code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
# -*- coding: utf-8 -*-
import locale
locale.setlocale(locale.LC_ALL, 'pt_BR')
default_notas = [
100, 50, 20, 10, 5, 2
]
default_moedas = [
1, 0.50, 0.25, 0.10, 0.05, 0.01
]
value = float(input())
print("NOTAS:")
for nota in default_notas:
count = int(value / nota)
print("{} nota(s) de R$ {},00".format(count, nota))
value -= count * nota
print("MOEDAS:")
for moeda in default_moedas:
count = int(value / moeda)
print("{} moeda(s) de R$ {:,2F} ".format(count, moeda))
value -= count * moeda
| [
"locale.setlocale"
] | [((38, 78), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '"""pt_BR"""'], {}), "(locale.LC_ALL, 'pt_BR')\n", (54, 78), False, 'import locale\n')] |
import pandas as pd
from os import walk
from os.path import join
from datetime import datetime, timedelta
import agg, agg0, agg1, agg2, agg3
def aggregate(tree, data_path):
for (_, _, filenames) in walk(data_path):
for filename in sorted(filenames):
if filename.endswith('.parquet'):
filepath = join(data_path, filename)
df = pd.read_parquet(filepath)
df['datetime'] = pd.to_datetime(df['date'].astype(str) + ' ' + df['time'], errors='coerce', format='%d%b%Y %H:%M:%S')
df.drop(axis=1, columns=['date', 'time'], inplace=True)
df.rename(columns={'i/f_name' : 'if_name', 'i/f_dir' : 'if_dir'}, inplace=True)
print(f' {datetime.now()} -> {filename} ({len(df.index)} rows)')
ts = datetime.now()
agg.aggregate(tree, df)
yield tree, datetime.now() - ts
break
def test():
tree_conf = {
"name":"1second",
"range":"5000s",
"delta":"1s"
}
params_conf = [ [ ["src"],
["dst"],
[ "src", "dst" ]
],
[ ["src"],
["dst"],
["service"],
[ "src", "dst", "service" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
[ "src", "dst", "service", "proxy_src_ip" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag" ],
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
["orig"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto",
"orig" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
["orig"],
["type"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto",
"orig", "type" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
["orig"],
["type"],
["if_name"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto",
"orig", "type", "if_name" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
["orig"],
["type"],
["if_name"],
["if_dir"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto",
"orig", "type", "if_name", "if_dir" ]
],
[ ["src"],
["dst"],
["service"],
["proxy_src_ip"],
["SCADA_Tag"],
["s_port"],
["appi_name"],
["proto"],
["orig"],
["type"],
["if_name"],
["if_dir"],
["Modbus_Function_Code"],
[ "src", "dst", "service", "proxy_src_ip", "SCADA_Tag", "s_port", "appi_name", "proto",
"orig", "type", "if_name", "if_dir", "Modbus_Function_Code" ]
]
]
params_rel = [[['src', 'src & dst'],
['dst', 'src & dst']
],
[['src', 'src & dst & service'],
['dst', 'src & dst & service'],
['service', 'src & dst & service'],
],
[['src', 'src & dst & service & proxy_src_ip'],
['dst', 'src & dst & service & proxy_src_ip'],
['service', 'src & dst & service & proxy_src_ip'],
['proxy_src_ip', 'src & dst & service & proxy_src_ip'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
['orig','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['orig','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
['type','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['orig','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['type','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
['if_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['orig','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['type','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['if_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
['if_dir','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir'],
],
[['src', 'src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['dst','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['service','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['proxy_src_ip','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['SCADA_Tag','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['s_port','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['appi_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['proto','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['orig','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['type','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['if_name','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['if_dir','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
['Modbus_Function_Code','src & dst & service & proxy_src_ip & SCADA_Tag & s_port & appi_name & proto & orig & type & if_name & if_dir & Modbus_Function_Code'],
]
]
data_path = 'data_example'
n_files = 4
cnt_tests = 5
f = open('results/test2.csv', 'w')
for conf, relative in zip(params_conf, params_rel):
for aggN in [agg0, agg3]:
print(f'{datetime.now()} -> {aggN.__name__} ({conf})')
tree = aggN.AggTree(tree_conf, conf)
agg_time = timedelta(0)
for cnt, (tree, td) in enumerate(aggregate(tree, data_path)):
agg_time += td
if cnt + 1 == n_files:
# count of queues
print(f'{len(conf[-1])},{aggN.__name__},cnt_queu,{len(tree.tree.queue)}')
f.write(f'{len(conf[-1])},{aggN.__name__},cnt_queu,{len(tree.tree.queue)}\n')
f.flush()
# time to aggregate 100000 elements
agg_time /= (5 * n_files)
agg_time = int(agg_time.total_seconds() * 1000) # ms
print(f'{len(conf[-1])},{aggN.__name__},agg_time,{agg_time}')
f.write(f'{len(conf[-1])},{aggN.__name__},agg_time,{agg_time}\n')
f.flush()
# time to filter relative
rel_time = timedelta(0)
for _ in range(cnt_tests):
tm = datetime.now()
tree.filter(['1second'],
relative=relative)
rel_time += (datetime.now() - tm)
rel_time /= cnt_tests
rel_time = rel_time.total_seconds() * 1000
print(f'{len(conf[-1])},{aggN.__name__},rel_time,{rel_time}')
f.write(f'{len(conf[-1])},{aggN.__name__},rel_time,{rel_time}\n')
f.flush()
print('')
break
f.close()
if __name__ == '__main__':
test() | [
"pandas.read_parquet",
"os.path.join",
"datetime.datetime.now",
"agg.aggregate",
"datetime.timedelta",
"os.walk"
] | [((209, 224), 'os.walk', 'walk', (['data_path'], {}), '(data_path)\n', (213, 224), False, 'from os import walk\n'), ((17104, 17116), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (17113, 17116), False, 'from datetime import datetime, timedelta\n'), ((343, 368), 'os.path.join', 'join', (['data_path', 'filename'], {}), '(data_path, filename)\n', (347, 368), False, 'from os.path import join\n'), ((390, 415), 'pandas.read_parquet', 'pd.read_parquet', (['filepath'], {}), '(filepath)\n', (405, 415), True, 'import pandas as pd\n'), ((823, 837), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (835, 837), False, 'from datetime import datetime, timedelta\n'), ((854, 877), 'agg.aggregate', 'agg.aggregate', (['tree', 'df'], {}), '(tree, df)\n', (867, 877), False, 'import agg, agg0, agg1, agg2, agg3\n'), ((18030, 18042), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (18039, 18042), False, 'from datetime import datetime, timedelta\n'), ((16972, 16986), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16984, 16986), False, 'from datetime import datetime, timedelta\n'), ((18119, 18133), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18131, 18133), False, 'from datetime import datetime, timedelta\n'), ((747, 761), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (759, 761), False, 'from datetime import datetime, timedelta\n'), ((906, 920), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (918, 920), False, 'from datetime import datetime, timedelta\n'), ((18275, 18289), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18287, 18289), False, 'from datetime import datetime, timedelta\n')] |
import pytest
from krake.data import Key
from krake.data.serializable import Serializable
def test_key_format_object():
"""Test the format_object method of :class:`Key`."""
class Spaceship(Serializable):
name: str
namespace: str
key = Key("/space/spaceship/{namespace}/{name}")
spaceship = Spaceship(name="Rocinante", namespace="solar-system")
etcd_key = key.format_object(spaceship)
assert etcd_key == "/space/spaceship/solar-system/Rocinante"
def test_key_format_kwargs():
"""Test the format_kwargs method of :class:`Key`."""
key = Key("/space/spaceship/{namespace}/{name}")
# Valid parameters
etcd_key = key.format_kwargs(name="Rocinante", namespace="solar-system")
assert etcd_key == "/space/spaceship/solar-system/Rocinante"
# Invalid parameters
with pytest.raises(TypeError, match="Missing required keyword argument 'name'"):
key.format_kwargs(namespace="galaxy")
with pytest.raises(
TypeError, match="Got unexpected keyword argument parameter 'propulsion'"
):
key.format_kwargs(
name="Enterprise", namespace="Universe", propulsion="warp-drive"
)
def test_key_matches():
"""Test the matches method of :class:`Key`."""
key = Key("/space/spaceship/{namespace}/{name}")
# Valid matches
assert key.matches("/space/spaceship/milky-way/Bebop")
assert key.matches("/space/spaceship/Andromeda/heart_of_gold")
# Invalid matches
assert not key.matches("/space/spaceship/")
assert not key.matches("/space/spaceship/rebels")
assert not key.matches("/space/spaceship/empire/TIE/Fighter")
assert not key.matches("/space/spaceship/empire/TIE|Fighter")
assert not key.matches("/space/spaceship/empire/TIE Fighter")
def test_key_prefix():
"""Test the prefix method of :class:`Key`."""
key = Key("/space/spaceship/{namespace}/{name}/{propulsion}")
# Valid parameters
assert key.prefix(namespace="belt") == "/space/spaceship/belt/"
assert (
key.prefix(namespace="belt", name="voyager") == "/space/spaceship/belt/voyager/"
)
# Invalid parameters
with pytest.raises(
TypeError, match="Got parameter 'name' without preceding parameter 'namespace'"
):
key.prefix(name="Battlestar")
with pytest.raises(
TypeError, match="Got parameter 'propulsion' without preceding parameter 'name'"
):
key.prefix(namespace="belt", propulsion="antimatter")
| [
"krake.data.Key",
"pytest.raises"
] | [((267, 309), 'krake.data.Key', 'Key', (['"""/space/spaceship/{namespace}/{name}"""'], {}), "('/space/spaceship/{namespace}/{name}')\n", (270, 309), False, 'from krake.data import Key\n'), ((590, 632), 'krake.data.Key', 'Key', (['"""/space/spaceship/{namespace}/{name}"""'], {}), "('/space/spaceship/{namespace}/{name}')\n", (593, 632), False, 'from krake.data import Key\n'), ((1271, 1313), 'krake.data.Key', 'Key', (['"""/space/spaceship/{namespace}/{name}"""'], {}), "('/space/spaceship/{namespace}/{name}')\n", (1274, 1313), False, 'from krake.data import Key\n'), ((1869, 1924), 'krake.data.Key', 'Key', (['"""/space/spaceship/{namespace}/{name}/{propulsion}"""'], {}), "('/space/spaceship/{namespace}/{name}/{propulsion}')\n", (1872, 1924), False, 'from krake.data import Key\n'), ((834, 908), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""Missing required keyword argument \'name\'"""'}), '(TypeError, match="Missing required keyword argument \'name\'")\n', (847, 908), False, 'import pytest\n'), ((966, 1059), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""Got unexpected keyword argument parameter \'propulsion\'"""'}), '(TypeError, match=\n "Got unexpected keyword argument parameter \'propulsion\'")\n', (979, 1059), False, 'import pytest\n'), ((2160, 2259), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""Got parameter \'name\' without preceding parameter \'namespace\'"""'}), '(TypeError, match=\n "Got parameter \'name\' without preceding parameter \'namespace\'")\n', (2173, 2259), False, 'import pytest\n'), ((2318, 2418), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""Got parameter \'propulsion\' without preceding parameter \'name\'"""'}), '(TypeError, match=\n "Got parameter \'propulsion\' without preceding parameter \'name\'")\n', (2331, 2418), False, 'import pytest\n')] |
import wgan
import pandas as pd
import torch
import numpy as np
import torch.nn.functional as F
from matplotlib import pyplot as plt
########################################
# setup
########################################
df = pd.read_feather("data/original_data/cps_merged.feather").drop("u75",1).drop("u74",1)
df = df.loc[df.t==0,]
continuous_vars_0 = ["age", "education", "re74", "re75", "re78"]
continuous_lower_bounds_0 = {"re74": 0, "re75": 0, "re78": 0, "age": 0}
categorical_vars_0 = ["black", "hispanic", "married", "nodegree"]
context_vars_0 = ["t"]
dw = wgan.DataWrapper(df, continuous_vars_0, categorical_vars_0, context_vars_0, continuous_lower_bounds_0)
x, context = dw.preprocess(df)
a = lambda *args, **kwargs: torch.optim.Adam(betas=(0, 0.9), *args, **kwargs)
oa = lambda *args, **kwargs: wgan.OAdam(betas=(0, 0.9), *args, **kwargs)
spec = wgan.Specifications(dw, batch_size=512, max_epochs=int(3e3), print_every=500, optimizer=a, generator_optimizer=oa, critic_lr=1e-4, generator_lr=1e-4)
########################################
# define penalties
########################################
def monotonicity_penalty_kernreg(factor, h=0.1, idx_out=4, idx_in=0, x_min=None, x_max=None, data_wrapper=None):
"""
Adds Kernel Regression monotonicity penalty.
Incentivizes monotonicity of the mean of cat(x_hat, context)[:, dim_out] conditional on cat(x_hat, context)[:, dim_in].
Parameters
----------
x_hat: torch.tensor
generated data
context: torch.tensor
context data
Returns
-------
torch.tensor
"""
if data_wrapper is not None:
x_std = torch.cat(data_wrapper.stds, -1).squeeze()[idx_in]
x_mean = torch.cat(data_wrapper.means, -1).squeeze()[idx_in]
x_min, x_max = ((x-x_mean)/(x_std+1e-3) for x in (x_min, x_max))
if x_min is None: x_min = x.min()
if x_max is None: x_max = x.max()
def penalty(x_hat, context):
y, x = (torch.cat([x_hat, context], -1)[:, idx] for idx in (idx_out, idx_in))
k = lambda x: (1-x.pow(2)).clamp_min(0)
x_grid = ((x_max-x_min)*torch.arange(20, device=x.device)/20 + x_min).detach()
W = k((x_grid.unsqueeze(-1) - x)/h).detach()
W = W/(W.sum(-1, True) + 1e-2)
y_mean = (W*y).sum(-1).squeeze()
return (factor * (y_mean[:-1]-y_mean[1:])).clamp_min(0).sum()
return penalty
def monotonicity_penalty_chetverikov(factor, bound=0, idx_out=4, idx_in=0):
"""
Adds Chetverikov monotonicity test penalty.
Incentivizes monotonicity of the mean of cat(x_hat, context)[:, dim_out] conditional on cat(x_hat, context)[:, dim_in].
Parameters
----------
x_hat: torch.tensor
generated data
context: torch.tensor
context data
Returns
-------
torch.tensor
"""
def penalty(x_hat, context):
y, x = (torch.cat([x_hat, context], -1)[:, idx] for idx in (idx_out, idx_in))
argsort = torch.argsort(x)
y, x = y[argsort], x[argsort]
sigma = (y[:-1] - y[1:]).pow(2)
sigma = torch.cat([sigma, sigma[-1:]])
k = lambda x: 0.75*F.relu(1-x.pow(2))
h_max = torch.tensor((x.max()-x.min()).detach()/2).to(x_hat.device)
n = y.size(0)
h_min = 0.4*h_max*(np.log(n)/n)**(1/3)
l_max = int((h_min/h_max).log()/np.log(0.5))
H = h_max * (torch.tensor([0.5])**torch.arange(l_max)).to(x_hat.device)
x_dist = (x.unsqueeze(-1) - x) # i, j
Q = k(x_dist.unsqueeze(-1) / H) # i, j, h
Q = (Q.unsqueeze(0) * Q.unsqueeze(1)).detach() # i, j, x, h
y_dist = (y - y.unsqueeze(-1)) # i, j
sgn = torch.sign(x_dist) * (x_dist.abs() > 1e-8) # i, j
b = ((y_dist * sgn).unsqueeze(-1).unsqueeze(-1) * Q).sum(0).sum(0) # x, h
V = ((sgn.unsqueeze(-1).unsqueeze(-1) * Q).sum(1).pow(2)* sigma.unsqueeze(-1).unsqueeze(-1)).sum(0) # x, h
T = b / (V + 1e-2)
return T.max().clamp_min(0) * factor
return penalty
mode = "load"
if mode == "train":
########################################
# train and save models
########################################
gennone, critnone = wgan.Generator(spec), wgan.Critic(spec)
wgan.train(gennone, critnone, x, context, spec)
torch.save(genchet, "monotonicity_penalty/genchet.torch")
torch.save(critchet, "monotonicity_penalty/critchet.torch")
genkern, critkern = wgan.Generator(spec), wgan.Critic(spec)
wgan.train(genkern, critkern, x, context, spec, monotonicity_penalty_kernreg(1, h=1, idx_in=0, idx_out=4, x_min=0, x_max=90, data_wrapper=dw))
torch.save(genkern, "monotonicity_penalty/genkern.torch")
torch.save(critkern, "monotonicity_penalty/critkern.torch")
genchet, critchet = wgan.Generator(spec), wgan.Critic(spec)
wgan.train(genchet, critchet, x, context, spec, monotonicity_penalty_chetverikov(1, idx_in=0, idx_out=4))
torch.save(gennone, "monotonicity_penalty/gennone.torch")
torch.save(critnone, "monotonicity_penalty/critnone.torch")
elif mode == "load":
########################################
# load models
########################################
genchet = torch.load("monotonicity_penalty/genchet.torch", map_location=torch.device('cpu'))
critchet = torch.load("monotonicity_penalty/critchet.torch", map_location=torch.device('cpu'))
genkern = torch.load("monotonicity_penalty/genkern.torch", map_location=torch.device('cpu'))
critkern = torch.load("monotonicity_penalty/critkern.torch", map_location=torch.device('cpu'))
gennone = torch.load("monotonicity_penalty/gennone.torch", map_location=torch.device('cpu'))
critnone = torch.load("monotonicity_penalty/critnone.torch", map_location=torch.device('cpu'))
########################################
# produce figures
########################################
# sample data
df_none = dw.apply_generator(gennone, df.sample(int(5e5), replace=True)).reset_index(drop=True)
df_kern = dw.apply_generator(genkern, df.sample(int(5e5), replace=True)).reset_index(drop=True)
df_chet = dw.apply_generator(genchet, df.sample(int(5e5), replace=True)).reset_index(drop=True)
# Kernel Smoother for plotting
def y_smooth(x, y, h):
x, y = torch.tensor(x), torch.tensor(y)
k = lambda x: (1-x.pow(2)).clamp_min(0)
x_grid = (x.max()-x.min())*torch.arange(20)/20 + x.min()
W = k((x_grid.unsqueeze(-1) - x)/h)
W = W/W.sum(-1, True)
return x_grid, (W*y).sum(-1)
# Compare conditional means
plt.figure(figsize=(10, 6))
for df_, lab in zip((df, df_none, df_kern, df_chet), ("Original Data", "Unpenalized WGAN", "Kernel Regression Penalty", "Chetverikov Penalty")):
x_, y = df_.age.to_numpy(), df_.re78.to_numpy()
x_grid, y_hat = y_smooth(x_, y, 1)
plt.plot(x_grid, y_hat, label=lab)
plt.ylabel("Earnings 1978")
plt.xlabel("Age")
plt.legend()
plt.savefig("figures/monotonicity.pdf", format="pdf")
# Compare overall fits
f, a = plt.subplots(4, 6, figsize=(15, 10), sharex="col", sharey="col")
for i, (ax, df_, n) in enumerate(zip(a, [df, df_none, df_kern, df_chet], ["Original", "Unpenalized WGAN", "Kernel Regression Penalty", "Chetverikov Penalty"])):
ax[0].set_ylabel(n)
ax[0].matshow(df_.drop(["t"], 1).corr())
ax[1].hist(df_.re78, density=True)
ax[2].hist(df_.age, density=True)
ax[3].hist(df_.re74, density=True)
ax[4].hist(df_.education, density=True)
ax[5].hist(df_.married, density=True)
for _ in range(1,6): ax[_].set_yticklabels([])
for i, n in enumerate(["Correlation", "Earnings 1978", "Age", "Earnings 1974", "Education", "Married"]):
a[0, i].set_title(n)
plt.savefig("figures/monotonicity_fit.pdf", format="pdf")
| [
"wgan.DataWrapper",
"matplotlib.pyplot.ylabel",
"numpy.log",
"torch.arange",
"pandas.read_feather",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"torch.argsort",
"wgan.train",
"matplotlib.pyplot.savefig",
"torch.sign",
"torch.save",
"torch.cat",
"matplotlib.pyplot.legend",
"torch.device",
"torch.optim.Adam",
"wgan.Critic",
"torch.tensor",
"matplotlib.pyplot.figure",
"wgan.Generator",
"wgan.OAdam",
"matplotlib.pyplot.subplots"
] | [((568, 674), 'wgan.DataWrapper', 'wgan.DataWrapper', (['df', 'continuous_vars_0', 'categorical_vars_0', 'context_vars_0', 'continuous_lower_bounds_0'], {}), '(df, continuous_vars_0, categorical_vars_0, context_vars_0,\n continuous_lower_bounds_0)\n', (584, 674), False, 'import wgan\n'), ((6270, 6297), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (6280, 6297), True, 'from matplotlib import pyplot as plt\n'), ((6567, 6594), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Earnings 1978"""'], {}), "('Earnings 1978')\n", (6577, 6594), True, 'from matplotlib import pyplot as plt\n'), ((6595, 6612), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Age"""'], {}), "('Age')\n", (6605, 6612), True, 'from matplotlib import pyplot as plt\n'), ((6613, 6625), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6623, 6625), True, 'from matplotlib import pyplot as plt\n'), ((6626, 6679), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures/monotonicity.pdf"""'], {'format': '"""pdf"""'}), "('figures/monotonicity.pdf', format='pdf')\n", (6637, 6679), True, 'from matplotlib import pyplot as plt\n'), ((6711, 6775), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(4)', '(6)'], {'figsize': '(15, 10)', 'sharex': '"""col"""', 'sharey': '"""col"""'}), "(4, 6, figsize=(15, 10), sharex='col', sharey='col')\n", (6723, 6775), True, 'from matplotlib import pyplot as plt\n'), ((7371, 7428), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures/monotonicity_fit.pdf"""'], {'format': '"""pdf"""'}), "('figures/monotonicity_fit.pdf', format='pdf')\n", (7382, 7428), True, 'from matplotlib import pyplot as plt\n'), ((730, 779), 'torch.optim.Adam', 'torch.optim.Adam', (['*args'], {'betas': '(0, 0.9)'}), '(*args, betas=(0, 0.9), **kwargs)\n', (746, 779), False, 'import torch\n'), ((809, 852), 'wgan.OAdam', 'wgan.OAdam', (['*args'], {'betas': '(0, 0.9)'}), '(*args, betas=(0, 0.9), **kwargs)\n', (819, 852), False, 'import wgan\n'), ((4008, 4055), 'wgan.train', 'wgan.train', (['gennone', 'critnone', 'x', 'context', 'spec'], {}), '(gennone, critnone, x, context, spec)\n', (4018, 4055), False, 'import wgan\n'), ((4060, 4117), 'torch.save', 'torch.save', (['genchet', '"""monotonicity_penalty/genchet.torch"""'], {}), "(genchet, 'monotonicity_penalty/genchet.torch')\n", (4070, 4117), False, 'import torch\n'), ((4123, 4182), 'torch.save', 'torch.save', (['critchet', '"""monotonicity_penalty/critchet.torch"""'], {}), "(critchet, 'monotonicity_penalty/critchet.torch')\n", (4133, 4182), False, 'import torch\n'), ((4399, 4456), 'torch.save', 'torch.save', (['genkern', '"""monotonicity_penalty/genkern.torch"""'], {}), "(genkern, 'monotonicity_penalty/genkern.torch')\n", (4409, 4456), False, 'import torch\n'), ((4462, 4521), 'torch.save', 'torch.save', (['critkern', '"""monotonicity_penalty/critkern.torch"""'], {}), "(critkern, 'monotonicity_penalty/critkern.torch')\n", (4472, 4521), False, 'import torch\n'), ((4701, 4758), 'torch.save', 'torch.save', (['gennone', '"""monotonicity_penalty/gennone.torch"""'], {}), "(gennone, 'monotonicity_penalty/gennone.torch')\n", (4711, 4758), False, 'import torch\n'), ((4764, 4823), 'torch.save', 'torch.save', (['critnone', '"""monotonicity_penalty/critnone.torch"""'], {}), "(critnone, 'monotonicity_penalty/critnone.torch')\n", (4774, 4823), False, 'import torch\n'), ((6532, 6566), 'matplotlib.pyplot.plot', 'plt.plot', (['x_grid', 'y_hat'], {'label': 'lab'}), '(x_grid, y_hat, label=lab)\n', (6540, 6566), True, 'from matplotlib import pyplot as plt\n'), ((2830, 2846), 'torch.argsort', 'torch.argsort', (['x'], {}), '(x)\n', (2843, 2846), False, 'import torch\n'), ((2929, 2959), 'torch.cat', 'torch.cat', (['[sigma, sigma[-1:]]'], {}), '([sigma, sigma[-1:]])\n', (2938, 2959), False, 'import torch\n'), ((3964, 3984), 'wgan.Generator', 'wgan.Generator', (['spec'], {}), '(spec)\n', (3978, 3984), False, 'import wgan\n'), ((3986, 4003), 'wgan.Critic', 'wgan.Critic', (['spec'], {}), '(spec)\n', (3997, 4003), False, 'import wgan\n'), ((4208, 4228), 'wgan.Generator', 'wgan.Generator', (['spec'], {}), '(spec)\n', (4222, 4228), False, 'import wgan\n'), ((4230, 4247), 'wgan.Critic', 'wgan.Critic', (['spec'], {}), '(spec)\n', (4241, 4247), False, 'import wgan\n'), ((4547, 4567), 'wgan.Generator', 'wgan.Generator', (['spec'], {}), '(spec)\n', (4561, 4567), False, 'import wgan\n'), ((4569, 4586), 'wgan.Critic', 'wgan.Critic', (['spec'], {}), '(spec)\n', (4580, 4586), False, 'import wgan\n'), ((6014, 6029), 'torch.tensor', 'torch.tensor', (['x'], {}), '(x)\n', (6026, 6029), False, 'import torch\n'), ((6031, 6046), 'torch.tensor', 'torch.tensor', (['y'], {}), '(y)\n', (6043, 6046), False, 'import torch\n'), ((3464, 3482), 'torch.sign', 'torch.sign', (['x_dist'], {}), '(x_dist)\n', (3474, 3482), False, 'import torch\n'), ((230, 286), 'pandas.read_feather', 'pd.read_feather', (['"""data/original_data/cps_merged.feather"""'], {}), "('data/original_data/cps_merged.feather')\n", (245, 286), True, 'import pandas as pd\n'), ((1897, 1928), 'torch.cat', 'torch.cat', (['[x_hat, context]', '(-1)'], {}), '([x_hat, context], -1)\n', (1906, 1928), False, 'import torch\n'), ((2746, 2777), 'torch.cat', 'torch.cat', (['[x_hat, context]', '(-1)'], {}), '([x_hat, context], -1)\n', (2755, 2777), False, 'import torch\n'), ((3171, 3182), 'numpy.log', 'np.log', (['(0.5)'], {}), '(0.5)\n', (3177, 3182), True, 'import numpy as np\n'), ((5031, 5050), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5043, 5050), False, 'import torch\n'), ((5130, 5149), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5142, 5149), False, 'import torch\n'), ((5228, 5247), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5240, 5247), False, 'import torch\n'), ((5327, 5346), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5339, 5346), False, 'import torch\n'), ((5425, 5444), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5437, 5444), False, 'import torch\n'), ((5524, 5543), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (5536, 5543), False, 'import torch\n'), ((6118, 6134), 'torch.arange', 'torch.arange', (['(20)'], {}), '(20)\n', (6130, 6134), False, 'import torch\n'), ((1597, 1629), 'torch.cat', 'torch.cat', (['data_wrapper.stds', '(-1)'], {}), '(data_wrapper.stds, -1)\n', (1606, 1629), False, 'import torch\n'), ((1661, 1694), 'torch.cat', 'torch.cat', (['data_wrapper.means', '(-1)'], {}), '(data_wrapper.means, -1)\n', (1670, 1694), False, 'import torch\n'), ((3115, 3124), 'numpy.log', 'np.log', (['n'], {}), '(n)\n', (3121, 3124), True, 'import numpy as np\n'), ((3201, 3220), 'torch.tensor', 'torch.tensor', (['[0.5]'], {}), '([0.5])\n', (3213, 3220), False, 'import torch\n'), ((3222, 3241), 'torch.arange', 'torch.arange', (['l_max'], {}), '(l_max)\n', (3234, 3241), False, 'import torch\n'), ((2039, 2072), 'torch.arange', 'torch.arange', (['(20)'], {'device': 'x.device'}), '(20, device=x.device)\n', (2051, 2072), False, 'import torch\n')] |
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
import os
Base = declarative_base()
class Model(Base):
__tablename__ = 'model'
# Here we define columns for the table risk_score
ID = Column(Integer, primary_key=True)
MODEL = Column(String(50), nullable=False)
AI = Column(String(20), nullable=False)
TIMESTAMP = Column(DateTime, nullable=False)
path = os.getcwd()
path = os.path.join(path, "db/model.db")
connection_string = "sqlite:///{path}".format(path=path)
# Create an engine that stores data in the local directory's
engine = create_engine(connection_string)
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all(engine)
| [
"sqlalchemy.create_engine",
"os.path.join",
"os.getcwd",
"sqlalchemy.String",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.Column"
] | [((168, 186), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (184, 186), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((481, 492), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (490, 492), False, 'import os\n'), ((500, 533), 'os.path.join', 'os.path.join', (['path', '"""db/model.db"""'], {}), "(path, 'db/model.db')\n", (512, 533), False, 'import os\n'), ((662, 694), 'sqlalchemy.create_engine', 'create_engine', (['connection_string'], {}), '(connection_string)\n', (675, 694), False, 'from sqlalchemy import create_engine\n'), ((299, 332), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (305, 332), False, 'from sqlalchemy import Column, Integer, String, DateTime\n'), ((440, 472), 'sqlalchemy.Column', 'Column', (['DateTime'], {'nullable': '(False)'}), '(DateTime, nullable=False)\n', (446, 472), False, 'from sqlalchemy import Column, Integer, String, DateTime\n'), ((352, 362), 'sqlalchemy.String', 'String', (['(50)'], {}), '(50)\n', (358, 362), False, 'from sqlalchemy import Column, Integer, String, DateTime\n'), ((396, 406), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (402, 406), False, 'from sqlalchemy import Column, Integer, String, DateTime\n')] |
from typing import Dict, Tuple, Union
import torch
import torch.nn as nn
from .sac import SAC
from hlrl.torch.common import polyak_average
class SACRecurrent(SAC):
"""
Soft-Actor-Critic with a recurrent network.
"""
def forward(
self,
observation: torch.Tensor,
hidden_state: Tuple[torch.Tensor, torch.Tensor]
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
"""
Get the model output for a batch of observations
Args:
observation: A batch of observations from the environment.
hidden_state (torch.Tensor): The hidden state.
Returns:
The action, Q-val, and new hidden state.
"""
# Only going to update the hidden state using the policy hidden state
action, _, _, new_hidden = self.policy(
observation, hidden_state
)
q_val, _ = self.q_func1(observation, action, hidden_state)
return action, q_val, new_hidden
def step(
self,
observation: torch.Tensor,
hidden_state: Tuple[torch.Tensor, torch.Tensor]
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
"""
Get the model action for a single observation of the environment.
Args:
observation: A single observation from the environment.
hidden_state: The hidden state.
Returns:
The action, Q-value of the action and hidden state.
"""
with torch.no_grad():
action, q_val, new_hidden = self(observation, hidden_state)
new_hidden = [nh for nh in new_hidden]
return action, q_val, new_hidden
def reset_hidden_state(self) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Resets the hidden state for the network.
Returns:
The default hidden state of the network.
"""
return [
tens.to(self.device)
for tens in self.policy.reset_hidden_state()
]
def get_critic_loss(
self,
rollouts: Dict[str, torch.Tensor]
) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]:
"""
Calculates the loss for the Q-function/functions.
Args:
rollouts: The (s, a, r, s', t) of training data for the network.
Returns:
The batch-wise loss for the Q-function/functions.
"""
states = rollouts["state"]
actions = rollouts["action"]
rewards = rollouts["reward"]
next_states = rollouts["next_state"]
terminals = rollouts["terminal"]
hidden_states = rollouts["hidden_state"]
next_hiddens = rollouts["next_hidden_state"]
with torch.no_grad():
next_actions, next_log_probs, _, _ = self.policy(
next_states, next_hiddens
)
next_log_probs = next_log_probs.sum(-1, keepdim=True)
q_targ_pred, _ = self.q_func_targ1(
next_states, next_actions, next_hiddens
)
if self.twin:
q_targ_pred2, _ = self.q_func_targ2(
next_states, next_actions, next_hiddens
)
q_targ_pred = torch.min(q_targ_pred, q_targ_pred2)
q_targ = q_targ_pred - self.temperature * next_log_probs
q_next = rewards + (1 - terminals) * self._discount * q_targ
q_pred, _ = self.q_func1(states, actions, hidden_states)
q_loss = self.q_loss_func(q_pred, q_next)
if self.twin:
q_pred2, _ = self.q_func2(states, actions, hidden_states)
q_loss2 = self.q_loss_func(q_pred2, q_next)
q_loss = (q_loss, q_loss2)
return q_loss
def get_actor_loss(
self,
rollouts: Dict[str, torch.Tensor]
) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Calculates the loss for the actor/policy.
Args:
rollouts: The (s, a, r, s', t) of training data for the network.
Returns:
The batch-wise loss for the actor/policy and the log probability of
a sampled action on the current policy.
"""
states = rollouts["state"]
hidden_states = rollouts["hidden_state"]
pred_actions, pred_log_probs, _, _ = self.policy(states, hidden_states)
pred_log_probs = pred_log_probs.sum(-1, keepdim=True)
p_q_pred, _ = self.q_func1(states, pred_actions, hidden_states)
if self.twin:
p_q_pred2, _ = self.q_func2(states, pred_actions, hidden_states)
p_q_pred = torch.min(p_q_pred, p_q_pred2)
p_loss = self.temperature * pred_log_probs - p_q_pred
return p_loss, pred_log_probs
| [
"torch.no_grad",
"torch.min"
] | [((1522, 1537), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1535, 1537), False, 'import torch\n'), ((2758, 2773), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2771, 2773), False, 'import torch\n'), ((4670, 4700), 'torch.min', 'torch.min', (['p_q_pred', 'p_q_pred2'], {}), '(p_q_pred, p_q_pred2)\n', (4679, 4700), False, 'import torch\n'), ((3266, 3302), 'torch.min', 'torch.min', (['q_targ_pred', 'q_targ_pred2'], {}), '(q_targ_pred, q_targ_pred2)\n', (3275, 3302), False, 'import torch\n')] |
import uuid
import json
from .position import mock_latitude, mock_longitude
from ..config import Config
from ..util import util
from kafka import KafkaProducer
from threading import Thread
from random import randint
from time import sleep
KAFKA_BROKER_URL = Config().kafka_broker_url
KAFKA_TAXI_POSITION_TOPIC = Config().taxi_position_tp
KAFKA_PASSENGER_POSITION_TOPIC = Config().psgr_position_tp
TOTAL_TAXI = Config().total_taxi
PASSENGER_PER_TIME_SlOT = Config().psgr_per_time_slot
DURATION = Config().duration
finish = False
taxi_pos_producer = KafkaProducer(
bootstrap_servers = [KAFKA_BROKER_URL],
value_serializer = lambda m: json.dumps(m).encode('ascii')
)
psgr_pos_producer = KafkaProducer(
bootstrap_servers = [KAFKA_BROKER_URL],
value_serializer = lambda m: json.dumps(m).encode('ascii')
)
def _mock_taxi_message(i):
return {
'taxi_id': util.mock_taxi_id(i),
'position': {
'latitude': mock_latitude(),
'longitude': mock_longitude()
}
}
def _mock_passenger_message():
return {
'passenger_id': str(uuid.uuid4()),
'position': {
'latitude': mock_latitude(),
'longitude': mock_longitude()
}
}
def _send_taxi_message():
while True:
if finish:
break
else:
for i in range(TOTAL_TAXI):
# Send messages in JSON format
taxi_pos_producer.send(KAFKA_TAXI_POSITION_TOPIC, _mock_taxi_message(i))
# Update taxi position every 10s
sleep(10)
def _send_passenger_message():
while True:
if finish:
break
else:
for _ in range(PASSENGER_PER_TIME_SlOT):
# Send messages in JSON format
psgr_pos_producer.send(KAFKA_PASSENGER_POSITION_TOPIC, _mock_passenger_message())
# Random number of requests per second
sleep_time = randint(0, 5)
sleep(sleep_time)
if __name__ == '__main__':
thread1 = Thread(target = _send_taxi_message)
thread2 = Thread(target = _send_passenger_message)
# The threads will be killed if the main thread exits
# Do this in case we want to terminate the program suddenly like Ctrl+C
thread1.daemon = True
thread2.daemon = True
thread1.start()
thread2.start()
sleep(DURATION)
finish = True
thread1.join()
thread2.join() | [
"json.dumps",
"time.sleep",
"uuid.uuid4",
"threading.Thread",
"random.randint"
] | [((2030, 2063), 'threading.Thread', 'Thread', ([], {'target': '_send_taxi_message'}), '(target=_send_taxi_message)\n', (2036, 2063), False, 'from threading import Thread\n'), ((2080, 2118), 'threading.Thread', 'Thread', ([], {'target': '_send_passenger_message'}), '(target=_send_passenger_message)\n', (2086, 2118), False, 'from threading import Thread\n'), ((2354, 2369), 'time.sleep', 'sleep', (['DURATION'], {}), '(DURATION)\n', (2359, 2369), False, 'from time import sleep\n'), ((1094, 1106), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1104, 1106), False, 'import uuid\n'), ((1561, 1570), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (1566, 1570), False, 'from time import sleep\n'), ((1944, 1957), 'random.randint', 'randint', (['(0)', '(5)'], {}), '(0, 5)\n', (1951, 1957), False, 'from random import randint\n'), ((1970, 1987), 'time.sleep', 'sleep', (['sleep_time'], {}), '(sleep_time)\n', (1975, 1987), False, 'from time import sleep\n'), ((642, 655), 'json.dumps', 'json.dumps', (['m'], {}), '(m)\n', (652, 655), False, 'import json\n'), ((786, 799), 'json.dumps', 'json.dumps', (['m'], {}), '(m)\n', (796, 799), False, 'import json\n')] |
# Stdlib imports
import os
import json
# Django imports
from django.conf import settings
from django.core.files import File
# Pip imports
from web3 import Web3
# App imports
from .contract import Contract
class VotingManagerContract(Contract):
def __init__(self, client, abi=None, address=None):
"""
:param client: (EthClient)
:param abi: contract abi
:param address: contract address
"""
if not abi:
abi = self.load_default_abi()
if not address:
address = settings.VOTING_MANAGER_CONTRACT_ADDRESS
super().__init__(client, abi, address)
@classmethod
def voting_details_log_parser(cls, log):
args = log.get('args')
return {
"proposal_id": args["proposalId"],
"is_voting_open": args["isVotingOpen"],
"block_number": log["blockNumber"]
}
@classmethod
def votes_log_parser(cls, log):
args = log.get('args')
return {
"proposal_id": args["proposalId"],
"voter": Web3.toChecksumAddress(args["voter"]),
"selected_option": args["selectedOption"],
"block_number": log["blockNumber"]
}
def load_voting_details_logs(self, from_block):
logs = super().fetch_events('VotingDetails', from_block)
return map(lambda l: self.voting_details_log_parser(l), logs)
def load_votes_logs(self, from_block):
logs = super().fetch_events('Vote', from_block)
return map(lambda l: self.votes_log_parser(l), logs)
def load_default_abi(self):
artifacts_path = os.path.join(settings.STATIC_ROOT, 'contracts/VotingManager.json')
artifacts = json.load(open(artifacts_path, 'rb'))
return artifacts.get('abi') | [
"web3.Web3.toChecksumAddress",
"os.path.join"
] | [((1632, 1698), 'os.path.join', 'os.path.join', (['settings.STATIC_ROOT', '"""contracts/VotingManager.json"""'], {}), "(settings.STATIC_ROOT, 'contracts/VotingManager.json')\n", (1644, 1698), False, 'import os\n'), ((1074, 1111), 'web3.Web3.toChecksumAddress', 'Web3.toChecksumAddress', (["args['voter']"], {}), "(args['voter'])\n", (1096, 1111), False, 'from web3 import Web3\n')] |
#!/usr/bin/env python
# coding:utf-8
import rospy
import serial
import struct
from std_msgs.msg import Int32
from std_msgs.msg import Float32
def motor_callback(msg):
if not isinstance(msg, Int32): return
# 接受到其他节点发送的数据
pwm = msg.data
# 给下位机发送指令
# 电机的驱动
pack = struct.pack('h', pwm)
data = bytearray([0x03, pack[0], pack[1]])
ser.write(data)
if __name__ == '__main__':
# 创建节点
rospy.init_node('my_driver_node')
# 串口创建
# 重试机制
count = 0
while count < 10:
count += 1
try:
ser = serial.Serial(port='/dev/ttyUSB0', baudrate=115200)
# 如果出错了,后面的代码就不执行了
# 能到达这个位置说明,链接成功
break
except Exception as e:
print(e)
# 创建一个电机指令的订阅者
motor_topic_name = '/motor'
rospy.Subscriber(motor_topic_name, Int32, motor_callback)
# 编码器
encoder_topic_name = '/rpm'
rpm_publisher = rospy.Publisher(encoder_topic_name, Float32, queue_size=100)
# 和下位机进行通讯
while not rospy.is_shutdown():
# 阻塞式的函数
read = ser.read(2)
data = bytearray([])
data.extend(read)
# bytearray 数据 -> 数字类型
data = struct.unpack('h', data)[0]
rpm = data / 100.0
# 将数据发布出去
msg = Float32()
msg.data = rpm
rpm_publisher.publish(msg)
rospy.spin() | [
"rospy.Subscriber",
"rospy.is_shutdown",
"std_msgs.msg.Float32",
"rospy.init_node",
"struct.pack",
"struct.unpack",
"serial.Serial",
"rospy.spin",
"rospy.Publisher"
] | [((289, 310), 'struct.pack', 'struct.pack', (['"""h"""', 'pwm'], {}), "('h', pwm)\n", (300, 310), False, 'import struct\n'), ((423, 456), 'rospy.init_node', 'rospy.init_node', (['"""my_driver_node"""'], {}), "('my_driver_node')\n", (438, 456), False, 'import rospy\n'), ((804, 861), 'rospy.Subscriber', 'rospy.Subscriber', (['motor_topic_name', 'Int32', 'motor_callback'], {}), '(motor_topic_name, Int32, motor_callback)\n', (820, 861), False, 'import rospy\n'), ((925, 985), 'rospy.Publisher', 'rospy.Publisher', (['encoder_topic_name', 'Float32'], {'queue_size': '(100)'}), '(encoder_topic_name, Float32, queue_size=100)\n', (940, 985), False, 'import rospy\n'), ((1345, 1357), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (1355, 1357), False, 'import rospy\n'), ((1016, 1035), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (1033, 1035), False, 'import rospy\n'), ((1272, 1281), 'std_msgs.msg.Float32', 'Float32', ([], {}), '()\n', (1279, 1281), False, 'from std_msgs.msg import Float32\n'), ((566, 617), 'serial.Serial', 'serial.Serial', ([], {'port': '"""/dev/ttyUSB0"""', 'baudrate': '(115200)'}), "(port='/dev/ttyUSB0', baudrate=115200)\n", (579, 617), False, 'import serial\n'), ((1184, 1208), 'struct.unpack', 'struct.unpack', (['"""h"""', 'data'], {}), "('h', data)\n", (1197, 1208), False, 'import struct\n')] |
import sys
from utils.definitions import ROOT_DIR
from recommenders.script.main.top_pop_p import Top_pop_p
import scipy.sparse as sps
arg = sys.argv[1:]
mode = arg[0]
t = Top_pop_p()
eurm = t.get_top_pop_album(mode)
sps.save_npz(ROOT_DIR+"/recommenders/script/creative/"+mode+"_npz/top_pop_2_album_"+mode+".npz", eurm)
| [
"scipy.sparse.save_npz",
"recommenders.script.main.top_pop_p.Top_pop_p"
] | [((174, 185), 'recommenders.script.main.top_pop_p.Top_pop_p', 'Top_pop_p', ([], {}), '()\n', (183, 185), False, 'from recommenders.script.main.top_pop_p import Top_pop_p\n'), ((219, 335), 'scipy.sparse.save_npz', 'sps.save_npz', (["(ROOT_DIR + '/recommenders/script/creative/' + mode +\n '_npz/top_pop_2_album_' + mode + '.npz')", 'eurm'], {}), "(ROOT_DIR + '/recommenders/script/creative/' + mode +\n '_npz/top_pop_2_album_' + mode + '.npz', eurm)\n", (231, 335), True, 'import scipy.sparse as sps\n')] |
import json
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from funfactory.helpers import urlparams
from mock import patch
from nose.tools import eq_, ok_
from waffle.models import Flag
import snippets.base.models
from snippets.base import views
from snippets.base.models import Client
from snippets.base.tests import (JSONSnippetFactory, SnippetFactory,
SnippetTemplateFactory, TestCase)
snippets.base.models.CHANNELS = ('release', 'beta', 'aurora', 'nightly')
snippets.base.models.FIREFOX_STARTPAGE_VERSIONS = ('1', '2', '3', '4')
class FetchRenderSnippetsTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.client_items = [
('startpage_version', '4'),
('name', 'Firefox'),
('version', '23.0a1'),
('appbuildid', '20130510041606'),
('build_target', 'Darwin_Universal-gcc3'),
('locale', 'en-US'),
('channel', 'nightly'),
('os_version', 'Darwin 10.8.0'),
('distribution', 'default'),
('distribution_version', 'default_version'),
]
self.client_params = [v[1] for v in self.client_items]
self.client_kwargs = dict(self.client_items)
# Ensure the render-immediately view is used.
Flag.objects.create(name='serve_pregenerated_snippets', everyone=False)
def test_base(self):
# Matching snippets.
snippet_1 = SnippetFactory.create(on_nightly=True)
# Matching but disabled snippet.
SnippetFactory.create(on_nightly=True, disabled=True)
# Snippet that doesn't match.
SnippetFactory.create(on_nightly=False),
snippets_ok = [snippet_1]
params = self.client_params
response = self.client.get('/{0}/'.format('/'.join(params)))
eq_(set(snippets_ok), set(response.context['snippets']))
eq_(response.context['locale'], 'en-US')
@patch('snippets.base.views.Client', wraps=Client)
def test_client_construction(self, ClientMock):
"""
Ensure that the client object is constructed correctly from the URL
arguments.
"""
params = self.client_params
self.client.get('/{0}/'.format('/'.join(params)))
ClientMock.assert_called_with(**self.client_kwargs)
@override_settings(SNIPPET_HTTP_MAX_AGE=75)
def test_cache_headers(self):
"""
fetch_snippets should always have Cache-control set to
'public, max-age={settings.SNIPPET_HTTP_MAX_AGE}' and a Vary
header for 'If-None-Match'.
"""
params = self.client_params
response = self.client.get('/{0}/'.format('/'.join(params)))
eq_(response['Cache-control'], 'public, max-age=75')
eq_(response['Vary'], 'If-None-Match')
def test_etag(self):
"""
The response returned by fetch_snippets should have a ETag set
to the sha256 hash of the response content.
"""
request = self.factory.get('/')
with patch.object(views, 'render') as mock_render:
mock_render.return_value = HttpResponse('asdf')
response = views.fetch_snippets(request, **self.client_kwargs)
# sha256 of 'asdf'
expected = 'f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b'
eq_(response['ETag'], expected)
class JSONSnippetsTests(TestCase):
def test_base(self):
# Matching snippets.
snippet_1 = JSONSnippetFactory.create(on_nightly=True, weight=66)
# Matching but disabled snippet.
JSONSnippetFactory.create(on_nightly=True, disabled=True)
# Snippet that doesn't match.
JSONSnippetFactory.create(on_nightly=False),
params = ('4', 'Fennec', '23.0a1', '20130510041606',
'Darwin_Universal-gcc3', 'en-US', 'nightly',
'Darwin%2010.8.0', 'default', 'default_version')
response = self.client.get('/json/{0}/'.format('/'.join(params)))
data = json.loads(response.content)
eq_(len(data), 1)
eq_(data[0]['id'], snippet_1.id)
eq_(data[0]['weight'], 66)
@patch('snippets.base.views.Client', wraps=Client)
def test_client_construction(self, ClientMock):
"""
Ensure that the client object is constructed correctly from the URL
arguments.
"""
params = ('4', 'Fennec', '23.0a1', '20130510041606',
'Darwin_Universal-gcc3', 'en-US', 'nightly',
'Darwin%2010.8.0', 'default', 'default_version')
self.client.get('/json/{0}/'.format('/'.join(params)))
ClientMock.assert_called_with(startpage_version='4',
name='Fennec',
version='23.0a1',
appbuildid='20130510041606',
build_target='Darwin_Universal-gcc3',
locale='en-US',
channel='nightly',
os_version='Darwin 10.8.0',
distribution='default',
distribution_version='default_version')
@override_settings(SNIPPET_HTTP_MAX_AGE=75)
def test_cache_headers(self):
"""
view_snippets should always have Cache-control set to
'public, max-age={settings.SNIPPET_HTTP_MAX_AGE}' and no Vary header,
even after middleware is executed.
"""
params = ('1', 'Fennec', '23.0a1', '20130510041606',
'Darwin_Universal-gcc3', 'en-US', 'nightly',
'Darwin%2010.8.0', 'default', 'default_version')
response = self.client.get('/json/{0}/'.format('/'.join(params)))
eq_(response['Cache-control'], 'public, max-age=75')
ok_('Vary' not in response)
def test_response(self):
params = ('1', 'Fennec', '23.0a1', '20130510041606',
'Darwin_Universal-gcc3', 'en-US', 'nightly',
'Darwin%2010.8.0', 'default', 'default_version')
response = self.client.get('/json/{0}/'.format('/'.join(params)))
eq_(response['Content-Type'], 'application/json')
class PreviewSnippetTests(TestCase):
def setUp(self):
self.user = User.objects.create_superuser('admin', '<EMAIL>', 'asdf')
self.client.login(username='admin', password='<PASSWORD>')
def _preview_snippet(self, **kwargs):
return self.client.post(reverse('base.preview'), kwargs)
def test_invalid_template(self):
"""If template_id is missing or invalid, return a 400 Bad Request."""
response = self._preview_snippet()
eq_(response.status_code, 400)
response = self._preview_snippet(template_id=99999999999999999999)
eq_(response.status_code, 400)
response = self._preview_snippet(template_id='')
eq_(response.status_code, 400)
def test_invalid_data(self):
"""If data is missing or invalid, return a 400 Bad Request."""
template = SnippetTemplateFactory.create()
response = self._preview_snippet(template_id=template.id)
eq_(response.status_code, 400)
response = self._preview_snippet(template_id=template.id,
data='{invalid."json]')
eq_(response.status_code, 400)
def test_valid_args(self):
"""If template_id and data are both valid, return the preview page."""
template = SnippetTemplateFactory.create()
data = '{"a": "b"}'
response = self._preview_snippet(template_id=template.id, data=data)
eq_(response.status_code, 200)
snippet = response.context['snippet']
eq_(snippet.template, template)
eq_(snippet.data, data)
class ShowSnippetTests(TestCase):
def test_valid_snippet(self):
"""Test show of snippet."""
snippet = SnippetFactory.create()
response = self.client.get(reverse('base.show', kwargs={'snippet_id': snippet.id}))
eq_(response.status_code, 200)
def test_invalid_snippet(self):
"""Test invalid snippet returns 404."""
response = self.client.get(reverse('base.show', kwargs={'snippet_id': '100'}))
eq_(response.status_code, 404)
def test_valid_disabled_snippet_unauthenticated(self):
"""Test disabled snippet returns 404 to unauthenticated users."""
snippet = SnippetFactory.create(disabled=True)
response = self.client.get(reverse('base.show', kwargs={'snippet_id': snippet.id}))
eq_(response.status_code, 404)
def test_valid_disabled_snippet_authenticated(self):
"""Test disabled snippet returns 200 to authenticated users."""
snippet = SnippetFactory.create(disabled=True)
User.objects.create_superuser('admin', '<EMAIL>', 'asdf')
self.client.login(username='admin', password='<PASSWORD>')
response = self.client.get(reverse('base.show', kwargs={'snippet_id': snippet.id}))
eq_(response.status_code, 200)
@patch('snippets.base.views.SNIPPETS_PER_PAGE', 1)
class JSONIndexSnippetsTests(TestCase):
def setUp(self):
for i in range(10):
JSONSnippetFactory.create()
def test_base(self):
response = self.client.get(reverse('base.index_json'))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 1)
def test_second_page(self):
response = self.client.get(urlparams(reverse('base.index_json'), page=2))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 2)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_empty_page_number(self):
"""Test that empty page number returns the last page."""
response = self.client.get(urlparams(reverse('base.index_json'), page=20))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 10)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_non_integer_page_number(self):
"""Test that a non integer page number returns the first page."""
response = self.client.get(urlparams(reverse('base.index_json'), page='k'))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 1)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_filter(self):
JSONSnippetFactory.create(on_nightly=True)
response = self.client.get(urlparams(reverse('base.index_json'), on_nightly=2))
eq_(response.status_code, 200)
eq_(response.context['snippets'].paginator.count, 1)
def test_pagination_range_first_page(self):
response = self.client.get(reverse('base.index_json'))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 1)
eq_(pagination_range[-1], 3)
eq_(len(pagination_range), 3)
def test_pagination_range_last_page(self):
response = self.client.get(urlparams(reverse('base.index_json'), page=10))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 8)
eq_(pagination_range[-1], 10)
eq_(len(pagination_range), 3)
def test_pagination_range_middle_page(self):
response = self.client.get(urlparams(reverse('base.index_json'), page=5))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 3)
eq_(pagination_range[-1], 7)
eq_(len(pagination_range), 5)
@patch('snippets.base.views.SNIPPETS_PER_PAGE', 1)
class IndexSnippetsTests(TestCase):
def setUp(self):
for i in range(10):
SnippetFactory.create()
def test_base(self):
response = self.client.get(reverse('base.index'))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 1)
def test_second_page(self):
response = self.client.get(urlparams(reverse('base.index'), page=2))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 2)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_empty_page_number(self):
"""Test that empty page number returns the last page."""
response = self.client.get(urlparams(reverse('base.index'), page=20))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 10)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_non_integer_page_number(self):
"""Test that a non integer page number returns the first page."""
response = self.client.get(urlparams(reverse('base.index'), page='k'))
eq_(response.status_code, 200)
eq_(response.context['snippets'].number, 1)
eq_(response.context['snippets'].paginator.num_pages, 10)
def test_filter(self):
SnippetFactory.create(on_nightly=True)
response = self.client.get(urlparams(reverse('base.index'), on_nightly=2))
eq_(response.status_code, 200)
eq_(response.context['snippets'].paginator.count, 1)
def test_pagination_range_first_page(self):
response = self.client.get(reverse('base.index'))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 1)
eq_(pagination_range[-1], 3)
eq_(len(pagination_range), 3)
def test_pagination_range_last_page(self):
response = self.client.get(urlparams(reverse('base.index'), page=10))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 8)
eq_(pagination_range[-1], 10)
eq_(len(pagination_range), 3)
def test_pagination_range_middle_page(self):
response = self.client.get(urlparams(reverse('base.index'), page=5))
pagination_range = response.context['pagination_range']
eq_(pagination_range[0], 3)
eq_(pagination_range[-1], 7)
eq_(len(pagination_range), 5)
class FetchPregeneratedSnippetsTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
self.client_kwargs = {
'startpage_version': '4',
'name': 'Firefox',
'version': '23.0a1',
'appbuildid': '20130510041606',
'build_target': 'Darwin_Universal-gcc3',
'locale': 'en-US',
'channel': 'nightly',
'os_version': 'Darwin 10.8.0',
'distribution': 'default',
'distribution_version': 'default_version',
}
def test_normal(self):
with patch.object(views, 'SnippetBundle') as SnippetBundle:
bundle = SnippetBundle.return_value
bundle.url = '/foo/bar'
bundle.expired = False
response = views.fetch_pregenerated_snippets(self.request, **self.client_kwargs)
eq_(response.status_code, 302)
eq_(response['Location'], '/foo/bar')
# Check for correct client.
eq_(SnippetBundle.call_args[0][0].locale, 'en-US')
# Do not generate bundle when not expired.
ok_(not SnippetBundle.return_value.generate.called)
def test_regenerate(self):
"""If the bundle has expired, re-generate it."""
with patch.object(views, 'SnippetBundle') as SnippetBundle:
bundle = SnippetBundle.return_value
bundle.url = '/foo/bar'
bundle.expired = True
response = views.fetch_pregenerated_snippets(self.request, **self.client_kwargs)
eq_(response.status_code, 302)
eq_(response['Location'], '/foo/bar')
# Since the bundle was expired, ensure it was re-generated.
ok_(SnippetBundle.return_value.generate.called)
class FetchSnippetsTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
def test_flag_off(self):
Flag.objects.create(name='serve_pregenerated_snippets', everyone=False)
with patch.object(views, 'fetch_render_snippets') as fetch_render_snippets:
eq_(views.fetch_snippets(self.request, foo='bar'), fetch_render_snippets.return_value)
fetch_render_snippets.assert_called_with(self.request, foo='bar')
def test_flag_on(self):
Flag.objects.create(name='serve_pregenerated_snippets', everyone=True)
with patch.object(views, 'fetch_pregenerated_snippets') as fetch_pregenerated_snippets:
eq_(views.fetch_snippets(self.request, foo='bar'),
fetch_pregenerated_snippets.return_value)
fetch_pregenerated_snippets.assert_called_with(self.request, foo='bar')
class ActiveSnippetsViewTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
def test_base(self):
snippets = SnippetFactory.create_batch(2)
jsonsnippets = JSONSnippetFactory.create_batch(2)
SnippetFactory.create(disabled=True)
JSONSnippetFactory.create(disabled=True)
response = views.ActiveSnippetsView.as_view()(self.request)
eq_(response.get('content-type'), 'application/json')
data = json.loads(response.content)
eq_(set([snippets[0].id, snippets[1].id,
jsonsnippets[0].id, jsonsnippets[1].id]),
set([x['id'] for x in data]))
| [
"nose.tools.eq_",
"snippets.base.tests.JSONSnippetFactory.create",
"snippets.base.tests.JSONSnippetFactory.create_batch",
"django.core.urlresolvers.reverse",
"snippets.base.views.ActiveSnippetsView.as_view",
"waffle.models.Flag.objects.create",
"mock.patch",
"snippets.base.tests.SnippetFactory.create",
"django.http.HttpResponse",
"snippets.base.tests.SnippetTemplateFactory.create",
"django.contrib.auth.models.User.objects.create_superuser",
"json.loads",
"snippets.base.tests.SnippetFactory.create_batch",
"snippets.base.views.fetch_snippets",
"snippets.base.views.fetch_pregenerated_snippets",
"django.test.utils.override_settings",
"mock.patch.object",
"nose.tools.ok_",
"django.test.client.RequestFactory"
] | [((9302, 9351), 'mock.patch', 'patch', (['"""snippets.base.views.SNIPPETS_PER_PAGE"""', '(1)'], {}), "('snippets.base.views.SNIPPETS_PER_PAGE', 1)\n", (9307, 9351), False, 'from mock import patch\n'), ((11809, 11858), 'mock.patch', 'patch', (['"""snippets.base.views.SNIPPETS_PER_PAGE"""', '(1)'], {}), "('snippets.base.views.SNIPPETS_PER_PAGE', 1)\n", (11814, 11858), False, 'from mock import patch\n'), ((2127, 2176), 'mock.patch', 'patch', (['"""snippets.base.views.Client"""'], {'wraps': 'Client'}), "('snippets.base.views.Client', wraps=Client)\n", (2132, 2176), False, 'from mock import patch\n'), ((2509, 2551), 'django.test.utils.override_settings', 'override_settings', ([], {'SNIPPET_HTTP_MAX_AGE': '(75)'}), '(SNIPPET_HTTP_MAX_AGE=75)\n', (2526, 2551), False, 'from django.test.utils import override_settings\n'), ((4348, 4397), 'mock.patch', 'patch', (['"""snippets.base.views.Client"""'], {'wraps': 'Client'}), "('snippets.base.views.Client', wraps=Client)\n", (4353, 4397), False, 'from mock import patch\n'), ((5460, 5502), 'django.test.utils.override_settings', 'override_settings', ([], {'SNIPPET_HTTP_MAX_AGE': '(75)'}), '(SNIPPET_HTTP_MAX_AGE=75)\n', (5477, 5502), False, 'from django.test.utils import override_settings\n'), ((831, 847), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (845, 847), False, 'from django.test.client import RequestFactory\n'), ((1488, 1559), 'waffle.models.Flag.objects.create', 'Flag.objects.create', ([], {'name': '"""serve_pregenerated_snippets"""', 'everyone': '(False)'}), "(name='serve_pregenerated_snippets', everyone=False)\n", (1507, 1559), False, 'from waffle.models import Flag\n'), ((1635, 1673), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'on_nightly': '(True)'}), '(on_nightly=True)\n', (1656, 1673), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((1724, 1777), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'on_nightly': '(True)', 'disabled': '(True)'}), '(on_nightly=True, disabled=True)\n', (1745, 1777), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((2080, 2120), 'nose.tools.eq_', 'eq_', (["response.context['locale']", '"""en-US"""'], {}), "(response.context['locale'], 'en-US')\n", (2083, 2120), False, 'from nose.tools import eq_, ok_\n'), ((2891, 2943), 'nose.tools.eq_', 'eq_', (["response['Cache-control']", '"""public, max-age=75"""'], {}), "(response['Cache-control'], 'public, max-age=75')\n", (2894, 2943), False, 'from nose.tools import eq_, ok_\n'), ((2952, 2990), 'nose.tools.eq_', 'eq_', (["response['Vary']", '"""If-None-Match"""'], {}), "(response['Vary'], 'If-None-Match')\n", (2955, 2990), False, 'from nose.tools import eq_, ok_\n'), ((3676, 3729), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {'on_nightly': '(True)', 'weight': '(66)'}), '(on_nightly=True, weight=66)\n', (3701, 3729), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((3780, 3837), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {'on_nightly': '(True)', 'disabled': '(True)'}), '(on_nightly=True, disabled=True)\n', (3805, 3837), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((4211, 4239), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (4221, 4239), False, 'import json\n'), ((4274, 4306), 'nose.tools.eq_', 'eq_', (["data[0]['id']", 'snippet_1.id'], {}), "(data[0]['id'], snippet_1.id)\n", (4277, 4306), False, 'from nose.tools import eq_, ok_\n'), ((4315, 4341), 'nose.tools.eq_', 'eq_', (["data[0]['weight']", '(66)'], {}), "(data[0]['weight'], 66)\n", (4318, 4341), False, 'from nose.tools import eq_, ok_\n'), ((6017, 6069), 'nose.tools.eq_', 'eq_', (["response['Cache-control']", '"""public, max-age=75"""'], {}), "(response['Cache-control'], 'public, max-age=75')\n", (6020, 6069), False, 'from nose.tools import eq_, ok_\n'), ((6078, 6105), 'nose.tools.ok_', 'ok_', (["('Vary' not in response)"], {}), "('Vary' not in response)\n", (6081, 6105), False, 'from nose.tools import eq_, ok_\n'), ((6409, 6458), 'nose.tools.eq_', 'eq_', (["response['Content-Type']", '"""application/json"""'], {}), "(response['Content-Type'], 'application/json')\n", (6412, 6458), False, 'from nose.tools import eq_, ok_\n'), ((6539, 6596), 'django.contrib.auth.models.User.objects.create_superuser', 'User.objects.create_superuser', (['"""admin"""', '"""<EMAIL>"""', '"""asdf"""'], {}), "('admin', '<EMAIL>', 'asdf')\n", (6568, 6596), False, 'from django.contrib.auth.models import User\n'), ((6939, 6969), 'nose.tools.eq_', 'eq_', (['response.status_code', '(400)'], {}), '(response.status_code, 400)\n', (6942, 6969), False, 'from nose.tools import eq_, ok_\n'), ((7054, 7084), 'nose.tools.eq_', 'eq_', (['response.status_code', '(400)'], {}), '(response.status_code, 400)\n', (7057, 7084), False, 'from nose.tools import eq_, ok_\n'), ((7151, 7181), 'nose.tools.eq_', 'eq_', (['response.status_code', '(400)'], {}), '(response.status_code, 400)\n', (7154, 7181), False, 'from nose.tools import eq_, ok_\n'), ((7306, 7337), 'snippets.base.tests.SnippetTemplateFactory.create', 'SnippetTemplateFactory.create', ([], {}), '()\n', (7335, 7337), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((7412, 7442), 'nose.tools.eq_', 'eq_', (['response.status_code', '(400)'], {}), '(response.status_code, 400)\n', (7415, 7442), False, 'from nose.tools import eq_, ok_\n'), ((7583, 7613), 'nose.tools.eq_', 'eq_', (['response.status_code', '(400)'], {}), '(response.status_code, 400)\n', (7586, 7613), False, 'from nose.tools import eq_, ok_\n'), ((7744, 7775), 'snippets.base.tests.SnippetTemplateFactory.create', 'SnippetTemplateFactory.create', ([], {}), '()\n', (7773, 7775), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((7890, 7920), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (7893, 7920), False, 'from nose.tools import eq_, ok_\n'), ((7976, 8007), 'nose.tools.eq_', 'eq_', (['snippet.template', 'template'], {}), '(snippet.template, template)\n', (7979, 8007), False, 'from nose.tools import eq_, ok_\n'), ((8016, 8039), 'nose.tools.eq_', 'eq_', (['snippet.data', 'data'], {}), '(snippet.data, data)\n', (8019, 8039), False, 'from nose.tools import eq_, ok_\n'), ((8164, 8187), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {}), '()\n', (8185, 8187), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((8288, 8318), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (8291, 8318), False, 'from nose.tools import eq_, ok_\n'), ((8499, 8529), 'nose.tools.eq_', 'eq_', (['response.status_code', '(404)'], {}), '(response.status_code, 404)\n', (8502, 8529), False, 'from nose.tools import eq_, ok_\n'), ((8682, 8718), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'disabled': '(True)'}), '(disabled=True)\n', (8703, 8718), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((8819, 8849), 'nose.tools.eq_', 'eq_', (['response.status_code', '(404)'], {}), '(response.status_code, 404)\n', (8822, 8849), False, 'from nose.tools import eq_, ok_\n'), ((8998, 9034), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'disabled': '(True)'}), '(disabled=True)\n', (9019, 9034), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((9043, 9100), 'django.contrib.auth.models.User.objects.create_superuser', 'User.objects.create_superuser', (['"""admin"""', '"""<EMAIL>"""', '"""asdf"""'], {}), "('admin', '<EMAIL>', 'asdf')\n", (9072, 9100), False, 'from django.contrib.auth.models import User\n'), ((9268, 9298), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (9271, 9298), False, 'from nose.tools import eq_, ok_\n'), ((9578, 9608), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (9581, 9608), False, 'from nose.tools import eq_, ok_\n'), ((9617, 9660), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(1)'], {}), "(response.context['snippets'].number, 1)\n", (9620, 9660), False, 'from nose.tools import eq_, ok_\n'), ((9784, 9814), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (9787, 9814), False, 'from nose.tools import eq_, ok_\n'), ((9823, 9866), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(2)'], {}), "(response.context['snippets'].number, 2)\n", (9826, 9866), False, 'from nose.tools import eq_, ok_\n'), ((9875, 9932), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (9878, 9932), False, 'from nose.tools import eq_, ok_\n'), ((10128, 10158), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (10131, 10158), False, 'from nose.tools import eq_, ok_\n'), ((10167, 10211), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(10)'], {}), "(response.context['snippets'].number, 10)\n", (10170, 10211), False, 'from nose.tools import eq_, ok_\n'), ((10220, 10277), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (10223, 10277), False, 'from nose.tools import eq_, ok_\n'), ((10489, 10519), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (10492, 10519), False, 'from nose.tools import eq_, ok_\n'), ((10528, 10571), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(1)'], {}), "(response.context['snippets'].number, 1)\n", (10531, 10571), False, 'from nose.tools import eq_, ok_\n'), ((10580, 10637), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (10583, 10637), False, 'from nose.tools import eq_, ok_\n'), ((10674, 10716), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {'on_nightly': '(True)'}), '(on_nightly=True)\n', (10699, 10716), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((10813, 10843), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (10816, 10843), False, 'from nose.tools import eq_, ok_\n'), ((10852, 10904), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.count", '(1)'], {}), "(response.context['snippets'].paginator.count, 1)\n", (10855, 10904), False, 'from nose.tools import eq_, ok_\n'), ((11089, 11116), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(1)'], {}), '(pagination_range[0], 1)\n', (11092, 11116), False, 'from nose.tools import eq_, ok_\n'), ((11125, 11153), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(3)'], {}), '(pagination_range[-1], 3)\n', (11128, 11153), False, 'from nose.tools import eq_, ok_\n'), ((11395, 11422), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(8)'], {}), '(pagination_range[0], 8)\n', (11398, 11422), False, 'from nose.tools import eq_, ok_\n'), ((11431, 11460), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(10)'], {}), '(pagination_range[-1], 10)\n', (11434, 11460), False, 'from nose.tools import eq_, ok_\n'), ((11703, 11730), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(3)'], {}), '(pagination_range[0], 3)\n', (11706, 11730), False, 'from nose.tools import eq_, ok_\n'), ((11739, 11767), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(7)'], {}), '(pagination_range[-1], 7)\n', (11742, 11767), False, 'from nose.tools import eq_, ok_\n'), ((12072, 12102), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (12075, 12102), False, 'from nose.tools import eq_, ok_\n'), ((12111, 12154), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(1)'], {}), "(response.context['snippets'].number, 1)\n", (12114, 12154), False, 'from nose.tools import eq_, ok_\n'), ((12273, 12303), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (12276, 12303), False, 'from nose.tools import eq_, ok_\n'), ((12312, 12355), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(2)'], {}), "(response.context['snippets'].number, 2)\n", (12315, 12355), False, 'from nose.tools import eq_, ok_\n'), ((12364, 12421), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (12367, 12421), False, 'from nose.tools import eq_, ok_\n'), ((12612, 12642), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (12615, 12642), False, 'from nose.tools import eq_, ok_\n'), ((12651, 12695), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(10)'], {}), "(response.context['snippets'].number, 10)\n", (12654, 12695), False, 'from nose.tools import eq_, ok_\n'), ((12704, 12761), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (12707, 12761), False, 'from nose.tools import eq_, ok_\n'), ((12968, 12998), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (12971, 12998), False, 'from nose.tools import eq_, ok_\n'), ((13007, 13050), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].number", '(1)'], {}), "(response.context['snippets'].number, 1)\n", (13010, 13050), False, 'from nose.tools import eq_, ok_\n'), ((13059, 13116), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.num_pages", '(10)'], {}), "(response.context['snippets'].paginator.num_pages, 10)\n", (13062, 13116), False, 'from nose.tools import eq_, ok_\n'), ((13153, 13191), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'on_nightly': '(True)'}), '(on_nightly=True)\n', (13174, 13191), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((13283, 13313), 'nose.tools.eq_', 'eq_', (['response.status_code', '(200)'], {}), '(response.status_code, 200)\n', (13286, 13313), False, 'from nose.tools import eq_, ok_\n'), ((13322, 13374), 'nose.tools.eq_', 'eq_', (["response.context['snippets'].paginator.count", '(1)'], {}), "(response.context['snippets'].paginator.count, 1)\n", (13325, 13374), False, 'from nose.tools import eq_, ok_\n'), ((13554, 13581), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(1)'], {}), '(pagination_range[0], 1)\n', (13557, 13581), False, 'from nose.tools import eq_, ok_\n'), ((13590, 13618), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(3)'], {}), '(pagination_range[-1], 3)\n', (13593, 13618), False, 'from nose.tools import eq_, ok_\n'), ((13855, 13882), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(8)'], {}), '(pagination_range[0], 8)\n', (13858, 13882), False, 'from nose.tools import eq_, ok_\n'), ((13891, 13920), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(10)'], {}), '(pagination_range[-1], 10)\n', (13894, 13920), False, 'from nose.tools import eq_, ok_\n'), ((14158, 14185), 'nose.tools.eq_', 'eq_', (['pagination_range[0]', '(3)'], {}), '(pagination_range[0], 3)\n', (14161, 14185), False, 'from nose.tools import eq_, ok_\n'), ((14194, 14222), 'nose.tools.eq_', 'eq_', (['pagination_range[-1]', '(7)'], {}), '(pagination_range[-1], 7)\n', (14197, 14222), False, 'from nose.tools import eq_, ok_\n'), ((14355, 14371), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (14369, 14371), False, 'from django.test.client import RequestFactory\n'), ((15176, 15206), 'nose.tools.eq_', 'eq_', (['response.status_code', '(302)'], {}), '(response.status_code, 302)\n', (15179, 15206), False, 'from nose.tools import eq_, ok_\n'), ((15215, 15252), 'nose.tools.eq_', 'eq_', (["response['Location']", '"""/foo/bar"""'], {}), "(response['Location'], '/foo/bar')\n", (15218, 15252), False, 'from nose.tools import eq_, ok_\n'), ((15298, 15348), 'nose.tools.eq_', 'eq_', (['SnippetBundle.call_args[0][0].locale', '"""en-US"""'], {}), "(SnippetBundle.call_args[0][0].locale, 'en-US')\n", (15301, 15348), False, 'from nose.tools import eq_, ok_\n'), ((15409, 15460), 'nose.tools.ok_', 'ok_', (['(not SnippetBundle.return_value.generate.called)'], {}), '(not SnippetBundle.return_value.generate.called)\n', (15412, 15460), False, 'from nose.tools import eq_, ok_\n'), ((15838, 15868), 'nose.tools.eq_', 'eq_', (['response.status_code', '(302)'], {}), '(response.status_code, 302)\n', (15841, 15868), False, 'from nose.tools import eq_, ok_\n'), ((15877, 15914), 'nose.tools.eq_', 'eq_', (["response['Location']", '"""/foo/bar"""'], {}), "(response['Location'], '/foo/bar')\n", (15880, 15914), False, 'from nose.tools import eq_, ok_\n'), ((15992, 16039), 'nose.tools.ok_', 'ok_', (['SnippetBundle.return_value.generate.called'], {}), '(SnippetBundle.return_value.generate.called)\n', (15995, 16039), False, 'from nose.tools import eq_, ok_\n'), ((16122, 16138), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (16136, 16138), False, 'from django.test.client import RequestFactory\n'), ((16222, 16293), 'waffle.models.Flag.objects.create', 'Flag.objects.create', ([], {'name': '"""serve_pregenerated_snippets"""', 'everyone': '(False)'}), "(name='serve_pregenerated_snippets', everyone=False)\n", (16241, 16293), False, 'from waffle.models import Flag\n'), ((16593, 16663), 'waffle.models.Flag.objects.create', 'Flag.objects.create', ([], {'name': '"""serve_pregenerated_snippets"""', 'everyone': '(True)'}), "(name='serve_pregenerated_snippets', everyone=True)\n", (16612, 16663), False, 'from waffle.models import Flag\n'), ((17053, 17069), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (17067, 17069), False, 'from django.test.client import RequestFactory\n'), ((17160, 17190), 'snippets.base.tests.SnippetFactory.create_batch', 'SnippetFactory.create_batch', (['(2)'], {}), '(2)\n', (17187, 17190), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((17214, 17248), 'snippets.base.tests.JSONSnippetFactory.create_batch', 'JSONSnippetFactory.create_batch', (['(2)'], {}), '(2)\n', (17245, 17248), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((17257, 17293), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'disabled': '(True)'}), '(disabled=True)\n', (17278, 17293), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((17302, 17342), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {'disabled': '(True)'}), '(disabled=True)\n', (17327, 17342), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((17488, 17516), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (17498, 17516), False, 'import json\n'), ((1825, 1864), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {'on_nightly': '(False)'}), '(on_nightly=False)\n', (1846, 1864), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((3218, 3247), 'mock.patch.object', 'patch.object', (['views', '"""render"""'], {}), "(views, 'render')\n", (3230, 3247), False, 'from mock import patch\n'), ((3303, 3323), 'django.http.HttpResponse', 'HttpResponse', (['"""asdf"""'], {}), "('asdf')\n", (3315, 3323), False, 'from django.http import HttpResponse\n'), ((3347, 3398), 'snippets.base.views.fetch_snippets', 'views.fetch_snippets', (['request'], {}), '(request, **self.client_kwargs)\n', (3367, 3398), False, 'from snippets.base import views\n'), ((3533, 3564), 'nose.tools.eq_', 'eq_', (["response['ETag']", 'expected'], {}), "(response['ETag'], expected)\n", (3536, 3564), False, 'from nose.tools import eq_, ok_\n'), ((3885, 3928), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {'on_nightly': '(False)'}), '(on_nightly=False)\n', (3910, 3928), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((6739, 6762), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.preview"""'], {}), "('base.preview')\n", (6746, 6762), False, 'from django.core.urlresolvers import reverse\n'), ((8223, 8278), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.show"""'], {'kwargs': "{'snippet_id': snippet.id}"}), "('base.show', kwargs={'snippet_id': snippet.id})\n", (8230, 8278), False, 'from django.core.urlresolvers import reverse\n'), ((8439, 8489), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.show"""'], {'kwargs': "{'snippet_id': '100'}"}), "('base.show', kwargs={'snippet_id': '100'})\n", (8446, 8489), False, 'from django.core.urlresolvers import reverse\n'), ((8754, 8809), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.show"""'], {'kwargs': "{'snippet_id': snippet.id}"}), "('base.show', kwargs={'snippet_id': snippet.id})\n", (8761, 8809), False, 'from django.core.urlresolvers import reverse\n'), ((9203, 9258), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.show"""'], {'kwargs': "{'snippet_id': snippet.id}"}), "('base.show', kwargs={'snippet_id': snippet.id})\n", (9210, 9258), False, 'from django.core.urlresolvers import reverse\n'), ((9453, 9480), 'snippets.base.tests.JSONSnippetFactory.create', 'JSONSnippetFactory.create', ([], {}), '()\n', (9478, 9480), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((9542, 9568), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (9549, 9568), False, 'from django.core.urlresolvers import reverse\n'), ((10989, 11015), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (10996, 11015), False, 'from django.core.urlresolvers import reverse\n'), ((11956, 11979), 'snippets.base.tests.SnippetFactory.create', 'SnippetFactory.create', ([], {}), '()\n', (11977, 11979), False, 'from snippets.base.tests import JSONSnippetFactory, SnippetFactory, SnippetTemplateFactory, TestCase\n'), ((12041, 12062), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (12048, 12062), False, 'from django.core.urlresolvers import reverse\n'), ((13459, 13480), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (13466, 13480), False, 'from django.core.urlresolvers import reverse\n'), ((14900, 14936), 'mock.patch.object', 'patch.object', (['views', '"""SnippetBundle"""'], {}), "(views, 'SnippetBundle')\n", (14912, 14936), False, 'from mock import patch\n'), ((15097, 15166), 'snippets.base.views.fetch_pregenerated_snippets', 'views.fetch_pregenerated_snippets', (['self.request'], {}), '(self.request, **self.client_kwargs)\n', (15130, 15166), False, 'from snippets.base import views\n'), ((15563, 15599), 'mock.patch.object', 'patch.object', (['views', '"""SnippetBundle"""'], {}), "(views, 'SnippetBundle')\n", (15575, 15599), False, 'from mock import patch\n'), ((15759, 15828), 'snippets.base.views.fetch_pregenerated_snippets', 'views.fetch_pregenerated_snippets', (['self.request'], {}), '(self.request, **self.client_kwargs)\n', (15792, 15828), False, 'from snippets.base import views\n'), ((16308, 16352), 'mock.patch.object', 'patch.object', (['views', '"""fetch_render_snippets"""'], {}), "(views, 'fetch_render_snippets')\n", (16320, 16352), False, 'from mock import patch\n'), ((16678, 16728), 'mock.patch.object', 'patch.object', (['views', '"""fetch_pregenerated_snippets"""'], {}), "(views, 'fetch_pregenerated_snippets')\n", (16690, 16728), False, 'from mock import patch\n'), ((17362, 17396), 'snippets.base.views.ActiveSnippetsView.as_view', 'views.ActiveSnippetsView.as_view', ([], {}), '()\n', (17394, 17396), False, 'from snippets.base import views\n'), ((9739, 9765), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (9746, 9765), False, 'from django.core.urlresolvers import reverse\n'), ((10082, 10108), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (10089, 10108), False, 'from django.core.urlresolvers import reverse\n'), ((10442, 10468), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (10449, 10468), False, 'from django.core.urlresolvers import reverse\n'), ((10762, 10788), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (10769, 10788), False, 'from django.core.urlresolvers import reverse\n'), ((11285, 11311), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (11292, 11311), False, 'from django.core.urlresolvers import reverse\n'), ((11594, 11620), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index_json"""'], {}), "('base.index_json')\n", (11601, 11620), False, 'from django.core.urlresolvers import reverse\n'), ((12233, 12254), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (12240, 12254), False, 'from django.core.urlresolvers import reverse\n'), ((12571, 12592), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (12578, 12592), False, 'from django.core.urlresolvers import reverse\n'), ((12926, 12947), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (12933, 12947), False, 'from django.core.urlresolvers import reverse\n'), ((13237, 13258), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (13244, 13258), False, 'from django.core.urlresolvers import reverse\n'), ((13750, 13771), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (13757, 13771), False, 'from django.core.urlresolvers import reverse\n'), ((14054, 14075), 'django.core.urlresolvers.reverse', 'reverse', (['"""base.index"""'], {}), "('base.index')\n", (14061, 14075), False, 'from django.core.urlresolvers import reverse\n'), ((16395, 16440), 'snippets.base.views.fetch_snippets', 'views.fetch_snippets', (['self.request'], {'foo': '"""bar"""'}), "(self.request, foo='bar')\n", (16415, 16440), False, 'from snippets.base import views\n'), ((16777, 16822), 'snippets.base.views.fetch_snippets', 'views.fetch_snippets', (['self.request'], {'foo': '"""bar"""'}), "(self.request, foo='bar')\n", (16797, 16822), False, 'from snippets.base import views\n')] |
import logging as lg
from contextlib import contextmanager
from pywhdfs.client import create_client
from ..filesystem import IFileSystem
_logger = lg.getLogger(__name__)
class HDFSFileSystem(IFileSystem):
def __init__(self, auth_mechanism, **params):
self.client = create_client(auth_mechanism, **params)
def resolve_path(self, path):
return self.client.resolvepath(path)
def _list(self, path, status=False, glob=False):
return self.client.list(path, status, glob)
def _status(self, path, strict=True):
return self.client.status(path, strict)
def _content(self, path, strict=True):
c = self.client.content(path, strict)
if c is None:
return None
else:
return {
"length": c["length"],
"fileCount": c["fileCount"],
"directoryCount": c["directoryCount"],
}
def _delete(self, path, recursive=False):
return self.client.delete(path, recursive)
def _copy(self, src_path, dst_path):
# HDFS does not support copy natively : ugly implementation
with self.client.read(src_path, chunk_size=1024) as reader:
self.client.write(dst_path, reader)
def rename(self, src_path, dst_path):
return self.client.rename(src_path, dst_path)
def _set_owner(self, path, owner=None, group=None):
return self.client.set_owner(path, owner, group)
def _set_permission(self, path, permission):
return self.client.set_permission(path, permission)
def _mkdir(self, path, permission=None):
return self.client.makedirs(path, permission)
def _open(self, path, mode, buffer_size=-1, encoding=None, **kwargs):
# HDFS library does not implement open method
raise NotImplementedError
@contextmanager
def read(
self,
path,
offset=0,
buffer_size=1024,
encoding=None,
chunk_size=None,
delimiter=None,
**kwargs
):
with self.client.read(
path,
offset=offset,
buffer_size=buffer_size,
encoding=encoding,
chunk_size=chunk_size,
delimiter=delimiter,
**kwargs
) as reader:
yield reader
def write(
self,
path,
data=None,
overwrite=False,
permission=None,
buffer_size=1024,
append=False,
encoding=None,
**kwargs
):
return self.client.write(
path,
data=data,
overwrite=overwrite,
permission=permission,
buffersize=buffer_size,
append=append,
encoding=encoding,
**kwargs
)
| [
"logging.getLogger",
"pywhdfs.client.create_client"
] | [((150, 172), 'logging.getLogger', 'lg.getLogger', (['__name__'], {}), '(__name__)\n', (162, 172), True, 'import logging as lg\n'), ((282, 321), 'pywhdfs.client.create_client', 'create_client', (['auth_mechanism'], {}), '(auth_mechanism, **params)\n', (295, 321), False, 'from pywhdfs.client import create_client\n')] |
# -*- coding: utf-8 -*-
import os
import random
from multiprocessing import cpu_count
import numpy as np
import paddle
from PIL import Image
import cv2 as cv
# 训练图片的预处理
def train_mapper(sample):
img_path, label, crop_size, resize_size = sample
try:
img = Image.open(img_path)
# 统一图片大小
img = img.resize((resize_size, resize_size), Image.ANTIALIAS)
# 把图片转换成numpy值
img = np.array(img).astype(np.float32)
img = cv.cvtColor(img, cv.COLOR_GRAY2BGR)
# 转换成CHW
img = img.transpose((2, 0, 1))
# 转换成BGR
img = img[(2, 1, 0), :, :] / 255.0
return img, label
except:
print("%s 该图片错误,请删除该图片并重新创建图像数据列表" % img_path)
# 获取训练的reader
def train_reader(train_list_path, crop_size, resize_size):
father_path = os.path.dirname(train_list_path)
def reader():
with open(train_list_path, 'r') as f:
lines = f.readlines()
# 打乱图像列表
np.random.shuffle(lines)
# 开始获取每张图像和标签
for line in lines:
img, label = line.split('\t')
img = os.path.join(father_path, img)
yield img, label, crop_size, resize_size
return paddle.reader.xmap_readers(train_mapper, reader, cpu_count(), 102400)
# 测试图片的预处理
def test_mapper(sample):
img, label, crop_size = sample
img = Image.open(img)
# 统一图像大小
img = img.resize((crop_size, crop_size), Image.ANTIALIAS)
# 转换成numpy值
img = np.array(img).astype(np.float32)
img = cv.cvtColor(img, cv.COLOR_GRAY2BGR)
# 转换成CHW
img = img.transpose((2, 0, 1))
# 转换成BGR
img = img[(2, 1, 0), :, :] / 255.0
return img, label
# 测试的图片reader
def test_reader(test_list_path, crop_size):
father_path = os.path.dirname(test_list_path)
def reader():
with open(test_list_path, 'r') as f:
lines = f.readlines()
for line in lines:
img, label = line.split('\t')
img = os.path.join(father_path, img)
yield img, label, crop_size
return paddle.reader.xmap_readers(test_mapper, reader, cpu_count(), 1024)
| [
"PIL.Image.open",
"os.path.join",
"multiprocessing.cpu_count",
"os.path.dirname",
"numpy.array",
"cv2.cvtColor",
"numpy.random.shuffle"
] | [((803, 835), 'os.path.dirname', 'os.path.dirname', (['train_list_path'], {}), '(train_list_path)\n', (818, 835), False, 'import os\n'), ((1370, 1385), 'PIL.Image.open', 'Image.open', (['img'], {}), '(img)\n', (1380, 1385), False, 'from PIL import Image\n'), ((1530, 1565), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_GRAY2BGR'], {}), '(img, cv.COLOR_GRAY2BGR)\n', (1541, 1565), True, 'import cv2 as cv\n'), ((1765, 1796), 'os.path.dirname', 'os.path.dirname', (['test_list_path'], {}), '(test_list_path)\n', (1780, 1796), False, 'import os\n'), ((273, 293), 'PIL.Image.open', 'Image.open', (['img_path'], {}), '(img_path)\n', (283, 293), False, 'from PIL import Image\n'), ((465, 500), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_GRAY2BGR'], {}), '(img, cv.COLOR_GRAY2BGR)\n', (476, 500), True, 'import cv2 as cv\n'), ((1267, 1278), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (1276, 1278), False, 'from multiprocessing import cpu_count\n'), ((2129, 2140), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (2138, 2140), False, 'from multiprocessing import cpu_count\n'), ((968, 992), 'numpy.random.shuffle', 'np.random.shuffle', (['lines'], {}), '(lines)\n', (985, 992), True, 'import numpy as np\n'), ((1487, 1500), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (1495, 1500), True, 'import numpy as np\n'), ((418, 431), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (426, 431), True, 'import numpy as np\n'), ((1118, 1148), 'os.path.join', 'os.path.join', (['father_path', 'img'], {}), '(father_path, img)\n', (1130, 1148), False, 'import os\n'), ((1994, 2024), 'os.path.join', 'os.path.join', (['father_path', 'img'], {}), '(father_path, img)\n', (2006, 2024), False, 'import os\n')] |
from biotea_clustering_ws import app
app.run(host='0.0.0.0', port=5353, debug=True, threaded=True) | [
"biotea_clustering_ws.app.run"
] | [((39, 100), 'biotea_clustering_ws.app.run', 'app.run', ([], {'host': '"""0.0.0.0"""', 'port': '(5353)', 'debug': '(True)', 'threaded': '(True)'}), "(host='0.0.0.0', port=5353, debug=True, threaded=True)\n", (46, 100), False, 'from biotea_clustering_ws import app\n')] |
# -*- coding: utf-8 -*-
'''Functional tests using WebTest.
See: http://webtest.readthedocs.org/
'''
from flask import url_for
from flask.ext.webtest import TestApp
from nose.tools import * # PEP8 asserts
from ..user.models import User
from .base import DbTestCase
from .factories import UserFactory
class TestLoggingIn(DbTestCase):
def setUp(self):
self.w = TestApp(self.app)
self.user = UserFactory(password="<PASSWORD>")
self.user.save()
def test_can_log_in(self):
# Goes to homepage
res = self.w.get("/")
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = self.user.username
form['password'] = '<PASSWORD>'
# Submits
res = form.submit().maybe_follow()
assert_equal(res.status_code, 200)
def _login(self, username, password):
res = self.w.get("/")
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = username
form['password'] = password
# Submits
res = form.submit().maybe_follow()
return res
def test_sees_alert_on_log_out(self):
res = self._login(self.user.username, 'myprecious')
res = self.w.get(url_for('public.logout')).maybe_follow()
# sees alert
assert_in('You are logged out.', res)
def test_sees_error_message_if_password_is_incorrect(self):
# Goes to homepage
res = self.w.get("/")
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = self.user.username
form['password'] = '<PASSWORD>'
# Submits
res = form.submit()
# sees error
assert_in("Invalid password", res)
def test_sees_error_message_if_username_doesnt_exist(self):
# Goes to homepage
res = self.w.get("/")
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = 'unknown'
form['password'] = '<PASSWORD>'
# Submits
res = form.submit()
# sees error
assert_in("Unknown user", res)
class TestRegistering(DbTestCase):
def setUp(self):
self.w = TestApp(self.app)
def test_can_register(self):
# Goes to homepage
res = self.w.get("/")
# Clicks Create Account button
res = res.click("Create account")
# Fills out the form
form = res.forms["registerForm"]
form['username'] = 'foobar'
form['email'] = '<EMAIL>'
form['password'] = '<PASSWORD>'
form['confirm'] = '<PASSWORD>'
# Submits
res = form.submit().maybe_follow()
assert_equal(res.status_code, 200)
# A new user was created
assert_equal(len(User.query.all()), 1)
def test_sees_error_message_if_passwords_dont_match(self):
# Goes to registration page
res = self.w.get(url_for("public.register"))
# Fills out form, but passwords don't match
form = res.forms["registerForm"]
form['username'] = 'foobar'
form['email'] = '<EMAIL>'
form['password'] = '<PASSWORD>'
form['confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert_in("Passwords must match", res)
def test_sees_error_message_if_user_already_registered(self):
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = self.w.get(url_for("public.register"))
# Fills out form, but username is already registered
form = res.forms["registerForm"]
form['username'] = user.username
form['email'] = '<EMAIL>'
form['password'] = '<PASSWORD>'
form['confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert_in("Username already registered", res) | [
"flask.ext.webtest.TestApp",
"flask.url_for"
] | [((376, 393), 'flask.ext.webtest.TestApp', 'TestApp', (['self.app'], {}), '(self.app)\n', (383, 393), False, 'from flask.ext.webtest import TestApp\n'), ((2249, 2266), 'flask.ext.webtest.TestApp', 'TestApp', (['self.app'], {}), '(self.app)\n', (2256, 2266), False, 'from flask.ext.webtest import TestApp\n'), ((2967, 2993), 'flask.url_for', 'url_for', (['"""public.register"""'], {}), "('public.register')\n", (2974, 2993), False, 'from flask import url_for\n'), ((3565, 3591), 'flask.url_for', 'url_for', (['"""public.register"""'], {}), "('public.register')\n", (3572, 3591), False, 'from flask import url_for\n'), ((1264, 1288), 'flask.url_for', 'url_for', (['"""public.logout"""'], {}), "('public.logout')\n", (1271, 1288), False, 'from flask import url_for\n')] |
import json
import logging
from tf_tpu_models.official.mask_rcnn.coco_utils import generate_segmentation_from_masks
from tf_tpu_models.official.mask_rcnn.evaluation import process_prediction_for_eval
import six
import numpy as np
from PIL import Image
import os
from tensorflow.python.platform import gfile
from tensorflow.python.summary import summary_iterator
SUBMISSION_IMAGE_SIZE = 1024
def generate_submission(eval_estimator,
input_fn,
checkpoint_path,
num_attributes):
"""Runs COCO evaluation once."""
predictor = eval_estimator.predict(input_fn=input_fn, yield_single_examples=False, checkpoint_path=checkpoint_path)
# Every predictor.next() gets a batch of prediction (a dictionary).
# get attribute thresholds
step = int(checkpoint_path.rsplit('-', 1)[-1])
attr_thresholds = get_attribute_thresholds(eval_estimator.model_dir, step, num_attributes)
# load image IDs
with open('/workspace/project/data/test_coco.json') as f:
test_annotations = json.load(f)
image_filenames = {int(image['id']): image['file_name'] for image in test_annotations['images']}
batch_idx = 0
rows = []
while True:
try:
batch_predictions = six.next(predictor)
logging.info('Running inference on batch %d...', (batch_idx + 1))
except StopIteration:
logging.info('Finished the eval set at %d batch.', (batch_idx + 1))
break
batch_predictions = process_prediction_for_eval(batch_predictions)
rows += _generate_submission_rows(batch_predictions, attr_thresholds, image_filenames)
batch_idx += 1
return rows
def _generate_submission_rows(predictions, attr_thresholds, image_filenames):
rows = []
for i, image_id in enumerate(predictions['source_id']):
if (i + 1) % 100 == 0:
logging.info(' loading image %d/%d...' % (i + 1, len(predictions['source_id'])))
image_height = int(predictions['image_info'][i][3])
image_width = int(predictions['image_info'][i][4])
if image_width > image_height:
new_width = SUBMISSION_IMAGE_SIZE
new_height = int(image_height / (image_width / new_width))
else:
new_height = SUBMISSION_IMAGE_SIZE
new_width = int(image_width / (image_height / new_height))
for box_index in range(int(predictions['num_detections'][i])):
mask = generate_segmentation_from_masks(predictions['detection_masks'][i][box_index:(box_index + 1)],
predictions['detection_boxes'][i][box_index:(box_index + 1)],
image_height,
image_width,
is_image_mask=False)[0]
pil_image = Image.fromarray(mask.astype(np.uint8))
pil_image = pil_image.resize((new_width, new_height), Image.NEAREST)
resized_binary_mask = np.asarray(pil_image)
encoded_mask = rle_encode(resized_binary_mask)
# get attributes
attr_predictions = predictions['detection_attributes'][i][box_index]
attr_ids = np.argwhere(attr_predictions >= attr_thresholds).flatten()
bbox_x, bbox_y, bbox_w, bbox_h = predictions['detection_boxes'][i][box_index]
row = {
'ImageId': image_filenames[int(image_id)].split('.')[0],
'EncodedPixels': ' '.join(str(x) for x in encoded_mask),
'ClassId': int(predictions['detection_classes'][i][box_index]) - 1,
'AttributesIds': ','.join(str(attr_id) for attr_id in attr_ids),
'image_width': new_width,
'image_height': new_height,
'mask_area': resized_binary_mask.sum(),
'bbox_x': bbox_x,
'bbox_y': bbox_y,
'bbox_width': bbox_w,
'bbox_height': bbox_h,
'score': predictions['detection_scores'][i][box_index],
}
rows.append(row)
return rows
def rle_encode(mask):
pixels = mask.T.flatten()
# We need to allow for cases where there is a '1' at either end of the sequence.
# We do this by padding with a zero at each end when needed.
use_padding = False
if pixels[0] or pixels[-1]:
use_padding = True
pixel_padded = np.zeros([len(pixels) + 2], dtype=pixels.dtype)
pixel_padded[1:-1] = pixels
pixels = pixel_padded
rle = np.where(pixels[1:] != pixels[:-1])[0] + 2
if use_padding:
rle = rle - 1
rle[1::2] = rle[1::2] - rle[:-1:2]
return rle
def get_attribute_thresholds(model_dir: str, step: int, num_attributes: int):
"""Returns the best evaluation result based on the compare function."""
eval_result = None
for event_file in gfile.Glob(os.path.join(model_dir, 'eval', '*.tfevents.*')):
for event in summary_iterator.summary_iterator(event_file):
if event.step == step:
assert event.HasField('summary')
eval_result = {}
for value in event.summary.value:
if value.HasField('simple_value'):
eval_result[value.tag] = value.simple_value
break
thresholds = np.zeros(num_attributes, dtype=np.float32)
for metric_name, value in eval_result.items():
if metric_name.startswith('attribute_threshold/attr_'):
attr_id = int(metric_name.rsplit('_', 1)[-1])
thresholds[attr_id] = value
return thresholds
| [
"numpy.where",
"tf_tpu_models.official.mask_rcnn.evaluation.process_prediction_for_eval",
"os.path.join",
"numpy.asarray",
"tensorflow.python.summary.summary_iterator.summary_iterator",
"numpy.zeros",
"numpy.argwhere",
"json.load",
"tf_tpu_models.official.mask_rcnn.coco_utils.generate_segmentation_from_masks",
"logging.info",
"six.next"
] | [((5445, 5487), 'numpy.zeros', 'np.zeros', (['num_attributes'], {'dtype': 'np.float32'}), '(num_attributes, dtype=np.float32)\n', (5453, 5487), True, 'import numpy as np\n'), ((1069, 1081), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1078, 1081), False, 'import json\n'), ((1533, 1579), 'tf_tpu_models.official.mask_rcnn.evaluation.process_prediction_for_eval', 'process_prediction_for_eval', (['batch_predictions'], {}), '(batch_predictions)\n', (1560, 1579), False, 'from tf_tpu_models.official.mask_rcnn.evaluation import process_prediction_for_eval\n'), ((4995, 5042), 'os.path.join', 'os.path.join', (['model_dir', '"""eval"""', '"""*.tfevents.*"""'], {}), "(model_dir, 'eval', '*.tfevents.*')\n", (5007, 5042), False, 'import os\n'), ((5066, 5111), 'tensorflow.python.summary.summary_iterator.summary_iterator', 'summary_iterator.summary_iterator', (['event_file'], {}), '(event_file)\n', (5099, 5111), False, 'from tensorflow.python.summary import summary_iterator\n'), ((1278, 1297), 'six.next', 'six.next', (['predictor'], {}), '(predictor)\n', (1286, 1297), False, 'import six\n'), ((1310, 1373), 'logging.info', 'logging.info', (['"""Running inference on batch %d..."""', '(batch_idx + 1)'], {}), "('Running inference on batch %d...', batch_idx + 1)\n", (1322, 1373), False, 'import logging\n'), ((3089, 3110), 'numpy.asarray', 'np.asarray', (['pil_image'], {}), '(pil_image)\n', (3099, 3110), True, 'import numpy as np\n'), ((4642, 4677), 'numpy.where', 'np.where', (['(pixels[1:] != pixels[:-1])'], {}), '(pixels[1:] != pixels[:-1])\n', (4650, 4677), True, 'import numpy as np\n'), ((1418, 1483), 'logging.info', 'logging.info', (['"""Finished the eval set at %d batch."""', '(batch_idx + 1)'], {}), "('Finished the eval set at %d batch.', batch_idx + 1)\n", (1430, 1483), False, 'import logging\n'), ((2494, 2704), 'tf_tpu_models.official.mask_rcnn.coco_utils.generate_segmentation_from_masks', 'generate_segmentation_from_masks', (["predictions['detection_masks'][i][box_index:box_index + 1]", "predictions['detection_boxes'][i][box_index:box_index + 1]", 'image_height', 'image_width'], {'is_image_mask': '(False)'}), "(predictions['detection_masks'][i][\n box_index:box_index + 1], predictions['detection_boxes'][i][box_index:\n box_index + 1], image_height, image_width, is_image_mask=False)\n", (2526, 2704), False, 'from tf_tpu_models.official.mask_rcnn.coco_utils import generate_segmentation_from_masks\n'), ((3304, 3352), 'numpy.argwhere', 'np.argwhere', (['(attr_predictions >= attr_thresholds)'], {}), '(attr_predictions >= attr_thresholds)\n', (3315, 3352), True, 'import numpy as np\n')] |
from setuptools import setup, find_packages
setup(
name='redis-tools',
version='0.1.0',
description='simple python redis tools, mostly used for keys synchronization between 2 redis',
author='<NAME>',
install_requires=[
'redis',
],
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'redis-sync = redistools.tools:sync',
'redis-monitor = redistools.tools:monitor',
]
}
)
| [
"setuptools.find_packages"
] | [((277, 292), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (290, 292), False, 'from setuptools import setup, find_packages\n')] |
import tempfile
import unittest
from hamcrest import *
from tests.functional import command
from tests.functional.http_stub import HttpStub
class TestRetries(unittest.TestCase):
def setUp(self):
HttpStub.start()
def tearDown(self):
HttpStub.stop()
def test_it_passes_after_default_number_of_retries(self):
HttpStub.set_response_codes(500, 500, 200)
cmd = command.do("./backdrop-send "
"--url http://localhost:8000/data_set "
"--sleep 0 "
"--token data_set-auth-token", stdin='{"key": "value"}')
assert_that(cmd.exit_status, is_(0))
def test_it_passes_after_specified_number_of_retries(self):
HttpStub.set_response_codes(500, 500, 500, 200)
cmd = command.do("./backdrop-send "
"--url http://localhost:8000/data_set "
"--token data_set-auth-token "
"--sleep 0 "
"--attempts 4", stdin='{"key": "value"}')
assert_that(cmd.exit_status, is_(0))
def test_it_fails_after_specified_number_of_retries(self):
HttpStub.set_response_codes(500, 500, 200)
cmd = command.do("./backdrop-send "
"--url http://localhost:8000/data_set "
"--token data_set-auth-token "
"--sleep 0 "
"--attempts 2", stdin='{"key": "value"}')
assert_that(cmd.exit_status, is_not(0))
| [
"tests.functional.http_stub.HttpStub.start",
"tests.functional.command.do",
"tests.functional.http_stub.HttpStub.set_response_codes",
"tests.functional.http_stub.HttpStub.stop"
] | [((209, 225), 'tests.functional.http_stub.HttpStub.start', 'HttpStub.start', ([], {}), '()\n', (223, 225), False, 'from tests.functional.http_stub import HttpStub\n'), ((259, 274), 'tests.functional.http_stub.HttpStub.stop', 'HttpStub.stop', ([], {}), '()\n', (272, 274), False, 'from tests.functional.http_stub import HttpStub\n'), ((346, 388), 'tests.functional.http_stub.HttpStub.set_response_codes', 'HttpStub.set_response_codes', (['(500)', '(500)', '(200)'], {}), '(500, 500, 200)\n', (373, 388), False, 'from tests.functional.http_stub import HttpStub\n'), ((403, 543), 'tests.functional.command.do', 'command.do', (['"""./backdrop-send --url http://localhost:8000/data_set --sleep 0 --token data_set-auth-token"""'], {'stdin': '"""{"key": "value"}"""'}), '(\n \'./backdrop-send --url http://localhost:8000/data_set --sleep 0 --token data_set-auth-token\'\n , stdin=\'{"key": "value"}\')\n', (413, 543), False, 'from tests.functional import command\n'), ((737, 784), 'tests.functional.http_stub.HttpStub.set_response_codes', 'HttpStub.set_response_codes', (['(500)', '(500)', '(500)', '(200)'], {}), '(500, 500, 500, 200)\n', (764, 784), False, 'from tests.functional.http_stub import HttpStub\n'), ((799, 952), 'tests.functional.command.do', 'command.do', (['"""./backdrop-send --url http://localhost:8000/data_set --token data_set-auth-token --sleep 0 --attempts 4"""'], {'stdin': '"""{"key": "value"}"""'}), '(\n \'./backdrop-send --url http://localhost:8000/data_set --token data_set-auth-token --sleep 0 --attempts 4\'\n , stdin=\'{"key": "value"}\')\n', (809, 952), False, 'from tests.functional import command\n'), ((1173, 1215), 'tests.functional.http_stub.HttpStub.set_response_codes', 'HttpStub.set_response_codes', (['(500)', '(500)', '(200)'], {}), '(500, 500, 200)\n', (1200, 1215), False, 'from tests.functional.http_stub import HttpStub\n'), ((1230, 1383), 'tests.functional.command.do', 'command.do', (['"""./backdrop-send --url http://localhost:8000/data_set --token data_set-auth-token --sleep 0 --attempts 2"""'], {'stdin': '"""{"key": "value"}"""'}), '(\n \'./backdrop-send --url http://localhost:8000/data_set --token data_set-auth-token --sleep 0 --attempts 2\'\n , stdin=\'{"key": "value"}\')\n', (1240, 1383), False, 'from tests.functional import command\n')] |
from numpy import *
import networkx as nx
class element():
""" Electrocinetic element. Define an element type between two nodes n1 and n2"""
def __init__(self,type,id, n1,n2, value):
self.type = type
self.id = id
self.n1 = n1
self.n2 = n2
self.value = value
def spice(self):
return('{0}{1} {2} {3} {4}'.format(self.type,self.id,self.n1, self.n2, self.value))
class circuit():
""" The circuit is a collection of component """
def __init__(self,name,dict_component={}):
self.dict_component = dict_component
self.name = name
def add_element(self, name, element):
""" add an element bewteen node_1 and node 2"""
self.dict_component[name] = element
def C(self,id,n1,n2,value):
""" add a Capacitor between node n1 and node n2. """
self.dict_component['C{0}'.format(id)] = element('C',id,n1,n2,value)
def L(self,id,n1,n2,value):
""" add a inductor between node n1 and node n2. """
self.dict_component['L{0}'.format(id)] = element('L',id,n1,n2,value)
def R(self,id,n1,n2,value):
""" add a resistor between node n1 and node n2. """
self.dict_component['R{0}'.format(id)] = element('R',id,n1,n2,value)
def spice_print(self):
print(self.name)
for e in self.dict_component.values():
print(e.spice())
print('.end')
def save(self,filename):
f = open(filename,'w')
f.write(self.name + '\n')
for e in self.dict_component.values():
f.write(e.spice()+'\n' )
f.write('.end')
f.close()
def capacitor(self):
""" return the list of all the capacitor of the circuit """
return [ e for e in self.dict_component.values() if e.type=='C']
def capacitor_graph(self):
""" return the capacitor graph of the circuit """
G = nx.Graph()
for cap in self.capacitor():
G.add_edge(cap.n1, cap.n2, name = cap.type + str(cap.id), value = cap.value )
return G
def inductor_graph(self):
""" return the capacitor graph of the circuit """
G = nx.Graph()
for cap in self.inductor():
G.add_edge(cap.n1, cap.n2, name = cap.type + str(cap.id), value = cap.value )
return G
def inductor(self):
""" return the list of all the inductor of the circuit """
return [e for e in self.dict_component.values() if e.type=='L']
def load_circuit(filename):
with open(filename) as f:
lines = f.read().splitlines()
c = circuit(lines[0])
for i in range(1, len(lines) - 1):
u = lines[i].split(' ')
name = u[0]
e = element(u[0][0], u[0][1], u[1], u[2], u[3])
c.add_element(u[0],e)
return c
if __name__=='__main__':
e = element('R',1, 0,1,1e3)
print(e.spice())
c = circuit('Example of circuit')
c.C(1,0,1,350e-6)
c.L(1,0,1,2)
c.R(1,0,1,1)
c.C(2,0,2,350e-6)
c.C(3,1,2,50e-6)
c.spice_print()
c.save('test.txt')
C_graph = c.capacitor_graph()
| [
"networkx.Graph"
] | [((1940, 1950), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (1948, 1950), True, 'import networkx as nx\n'), ((2195, 2205), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (2203, 2205), True, 'import networkx as nx\n')] |
"""
sentry.web.forms
~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import forms
from django.contrib.auth.models import User
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from sentry.models import Project, ProjectMember
from sentry.interfaces import Http
class RadioFieldRenderer(forms.widgets.RadioFieldRenderer):
"""
This is identical to Django's builtin widget, except that
it renders as <ul.inputs-list>. Would be great if we didn't
have to create this stupid code, but Django widgets are not
flexible.
"""
def render(self):
"""Outputs a <ul> for this set of radio fields."""
return mark_safe(u'<ul class="inputs-list">\n%s\n</ul>' % u'\n'.join([u'<li>%s</li>'
% force_unicode(w) for w in self]))
class UserField(forms.CharField):
class widget(forms.widgets.TextInput):
def render(self, name, value, attrs=None):
if not attrs:
attrs = {}
if 'placeholder' not in attrs:
attrs['placeholder'] = 'username'
if isinstance(value, int):
value = unicode(User.objects.get(pk=value))
return super(UserField.widget, self).render(name, value, attrs)
def clean(self, value):
value = super(UserField, self).clean(value)
if not value:
return None
try:
return User.objects.get(username=value)
except User.DoesNotExist:
raise forms.ValidationError(u'invalid user name')
class RemoveProjectForm(forms.Form):
removal_type = forms.ChoiceField(choices=(
('1', 'Remove all attached events.'),
('2', 'Migrate events to another project.'),
('3', 'Hide this project.'),
), widget=forms.RadioSelect(renderer=RadioFieldRenderer))
project = forms.ChoiceField(choices=(), required=False)
def __init__(self, project_list, *args, **kwargs):
super(RemoveProjectForm, self).__init__(*args, **kwargs)
if not project_list:
del self.fields['project']
self.fields['removal_type'].choices = filter(lambda x: x[0] != 2, self.fields['removal_type'].choices)
else:
self.fields['project'].choices = [(p.pk, p.name) for p in project_list]
self.fields['project'].widget.choices = self.fields['project'].choices
def clean(self):
data = self.cleaned_data
if data.get('removal_type') == 2 and not data.get('project'):
raise forms.ValidationError('You must select a project to migrate data')
return data
def clean_project(self):
project_id = self.cleaned_data['project']
return Project.objects.get(id=project_id)
class NewProjectForm(forms.ModelForm):
class Meta:
fields = ('name',)
model = Project
class EditProjectForm(forms.ModelForm):
class Meta:
fields = ('name', 'status', 'public')
model = Project
class BaseProjectMemberForm(forms.ModelForm):
class Meta:
fields = ('type',)
model = ProjectMember
def __init__(self, project, *args, **kwargs):
self.project = project
super(BaseProjectMemberForm, self).__init__(*args, **kwargs)
EditProjectMemberForm = BaseProjectMemberForm
class NewProjectMemberForm(BaseProjectMemberForm):
user = UserField()
class Meta:
fields = ('user', 'type')
model = ProjectMember
def clean_user(self):
value = self.cleaned_data['user']
if not value:
return None
if self.project.member_set.filter(user=value).exists():
raise forms.ValidationError('User already a member of project')
return value
class ReplayForm(forms.Form):
url = forms.URLField(widget=forms.TextInput(attrs={'class': 'span8'}))
method = forms.ChoiceField(choices=((k, k) for k in Http.METHODS))
data = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'span8'}))
headers = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'span8'}))
def clean_headers(self):
value = self.cleaned_data.get('headers')
if not value:
return
return dict(line.split(': ') for line in value.split('\n'))
| [
"django.forms.ValidationError",
"django.forms.ChoiceField",
"sentry.models.Project.objects.get",
"django.forms.Textarea",
"django.forms.TextInput",
"django.forms.RadioSelect",
"django.utils.encoding.force_unicode",
"django.contrib.auth.models.User.objects.get"
] | [((1952, 1997), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': '()', 'required': '(False)'}), '(choices=(), required=False)\n', (1969, 1997), False, 'from django import forms\n'), ((3954, 4011), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': '((k, k) for k in Http.METHODS)'}), '(choices=((k, k) for k in Http.METHODS))\n', (3971, 4011), False, 'from django import forms\n'), ((2808, 2842), 'sentry.models.Project.objects.get', 'Project.objects.get', ([], {'id': 'project_id'}), '(id=project_id)\n', (2827, 2842), False, 'from sentry.models import Project, ProjectMember\n'), ((1525, 1557), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'value'}), '(username=value)\n', (1541, 1557), False, 'from django.contrib.auth.models import User\n'), ((1890, 1936), 'django.forms.RadioSelect', 'forms.RadioSelect', ([], {'renderer': 'RadioFieldRenderer'}), '(renderer=RadioFieldRenderer)\n', (1907, 1936), False, 'from django import forms\n'), ((2626, 2692), 'django.forms.ValidationError', 'forms.ValidationError', (['"""You must select a project to migrate data"""'], {}), "('You must select a project to migrate data')\n", (2647, 2692), False, 'from django import forms\n'), ((3754, 3811), 'django.forms.ValidationError', 'forms.ValidationError', (['"""User already a member of project"""'], {}), "('User already a member of project')\n", (3775, 3811), False, 'from django import forms\n'), ((3898, 3939), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'span8'}"}), "(attrs={'class': 'span8'})\n", (3913, 3939), False, 'from django import forms\n'), ((4062, 4102), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'class': 'span8'}"}), "(attrs={'class': 'span8'})\n", (4076, 4102), False, 'from django import forms\n'), ((4157, 4197), 'django.forms.Textarea', 'forms.Textarea', ([], {'attrs': "{'class': 'span8'}"}), "(attrs={'class': 'span8'})\n", (4171, 4197), False, 'from django import forms\n'), ((1610, 1653), 'django.forms.ValidationError', 'forms.ValidationError', (['u"""invalid user name"""'], {}), "(u'invalid user name')\n", (1631, 1653), False, 'from django import forms\n'), ((1262, 1288), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'value'}), '(pk=value)\n', (1278, 1288), False, 'from django.contrib.auth.models import User\n'), ((881, 897), 'django.utils.encoding.force_unicode', 'force_unicode', (['w'], {}), '(w)\n', (894, 897), False, 'from django.utils.encoding import force_unicode\n')] |
from expects import be_empty, contain_exactly, expect, have_keys, have_len
from doublex import ANY_ARG, Stub
from basket_consumer import BasketConsumer
from kinds import ITEM_ADDED, CHECKOUT_STARTED, PAY_ORDER
from tests.builders import (
create_basket_created_event, create_add_item_command,
create_item_added_event, create_checkout_command)
with description('BasketConsumer'):
with it('disregards non related events'):
event = {
'sequence': 1,
'ts': 'a_timestamp',
'kind': 'irrelevant_event',
}
consumer = BasketConsumer(
events_repository=Stub(),
items_repository={})
next_events = consumer.process(event)
expect(next_events).to(be_empty)
with context('When processing an add_item command'):
with it('generates an item_added event'):
basket_id = 'a_basket_id'
item_id = 'an_item_id'
add_item_command = create_add_item_command(basket_id, item_id)
items_repository = {
item_id: {
'price': 9.99,
'name': 'An Item',
}
}
with Stub() as events_repository:
events_repository.get_by_basket_id(ANY_ARG).returns([
create_basket_created_event(basket_id),
])
consumer = BasketConsumer(
events_repository=events_repository,
items_repository=items_repository)
next_events = consumer.process(add_item_command)
expect(next_events).to(have_len(1))
expect(next_events[0]).to(have_keys({
'kind': ITEM_ADDED,
'payload': {
'basket_id': basket_id,
'item_id': item_id,
'item_name': 'An Item',
'item_price': 9.99,
}
}))
with context('When processing a checkout command'):
with it('generates a pay_order command and a checkout_started event'):
basket_id = 'a_basket_id'
item_id = 'an_item_id'
item_name = 'Irrelevant Item Name'
item_price = 9.99
checkout_command = create_checkout_command(basket_id)
with Stub() as events_repository:
events_repository.get_by_basket_id(ANY_ARG).returns([
create_basket_created_event(basket_id),
create_item_added_event(basket_id, item_id, item_price),
create_item_added_event(basket_id, item_id, item_price),
])
consumer = BasketConsumer(
events_repository=events_repository,
items_repository=Stub())
next_events = consumer.process(checkout_command)
expect(next_events).to(have_len(2))
expect(next_events).to(contain_exactly(
have_keys({
'kind': CHECKOUT_STARTED,
'payload': {
'basket_id': basket_id,
'total_price': 2*item_price,
'num_unique_items': 1,
'num_items': 2,
}
}),
have_keys({
'kind': PAY_ORDER,
'payload': {
'basket_id': basket_id,
'total_price': 2*item_price,
}
}),
))
| [
"doublex.Stub",
"tests.builders.create_checkout_command",
"expects.have_len",
"tests.builders.create_item_added_event",
"expects.expect",
"expects.have_keys",
"tests.builders.create_basket_created_event",
"tests.builders.create_add_item_command",
"basket_consumer.BasketConsumer"
] | [((976, 1019), 'tests.builders.create_add_item_command', 'create_add_item_command', (['basket_id', 'item_id'], {}), '(basket_id, item_id)\n', (999, 1019), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n'), ((1406, 1497), 'basket_consumer.BasketConsumer', 'BasketConsumer', ([], {'events_repository': 'events_repository', 'items_repository': 'items_repository'}), '(events_repository=events_repository, items_repository=\n items_repository)\n', (1420, 1497), False, 'from basket_consumer import BasketConsumer\n'), ((2272, 2306), 'tests.builders.create_checkout_command', 'create_checkout_command', (['basket_id'], {}), '(basket_id)\n', (2295, 2306), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n'), ((633, 639), 'doublex.Stub', 'Stub', ([], {}), '()\n', (637, 639), False, 'from doublex import ANY_ARG, Stub\n'), ((730, 749), 'expects.expect', 'expect', (['next_events'], {}), '(next_events)\n', (736, 749), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((1204, 1210), 'doublex.Stub', 'Stub', ([], {}), '()\n', (1208, 1210), False, 'from doublex import ANY_ARG, Stub\n'), ((1624, 1635), 'expects.have_len', 'have_len', (['(1)'], {}), '(1)\n', (1632, 1635), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((1675, 1811), 'expects.have_keys', 'have_keys', (["{'kind': ITEM_ADDED, 'payload': {'basket_id': basket_id, 'item_id': item_id,\n 'item_name': 'An Item', 'item_price': 9.99}}"], {}), "({'kind': ITEM_ADDED, 'payload': {'basket_id': basket_id,\n 'item_id': item_id, 'item_name': 'An Item', 'item_price': 9.99}})\n", (1684, 1811), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((2325, 2331), 'doublex.Stub', 'Stub', ([], {}), '()\n', (2329, 2331), False, 'from doublex import ANY_ARG, Stub\n'), ((2889, 2900), 'expects.have_len', 'have_len', (['(2)'], {}), '(2)\n', (2897, 2900), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((1601, 1620), 'expects.expect', 'expect', (['next_events'], {}), '(next_events)\n', (1607, 1620), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((1649, 1671), 'expects.expect', 'expect', (['next_events[0]'], {}), '(next_events[0])\n', (1655, 1671), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((2783, 2789), 'doublex.Stub', 'Stub', ([], {}), '()\n', (2787, 2789), False, 'from doublex import ANY_ARG, Stub\n'), ((2866, 2885), 'expects.expect', 'expect', (['next_events'], {}), '(next_events)\n', (2872, 2885), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((2914, 2933), 'expects.expect', 'expect', (['next_events'], {}), '(next_events)\n', (2920, 2933), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((2970, 3118), 'expects.have_keys', 'have_keys', (["{'kind': CHECKOUT_STARTED, 'payload': {'basket_id': basket_id,\n 'total_price': 2 * item_price, 'num_unique_items': 1, 'num_items': 2}}"], {}), "({'kind': CHECKOUT_STARTED, 'payload': {'basket_id': basket_id,\n 'total_price': 2 * item_price, 'num_unique_items': 1, 'num_items': 2}})\n", (2979, 3118), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((3307, 3409), 'expects.have_keys', 'have_keys', (["{'kind': PAY_ORDER, 'payload': {'basket_id': basket_id, 'total_price': 2 *\n item_price}}"], {}), "({'kind': PAY_ORDER, 'payload': {'basket_id': basket_id,\n 'total_price': 2 * item_price}})\n", (3316, 3409), False, 'from expects import be_empty, contain_exactly, expect, have_keys, have_len\n'), ((1323, 1361), 'tests.builders.create_basket_created_event', 'create_basket_created_event', (['basket_id'], {}), '(basket_id)\n', (1350, 1361), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n'), ((2444, 2482), 'tests.builders.create_basket_created_event', 'create_basket_created_event', (['basket_id'], {}), '(basket_id)\n', (2471, 2482), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n'), ((2504, 2559), 'tests.builders.create_item_added_event', 'create_item_added_event', (['basket_id', 'item_id', 'item_price'], {}), '(basket_id, item_id, item_price)\n', (2527, 2559), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n'), ((2581, 2636), 'tests.builders.create_item_added_event', 'create_item_added_event', (['basket_id', 'item_id', 'item_price'], {}), '(basket_id, item_id, item_price)\n', (2604, 2636), False, 'from tests.builders import create_basket_created_event, create_add_item_command, create_item_added_event, create_checkout_command\n')] |
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for working with actuator models."""
import ctypes
from makani.control import actuator_util
from makani.lib.python import ctype_util
from makani.lib.python.autogen import autogen_util
import numpy as np
_thrust_moment_keys = ['thrust', 'moment']
def _PythonToCtype(data, c_type):
"""Populate a ctypes data type with a Python structure."""
if c_type is actuator_util.Vec3:
# Handle Vec3.
assert len(data) == 3
c_data = c_type()
c_data.x = data[0]
c_data.y = data[1]
c_data.z = data[2]
return c_data
elif hasattr(c_type, '_length_'):
# Handle arrays.
length = getattr(c_type, '_length_')
assert len(data) == length
c_data = c_type()
for i in range(length):
c_data[i] = _PythonToCtype(data[i], getattr(c_type, '_type_'))
elif hasattr(c_type, '_fields_'):
# Handle structures.
fields = autogen_util.GetCFields(c_type)
assert set(data.keys()) == {field for field, _ in fields}
c_data = c_type()
for field, field_type in fields:
setattr(c_data, field, _PythonToCtype(data[field], field_type))
else:
c_data = c_type(data)
return c_data
def _ThrustMomentToArray(thrust_moment):
"""Convert a ThrustMoment dictionary into an array of thrust and moments.
Args:
thrust_moment: A ThrustMoment dictionary to be converted.
Returns:
A 4-by-1 numpy.matrix version of thrust_moment in array form.
"""
assert thrust_moment.keys() == _thrust_moment_keys
return np.matrix([thrust_moment['thrust'],
thrust_moment['moment'][0],
thrust_moment['moment'][1],
thrust_moment['moment'][2]])
def _ArrayToThrustMoment(array):
"""Convert a 4-by-1 array into a ThrustMoment dictionary.
Args:
array: A 4-by-1 numpy.matrix to be converted.
Returns:
A ThrustMoment dictionary.
"""
assert np.size(array) == 4
return {'thrust': array[0],
'moment': [array[1], array[2], array[3]]}
def _AddThrustMoment(thrust_moment_0, thrust_moment_1):
"""Add two ThrustMoment dictionaries."""
assert thrust_moment_0.keys() == _thrust_moment_keys
assert thrust_moment_1.keys() == _thrust_moment_keys
thrust_moment = {}
for k in _thrust_moment_keys:
thrust_moment[k] = (np.asarray(thrust_moment_0[k])
+ np.asarray(thrust_moment_1[k]))
return thrust_moment
def MixRotors(thrust_moment, weights,
v_app, pqr, stacking_state,
hover_flight_mode, air_density,
rotor_params,
rotor_control_params):
"""Wrapper around MixRotors function.
See MixRotors in control/actuator_util.c.
Args:
thrust_moment: Dict with keys 'thrust', whose value is a float, and
'moment', whose value is an array of three floats.
weights: Dict with keys 'thrust', whose value is a float, and
'moment', whose value is an array of three floats.
v_app: Float storing the airspeed [m/s].
pqr: Array of 3 floats representing the body rates [rad/s].
stacking_state: Integer (see the enum StackingState).
hover_flight_mode: Bool indicating if we are in a hover flight mode.
air_density: Float storing the air density [kg/m^3].
rotor_params: Array of kNumMotors dicts storing the contents of RotorParams
structures.
rotor_control_params: Dict storing the contents of the RotorControlParams
structure.
Returns:
An 8-by-1 np.matrix containing the rotor speeds [rad/s].
"""
assert len(rotor_params) == actuator_util.kNumMotors
c_rotor_params = [
_PythonToCtype(r, actuator_util.RotorParams) for r in rotor_params
]
c_rotor_params_pointers = (
ctypes.POINTER(actuator_util.RotorParams) * len(rotor_params))()
for i, c_r in enumerate(c_rotor_params):
c_rotor_params_pointers[i] = ctypes.pointer(c_r)
c_rotors = (ctypes.c_double * actuator_util.kNumMotors)()
c_available_thrust_moment = actuator_util.ThrustMoment()
c_v_app_locals = (ctypes.c_double * actuator_util.kNumMotors)()
actuator_util.MixRotors(
ctypes.pointer(_PythonToCtype(thrust_moment, actuator_util.ThrustMoment)),
ctypes.pointer(_PythonToCtype(weights, actuator_util.ThrustMoment)),
v_app,
ctypes.pointer(_PythonToCtype(pqr, actuator_util.Vec3)),
stacking_state,
hover_flight_mode,
air_density,
ctype_util.SizelessArray(c_rotor_params_pointers),
ctypes.pointer(_PythonToCtype(rotor_control_params,
actuator_util.RotorControlParams)),
c_rotors,
ctypes.pointer(c_available_thrust_moment),
c_v_app_locals)
return np.matrix([[c_rotors[i]] for i in range(actuator_util.kNumMotors)])
def LinearizeMixRotors(thrust_moment, params, h=1e-6):
"""Calculate a Jacobian matrix for the MixRotors function.
Produces a linearized model:
MixRotors(thrust_moment + delta_thrust_moment)
~ MixRotors(thrust_moment) + A * delta_thrust_moment
Args:
thrust_moment: A ThrustMoment dictionary around which to linearize.
params: A parameters structure from mconfig.
h: Step-size used in finite difference.
Returns:
A numpy.matrix of Jacobian values of units rad/s/N and rad/s/(N-m).
"""
num_inputs = 4
num_outputs = len(params['system']['rotors'])
dfdu = np.matrix(np.zeros((num_outputs, num_inputs)))
for i in range(num_inputs):
e = np.zeros(num_inputs)
e[i] = h
delta_thrust_moment = _ArrayToThrustMoment(e)
dfdu[:, i] = (
MixRotors(
_AddThrustMoment(thrust_moment, delta_thrust_moment),
params['control']['hover']['output']['weights'],
0.0,
[0.0, 0.0, 0.0],
actuator_util.kStackingStateNormal,
True,
params['system']['phys']['rho'],
params['system']['rotors'],
params['control']['rotor_control'])
- MixRotors(
thrust_moment,
params['control']['hover']['output']['weights'],
0.0,
[0.0, 0.0, 0.0],
actuator_util.kStackingStateNormal,
True,
params['system']['phys']['rho'],
params['system']['rotors'],
params['control']['rotor_control'])) / (2.0 * h)
return dfdu
| [
"makani.control.actuator_util.ThrustMoment",
"ctypes.POINTER",
"numpy.size",
"numpy.asarray",
"makani.lib.python.autogen.autogen_util.GetCFields",
"numpy.zeros",
"ctypes.pointer",
"numpy.matrix",
"makani.lib.python.ctype_util.SizelessArray"
] | [((2075, 2199), 'numpy.matrix', 'np.matrix', (["[thrust_moment['thrust'], thrust_moment['moment'][0], thrust_moment[\n 'moment'][1], thrust_moment['moment'][2]]"], {}), "([thrust_moment['thrust'], thrust_moment['moment'][0],\n thrust_moment['moment'][1], thrust_moment['moment'][2]])\n", (2084, 2199), True, 'import numpy as np\n'), ((4531, 4559), 'makani.control.actuator_util.ThrustMoment', 'actuator_util.ThrustMoment', ([], {}), '()\n', (4557, 4559), False, 'from makani.control import actuator_util\n'), ((2468, 2482), 'numpy.size', 'np.size', (['array'], {}), '(array)\n', (2475, 2482), True, 'import numpy as np\n'), ((4420, 4439), 'ctypes.pointer', 'ctypes.pointer', (['c_r'], {}), '(c_r)\n', (4434, 4439), False, 'import ctypes\n'), ((4959, 5008), 'makani.lib.python.ctype_util.SizelessArray', 'ctype_util.SizelessArray', (['c_rotor_params_pointers'], {}), '(c_rotor_params_pointers)\n', (4983, 5008), False, 'from makani.lib.python import ctype_util\n'), ((5162, 5203), 'ctypes.pointer', 'ctypes.pointer', (['c_available_thrust_moment'], {}), '(c_available_thrust_moment)\n', (5176, 5203), False, 'import ctypes\n'), ((5914, 5949), 'numpy.zeros', 'np.zeros', (['(num_outputs, num_inputs)'], {}), '((num_outputs, num_inputs))\n', (5922, 5949), True, 'import numpy as np\n'), ((5990, 6010), 'numpy.zeros', 'np.zeros', (['num_inputs'], {}), '(num_inputs)\n', (5998, 6010), True, 'import numpy as np\n'), ((2861, 2891), 'numpy.asarray', 'np.asarray', (['thrust_moment_0[k]'], {}), '(thrust_moment_0[k])\n', (2871, 2891), True, 'import numpy as np\n'), ((2918, 2948), 'numpy.asarray', 'np.asarray', (['thrust_moment_1[k]'], {}), '(thrust_moment_1[k])\n', (2928, 2948), True, 'import numpy as np\n'), ((4279, 4320), 'ctypes.POINTER', 'ctypes.POINTER', (['actuator_util.RotorParams'], {}), '(actuator_util.RotorParams)\n', (4293, 4320), False, 'import ctypes\n'), ((1462, 1493), 'makani.lib.python.autogen.autogen_util.GetCFields', 'autogen_util.GetCFields', (['c_type'], {}), '(c_type)\n', (1485, 1493), False, 'from makani.lib.python.autogen import autogen_util\n')] |
import requests
status_endpt = "https://api.gdc.cancer.gov/status"
response = requests.get(status_endpt)
# OUTPUT METHOD 1: Write to a file.
file = open("api_status.json", "w")
file.write(response.text)
file.close()
# OUTPUT METHOD 2: View on screen.
print(response.content) | [
"requests.get"
] | [((78, 104), 'requests.get', 'requests.get', (['status_endpt'], {}), '(status_endpt)\n', (90, 104), False, 'import requests\n')] |
from dataclasses import dataclass
from typing import List, Optional, Tuple
from melon.types.blockchain_format.sized_bytes import bytes32
from melon.util.ints import uint64
from melon.util.streamable import streamable, Streamable
from melon.wallet.lineage_proof import LineageProof
from melon.types.blockchain_format.program import Program
from melon.types.blockchain_format.coin import Coin
@dataclass(frozen=True)
@streamable
class DIDInfo(Streamable):
origin_coin: Optional[Coin] # puzzlehash of this coin is our DID
backup_ids: List[bytes]
num_of_backup_ids_needed: uint64
parent_info: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof}
current_inner: Optional[Program] # represents a Program as bytes
temp_coin: Optional[Coin] # partially recovered wallet uses these to hold info
temp_puzhash: Optional[bytes32]
temp_pubkey: Optional[bytes]
| [
"dataclasses.dataclass"
] | [((395, 417), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (404, 417), False, 'from dataclasses import dataclass\n')] |
#ref: https://atcoder.jp/contests/agc033/submissions/5260580
import sys
import numpy as np
def main():
input = sys.stdin.readline
H, W = map(int, input().split())
INF = H * W
dp = [[INF if c == '.' else 0 for c in input().strip()] for _ in range(H)]
dp = np.array(dp)
for i in range(1, H):
dp[i, :] = np.minimum(dp[i, :], dp[i-1, :] + 1)
for i in range(H-2, -1, -1):
dp[i, :] = np.minimum(dp[i, :], dp[i+1, :] + 1)
for i in range(1, W):
dp[:, i] = np.minimum(dp[:, i], dp[:, i-1] + 1)
for i in range(W-2, -1, -1):
dp[:, i] = np.minimum(dp[:, i], dp[:, i+1] + 1)
return np.max(dp)
if __name__ == '__main__':
print(main())
| [
"numpy.array",
"numpy.minimum",
"numpy.max"
] | [((277, 289), 'numpy.array', 'np.array', (['dp'], {}), '(dp)\n', (285, 289), True, 'import numpy as np\n'), ((648, 658), 'numpy.max', 'np.max', (['dp'], {}), '(dp)\n', (654, 658), True, 'import numpy as np\n'), ((336, 374), 'numpy.minimum', 'np.minimum', (['dp[i, :]', '(dp[i - 1, :] + 1)'], {}), '(dp[i, :], dp[i - 1, :] + 1)\n', (346, 374), True, 'import numpy as np\n'), ((426, 464), 'numpy.minimum', 'np.minimum', (['dp[i, :]', '(dp[i + 1, :] + 1)'], {}), '(dp[i, :], dp[i + 1, :] + 1)\n', (436, 464), True, 'import numpy as np\n'), ((509, 547), 'numpy.minimum', 'np.minimum', (['dp[:, i]', '(dp[:, i - 1] + 1)'], {}), '(dp[:, i], dp[:, i - 1] + 1)\n', (519, 547), True, 'import numpy as np\n'), ((599, 637), 'numpy.minimum', 'np.minimum', (['dp[:, i]', '(dp[:, i + 1] + 1)'], {}), '(dp[:, i], dp[:, i + 1] + 1)\n', (609, 637), True, 'import numpy as np\n')] |
import os
from flask import (
Flask, request, session, g, redirect, url_for,
abort, render_template, flash, Markup, make_response, jsonify,
Response)
from jinja2 import FileSystemLoader
def prepare_env(env):
env.loader = FileSystemLoader(os.path.join(os.path.dirname(__file__), 'html'))
app = Flask(__name__)
prepare_env(app.jinja_env)
DATABASE = None
DEBUG = True
SECRETKEY = '<KEY>'
SERVER = None
@app.route("/")
def index():
return render_template("index.templ")
@app.route("/run", methods=["POST"])
def run_search():
mass_error_tolerance = float(request.form["mass_error_tolerance"])
max_charge = -abs(int(request.form['max_charge']))
return redirect("/")
if __name__ == '__main__':
app.run(port=5000, use_reloader=True)
| [
"flask.render_template",
"os.path.dirname",
"flask.redirect",
"flask.Flask"
] | [((316, 331), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (321, 331), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, Markup, make_response, jsonify, Response\n'), ((466, 496), 'flask.render_template', 'render_template', (['"""index.templ"""'], {}), "('index.templ')\n", (481, 496), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, Markup, make_response, jsonify, Response\n'), ((691, 704), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (699, 704), False, 'from flask import Flask, request, session, g, redirect, url_for, abort, render_template, flash, Markup, make_response, jsonify, Response\n'), ((273, 298), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (288, 298), False, 'import os\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from collections import deque
import numpy as np
import numpy.testing as npt
from gym import spaces
from reinforceflow.envs import ObservationStackWrap
from reinforceflow.envs import Vectorize
def _compare_recursively(sample1, sample2):
for elem1, elem2 in zip(sample1, sample2):
if isinstance(elem1, (list, tuple)):
_compare_recursively(elem1, elem2)
else:
npt.assert_equal(elem1, elem2)
class TestConverters(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestConverters, self).__init__(*args, **kwargs)
self.space_d = spaces.Discrete(4)
self.gym_out_d = 2
self.rf_out_d = [0, 0, 1, 0]
self.space_c = spaces.Box(-1, 1, [2, 4])
self.gym_out_c = np.random.uniform(low=-1, high=1, size=(2, 4))
self.rf_out_c = self.gym_out_c
self.space_b = spaces.MultiBinary(4)
self.gym_out_b = [0, 1, 0, 1]
self.rf_out_b = [[1, 0], [0, 1], [1, 0], [0, 1]]
self.space_t = spaces.Tuple((self.space_d,
self.space_c,
self.space_b,
spaces.Tuple((self.space_d, self.space_c))
))
self.gym_out_t = tuple([self.gym_out_d, self.gym_out_c, self.gym_out_b,
tuple([self.gym_out_d, self.gym_out_c])])
self.rf_out_t = tuple([self.rf_out_d, self.rf_out_c, self.rf_out_b,
tuple([self.rf_out_d, self.rf_out_c])])
def test_gym2vec_converter_discrete(self):
converter = Vectorize.make_gym2vec_converter(self.space_d)
npt.assert_equal(converter(self.gym_out_d), self.rf_out_d)
def test_gym2vec_converter_box(self):
converter = Vectorize.make_gym2vec_converter(self.space_c)
npt.assert_equal(converter(self.gym_out_c), self.rf_out_c)
def test_gym2vec_converter_binary(self):
converter = Vectorize.make_gym2vec_converter(self.space_b)
npt.assert_equal(converter(self.gym_out_b), self.rf_out_b)
def test_gym2vec_converter_tuple(self):
converter = Vectorize.make_gym2vec_converter(self.space_t)
_compare_recursively(converter(self.gym_out_t), self.rf_out_t)
def test_vec2gym_converter_discrete(self):
converter = Vectorize.make_vec2gym_converter(self.space_d)
assert converter(self.rf_out_d) == self.gym_out_d
def test_vec2gym_converter_box(self):
converter = Vectorize.make_vec2gym_converter(self.space_c)
npt.assert_equal(converter(self.rf_out_c), self.gym_out_c)
def test_vec2gym_converter_binary(self):
converter = Vectorize.make_vec2gym_converter(self.space_b)
npt.assert_equal(converter(self.rf_out_b), self.gym_out_b)
def test_vec2gym_converter_tuple(self):
converter = Vectorize.make_vec2gym_converter(self.space_t)
_compare_recursively(converter(self.rf_out_t), self.gym_out_t)
def test_stack_initial_observation_image_gray():
ones = np.ones((84, 84, 1))
stack_len = 4
desired = np.ones((84, 84, stack_len))
result = ObservationStackWrap.stack_observations(ones, stack_len, None)
npt.assert_equal(result, desired)
def test_stack_observation_image_gray():
stack_obs_test(shape=(50, 30, 1), stack_len=5, num_stacks=10)
def test_stack_observation_with_len_equals_1():
stack_obs_test(shape=(30, 30, 1), stack_len=1, num_stacks=8)
def test_stack_observation_image_rgb():
stack_obs_test(shape=(84, 84, 3), stack_len=4, num_stacks=12)
def test_stack_observation_exotic_shape():
stack_obs_test(shape=(4, 4, 4, 2), stack_len=5, num_stacks=22)
def stack_obs_test(shape, stack_len, num_stacks):
stack_axis = len(shape)-1
desired = deque(maxlen=stack_len)
for _ in range(stack_len):
desired.append(np.ones(shape))
current_stack = np.concatenate(desired, stack_axis)
stack_len = stack_len
for i in range(num_stacks):
new_obs = np.ones(shape) * i
desired.append(new_obs)
current_stack = ObservationStackWrap.stack_observations(new_obs, stack_len, current_stack)
npt.assert_equal(current_stack, np.concatenate(desired, stack_axis))
| [
"reinforceflow.envs.ObservationStackWrap.stack_observations",
"collections.deque",
"numpy.ones",
"numpy.testing.assert_equal",
"reinforceflow.envs.Vectorize.make_gym2vec_converter",
"gym.spaces.Tuple",
"gym.spaces.Discrete",
"gym.spaces.Box",
"gym.spaces.MultiBinary",
"reinforceflow.envs.Vectorize.make_vec2gym_converter",
"numpy.concatenate",
"numpy.random.uniform"
] | [((3189, 3209), 'numpy.ones', 'np.ones', (['(84, 84, 1)'], {}), '((84, 84, 1))\n', (3196, 3209), True, 'import numpy as np\n'), ((3242, 3270), 'numpy.ones', 'np.ones', (['(84, 84, stack_len)'], {}), '((84, 84, stack_len))\n', (3249, 3270), True, 'import numpy as np\n'), ((3284, 3346), 'reinforceflow.envs.ObservationStackWrap.stack_observations', 'ObservationStackWrap.stack_observations', (['ones', 'stack_len', 'None'], {}), '(ones, stack_len, None)\n', (3323, 3346), False, 'from reinforceflow.envs import ObservationStackWrap\n'), ((3351, 3384), 'numpy.testing.assert_equal', 'npt.assert_equal', (['result', 'desired'], {}), '(result, desired)\n', (3367, 3384), True, 'import numpy.testing as npt\n'), ((3925, 3948), 'collections.deque', 'deque', ([], {'maxlen': 'stack_len'}), '(maxlen=stack_len)\n', (3930, 3948), False, 'from collections import deque\n'), ((4039, 4074), 'numpy.concatenate', 'np.concatenate', (['desired', 'stack_axis'], {}), '(desired, stack_axis)\n', (4053, 4074), True, 'import numpy as np\n'), ((732, 750), 'gym.spaces.Discrete', 'spaces.Discrete', (['(4)'], {}), '(4)\n', (747, 750), False, 'from gym import spaces\n'), ((839, 864), 'gym.spaces.Box', 'spaces.Box', (['(-1)', '(1)', '[2, 4]'], {}), '(-1, 1, [2, 4])\n', (849, 864), False, 'from gym import spaces\n'), ((890, 936), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-1)', 'high': '(1)', 'size': '(2, 4)'}), '(low=-1, high=1, size=(2, 4))\n', (907, 936), True, 'import numpy as np\n'), ((1000, 1021), 'gym.spaces.MultiBinary', 'spaces.MultiBinary', (['(4)'], {}), '(4)\n', (1018, 1021), False, 'from gym import spaces\n'), ((1760, 1806), 'reinforceflow.envs.Vectorize.make_gym2vec_converter', 'Vectorize.make_gym2vec_converter', (['self.space_d'], {}), '(self.space_d)\n', (1792, 1806), False, 'from reinforceflow.envs import Vectorize\n'), ((1937, 1983), 'reinforceflow.envs.Vectorize.make_gym2vec_converter', 'Vectorize.make_gym2vec_converter', (['self.space_c'], {}), '(self.space_c)\n', (1969, 1983), False, 'from reinforceflow.envs import Vectorize\n'), ((2117, 2163), 'reinforceflow.envs.Vectorize.make_gym2vec_converter', 'Vectorize.make_gym2vec_converter', (['self.space_b'], {}), '(self.space_b)\n', (2149, 2163), False, 'from reinforceflow.envs import Vectorize\n'), ((2296, 2342), 'reinforceflow.envs.Vectorize.make_gym2vec_converter', 'Vectorize.make_gym2vec_converter', (['self.space_t'], {}), '(self.space_t)\n', (2328, 2342), False, 'from reinforceflow.envs import Vectorize\n'), ((2482, 2528), 'reinforceflow.envs.Vectorize.make_vec2gym_converter', 'Vectorize.make_vec2gym_converter', (['self.space_d'], {}), '(self.space_d)\n', (2514, 2528), False, 'from reinforceflow.envs import Vectorize\n'), ((2650, 2696), 'reinforceflow.envs.Vectorize.make_vec2gym_converter', 'Vectorize.make_vec2gym_converter', (['self.space_c'], {}), '(self.space_c)\n', (2682, 2696), False, 'from reinforceflow.envs import Vectorize\n'), ((2830, 2876), 'reinforceflow.envs.Vectorize.make_vec2gym_converter', 'Vectorize.make_vec2gym_converter', (['self.space_b'], {}), '(self.space_b)\n', (2862, 2876), False, 'from reinforceflow.envs import Vectorize\n'), ((3009, 3055), 'reinforceflow.envs.Vectorize.make_vec2gym_converter', 'Vectorize.make_vec2gym_converter', (['self.space_t'], {}), '(self.space_t)\n', (3041, 3055), False, 'from reinforceflow.envs import Vectorize\n'), ((4227, 4301), 'reinforceflow.envs.ObservationStackWrap.stack_observations', 'ObservationStackWrap.stack_observations', (['new_obs', 'stack_len', 'current_stack'], {}), '(new_obs, stack_len, current_stack)\n', (4266, 4301), False, 'from reinforceflow.envs import ObservationStackWrap\n'), ((532, 562), 'numpy.testing.assert_equal', 'npt.assert_equal', (['elem1', 'elem2'], {}), '(elem1, elem2)\n', (548, 562), True, 'import numpy.testing as npt\n'), ((4003, 4017), 'numpy.ones', 'np.ones', (['shape'], {}), '(shape)\n', (4010, 4017), True, 'import numpy as np\n'), ((4152, 4166), 'numpy.ones', 'np.ones', (['shape'], {}), '(shape)\n', (4159, 4166), True, 'import numpy as np\n'), ((4342, 4377), 'numpy.concatenate', 'np.concatenate', (['desired', 'stack_axis'], {}), '(desired, stack_axis)\n', (4356, 4377), True, 'import numpy as np\n'), ((1308, 1350), 'gym.spaces.Tuple', 'spaces.Tuple', (['(self.space_d, self.space_c)'], {}), '((self.space_d, self.space_c))\n', (1320, 1350), False, 'from gym import spaces\n')] |
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
from . import usb_pcap_endpoint_number
from . import usbd_status_windows
class Usbpcap(KaitaiStruct):
"""A native pcap header of [usbpcap](https://github.com/desowin/usbpcap) - an app to capture USB frames in Windows OSes.
.. seealso::
Source - https://desowin.org/usbpcap/captureformat.html
"""
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header = Usbpcap.Header(self._io, self, self._root)
self.data = self._io.read_bytes(self.header.header_main.data_size)
class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.header_size = self._io.read_u2le()
self._raw_header_main = self._io.read_bytes((self.header_size - 2))
_io__raw_header_main = KaitaiStream(BytesIO(self._raw_header_main))
self.header_main = Usbpcap.Header.HeaderMain(_io__raw_header_main, self, self._root)
class HeaderMain(KaitaiStruct):
class TransferType(Enum):
isochronous = 0
interrupt = 1
control = 2
bulk = 3
irp_info = 254
unknown = 255
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.io_request_packet_id = self._io.read_u8le()
self.usbd_status_windows_code = usbd_status_windows.UsbdStatusWindows(self._io)
self.urb_function = self._io.read_u2le()
self.io_request_info = Usbpcap.Header.HeaderMain.Info(self._io, self, self._root)
self.bus = self._io.read_u2le()
self.device_address = self._io.read_u2le()
self.endpoint_number = usb_pcap_endpoint_number.UsbPcapEndpointNumber(self._io)
self.transfer_type = KaitaiStream.resolve_enum(Usbpcap.Header.HeaderMain.TransferType, self._io.read_u1())
self.data_size = self._io.read_u4le()
_on = self.transfer_type
if _on == Usbpcap.Header.HeaderMain.TransferType.isochronous:
self._raw_additional_header = self._io.read_bytes_full()
_io__raw_additional_header = KaitaiStream(BytesIO(self._raw_additional_header))
self.additional_header = Usbpcap.Header.HeaderMain.IsochHeader(_io__raw_additional_header, self, self._root)
elif _on == Usbpcap.Header.HeaderMain.TransferType.control:
self._raw_additional_header = self._io.read_bytes_full()
_io__raw_additional_header = KaitaiStream(BytesIO(self._raw_additional_header))
self.additional_header = Usbpcap.Header.HeaderMain.ControlHeader(_io__raw_additional_header, self, self._root)
else:
self.additional_header = self._io.read_bytes_full()
class Info(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.reserved = self._io.read_bits_int_be(7)
self.pdo_to_fdo = self._io.read_bits_int_be(1) != 0
class IsochHeader(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.start_frame = self._io.read_u8le()
self.packet_count = self._io.read_u8le()
self.error_count = self._io.read_u8le()
self.packet = Usbpcap.Header.HeaderMain.IsochHeader.IsochPacket(self._io, self, self._root)
class IsochPacket(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.offset = self._io.read_u8le()
self.size = self._io.read_u8le()
self.status = usbd_status_windows.UsbdStatusWindows(self._io)
class ControlHeader(KaitaiStruct):
class Stage(Enum):
setup = 0
data = 1
status = 2
complete = 3
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.stage = KaitaiStream.resolve_enum(Usbpcap.Header.HeaderMain.ControlHeader.Stage, self._io.read_u1())
| [
"pkg_resources.parse_version",
"kaitaistruct.BytesIO"
] | [((248, 287), 'pkg_resources.parse_version', 'parse_version', (['kaitaistruct.__version__'], {}), '(kaitaistruct.__version__)\n', (261, 287), False, 'from pkg_resources import parse_version\n'), ((290, 310), 'pkg_resources.parse_version', 'parse_version', (['"""0.9"""'], {}), "('0.9')\n", (303, 310), False, 'from pkg_resources import parse_version\n'), ((1539, 1569), 'kaitaistruct.BytesIO', 'BytesIO', (['self._raw_header_main'], {}), '(self._raw_header_main)\n', (1546, 1569), False, 'from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO\n'), ((3124, 3160), 'kaitaistruct.BytesIO', 'BytesIO', (['self._raw_additional_header'], {}), '(self._raw_additional_header)\n', (3131, 3160), False, 'from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO\n'), ((3506, 3542), 'kaitaistruct.BytesIO', 'BytesIO', (['self._raw_additional_header'], {}), '(self._raw_additional_header)\n', (3513, 3542), False, 'from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO\n')] |
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
"""Create a kops cluster with the name <username>.facebook360.dep.k8s.local.
Takes paths that corresponds to credentials for the user's AWS account
and parameters desired for the kops cluster (e.g. instance type and number of workers)
and creates a corresponding kops cluster. If executed when a kops cluster is
already present, the script will terminate without any effect.
Example:
To run create manually, simply execute:
$ python create.py \
--csv_path=/path/to/credentials.csv \
--key_dir=/path/to/keys/ \
--key_name=key \
--ec2_file=/path/to/aws/ec2.txt \
--cluster_size=2
This will create a cluster with two worker nodes (with the default instance type
of c4.xlarge) and a master node (with type c4.large).
Attributes:
FLAGS (absl.flags._flagvalues.FlagValues): Globally defined flags for create.py.
"""
import os
import stat
import sys
import time
from base64 import b64decode
from pathlib import Path
from shutil import copyfile
import botocore
import docker
from absl import app, flags
from fabric import Connection
import patchwork.transfers
dir_scripts = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
dir_root = os.path.dirname(dir_scripts)
sys.path.append(dir_root)
sys.path.append(os.path.join(dir_scripts, "util"))
import scripts.render.config as config
from scripts.util.system_util import run_command
from util import AWSUtil
FLAGS = flags.FLAGS
def sync_files(key_fn, ip_staging):
"""Syncs all the local files aside from the input, output, and build roots to
the root of the remote machine.
Args:
key_fn (str): Path to the .pem file for the staging instance.
ip_staging (str): IP address of the staging instance.
"""
# rsync through patchwork has issues with non-escaped key paths, but fabric wants non-escaped
# Solution: create fabric connection without key and pass it escaped in rsync options
with Connection(host=ip_staging, user="ubuntu") as c:
ssh_opts = f'-i "{key_fn}"'
patchwork.transfers.rsync(
c,
config.DOCKER_ROOT + "/",
"/home/ubuntu/",
ssh_opts=ssh_opts,
exclude=(
config.INPUT_ROOT_NAME,
config.OUTPUT_ROOT_NAME,
config.BUILD_ROOT_NAME,
),
strict_host_keys=False,
)
def run_ssh_command(key_fn, ip_staging, cmd, hide=False, ignore_env=False):
"""Executes a command over SSH on the remote machine.
Args:
key_fn (str): Path to the .pem file for the staging instance.
ip_staging (str): IP address of the staging instance.
cmd (str): Command to be executed on the instance.
hide (bool, optional): Whether or not to show stdout.
ignore_env (bool, optional): Whether or not to set up environment.
Returns:
str: Output from stdout of executing the command.
"""
with Connection(
host=ip_staging,
user="ubuntu",
connect_kwargs={"key_filename": [key_fn]},
inline_ssh_env=True,
) as c:
if not ignore_env:
c.config.run.env = {"PATH": "/home/ubuntu/.local/bin:$PATH"}
result = c.run(cmd, hide=hide)
return result.stdout.strip()
def run_detached_ssh_command(key_fn, ip_staging, cmd, output_fn=None):
redirection = f">> {output_fn}" if output_fn is not None else ""
detached_cmd = (
f"(nohup bash -c '{cmd} {redirection}' >& {output_fn} < /dev/null &) && sleep 1"
)
print(detached_cmd)
run_ssh_command(key_fn, ip_staging, detached_cmd)
def configure_remote_shell(aws_util, key_fn, ip_staging):
"""Given a key and IP address of a machine and an AWSUtil configured
to it, configures the shell with the access key ID, secret access key, and
region if present in the AWSUtil instance.
Args:
aws_util (AWSUtil): AWSUtil configured with credentials for the staging instance.
key_fn (str): Path to the .pem file for the staging instance.
ip_staging (str): IP address of the staging instance.
"""
run_ssh_command(
key_fn,
ip_staging,
f"aws configure set aws_access_key_id {aws_util.aws_access_key_id}",
)
run_ssh_command(
key_fn,
ip_staging,
f"aws configure set aws_secret_access_key {aws_util.aws_secret_access_key}",
)
if aws_util.region_name:
run_ssh_command(
key_fn,
ip_staging,
f"aws configure set default.region {aws_util.region_name}",
)
def get_staging_info(aws_util, ec2_file, start_instance=True):
instance_id = None
ip_staging = None
if os.path.exists(ec2_file):
with open(ec2_file) as f:
instance_id = f.readline().strip()
if aws_util.ec2_instance_exists(instance_id):
state = aws_util.get_instance_state(instance_id)
if state != "terminated":
if state == "running":
ip_staging = aws_util.wait_for_ping(instance_id)
elif start_instance:
print(f"Starting instance {instance_id}...")
aws_util.ec2_instance_start(instance_id)
ip_staging = aws_util.wait_for_ping(instance_id)
return instance_id, ip_staging
def create_instance(aws_util, key_fn, ec2_file=None):
"""Creates and sets up an instance for rendering. If an instance was previously
created, that instance is started and set up is not re-run.
Args:
aws_util (AWSUtil): AWSUtil configured with credentials for the staging instance.
key_fn (str): Path to the .pem file for the staging instance.
Returns:
tuple (str, str): Tuple with the instance ID name and corresponding IP.
"""
if ec2_file is None:
ec2_file = os.path.expanduser(FLAGS.ec2_file)
if os.path.exists(ec2_file):
instance_id, ip_staging = get_staging_info(aws_util, ec2_file)
if instance_id and ip_staging:
return instance_id, ip_staging
print("Creating instance...")
ec2 = aws_util.session.resource("ec2")
# We open permissions for ingress and egress to ensure no communication issues w/ k8s
try:
security_group = ec2.create_security_group(
GroupName=FLAGS.security_group, Description="Facebook360_dep security group"
)
security_group.authorize_ingress(
IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=0, ToPort=65535
)
except botocore.exceptions.ClientError:
pass
instances = ec2.create_instances(
BlockDeviceMappings=[{"DeviceName": "/dev/sda1", "Ebs": {"VolumeSize": 128}}],
ImageId=FLAGS.ami,
InstanceType=FLAGS.instance_type_staging,
MinCount=1,
MaxCount=1,
KeyName=FLAGS.key_name,
SecurityGroups=[FLAGS.security_group],
TagSpecifications=[
{
"ResourceType": "instance",
"Tags": [{"Key": "Name", "Value": f"{aws_util.username}.{FLAGS.name}"}],
}
],
)
print("Waiting for initialization...")
instance = instances[0]
instance.wait_until_running()
aws_util.wait_for_ip(instance.id)
print(f"Spawned instance {instance.id}! Waiting to be reachable...")
ip_staging = aws_util.wait_for_ping(instance.id)
print(f"Running setup on instance ({ip_staging})...")
sync_files(key_fn, ip_staging)
run_ssh_command(key_fn, ip_staging, "~/scripts/aws/setup.sh")
return instance.id, ip_staging
def setup_instance(
aws_util, key_fn, ip_staging, master_ip, repo_uri, wait_to_init=True
):
"""Creates a kops cluster and deploys kubernetes cluster to it. If a kops cluster already
exists, this step is skipped over and the kubernetes cluster is deployed to the
existing cluster. If the kubernetes cluster already exists, the cluster is updated.
Args:
aws_util (AWSUtil): AWSUtil configured with credentials for the staging instance.
key_fn (str): Path to the .pem file for the staging instance.
ip_staging (str): IP address of the staging instance.
master_ip (str): IP address of the kubernetes master node instance.
repo_uri (str): URI for remote Docker registry.
wait_to_init (bool): Whether or not the function should wait until at least one
container is running to complete.
"""
# Send source code to EC2 instance
sync_files(key_fn, ip_staging)
if FLAGS.cluster_size > 0:
config_fn = os.path.join(config.DOCKER_AWS_ROOT, "config.txt")
cluster_config = f"""--cluster_size={str(FLAGS.cluster_size)}
--instance_type={FLAGS.instance_type}
"""
cached_config = None
if os.path.exists(config_fn):
with open(config_fn) as f:
cached_config = "".join(f.readlines())
delete_cluster = cached_config != cluster_config
if delete_cluster:
with open(config_fn, "w") as f:
f.write(cluster_config)
run_ssh_command(
key_fn,
ip_staging,
f"""~/scripts/aws/create_cluster.sh \
{aws_util.aws_access_key_id} \
{aws_util.aws_secret_access_key} \
{aws_util.region_name} \
{str(FLAGS.cluster_size)} \
{FLAGS.instance_type} \
{aws_util.username} \
{str(delete_cluster).lower()}""",
)
push_docker_to_aws(repo_uri, aws_util)
run_ssh_command(
key_fn,
ip_staging,
f"~/scripts/aws/deploy.sh {repo_uri} {master_ip} {str(FLAGS.cluster_size)}",
)
remote_logs_dir = "~/logs"
try:
run_ssh_command(key_fn, ip_staging, f"mkdir {remote_logs_dir}")
except Exception:
pass # occurs if the directory already exists
while wait_to_init:
kubectl_pods = run_ssh_command(
key_fn, ip_staging, "kubectl get pods", hide=True
).split("\n")[1:]
# kubectl lines are of the format:
# facebook360-dep-588bdc5ff5-7d572 1/1 Running 0 29m
for kubectl_pod in kubectl_pods:
kubectl_pod_status = kubectl_pod.split()[2].strip()
if kubectl_pod_status == "Running":
wait_to_init = False
if wait_to_init:
print("Waiting for Kubernetes pods to initialize...")
time.sleep(10)
vpc_ips_to_id = aws_util.ec2_get_kube_workers()
get_pods_result = run_ssh_command(
key_fn, ip_staging, "kubectl get pods -o wide", hide=True
)
pod_properties = get_pods_result.split("\n")[1:]
pod_names = []
for pod_property in pod_properties:
# kops pod names are of the form ip-172-20-40-140.us-west-2.compute.internal
# where the ip-a-b-c-d correspond to the private IP a.b.c.d
if "ip-" not in pod_property:
continue
pod_property_attrs = pod_property.split()
node_name = pod_property_attrs[0]
pod_name = [attr for attr in pod_property_attrs if "ip-" in attr][0]
pod_names.append(pod_name)
hyphenated_ip = pod_name.split(".")[0].split("-", 1)[1]
private_ip = hyphenated_ip.replace("-", ".")
instance_id = vpc_ips_to_id[private_ip]
run_ssh_command(
key_fn,
ip_staging,
f"nohup kubectl logs -f {node_name} >> {remote_logs_dir}/Worker-{instance_id}.txt &",
hide=True,
ignore_env=True,
)
with open(config.DOCKER_AWS_WORKERS, "w") as f:
f.write("\n".join(pod_names))
def push_docker_to_aws(repo_uri, aws_util):
"""Pushes Docker image to the specified URI.
Args:
repo_uri (str): URI for remote Docker registry.
aws_util (AWSUtil): AWSUtil configured with credentials for the staging instance.
"""
local_img = f"localhost:5000/{config.DOCKER_IMAGE}"
remote_img = f"{repo_uri}:{config.DOCKER_IMAGE}"
ecr = aws_util.session.client("ecr")
auth = ecr.get_authorization_token()
token = auth["authorizationData"][0]["authorizationToken"]
username, password = b64decode(token).split(b":")
auth_config_payload = {
"username": username.decode("utf8"),
"password": password.decode("utf8"),
}
local_img = "localhost:5000/worker"
remote_img = f"{repo_uri}:worker"
client = docker.APIClient()
client.tag(local_img, remote_img)
for line in client.push(
remote_img, stream=True, auth_config=auth_config_payload, decode=True
):
if "status" in line:
if "progress" in line:
print(f"{line['status']}: {line['progress']}")
else:
print(line["status"])
def get_render_info(key_fn, ip_staging):
render_jobs_raw = run_ssh_command(key_fn, ip_staging, "ps aux | grep render.py")
return render_jobs_raw.split("\n")
def has_render_flag(key_fn, ip_staging, flag, value):
render_jobs = get_render_info(key_fn, ip_staging)
CMD_IDX = 10
for render_job in render_jobs:
# ps aux lines are of the format: ubuntu PID ... cmd
render_job_info = render_job.split()
cmd = " ".join(render_job_info[CMD_IDX:])
if cmd.startswith("python3"):
return any(f"{flag}={value}" in info for info in render_job_info)
return None
def get_render_pid(key_fn, ip_staging):
render_jobs = get_render_info(key_fn, ip_staging)
PID_IDX = 1
CMD_IDX = 10
for render_job in render_jobs:
# ps aux lines are of the format: ubuntu PID ... cmd
render_job_info = render_job.split()
cmd = " ".join(render_job_info[CMD_IDX:])
print(f"COMMAND: {cmd}")
if cmd.startswith("python3"):
pid = render_job_info[PID_IDX]
return pid
return None
def run_render(key_fn, ip_staging, master_ip):
"""Runs render on the deployed kubernetes cluster.
Args:
key_fn (str): Path to the .pem file for the staging instance.
ip_staging (str): IP address of the staging instance.
master_ip (str): IP address of the kubernetes master node instance.
"""
render_path = "~/scripts/render/render.py"
render_flags = os.path.join(
str(Path(os.path.abspath(__file__)).parents[2]),
"project",
"flags",
f"render_{FLAGS.tag}.flags",
)
with open(render_flags) as f:
render_flags = f.readlines()
render_flags_combined = " ".join(render_flags).replace("\n", "")
try:
run_ssh_command(key_fn, ip_staging, "rm render.out")
except Exception:
pass # There may be no previous runs on this staging machine or it was manually cleaned
run_detached_ssh_command(
key_fn,
ip_staging,
f"python3 -u {render_path} {render_flags_combined} --master={master_ip} --cloud=aws",
output_fn="render.out",
)
def create_key(aws_util):
key_dir = os.path.expanduser(FLAGS.key_dir)
key_file = f"{FLAGS.key_name}.pem"
key_fn_mount = os.path.join(key_dir, key_file)
aws_util.ec2_keypair_setup(key_fn_mount)
# Copy key to local path. Windows doesn't allow us to change permissions in bound paths
key_fn = f"/{key_file}"
copyfile(key_fn_mount, key_fn)
os.chmod(key_fn, stat.S_IREAD)
return key_fn
def get_repo_uri(key_fn, ip_staging, ecr_registry_name):
return run_ssh_command(
key_fn,
ip_staging,
f"""aws ecr describe-repositories \
--repository-names {ecr_registry_name} | jq -r '.repositories[0].repositoryUri'""",
hide=True,
)
def main(argv):
"""Creates a kops cluster, deploys a kubernetes cluster atop it, and runs render
with the kubernetes nodes as workers. The cluster remain upon completion and must
be externally terminated (re: clean.py).
Args:
argv (list[str]): List of arguments (used interally by abseil).
"""
aws_util = AWSUtil(FLAGS.csv_path, region_name=FLAGS.region)
key_fn = create_key(aws_util)
instance_id, ip_staging = create_instance(aws_util, key_fn)
ec2_file = os.path.expanduser(FLAGS.ec2_file)
with open(ec2_file, "w") as f:
f.write(instance_id)
configure_remote_shell(aws_util, key_fn, ip_staging)
master_ip = run_ssh_command(
key_fn,
ip_staging,
"""aws ec2 describe-instances \
--instance-ids $(ec2metadata --instance-id) \
--query 'Reservations[*].Instances[*].PublicIpAddress' \
--output text""",
hide=True,
)
ecr_registry_name = f"fb360-{aws_util.username}"
repo_uri = get_repo_uri(key_fn, ip_staging, ecr_registry_name)
if not repo_uri.strip():
run_ssh_command(
key_fn,
ip_staging,
f"aws ecr create-repository --repository-name {ecr_registry_name}",
)
repo_uri = get_repo_uri(key_fn, ip_staging, ecr_registry_name)
render_pid = get_render_pid(key_fn, ip_staging)
if render_pid is None:
setup_instance(aws_util, key_fn, ip_staging, master_ip, repo_uri)
run_render(key_fn, ip_staging, master_ip)
render_pid = get_render_pid(key_fn, ip_staging)
sync_logs = f"""while true; do \
rsync -avz -e 'ssh -i {key_fn}' \
ubuntu@{ip_staging}:/home/ubuntu/logs/ {config.DOCKER_INPUT_ROOT}/logs/; \
sleep 10; \
done &"""
run_command(sync_logs, run_async=True)
display_render_progress = f"""tail --pid {render_pid} -n +1 -f render.out"""
run_ssh_command(key_fn, ip_staging, display_render_progress)
if __name__ == "__main__":
# Abseil entry point app.run() expects all flags to be already defined
flags.DEFINE_string(
"ami",
"ami-005bdb005fb00e791",
"ID of the AMI to use (defaults to Ubuntu 18.04)",
)
flags.DEFINE_integer(
"cluster_size", 0, "size of Kubernetes cluster (0 = no cluster)"
)
flags.DEFINE_string("csv_path", None, "path to AWS credentials CSV")
flags.DEFINE_string("ec2_file", "~/ec2_info.txt", "file to save EC2 info to")
flags.DEFINE_string(
"instance_type", "c4.xlarge", "AWS instance type for worker nodes"
)
flags.DEFINE_string(
"instance_type_staging",
"c4.xlarge",
"AWS instance type for the staging machine",
)
flags.DEFINE_string(
"key_dir", "~/aws_keys", "directory where AWS .pem files are stored"
)
flags.DEFINE_string("key_name", "ec2-keypair", "name of the .pem keypair")
flags.DEFINE_string(
"name", "facebook360-dep", "name of the instance to be loaded/created"
)
flags.DEFINE_string("region", "us-west-2", "region where instance will spawn")
flags.DEFINE_string(
"security_group", "facebook360-dep", "name of the security group"
)
flags.DEFINE_string(
"tag", "", "tag of the type of render (either 'depth' or 'export')"
)
# Required FLAGS.
flags.mark_flag_as_required("csv_path")
app.run(main)
| [
"os.path.exists",
"docker.APIClient",
"scripts.util.system_util.run_command",
"util.AWSUtil",
"absl.flags.DEFINE_integer",
"os.path.join",
"base64.b64decode",
"absl.app.run",
"os.chmod",
"absl.flags.mark_flag_as_required",
"os.path.dirname",
"shutil.copyfile",
"time.sleep",
"absl.flags.DEFINE_string",
"os.path.abspath",
"fabric.Connection",
"sys.path.append",
"os.path.expanduser"
] | [((1430, 1458), 'os.path.dirname', 'os.path.dirname', (['dir_scripts'], {}), '(dir_scripts)\n', (1445, 1458), False, 'import os\n'), ((1459, 1484), 'sys.path.append', 'sys.path.append', (['dir_root'], {}), '(dir_root)\n', (1474, 1484), False, 'import sys\n'), ((1501, 1534), 'os.path.join', 'os.path.join', (['dir_scripts', '"""util"""'], {}), "(dir_scripts, 'util')\n", (1513, 1534), False, 'import os\n'), ((4921, 4945), 'os.path.exists', 'os.path.exists', (['ec2_file'], {}), '(ec2_file)\n', (4935, 4945), False, 'import os\n'), ((6155, 6179), 'os.path.exists', 'os.path.exists', (['ec2_file'], {}), '(ec2_file)\n', (6169, 6179), False, 'import os\n'), ((12919, 12937), 'docker.APIClient', 'docker.APIClient', ([], {}), '()\n', (12935, 12937), False, 'import docker\n'), ((15484, 15517), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.key_dir'], {}), '(FLAGS.key_dir)\n', (15502, 15517), False, 'import os\n'), ((15576, 15607), 'os.path.join', 'os.path.join', (['key_dir', 'key_file'], {}), '(key_dir, key_file)\n', (15588, 15607), False, 'import os\n'), ((15778, 15808), 'shutil.copyfile', 'copyfile', (['key_fn_mount', 'key_fn'], {}), '(key_fn_mount, key_fn)\n', (15786, 15808), False, 'from shutil import copyfile\n'), ((15813, 15843), 'os.chmod', 'os.chmod', (['key_fn', 'stat.S_IREAD'], {}), '(key_fn, stat.S_IREAD)\n', (15821, 15843), False, 'import os\n'), ((16486, 16535), 'util.AWSUtil', 'AWSUtil', (['FLAGS.csv_path'], {'region_name': 'FLAGS.region'}), '(FLAGS.csv_path, region_name=FLAGS.region)\n', (16493, 16535), False, 'from util import AWSUtil\n'), ((16650, 16684), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.ec2_file'], {}), '(FLAGS.ec2_file)\n', (16668, 16684), False, 'import os\n'), ((17964, 18002), 'scripts.util.system_util.run_command', 'run_command', (['sync_logs'], {'run_async': '(True)'}), '(sync_logs, run_async=True)\n', (17975, 18002), False, 'from scripts.util.system_util import run_command\n'), ((18258, 18364), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""ami"""', '"""ami-005bdb005fb00e791"""', '"""ID of the AMI to use (defaults to Ubuntu 18.04)"""'], {}), "('ami', 'ami-005bdb005fb00e791',\n 'ID of the AMI to use (defaults to Ubuntu 18.04)')\n", (18277, 18364), False, 'from absl import app, flags\n'), ((18396, 18486), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""cluster_size"""', '(0)', '"""size of Kubernetes cluster (0 = no cluster)"""'], {}), "('cluster_size', 0,\n 'size of Kubernetes cluster (0 = no cluster)')\n", (18416, 18486), False, 'from absl import app, flags\n'), ((18501, 18569), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""csv_path"""', 'None', '"""path to AWS credentials CSV"""'], {}), "('csv_path', None, 'path to AWS credentials CSV')\n", (18520, 18569), False, 'from absl import app, flags\n'), ((18574, 18651), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""ec2_file"""', '"""~/ec2_info.txt"""', '"""file to save EC2 info to"""'], {}), "('ec2_file', '~/ec2_info.txt', 'file to save EC2 info to')\n", (18593, 18651), False, 'from absl import app, flags\n'), ((18656, 18747), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""instance_type"""', '"""c4.xlarge"""', '"""AWS instance type for worker nodes"""'], {}), "('instance_type', 'c4.xlarge',\n 'AWS instance type for worker nodes')\n", (18675, 18747), False, 'from absl import app, flags\n'), ((18762, 18868), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""instance_type_staging"""', '"""c4.xlarge"""', '"""AWS instance type for the staging machine"""'], {}), "('instance_type_staging', 'c4.xlarge',\n 'AWS instance type for the staging machine')\n", (18781, 18868), False, 'from absl import app, flags\n'), ((18900, 18993), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""key_dir"""', '"""~/aws_keys"""', '"""directory where AWS .pem files are stored"""'], {}), "('key_dir', '~/aws_keys',\n 'directory where AWS .pem files are stored')\n", (18919, 18993), False, 'from absl import app, flags\n'), ((19008, 19082), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""key_name"""', '"""ec2-keypair"""', '"""name of the .pem keypair"""'], {}), "('key_name', 'ec2-keypair', 'name of the .pem keypair')\n", (19027, 19082), False, 'from absl import app, flags\n'), ((19087, 19182), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""name"""', '"""facebook360-dep"""', '"""name of the instance to be loaded/created"""'], {}), "('name', 'facebook360-dep',\n 'name of the instance to be loaded/created')\n", (19106, 19182), False, 'from absl import app, flags\n'), ((19197, 19275), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""region"""', '"""us-west-2"""', '"""region where instance will spawn"""'], {}), "('region', 'us-west-2', 'region where instance will spawn')\n", (19216, 19275), False, 'from absl import app, flags\n'), ((19280, 19370), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""security_group"""', '"""facebook360-dep"""', '"""name of the security group"""'], {}), "('security_group', 'facebook360-dep',\n 'name of the security group')\n", (19299, 19370), False, 'from absl import app, flags\n'), ((19385, 19477), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""tag"""', '""""""', '"""tag of the type of render (either \'depth\' or \'export\')"""'], {}), '(\'tag\', \'\',\n "tag of the type of render (either \'depth\' or \'export\')")\n', (19404, 19477), False, 'from absl import app, flags\n'), ((19515, 19554), 'absl.flags.mark_flag_as_required', 'flags.mark_flag_as_required', (['"""csv_path"""'], {}), "('csv_path')\n", (19542, 19554), False, 'from absl import app, flags\n'), ((19559, 19572), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (19566, 19572), False, 'from absl import app, flags\n'), ((1391, 1416), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1406, 1416), False, 'import os\n'), ((2175, 2217), 'fabric.Connection', 'Connection', ([], {'host': 'ip_staging', 'user': '"""ubuntu"""'}), "(host=ip_staging, user='ubuntu')\n", (2185, 2217), False, 'from fabric import Connection\n'), ((3176, 3286), 'fabric.Connection', 'Connection', ([], {'host': 'ip_staging', 'user': '"""ubuntu"""', 'connect_kwargs': "{'key_filename': [key_fn]}", 'inline_ssh_env': '(True)'}), "(host=ip_staging, user='ubuntu', connect_kwargs={'key_filename':\n [key_fn]}, inline_ssh_env=True)\n", (3186, 3286), False, 'from fabric import Connection\n'), ((6113, 6147), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.ec2_file'], {}), '(FLAGS.ec2_file)\n', (6131, 6147), False, 'import os\n'), ((8838, 8888), 'os.path.join', 'os.path.join', (['config.DOCKER_AWS_ROOT', '"""config.txt"""'], {}), "(config.DOCKER_AWS_ROOT, 'config.txt')\n", (8850, 8888), False, 'import os\n'), ((9057, 9082), 'os.path.exists', 'os.path.exists', (['config_fn'], {}), '(config_fn)\n', (9071, 9082), False, 'import os\n'), ((12673, 12689), 'base64.b64decode', 'b64decode', (['token'], {}), '(token)\n', (12682, 12689), False, 'from base64 import b64decode\n'), ((10833, 10847), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (10843, 10847), False, 'import time\n'), ((14795, 14820), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (14810, 14820), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import logging
import time
from apscheduler.schedulers.background import BackgroundScheduler
from zvdata import IntervalLevel
from zvt import init_log
from zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder import JqChinaStockKdataRecorder
logger = logging.getLogger(__name__)
sched = BackgroundScheduler()
# 名称 dataschema provider comments download
# 个股资料 Stock eastmoney,sina 个股和板块为多对多的关系
# 板块资料 Index eastmoney,sina 板块有行业,概念,区域三个分类的维度,不同的provider分类会有所不同,个股和板块为多对多的关系
# 个股行情 Stock{level}Kdata joinquant,netease,eastmoney 支持1,5,15,30,60分钟, 日线,周线级别
# 指数日线行情 Index1DKdata eastmoney,sina,joinquant 指数本质上也是一种板块,指数对应板块资料中的标的
# 个股资金流 MoneyFlow eastmoney,sina,joinquant
# 板块资金流 MoneyFlow eastmoney,sina,joinquant 对应板块资料里面的标的
# 分红融资数据 DividendFinancing eastmoney 企业最后的底线就是能不能给投资者赚钱,此为年度统计信息
# 分红明细 DividendDetail eastmoney
# 融资明细 SPODetail eastmoney
# 配股明细 RightsIssueDetail eastmoney
# 主要财务指标 FinanceFactor eastmoney
# 资产负债表 BalanceSheet eastmoney
# 利润表 IncomeStatement eastmoney
# 现金流量表 CashFlowStatement eastmoney
# 十大股东 TopTenHolder eastmoney
# 十大流通股东 TopTenTradableHolder eastmoney
# 机构持股 InstitutionalInvestorHolder eastmoney
# 高管交易 ManagerTrading eastmoney
# 大股东交易 HolderTrading eastmoney
# 大宗交易 BigDealTrading eastmoney
# 融资融券 MarginTrading eastmoney
# 龙虎榜数据 DragonAndTiger eastmoney
@sched.scheduled_job('cron', hour=1, minute=10, day_of_week='tue-sat')
def record_day_kdata():
loop = 9
while loop >= 0:
try:
JqChinaStockKdataRecorder(level=IntervalLevel.LEVEL_1DAY).run()
break
except Exception as e:
loop -= 1
logger.exception('joinquant_run_recorder joinquant day_kdata runner error:{}'.format(e))
time.sleep(60 * 2)
# 每周6抓取周线和月线数据
@sched.scheduled_job('cron', day_of_week=6, hour=2, minute=30)
def record_wk_kdata():
loop = 8
while loop >= 0:
try:
JqChinaStockKdataRecorder(level=IntervalLevel.LEVEL_1WEEK).run()
break
except Exception as e:
loop -= 1
logger.exception('joinquant_run_recorder joinquant wk_kdata runner error:{}'.format(e))
time.sleep(60 * 2)
# 每周6抓取周线和月线数据
@sched.scheduled_job('cron', day_of_week=0, hour=2, minute=30)
def record_month_kdata():
loop = 8
while loop >= 0:
try:
JqChinaStockKdataRecorder(level=IntervalLevel.LEVEL_1MON).run()
break
except Exception as e:
loop -= 1
logger.exception('joinquant_run_recorder joinquant month_kdata runner error:{}'.format(e))
time.sleep(60 * 2)
if __name__ == '__main__':
init_log('joinquant_run_recorder.log')
# record_day_kdata()
# record_wk_kdata()
sched.start()
sched._thread.join()
| [
"logging.getLogger",
"time.sleep",
"zvt.init_log",
"zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder.JqChinaStockKdataRecorder",
"apscheduler.schedulers.background.BackgroundScheduler"
] | [((280, 307), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (297, 307), False, 'import logging\n'), ((317, 338), 'apscheduler.schedulers.background.BackgroundScheduler', 'BackgroundScheduler', ([], {}), '()\n', (336, 338), False, 'from apscheduler.schedulers.background import BackgroundScheduler\n'), ((2638, 2676), 'zvt.init_log', 'init_log', (['"""joinquant_run_recorder.log"""'], {}), "('joinquant_run_recorder.log')\n", (2646, 2676), False, 'from zvt import init_log\n'), ((1723, 1741), 'time.sleep', 'time.sleep', (['(60 * 2)'], {}), '(60 * 2)\n', (1733, 1741), False, 'import time\n'), ((2152, 2170), 'time.sleep', 'time.sleep', (['(60 * 2)'], {}), '(60 * 2)\n', (2162, 2170), False, 'import time\n'), ((2586, 2604), 'time.sleep', 'time.sleep', (['(60 * 2)'], {}), '(60 * 2)\n', (2596, 2604), False, 'import time\n'), ((1475, 1532), 'zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder.JqChinaStockKdataRecorder', 'JqChinaStockKdataRecorder', ([], {'level': 'IntervalLevel.LEVEL_1DAY'}), '(level=IntervalLevel.LEVEL_1DAY)\n', (1500, 1532), False, 'from zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder import JqChinaStockKdataRecorder\n'), ((1904, 1962), 'zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder.JqChinaStockKdataRecorder', 'JqChinaStockKdataRecorder', ([], {'level': 'IntervalLevel.LEVEL_1WEEK'}), '(level=IntervalLevel.LEVEL_1WEEK)\n', (1929, 1962), False, 'from zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder import JqChinaStockKdataRecorder\n'), ((2336, 2393), 'zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder.JqChinaStockKdataRecorder', 'JqChinaStockKdataRecorder', ([], {'level': 'IntervalLevel.LEVEL_1MON'}), '(level=IntervalLevel.LEVEL_1MON)\n', (2361, 2393), False, 'from zvt.recorders.joinquant.quotes.jq_stock_kdata_recorder import JqChinaStockKdataRecorder\n')] |
from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.python import PythonOperator
from airflow.exceptions import AirflowSensorTimeout
from airflow.operators.dummy import DummyOperator
from airflow.contrib.sensors.file_sensor import FileSensor
from airflow.dags.jobs.extract_blob import launch_blob
from airflow.dags.jobs.check_stuff import check_variables
from airflow.dags.jobs.import_db import insert_db
from airflow.dags.jobs.extract_db import extract_db
FILE_PATH = "/opt/airflow/data"
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": datetime.now(),
"email": ["<EMAIL>"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 0,
"retry_delay": timedelta(minutes=2),
}
# Operators configuration
def _failure_callback(context):
if isinstance(context["exception"], AirflowSensorTimeout):
print(context)
print("Sensor timed out")
dag = DAG(
"pipeline_lombardia",
start_date=datetime.now(),
schedule_interval="@daily",
catchup=False,
default_args=default_args,
)
run_variables_check = PythonOperator(
task_id="variable_check",
python_callable=check_variables,
dag=dag,
op_kwargs={"state": "state_test"},
)
run_ospedali_extractor = BashOperator(
task_id="ospedali_extractor",
bash_command="python /opt/airflow/dags/jobs/extract_csv.py launch_ospedali",
dag=dag,
)
run_popolazione_extractor = PythonOperator(
task_id="popolazione_extractor", python_callable=launch_blob, dag=dag
)
run_performance_extractor = PythonOperator(
task_id="performance_extractor", python_callable=extract_db, dag=dag
)
save_result_db = PythonOperator(
task_id="save_result_db", python_callable=insert_db, dag=dag
)
## REMEMBER TO CREATE A file_check (fs) connection on admin > connections
sensor_extract_ospedali = FileSensor(
task_id="sensor_extract_ospedali",
mode="reschedule",
on_failure_callback=_failure_callback,
filepath="/opt/airflow/data/ospedali.csv",
poke_interval=15,
timeout=15 * 60,
fs_conn_id="file_check",
)
sensor_extract_popolazione = FileSensor(
task_id="sensor_extract_popolazione",
mode="reschedule",
on_failure_callback=_failure_callback,
filepath="/opt/airflow/data/popolazione.csv",
poke_interval=15,
timeout=15 * 60,
fs_conn_id="file_check",
)
sensor_extract_performance = FileSensor(
task_id="sensor_extract_performance",
mode="reschedule",
on_failure_callback=_failure_callback,
filepath="/opt/airflow/data/performance.csv",
poke_interval=15,
timeout=15 * 60,
fs_conn_id="file_check",
)
start_op = DummyOperator(task_id="start_task", dag=dag)
mid_op = DummyOperator(task_id="mid_task", dag=dag)
last_op = DummyOperator(task_id="last_task", dag=dag)
start_op >> run_variables_check >> run_ospedali_extractor >> sensor_extract_ospedali >> mid_op >> save_result_db >> last_op
start_op >> run_variables_check >> run_popolazione_extractor >> sensor_extract_popolazione >> mid_op >> save_result_db >> last_op
start_op >> run_variables_check >> run_performance_extractor >> sensor_extract_performance >> mid_op >> save_result_db >> last_op | [
"airflow.operators.dummy.DummyOperator",
"airflow.operators.bash.BashOperator",
"airflow.contrib.sensors.file_sensor.FileSensor",
"datetime.datetime.now",
"airflow.operators.python.PythonOperator",
"datetime.timedelta"
] | [((1181, 1302), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""variable_check"""', 'python_callable': 'check_variables', 'dag': 'dag', 'op_kwargs': "{'state': 'state_test'}"}), "(task_id='variable_check', python_callable=check_variables,\n dag=dag, op_kwargs={'state': 'state_test'})\n", (1195, 1302), False, 'from airflow.operators.python import PythonOperator\n'), ((1344, 1477), 'airflow.operators.bash.BashOperator', 'BashOperator', ([], {'task_id': '"""ospedali_extractor"""', 'bash_command': '"""python /opt/airflow/dags/jobs/extract_csv.py launch_ospedali"""', 'dag': 'dag'}), "(task_id='ospedali_extractor', bash_command=\n 'python /opt/airflow/dags/jobs/extract_csv.py launch_ospedali', dag=dag)\n", (1356, 1477), False, 'from airflow.operators.bash import BashOperator\n'), ((1517, 1606), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""popolazione_extractor"""', 'python_callable': 'launch_blob', 'dag': 'dag'}), "(task_id='popolazione_extractor', python_callable=launch_blob,\n dag=dag)\n", (1531, 1606), False, 'from airflow.operators.python import PythonOperator\n'), ((1638, 1726), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""performance_extractor"""', 'python_callable': 'extract_db', 'dag': 'dag'}), "(task_id='performance_extractor', python_callable=extract_db,\n dag=dag)\n", (1652, 1726), False, 'from airflow.operators.python import PythonOperator\n'), ((1748, 1824), 'airflow.operators.python.PythonOperator', 'PythonOperator', ([], {'task_id': '"""save_result_db"""', 'python_callable': 'insert_db', 'dag': 'dag'}), "(task_id='save_result_db', python_callable=insert_db, dag=dag)\n", (1762, 1824), False, 'from airflow.operators.python import PythonOperator\n'), ((1932, 2151), 'airflow.contrib.sensors.file_sensor.FileSensor', 'FileSensor', ([], {'task_id': '"""sensor_extract_ospedali"""', 'mode': '"""reschedule"""', 'on_failure_callback': '_failure_callback', 'filepath': '"""/opt/airflow/data/ospedali.csv"""', 'poke_interval': '(15)', 'timeout': '(15 * 60)', 'fs_conn_id': '"""file_check"""'}), "(task_id='sensor_extract_ospedali', mode='reschedule',\n on_failure_callback=_failure_callback, filepath=\n '/opt/airflow/data/ospedali.csv', poke_interval=15, timeout=15 * 60,\n fs_conn_id='file_check')\n", (1942, 2151), False, 'from airflow.contrib.sensors.file_sensor import FileSensor\n'), ((2199, 2424), 'airflow.contrib.sensors.file_sensor.FileSensor', 'FileSensor', ([], {'task_id': '"""sensor_extract_popolazione"""', 'mode': '"""reschedule"""', 'on_failure_callback': '_failure_callback', 'filepath': '"""/opt/airflow/data/popolazione.csv"""', 'poke_interval': '(15)', 'timeout': '(15 * 60)', 'fs_conn_id': '"""file_check"""'}), "(task_id='sensor_extract_popolazione', mode='reschedule',\n on_failure_callback=_failure_callback, filepath=\n '/opt/airflow/data/popolazione.csv', poke_interval=15, timeout=15 * 60,\n fs_conn_id='file_check')\n", (2209, 2424), False, 'from airflow.contrib.sensors.file_sensor import FileSensor\n'), ((2473, 2698), 'airflow.contrib.sensors.file_sensor.FileSensor', 'FileSensor', ([], {'task_id': '"""sensor_extract_performance"""', 'mode': '"""reschedule"""', 'on_failure_callback': '_failure_callback', 'filepath': '"""/opt/airflow/data/performance.csv"""', 'poke_interval': '(15)', 'timeout': '(15 * 60)', 'fs_conn_id': '"""file_check"""'}), "(task_id='sensor_extract_performance', mode='reschedule',\n on_failure_callback=_failure_callback, filepath=\n '/opt/airflow/data/performance.csv', poke_interval=15, timeout=15 * 60,\n fs_conn_id='file_check')\n", (2483, 2698), False, 'from airflow.contrib.sensors.file_sensor import FileSensor\n'), ((2729, 2773), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""start_task"""', 'dag': 'dag'}), "(task_id='start_task', dag=dag)\n", (2742, 2773), False, 'from airflow.operators.dummy import DummyOperator\n'), ((2783, 2825), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""mid_task"""', 'dag': 'dag'}), "(task_id='mid_task', dag=dag)\n", (2796, 2825), False, 'from airflow.operators.dummy import DummyOperator\n'), ((2836, 2879), 'airflow.operators.dummy.DummyOperator', 'DummyOperator', ([], {'task_id': '"""last_task"""', 'dag': 'dag'}), "(task_id='last_task', dag=dag)\n", (2849, 2879), False, 'from airflow.operators.dummy import DummyOperator\n'), ((665, 679), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (677, 679), False, 'from datetime import datetime, timedelta\n'), ((804, 824), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(2)'}), '(minutes=2)\n', (813, 824), False, 'from datetime import datetime, timedelta\n'), ((1058, 1072), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1070, 1072), False, 'from datetime import datetime, timedelta\n')] |
#!/usr/bin/python3
from botbase import *
_hersfeld_c = re.compile(r"Infektionen\sinsgesamt:\s([0-9.]+)\s(?:\(\+?(-?\s*[0-9.]+)\szum\s)?")
_hersfeld_d = re.compile(r"Todesfälle:\s([0-9.]+)\s(?:\(\+?(-?\s*[0-9.]+)\szum\s)?")
_hersfeld_g = re.compile(r"Genesungen:\s([0-9.]+)\s(?:\(\+?(-?\s*[0-9.]+)\szum\s)?")
_hersfeld_s = re.compile(r"([0-9.]+)\sinfizierte(?:\sPersone?n?)?\sin\sBehandlung,\sdavon\s([0-9.]+)(?:\sPersone?n?)?\sauf\s[Ii]ntensiv")
_hersfeld_q = re.compile(r"Quarantäne:\s([0-9.]+)")
def hersfeld(sheets):
import locale
locale.setlocale(locale.LC_TIME, "de_DE.UTF-8")
soup = get_soup("https://www.hef-rof.de/")
main = soup.find(id="fav-slide")
ps = [p.text for p in main.findAll("p")]
#for p in ps: print(p)
if not any([today().strftime("Stand: %d.%m.%Y") in p for p in ps]): raise NotYetAvailableException("Hersfeld noch alt: " + next(p for p in ps if "Stand:" in p))
c, cc, d, dd, g, gg, s, i, q = None, None, None, None, None, None, None, None, None
for p in ps:
m = _hersfeld_c.search(p)
if m: c, cc = force_int(m.group(1)), force_int(m.group(2))
#if m: c = force_int(m.group(1))
m = _hersfeld_d.search(p)
if m: d, dd = force_int(m.group(1)), force_int(m.group(2))
m = _hersfeld_g.search(p)
#if m: g = force_int(m.group(1))
if m: g, gg = force_int(m.group(1)), force_int(m.group(2))
m = _hersfeld_s.search(p)
if m: s, i = force_int(m.group(1)), force_int(m.group(2))
m = _hersfeld_q.search(p)
if m: q = force_int(m.group(1))
update(sheets, 6632, c=c, cc=cc, d=d, dd=dd, g=g, gg=gg, q=q, s=s, i=i, ignore_delta="mon")
return True
schedule.append(Task(13, 0, 16, 35, 360, hersfeld, 6632))
if __name__ == '__main__': hersfeld(googlesheets())
| [
"locale.setlocale"
] | [((544, 591), 'locale.setlocale', 'locale.setlocale', (['locale.LC_TIME', '"""de_DE.UTF-8"""'], {}), "(locale.LC_TIME, 'de_DE.UTF-8')\n", (560, 591), False, 'import locale\n')] |
import datetime
import pytz
import inspect
import logging
from saml2 import samlp
# to load state from a cookie
# from satosa.base import SATOSABase
# from satosa.wsgi import satosa_config
# sb = SATOSABase(satosa_config)
# sb._load_state(context)
logger = logging.getLogger(__name__)
_ERROR_TROUBLESHOOT = " Contattare il supporto tecnico per eventuali chiarimenti"
class SPIDValidatorException(Exception):
def __init__(self, message, errors=""):
super().__init__(message)
logger.error(message)
self.errors = errors
class Saml2ResponseValidator(object):
def __init__(
self,
authn_response="",
issuer="",
nameid_formats=["urn:oasis:names:tc:SAML:2.0:nameid-format:transient"],
recipient="spidSaml2/acs/post",
accepted_time_diff=1,
in_response_to="",
authn_context_class_ref="https://www.spid.gov.it/SpidL2",
return_addrs=[],
allowed_acrs=[],
):
self.response = samlp.response_from_string(authn_response)
self.nameid_formats = nameid_formats
self.recipient = recipient
self.accepted_time_diff = accepted_time_diff
self.authn_context_class_ref = authn_context_class_ref
self.in_response_to = in_response_to
self.return_addrs = return_addrs
self.issuer = issuer
self.allowed_acrs = allowed_acrs
# handled adding authn req arguments in the session state (cookie)
def validate_in_response_to(self):
"""spid test 18"""
if self.in_response_to != self.response.in_response_to:
raise SPIDValidatorException(
"In response To not valid: "
f"{self.in_response_to} != {self.response.in_response_to}."
f"{_ERROR_TROUBLESHOOT}"
)
def validate_destination(self):
"""spid test 19 e 20
inutile se disabiliti gli unsolicited
"""
if (
not self.response.destination
or self.response.destination not in self.return_addrs
):
_msg = (
f'Destination is not valid: {self.response.destination or ""} not in {self.return_addrs}.'
f"{_ERROR_TROUBLESHOOT}"
)
raise SPIDValidatorException(_msg)
def validate_issuer(self):
"""spid saml check 30, 70, 71, 72
<saml:Issuer Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">http://localhost:8080</saml:Issuer>
"""
# 30
# check that this issuer is in the metadata...
if self.response.issuer.format:
if (
self.response.issuer.format
!= "urn:oasis:names:tc:SAML:2.0:nameid-format:entity"
):
raise SPIDValidatorException(
f"Issuer NameFormat is invalid: {self.response.issuer.format} "
'!= "urn:oasis:names:tc:SAML:2.0:nameid-format:entity"'
)
msg = "Issuer format is not valid: {}. {}"
# 70, 71
assiss = self.response.assertion[0].issuer
if not hasattr(assiss, "format") or not getattr(assiss, "format", None):
raise SPIDValidatorException(
msg.format(self.response.issuer.format, _ERROR_TROUBLESHOOT)
)
# 72
for i in self.response.assertion:
if i.issuer.format != "urn:oasis:names:tc:SAML:2.0:nameid-format:entity":
raise SPIDValidatorException(
msg.format(self.response.issuer.format,
_ERROR_TROUBLESHOOT)
)
def validate_assertion_version(self):
"""spid saml check 35"""
for i in self.response.assertion:
if i.version != "2.0":
msg = (
f'validate_assertion_version failed on: "{i.version}".'
f"{_ERROR_TROUBLESHOOT}"
)
raise SPIDValidatorException(msg)
def validate_issueinstant(self):
"""spid saml check 39, 40"""
# Spid dt standard format
for i in self.response.assertion:
try:
issueinstant_naive = datetime.datetime.strptime(
i.issue_instant, "%Y-%m-%dT%H:%M:%SZ"
)
except Exception:
issueinstant_naive = datetime.datetime.strptime(
i.issue_instant, "%Y-%m-%dT%H:%M:%S.%fZ"
)
issuerinstant_aware = pytz.utc.localize(issueinstant_naive)
now = pytz.utc.localize(datetime.datetime.utcnow())
if now < issuerinstant_aware:
seconds = (issuerinstant_aware - now).seconds
else:
seconds = (now - issuerinstant_aware).seconds
if seconds > self.accepted_time_diff:
msg = (
f"Not a valid issue_instant: {issueinstant_naive}"
f"{_ERROR_TROUBLESHOOT}"
)
raise SPIDValidatorException(msg)
def validate_name_qualifier(self):
"""spid saml check 43, 45, 46, 47, 48, 49"""
for i in self.response.assertion:
if (
not hasattr(i.subject.name_id, "name_qualifier")
or not i.subject.name_id.name_qualifier
):
raise SPIDValidatorException(
"Not a valid subject.name_id.name_qualifier"
f"{_ERROR_TROUBLESHOOT}"
)
if not i.subject.name_id.format:
raise SPIDValidatorException(
"Not a valid subject.name_id.format" f"{_ERROR_TROUBLESHOOT}"
)
if i.subject.name_id.format not in self.nameid_formats:
msg = (
f"Not a valid subject.name_id.format: {i.subject.name_id.format}"
f"{_ERROR_TROUBLESHOOT}"
)
raise SPIDValidatorException(msg)
def validate_subject_confirmation_data(self):
"""spid saml check 59, 61, 62, 63, 64
saml_response.assertion[0].subject.subject_confirmation[0].subject_confirmation_data.__dict__
"""
for i in self.response.assertion:
for subject_confirmation in i.subject.subject_confirmation:
# 61
if not hasattr(
subject_confirmation, "subject_confirmation_data"
) or not getattr(
subject_confirmation, "subject_confirmation_data", None
):
msg = "subject_confirmation_data not present"
raise SPIDValidatorException(
f"{msg}. {_ERROR_TROUBLESHOOT}")
# 60
if not subject_confirmation.subject_confirmation_data.in_response_to:
raise SPIDValidatorException(
"subject.subject_confirmation_data in response -> null data."
f"{_ERROR_TROUBLESHOOT}"
)
# 62 avoided with allow_unsolicited set to false
# (XML parse error: Unsolicited response: id-OsoMQGYzX4HGLsfL7)
if self.in_response_to:
if (
subject_confirmation.subject_confirmation_data.in_response_to
!= self.in_response_to
):
raise Exception(
"subject.subject_confirmation_data in response to not valid"
)
# 50
if (
self.recipient
!= subject_confirmation.subject_confirmation_data.recipient
):
msg = (
"subject_confirmation.subject_confirmation_data.recipient not valid:"
f" {subject_confirmation.subject_confirmation_data.recipient}."
)
raise SPIDValidatorException(f"{msg}{_ERROR_TROUBLESHOOT}")
# 63 ,64
if not hasattr(
subject_confirmation.subject_confirmation_data, "not_on_or_after"
) or not getattr(
subject_confirmation.subject_confirmation_data,
"not_on_or_after",
None,
):
raise SPIDValidatorException(
"subject.subject_confirmation_data not_on_or_after not valid. "
f"{_ERROR_TROUBLESHOOT}"
)
if not hasattr(
subject_confirmation.subject_confirmation_data, "in_response_to"
) or not getattr(
subject_confirmation.subject_confirmation_data,
"in_response_to",
None,
):
raise SPIDValidatorException(
"subject.subject_confirmation_data in response to not valid. "
f"{_ERROR_TROUBLESHOOT}"
)
def validate_assertion_conditions(self):
"""spid saml check 73, 74, 75, 76, 79, 80, 84, 85
saml_response.assertion[0].conditions
"""
for i in self.response.assertion:
# 73, 74
if not hasattr(i, "conditions") or not getattr(i, "conditions", None):
# or not i.conditions.text.strip(' ').strip('\n'):
raise SPIDValidatorException(
"Assertion conditions not present. " f"{_ERROR_TROUBLESHOOT}"
)
# 75, 76
if not hasattr(i.conditions, "not_before") or not getattr(
i.conditions, "not_before", None
):
# or not i.conditions.text.strip(' ').strip('\n'):
raise SPIDValidatorException(
"Assertion conditions not_before not valid. "
f"{_ERROR_TROUBLESHOOT}"
)
# 79, 80
if not hasattr(i.conditions, "not_on_or_after") or not getattr(
i.conditions, "not_on_or_after", None
):
# or not i.conditions.text.strip(' ').strip('\n'):
raise SPIDValidatorException(
"Assertion conditions not_on_or_after not valid. "
f"{_ERROR_TROUBLESHOOT}"
)
# 84
if not hasattr(i.conditions, "audience_restriction") or not getattr(
i.conditions, "audience_restriction", None
):
raise SPIDValidatorException(
"Assertion conditions without audience_restriction. "
f"{_ERROR_TROUBLESHOOT}"
)
# 85
# already filtered by pysaml2: AttributeError: 'NoneType' object has no attribute 'strip'
for aud in i.conditions.audience_restriction:
if not getattr(aud, "audience", None):
raise SPIDValidatorException(
"Assertion conditions audience_restriction without audience."
f"{_ERROR_TROUBLESHOOT}"
)
if not aud.audience[0].text:
raise SPIDValidatorException(
"Assertion conditions audience_restriction without audience. "
f"{_ERROR_TROUBLESHOOT}"
)
def validate_assertion_authn_statement(self):
"""spid saml check 90, 92, 97, 98"""
for i in self.response.assertion:
if not hasattr(i, "authn_statement") or not getattr(
i, "authn_statement", None
):
raise SPIDValidatorException(
"Assertion authn_statement is missing/invalid. "
f"{_ERROR_TROUBLESHOOT}"
)
for authns in i.authn_statement:
# 90, 92, 93
if (
not hasattr(authns, "authn_context")
or not getattr(authns, "authn_context", None)
or not hasattr(authns.authn_context, "authn_context_class_ref")
or not getattr(
authns.authn_context, "authn_context_class_ref", None
)
):
raise SPIDValidatorException(
"Assertion authn_statement.authn_context_class_ref is missing/invalid. "
f"{_ERROR_TROUBLESHOOT}"
)
# 94, 95, 96
if (
authns.authn_context.authn_context_class_ref.text
!= self.authn_context_class_ref
):
_msg = (
"Invalid Spid authn_context_class_ref, requested: "
f"{self.authn_context_class_ref}, got {authns.authn_context.authn_context_class_ref.text}"
)
try:
level_sp = int(self.authn_context_class_ref[-1])
level_idp = int(
authns.authn_context.authn_context_class_ref.text.strip().replace(
"\n", ""
)[
-1
]
)
if level_idp < level_sp:
raise SPIDValidatorException(_msg)
except Exception:
raise SPIDValidatorException(_msg)
# 97
if (
authns.authn_context.authn_context_class_ref.text
not in self.allowed_acrs
):
raise SPIDValidatorException(
"Assertion authn_statement.authn_context.authn_context_class_ref is missing/invalid. "
f"{_ERROR_TROUBLESHOOT}"
)
# 98
if not hasattr(i, "attribute_statement") or not getattr(
i, "attribute_statement", None
):
raise SPIDValidatorException(
"Assertion attribute_statement is missing/invalid. "
f"{_ERROR_TROUBLESHOOT}"
)
for attri in i.attribute_statement:
if not attri.attribute:
raise SPIDValidatorException(
"Assertion attribute_statement.attribute is missing/invalid. "
f"{_ERROR_TROUBLESHOOT}"
)
def run(self, tests=[]):
"""run all tests/methods"""
if not tests:
tests = [
i[0]
for i in inspect.getmembers(self, predicate=inspect.ismethod)
if not i[0].startswith("_")
]
tests.remove("run")
# tests.remove('validate_issuer')
for element in tests:
getattr(self, element)()
| [
"logging.getLogger",
"saml2.samlp.response_from_string",
"pytz.utc.localize",
"inspect.getmembers",
"datetime.datetime.utcnow",
"datetime.datetime.strptime"
] | [((260, 287), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (277, 287), False, 'import logging\n'), ((993, 1035), 'saml2.samlp.response_from_string', 'samlp.response_from_string', (['authn_response'], {}), '(authn_response)\n', (1019, 1035), False, 'from saml2 import samlp\n'), ((4510, 4547), 'pytz.utc.localize', 'pytz.utc.localize', (['issueinstant_naive'], {}), '(issueinstant_naive)\n', (4527, 4547), False, 'import pytz\n'), ((4197, 4262), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['i.issue_instant', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(i.issue_instant, '%Y-%m-%dT%H:%M:%SZ')\n", (4223, 4262), False, 'import datetime\n'), ((4584, 4610), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4608, 4610), False, 'import datetime\n'), ((4368, 4436), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['i.issue_instant', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(i.issue_instant, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (4394, 4436), False, 'import datetime\n'), ((14915, 14967), 'inspect.getmembers', 'inspect.getmembers', (['self'], {'predicate': 'inspect.ismethod'}), '(self, predicate=inspect.ismethod)\n', (14933, 14967), False, 'import inspect\n')] |
import torch
import torch.nn as nn
LATENT_CODE_SIZE = 128 # size of the Z vector
SDF_NET_BREADTH = 256 # size of the w vector
amcm = 24 # Autoencoder Model Complexity Multiplier
class Lambda(nn.Module):
def __init__(self, function):
super(Lambda, self).__init__()
self.function = function
def forward(self, x):
return self.function(x)
class ModelD(nn.Module):
def __init__(self):
super(ModelD, self).__init__()
self.add_module('discriminator', nn.Sequential(
# accepts w
nn.Linear(in_features =SDF_NET_BREADTH, out_features = SDF_NET_BREADTH),
nn.ReLU(inplace=True),
nn.Linear(in_features = SDF_NET_BREADTH, out_features = SDF_NET_BREADTH),
nn.ReLU(inplace=True),
nn.Linear(in_features = SDF_NET_BREADTH, out_features = SDF_NET_BREADTH),
nn.ReLU(inplace=True),
# TODO: change output function to P(w = true)
nn.Linear(in_features = SDF_NET_BREADTH, out_features = 1),
nn.ReLU(inplace=True)
))
self.cuda()
def forward(self, w):
w = w.cuda()
return self.discriminator(w)
| [
"torch.nn.ReLU",
"torch.nn.Linear"
] | [((552, 620), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'SDF_NET_BREADTH', 'out_features': 'SDF_NET_BREADTH'}), '(in_features=SDF_NET_BREADTH, out_features=SDF_NET_BREADTH)\n', (561, 620), True, 'import torch.nn as nn\n'), ((637, 658), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (644, 658), True, 'import torch.nn as nn\n'), ((673, 741), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'SDF_NET_BREADTH', 'out_features': 'SDF_NET_BREADTH'}), '(in_features=SDF_NET_BREADTH, out_features=SDF_NET_BREADTH)\n', (682, 741), True, 'import torch.nn as nn\n'), ((759, 780), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (766, 780), True, 'import torch.nn as nn\n'), ((795, 863), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'SDF_NET_BREADTH', 'out_features': 'SDF_NET_BREADTH'}), '(in_features=SDF_NET_BREADTH, out_features=SDF_NET_BREADTH)\n', (804, 863), True, 'import torch.nn as nn\n'), ((881, 902), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (888, 902), True, 'import torch.nn as nn\n'), ((975, 1029), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'SDF_NET_BREADTH', 'out_features': '(1)'}), '(in_features=SDF_NET_BREADTH, out_features=1)\n', (984, 1029), True, 'import torch.nn as nn\n'), ((1047, 1068), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1054, 1068), True, 'import torch.nn as nn\n')] |
'''
A databear API simulator
Use to develop the display without running databear
'''
import threading
import socket
import selectors
import json
class simAPI:
#Define dummy sensors
sensors = ['tph1','wsd1']
measurements = {
'tph1':[
('air_temperature','C'),
('relative_humidity','%'),
('barometric_pressure','mb')
],
'wsd1':[
('speed','m/s'),
('direction','degrees'),
('speed_2m','m/s'),
('direction_2m','degrees'),
('speed_10m','m/s'),
('direction_10m','degrees')
]
}
data_tph = {
'air_temperature':('2020-12-17 11:05',34.33),
'relative_humidity':('2020-12-17 11:05',15),
'barometric_pressure':('2020-12-17 11:05',800.55)
}
data_wind = {
'speed':('2020-12-17 11:10',22),
'direction':('2020-12-17 11:10',150),
'speed_2m':('2020-12-17 11:10',22),
'direction_2m':('2020-12-17 11:10',150),
'speed_10m':('2020-12-17 11:10',22),
'direction_10m':('2020-12-17 11:10',150)
}
data = {'tph1':data_tph,'wsd1':data_wind}
runflag = True
def __init__(self):
#Set up socket and select
self.udpsocket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
self.udpsocket.bind(('localhost',62000))
self.udpsocket.setblocking(False)
self.sel = selectors.DefaultSelector()
self.sel.register(self.udpsocket,selectors.EVENT_READ)
self.listen = True #A flag to determine when to stop
def listenUDP(self):
'''
Listen on UDP socket
'''
while self.runflag:
try:
#Check for UDP comm
event = self.sel.select(timeout=1)
if event:
self.readUDP()
except KeyboardInterrupt:
self.udpsocket.close()
break
def readUDP(self):
'''
Read message, respond, add any messages
to the message queue
Message should be JSON
{'command': <cmd> , 'arg': <optional argument>}
'''
msgraw, address = self.udpsocket.recvfrom(1024)
#Decode message
msg = json.loads(msgraw)
#Respond
response = self.getResponse(msg['command'],msg.get('arg',None))
#Send a response
self.udpsocket.sendto(json.dumps(response).encode('utf-8'),address)
def getResponse(self,cmd,arg):
'''
Generate response for a particular command and argument
Commands
- status
- getdata
'''
if cmd == 'status':
response = {
'status':'running',
'sensors':self.sensors,
}
elif cmd == 'getsensor':
response = {
'measurements':self.measurements[arg]
}
elif cmd == 'getdata':
response = self.data[arg]
elif cmd == 'shutdown':
response = {'response':'OK'}
self.runflag = False
else:
response = {'error':'Invalid Command'}
return response
if __name__ == "__main__":
'''
Run the simulator
'''
#Instantiate API
api = simAPI()
#Start listening
api.listenUDP()
print('Shutting down')
| [
"selectors.DefaultSelector",
"json.dumps",
"json.loads",
"socket.socket"
] | [((1281, 1329), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (1294, 1329), False, 'import socket\n'), ((1439, 1466), 'selectors.DefaultSelector', 'selectors.DefaultSelector', ([], {}), '()\n', (1464, 1466), False, 'import selectors\n'), ((2269, 2287), 'json.loads', 'json.loads', (['msgraw'], {}), '(msgraw)\n', (2279, 2287), False, 'import json\n'), ((2446, 2466), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (2456, 2466), False, 'import json\n')] |
from google.cloud import vision
def detect_text(image):
"""Detects text in the file located in Google Cloud Storage or on the Web.
"""
client = vision.ImageAnnotatorClient()
response = client.text_detection(image=image)
texts = response.text_annotations
print('Texts:')
for text in texts:
print('\n"{}"'.format(text.description))
vertices = (['({},{})'.format(vertex.x, vertex.y)
for vertex in text.bounding_poly.vertices])
print('bounds: {}'.format(','.join(vertices)))
if response.error.message:
raise Exception(
'{}\nFor more info on error messages, check: '
'https://cloud.google.com/apis/design/errors'.format(
response.error.message))
if __name__ == "__main__":
import io
import os
# Imports the Google Cloud client library
from google.cloud import vision
from google.cloud.vision import types
# Instantiates a client
client = vision.ImageAnnotatorClient()
# The name of the image file to annotate
file_name = os.path.abspath('testimg.jpg')
# Loads the image into memory
with io.open(file_name, 'rb') as image_file:
content = image_file.read()
image = types.Image(content=content)
# Performs label detection on the image file
response = client.label_detection(image=image)
labels = response.label_annotations
detect_text(image)
print('Labels:')
for label in labels:
print(label.description) | [
"os.path.abspath",
"google.cloud.vision.ImageAnnotatorClient",
"google.cloud.vision.types.Image",
"io.open"
] | [((157, 186), 'google.cloud.vision.ImageAnnotatorClient', 'vision.ImageAnnotatorClient', ([], {}), '()\n', (184, 186), False, 'from google.cloud import vision\n'), ((999, 1028), 'google.cloud.vision.ImageAnnotatorClient', 'vision.ImageAnnotatorClient', ([], {}), '()\n', (1026, 1028), False, 'from google.cloud import vision\n'), ((1091, 1121), 'os.path.abspath', 'os.path.abspath', (['"""testimg.jpg"""'], {}), "('testimg.jpg')\n", (1106, 1121), False, 'import os\n'), ((1255, 1283), 'google.cloud.vision.types.Image', 'types.Image', ([], {'content': 'content'}), '(content=content)\n', (1266, 1283), False, 'from google.cloud.vision import types\n'), ((1166, 1190), 'io.open', 'io.open', (['file_name', '"""rb"""'], {}), "(file_name, 'rb')\n", (1173, 1190), False, 'import io\n')] |
"""
Return config on servers to start for bokeh
See https://jupyter-server-proxy.readthedocs.io/en/latest/server-process.html
for more information.
"""
import os
import sys
serverfile = os.path.join(os.path.dirname(__file__), "server.py")
def launch_server():
return {"command": [sys.executable, serverfile, "{port}"], "timeout": 20}
| [
"os.path.dirname"
] | [((201, 226), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (216, 226), False, 'import os\n')] |
from tkinter import *
from PIL import ImageTk, Image
import sqlite3
def billing():
pass
def inventory():
pass
def analytics():
pass
root=Tk()
# #setting tkinter window size
# width= root.winfo_screenwidth()
# height= root.winfo_screenheight()
# root.geometry("%dx%d" % (width, height))
# root.attributes('-zoomed', True)
# root.geometry("1366x768")
bg = ImageTk.PhotoImage(Image.open("bg.jpg"))
label = Label(root,image=bg)
label.place(x=-10, y=-10)
# root.configure(background='green')
root.wm_state('zoomed')
# root.attributes('-fullscreen', True)
root.title("myStore")
button1=Button(root,text="Billing",padx=2,pady=2,width=30,height=3,command=billing,bg='white',font=10).pack(pady=(140,10))
button2=Button(root,text="Inventory",width=30,height=3,padx=2,pady=2,command=inventory,bg='white',font=10).pack(pady=10)
button2=Button(root,text="Analytics",width=30,height=3,padx=2,pady=2,command=analytics,bg='white',font=10).pack(pady=10)
root=mainloop() | [
"PIL.Image.open"
] | [((412, 432), 'PIL.Image.open', 'Image.open', (['"""bg.jpg"""'], {}), "('bg.jpg')\n", (422, 432), False, 'from PIL import ImageTk, Image\n')] |
from functools import reduce
from operator import mul
def max_product(lst: list, n_largest_elements: int) -> int:
largest_elements = sorted(lst)[-n_largest_elements:]
return reduce(mul, largest_elements)
| [
"functools.reduce"
] | [((192, 221), 'functools.reduce', 'reduce', (['mul', 'largest_elements'], {}), '(mul, largest_elements)\n', (198, 221), False, 'from functools import reduce\n')] |
import os
import urllib3
import shutil
import subprocess
import xml.etree.ElementTree as et
from distutils.version import StrictVersion
from typing import Optional
THUNDRA_AGENT_METADATA: str = 'https://repo.thundra.io/service/local/repositories/thundra-releases/content/io/thundra/agent/thundra-agent-bootstrap/maven-metadata.xml'
MAVEN_INSTRUMENTATION_METADATA: str = 'https://repo1.maven.org/maven2/io/thundra/plugin/thundra-agent-maven-test-instrumentation/maven-metadata.xml'
def get_latest_version(repository: str, version: str = None) -> str:
http = urllib3.PoolManager()
response = http.request('GET', repository)
xml = et.fromstring(response.data)
available_versions = xml.findall('./versioning/versions/version')
available_versions_set = set(map(lambda x: x.text, available_versions))
latest_version = xml.find('./versioning/latest').text
if (version and (version in available_versions_set)):
return version
else:
return latest_version
def instrument(instrumenter_version: str = None, agent_version: str = None):
agent_path: str
maven_instrumenter_path: str
http = urllib3.PoolManager()
maven_instrumenter_version: Optional[str] = get_latest_version(
MAVEN_INSTRUMENTATION_METADATA,
instrumenter_version,
)
if not maven_instrumenter_version:
print(
"> Couldn't find an available version for Thundra Maven Instrumentation script")
print("> Instrumentation failed!")
return
maven_instrumenter_url = f'https://repo1.maven.org/maven2/io/thundra/plugin/thundra-agent-maven-test-instrumentation/{maven_instrumenter_version}/thundra-agent-maven-test-instrumentation-{maven_instrumenter_version}.jar'
thundra_agent_version: Optional[str] = get_latest_version(
THUNDRA_AGENT_METADATA,
agent_version,
)
if not thundra_agent_version:
print("> Couldn't find an available version for Thundra Agent")
print("> Instrumentation failed!")
return
thundra_agent_url = f'https://repo.thundra.io/service/local/repositories/thundra-releases/content/io/thundra/agent/thundra-agent-bootstrap/{thundra_agent_version}/thundra-agent-bootstrap-{thundra_agent_version}.jar'
if os.environ.get('LOCAL_AGENT_PATH'):
agent_path = os.environ.get('LOCAL_AGENT_PATH')
print(f'> Using the local agent at {agent_path}')
else:
print("> Downloading the agent...")
agent_path = '/tmp/thundra-agent-bootstrap.jar'
with open(agent_path, 'wb') as out:
r = http.request('GET', thundra_agent_url, preload_content=False)
shutil.copyfileobj(r, out)
print(f'> Successfully downloaded the agent to {agent_path}')
print("> Downloading the maven instrumentater")
maven_instrumenter_path = f'/tmp/thundra-agent-maven-test-instrumentation-{maven_instrumenter_version}.jar'
with open(maven_instrumenter_path, 'wb') as out:
r = http.request('GET', maven_instrumenter_url, preload_content=False)
shutil.copyfileobj(r, out)
print(f'> Successfully downloaded the agent to {maven_instrumenter_path}')
print("> Updating pom.xml...")
poms = subprocess.run(
['sh', '-c', f'find . -name "pom.xml" -exec echo \'{{}}\' +'], capture_output=True)
if poms.stdout:
subprocess.call(['java', '-jar', maven_instrumenter_path,
agent_path, str(poms.stdout.strip(), 'utf-8')])
print("> Update to pom.xml is done")
else:
print("> Couldn't find any pom.xml files. Exiting the instrumentation step.")
api_key_env_name = os.environ.get("THUNDRA_APIKEY_ENV_NAME")
if not os.environ.get(api_key_env_name):
print('> Thundra API Key is not present. Exiting early...')
print('> Instrumentation failed.')
exit(0)
project_id_env_name = os.environ.get("THUNDRA_AGENT_TEST_PROJECT_ID_ENV_NAME")
if not os.environ.get(project_id_env_name):
print('> Thundra Project ID is not present. Exiting early...')
print('> Instrumentation failed.')
exit(0)
if os.environ.get('THUNDRA_AGENT_VERSION') and (StrictVersion(os.environ.get('THUNDRA_AGENT_VERSION') < StrictVersion("2.7.0"))):
print(f'Thundra Java Agent prior to 2.7.0 doesn\'t work with this action')
exit(0)
def run():
print(f'> [Thundra] Initializing the Thundra Action...')
print(f'> Instrumenting the application')
instrument(os.environ.get('THUNDRA_INSTRUMENTER_VERSION'),
os.environ.get('THUNDRA_AGENT_VERSION'))
run()
| [
"shutil.copyfileobj",
"distutils.version.StrictVersion",
"subprocess.run",
"os.environ.get",
"urllib3.PoolManager",
"xml.etree.ElementTree.fromstring"
] | [((3637, 3678), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_APIKEY_ENV_NAME"""'], {}), "('THUNDRA_APIKEY_ENV_NAME')\n", (3651, 3678), False, 'import os\n'), ((3859, 3915), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_AGENT_TEST_PROJECT_ID_ENV_NAME"""'], {}), "('THUNDRA_AGENT_TEST_PROJECT_ID_ENV_NAME')\n", (3873, 3915), False, 'import os\n'), ((565, 586), 'urllib3.PoolManager', 'urllib3.PoolManager', ([], {}), '()\n', (584, 586), False, 'import urllib3\n'), ((644, 672), 'xml.etree.ElementTree.fromstring', 'et.fromstring', (['response.data'], {}), '(response.data)\n', (657, 672), True, 'import xml.etree.ElementTree as et\n'), ((1144, 1165), 'urllib3.PoolManager', 'urllib3.PoolManager', ([], {}), '()\n', (1163, 1165), False, 'import urllib3\n'), ((2258, 2292), 'os.environ.get', 'os.environ.get', (['"""LOCAL_AGENT_PATH"""'], {}), "('LOCAL_AGENT_PATH')\n", (2272, 2292), False, 'import os\n'), ((3207, 3310), 'subprocess.run', 'subprocess.run', (['[\'sh\', \'-c\', f\'find . -name "pom.xml" -exec echo \\\'{{}}\\\' +\']'], {'capture_output': '(True)'}), '([\'sh\', \'-c\', f\'find . -name "pom.xml" -exec echo \\\'{{}}\\\' +\'\n ], capture_output=True)\n', (3221, 3310), False, 'import subprocess\n'), ((3686, 3718), 'os.environ.get', 'os.environ.get', (['api_key_env_name'], {}), '(api_key_env_name)\n', (3700, 3718), False, 'import os\n'), ((3923, 3958), 'os.environ.get', 'os.environ.get', (['project_id_env_name'], {}), '(project_id_env_name)\n', (3937, 3958), False, 'import os\n'), ((4083, 4122), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_AGENT_VERSION"""'], {}), "('THUNDRA_AGENT_VERSION')\n", (4097, 4122), False, 'import os\n'), ((2315, 2349), 'os.environ.get', 'os.environ.get', (['"""LOCAL_AGENT_PATH"""'], {}), "('LOCAL_AGENT_PATH')\n", (2329, 2349), False, 'import os\n'), ((3054, 3080), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r', 'out'], {}), '(r, out)\n', (3072, 3080), False, 'import shutil\n'), ((4436, 4482), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_INSTRUMENTER_VERSION"""'], {}), "('THUNDRA_INSTRUMENTER_VERSION')\n", (4450, 4482), False, 'import os\n'), ((4499, 4538), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_AGENT_VERSION"""'], {}), "('THUNDRA_AGENT_VERSION')\n", (4513, 4538), False, 'import os\n'), ((2652, 2678), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r', 'out'], {}), '(r, out)\n', (2670, 2678), False, 'import shutil\n'), ((4142, 4181), 'os.environ.get', 'os.environ.get', (['"""THUNDRA_AGENT_VERSION"""'], {}), "('THUNDRA_AGENT_VERSION')\n", (4156, 4181), False, 'import os\n'), ((4184, 4206), 'distutils.version.StrictVersion', 'StrictVersion', (['"""2.7.0"""'], {}), "('2.7.0')\n", (4197, 4206), False, 'from distutils.version import StrictVersion\n')] |
import pytest
from simobility.routers import LinearRouter
from simobility.core import Position
from simobility.core.clock import Clock
def test_router2d():
speed_kmph = 25
nyc_pos = Position(-73.935242, 40.730610)
nyc_pos_shift = Position(-73.935, 40.7306)
# monutes
clock = Clock(time_step=1, time_unit="m")
router = LinearRouter(speed=speed_kmph, clock=clock)
assert (router.estimate_duration(nyc_pos, nyc_pos) == 0)
route = router.calculate_route(nyc_pos, nyc_pos)
assert (route.duration == 0)
assert (route.distance == 0)
assert (len(route.coordinates) == 1)
assert (route.approximate_position(clock.clock_time) == nyc_pos)
assert (route.approximate_position(clock.clock_time + 1) == nyc_pos)
assert (router.estimate_duration(nyc_pos, nyc_pos_shift) == 1)
for i in range(10):
clock.tick()
assert (router.estimate_duration(nyc_pos, nyc_pos) == 0)
# seconds
clock = Clock(time_step=1, time_unit="s")
router = LinearRouter(speed=speed_kmph, clock=clock)
assert (router.estimate_duration(nyc_pos, nyc_pos_shift) == 3)
route = router.calculate_route(nyc_pos, nyc_pos_shift)
assert route.duration == 3
assert (pytest.approx(route.distance, 3) == 0.02)
assert len(route.coordinates) == 4
assert route.approximate_position(clock.clock_time) == nyc_pos
assert route.approximate_position(clock.clock_time + 1) != nyc_pos
assert (route.approximate_position(clock.clock_time + 1) == route.coordinates[1])
assert (route.approximate_position(clock.clock_time + 3) == nyc_pos_shift)
assert (
route.approximate_position(clock.clock_time + 3) == route.coordinates[-1]
)
def test_router2d_2():
speed_kmph = 17
nyc_pos = Position(-73.935242, 40.730610)
nyc_pos_shift = Position(-73.935, 40.730610)
clock = Clock(time_step=1, time_unit="s")
router = LinearRouter(speed=speed_kmph, clock=clock)
assert (router.estimate_duration(nyc_pos, nyc_pos_shift) == 5)
route = router.calculate_route(nyc_pos, nyc_pos_shift)
assert (len(route.coordinates) == 6)
for p in route.coordinates:
assert (p.lat == nyc_pos.lat)
def test_map_match():
clock = Clock(time_step=1, time_unit="m")
router = LinearRouter(speed=12, clock=clock)
pos1 = Position(-0.39376, 39.5145)
pos2 = Position(-0.38874, 39.503)
for pos in [pos1, pos2]:
pos_m = router.map_match(pos)
assert pos == pos_m
assert pos.id != pos_m.id
| [
"simobility.core.Position",
"simobility.routers.LinearRouter",
"pytest.approx",
"simobility.core.clock.Clock"
] | [((192, 222), 'simobility.core.Position', 'Position', (['(-73.935242)', '(40.73061)'], {}), '(-73.935242, 40.73061)\n', (200, 222), False, 'from simobility.core import Position\n'), ((244, 270), 'simobility.core.Position', 'Position', (['(-73.935)', '(40.7306)'], {}), '(-73.935, 40.7306)\n', (252, 270), False, 'from simobility.core import Position\n'), ((298, 331), 'simobility.core.clock.Clock', 'Clock', ([], {'time_step': '(1)', 'time_unit': '"""m"""'}), "(time_step=1, time_unit='m')\n", (303, 331), False, 'from simobility.core.clock import Clock\n'), ((345, 388), 'simobility.routers.LinearRouter', 'LinearRouter', ([], {'speed': 'speed_kmph', 'clock': 'clock'}), '(speed=speed_kmph, clock=clock)\n', (357, 388), False, 'from simobility.routers import LinearRouter\n'), ((958, 991), 'simobility.core.clock.Clock', 'Clock', ([], {'time_step': '(1)', 'time_unit': '"""s"""'}), "(time_step=1, time_unit='s')\n", (963, 991), False, 'from simobility.core.clock import Clock\n'), ((1005, 1048), 'simobility.routers.LinearRouter', 'LinearRouter', ([], {'speed': 'speed_kmph', 'clock': 'clock'}), '(speed=speed_kmph, clock=clock)\n', (1017, 1048), False, 'from simobility.routers import LinearRouter\n'), ((1766, 1796), 'simobility.core.Position', 'Position', (['(-73.935242)', '(40.73061)'], {}), '(-73.935242, 40.73061)\n', (1774, 1796), False, 'from simobility.core import Position\n'), ((1818, 1845), 'simobility.core.Position', 'Position', (['(-73.935)', '(40.73061)'], {}), '(-73.935, 40.73061)\n', (1826, 1845), False, 'from simobility.core import Position\n'), ((1860, 1893), 'simobility.core.clock.Clock', 'Clock', ([], {'time_step': '(1)', 'time_unit': '"""s"""'}), "(time_step=1, time_unit='s')\n", (1865, 1893), False, 'from simobility.core.clock import Clock\n'), ((1907, 1950), 'simobility.routers.LinearRouter', 'LinearRouter', ([], {'speed': 'speed_kmph', 'clock': 'clock'}), '(speed=speed_kmph, clock=clock)\n', (1919, 1950), False, 'from simobility.routers import LinearRouter\n'), ((2227, 2260), 'simobility.core.clock.Clock', 'Clock', ([], {'time_step': '(1)', 'time_unit': '"""m"""'}), "(time_step=1, time_unit='m')\n", (2232, 2260), False, 'from simobility.core.clock import Clock\n'), ((2274, 2309), 'simobility.routers.LinearRouter', 'LinearRouter', ([], {'speed': '(12)', 'clock': 'clock'}), '(speed=12, clock=clock)\n', (2286, 2309), False, 'from simobility.routers import LinearRouter\n'), ((2322, 2349), 'simobility.core.Position', 'Position', (['(-0.39376)', '(39.5145)'], {}), '(-0.39376, 39.5145)\n', (2330, 2349), False, 'from simobility.core import Position\n'), ((2361, 2387), 'simobility.core.Position', 'Position', (['(-0.38874)', '(39.503)'], {}), '(-0.38874, 39.503)\n', (2369, 2387), False, 'from simobility.core import Position\n'), ((1219, 1251), 'pytest.approx', 'pytest.approx', (['route.distance', '(3)'], {}), '(route.distance, 3)\n', (1232, 1251), False, 'import pytest\n')] |
import logging
import random
from typing import Literal
import discord
from dislash import * # pylint:disable=unused-wildcard-import
from redbot.core import commands
from redbot.core.bot import Red
from redbot.core.config import Config
log = logging.getLogger("red.yamicogs.rps")
RequestType = Literal["discord_deleted_user", "owner", "user", "user_strict"]
ICONS_RPS = {
"rock": "\U0001faa8",
"paper": "\N{NEWSPAPER}",
"scissors": "\N{BLACK SCISSORS}\N{VARIATION SELECTOR-16}",
}
ICONS_RPSLS = {
"rock": "\U0001faa8",
"paper": "\N{NEWSPAPER}",
"scissors": "\N{BLACK SCISSORS}\N{VARIATION SELECTOR-16}",
"lizard": "\N{LIZARD}",
"spock": "\N{RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS}",
}
class RPS(commands.Cog):
"""
Rock, Paper, Scissors (Lizard, Spock)
More detailed docs: <https://cogs.yamikaitou.dev/rps.html>
"""
def __init__(self, bot: Red) -> None:
self.bot = bot
self.config = Config.get_conf(
self,
identifier=582650109,
force_registration=True,
)
@commands.command(name="rpsrules", aliases=["rpslsrules"])
async def _rps_rules(self, ctx):
"""Rules of Rock, Paper, Scissors (Lizard, Spock)"""
embed = discord.Embed()
embed.title = "Rock, Paper, Scissors (Lizard, Spock)"
embed.color = await ctx.embed_color()
embed.description = (
f"A game of skill (chance).\n"
f"Simply select your choice and see if you can defeat the computer\n\n"
f"2 versions are included, the rules are below\n"
)
embed.add_field(
name="Rock, Paper, Scissors",
inline=False,
value=(
f"Rock {ICONS_RPS['rock']} beats Scissors {ICONS_RPS['scissors']}\n"
f"Scissors {ICONS_RPS['scissors']} beats Paper {ICONS_RPS['paper']}\n"
f"Paper {ICONS_RPS['paper']} beats Rock {ICONS_RPS['rock']}\n\n"
f"Play with `{ctx.prefix}rps`\n"
),
)
embed.add_field(
name="Rock, Paper, Scissors, Lizard, Spock",
inline=False,
value=(
f"Rock {ICONS_RPSLS['rock']} beats Scissors {ICONS_RPSLS['scissors']} and Lizard {ICONS_RPSLS['lizard']}\n"
f"Paper {ICONS_RPSLS['paper']} beats Rock {ICONS_RPSLS['rock']} and Spock {ICONS_RPSLS['spock']}\n"
f"Scissors {ICONS_RPSLS['scissors']} beats Paper {ICONS_RPSLS['paper']} and Lizard {ICONS_RPSLS['lizard']}\n"
f"Lizard {ICONS_RPSLS['lizard']} beats Paper {ICONS_RPSLS['paper']} and Spock {ICONS_RPSLS['spock']}\n"
f"Spock {ICONS_RPSLS['spock']} beats Rock {ICONS_RPSLS['rock']} and Scissors {ICONS_RPSLS['scissors']}\n\n"
f"Play with `{ctx.prefix}rpsls`\n"
),
)
await ctx.send(embed=embed)
@commands.command(name="rps")
async def _rps(self, ctx):
"""Play a game of Rock, Paper, Scissors"""
row_of_buttons = [
ActionRow(
Button(
style=ButtonStyle.blurple,
label="Rock",
emoji=discord.PartialEmoji(name="\U0001faa8"),
custom_id="rock",
),
Button(
style=ButtonStyle.blurple,
label="Paper",
emoji=discord.PartialEmoji(name="\N{NEWSPAPER}"),
custom_id="paper",
),
Button(
style=ButtonStyle.blurple,
label="Scissors",
emoji=discord.PartialEmoji(name="\N{BLACK SCISSORS}\N{VARIATION SELECTOR-16}"),
custom_id="scissors",
),
),
ActionRow(
Button(
style=ButtonStyle.red,
label="Cancel",
custom_id="cancel",
),
Button(
style=ButtonStyle.gray,
label="Rules",
emoji=discord.PartialEmoji(name="\N{MEMO}"),
custom_id="rules",
),
),
]
msg = await ctx.reply(
"Let's play!",
components=row_of_buttons,
mention_author=False,
)
computer = random.choice(["rock", "paper", "scissors"])
on_click = msg.create_click_listener(timeout=60)
dead = False
def is_not_author(inter):
# Note that this check must take only 1 arg
return inter.author != ctx.author
@on_click.matching_condition(is_not_author, cancel_others=True)
async def on_wrong_user(inter):
# Reply with a hidden message
await inter.reply(
f"Sorry, this is not your game to play, try launching your own with `{ctx.prefix}rps`",
ephemeral=True,
)
@on_click.matching_id("rock", cancel_others=True)
async def on_rock(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPS['rock']} - {ICONS_RPS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPS['rock']} - {ICONS_RPS[computer]} Me",
components=None,
)
else:
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPS['rock']} - {ICONS_RPS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("paper", cancel_others=True)
async def on_paper(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPS['paper']} - {ICONS_RPS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPS['paper']} - {ICONS_RPS[computer]} Me",
components=None,
)
else:
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPS['paper']} - {ICONS_RPS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("scissors", cancel_others=True)
async def on_scissors(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPS['scissors']} - {ICONS_RPS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPS['scissors']} - {ICONS_RPS[computer]} Me",
components=None,
)
else:
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPS['scissors']} - {ICONS_RPS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("rules", cancel_others=False, reset_timeout=True)
async def on_rules(inter):
embed = discord.Embed()
embed.title = "Rock, Paper, Scissors"
embed.color = await ctx.embed_color()
embed.description = (
f"A game of skill (chance).\n"
f"Simply select your choice and see if you can defeat the computer\n\n\n"
f"Rock {ICONS_RPS['rock']} beats Scissors {ICONS_RPS['scissors']}\n"
f"Paper {ICONS_RPS['paper']} beats Rock {ICONS_RPS['rock']}\n"
f"Scissors {ICONS_RPS['scissors']} beats Paper {ICONS_RPS['paper']}\n"
)
await inter.reply(embed=embed, ephemeral=True)
@on_click.matching_id("cancel", cancel_others=False)
async def on_cancel(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
await msg.edit(content="Very well, maybe later", components=None)
@on_click.timeout
async def on_timeout():
global dead
if not dead:
await msg.edit(content="Okay then, maybe later", components=None)
@commands.command(name="rpsls")
async def _rpsls(self, ctx):
"""Play a game of Rock, Paper, Scissors, Lizard, Spock"""
row_of_buttons = [
ActionRow(
Button(
style=ButtonStyle.blurple,
label="Rock",
emoji=discord.PartialEmoji(name="\U0001faa8"),
custom_id="rock",
),
Button(
style=ButtonStyle.blurple,
label="Paper",
emoji=discord.PartialEmoji(name="\N{NEWSPAPER}"),
custom_id="paper",
),
Button(
style=ButtonStyle.blurple,
label="Scissors",
emoji=discord.PartialEmoji(name="\N{BLACK SCISSORS}\N{VARIATION SELECTOR-16}"),
custom_id="scissors",
),
Button(
style=ButtonStyle.blurple,
label="Lizard",
emoji=discord.PartialEmoji(name="\N{LIZARD}"),
custom_id="lizard",
),
Button(
style=ButtonStyle.blurple,
label="Spock",
emoji=discord.PartialEmoji(
name="\N{RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS}"
),
custom_id="spock",
),
),
ActionRow(
Button(
style=ButtonStyle.red,
label="Cancel",
custom_id="cancel",
),
Button(
style=ButtonStyle.gray,
label="Rules",
emoji=discord.PartialEmoji(name="\N{MEMO}"),
custom_id="rules",
),
),
]
msg = await ctx.reply(
"Let's play!",
components=row_of_buttons,
mention_author=False,
)
computer = random.choice(["rock", "paper", "scissors", "lizard", "spock"])
on_click = msg.create_click_listener(timeout=60)
dead = False
def is_not_author(inter):
# Note that this check must take only 1 arg
return inter.author != ctx.author
@on_click.matching_condition(is_not_author, cancel_others=True)
async def on_wrong_user(inter):
# Reply with a hidden message
await inter.reply(
f"Sorry, this is not your game to play, try launching your own with `{ctx.prefix}rpsls`",
ephemeral=True,
)
@on_click.matching_id("rock", cancel_others=True)
async def on_rock(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPSLS['rock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['rock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "scissors":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['rock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "lizard":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['rock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
else: # computer == 'spock'
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['rock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("paper", cancel_others=True)
async def on_paper(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['paper']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPSLS['paper']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "scissors":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['paper']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "lizard":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['paper']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
else: # computer == 'spock'
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['paper']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("scissors", cancel_others=True)
async def on_scissors(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['scissors']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['scissors']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "scissors":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPSLS['scissors']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "lizard":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['scissors']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
else: # computer == 'spock'
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['scissors']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("lizard", cancel_others=True)
async def on_lizard(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['lizard']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['lizard']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "scissors":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['lizard']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "lizard":
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPSLS['lizard']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
else: # computer == 'spock'
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['lizard']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("spock", cancel_others=True)
async def on_spock(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
if computer == "rock":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['spock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "paper":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['spock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "scissors":
await msg.edit(
content=f"Congrats, you win!\n\nYou {ICONS_RPSLS['spock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
elif computer == "lizard":
await msg.edit(
content=f"Look at that, I win!\n\nYou {ICONS_RPSLS['spock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
else: # computer == 'spock'
await msg.edit(
content=f"Well, we must be mind-readers!\n\nYou {ICONS_RPSLS['spock']} - {ICONS_RPSLS[computer]} Me",
components=None,
)
global dead
dead = True
@on_click.matching_id("rules", cancel_others=False, reset_timeout=True)
async def on_rules(inter):
embed = discord.Embed()
embed.title = "Rock, Paper, Scissors"
embed.color = await ctx.embed_color()
embed.description = (
f"A game of skill (chance).\n"
f"Simply select your choice and see if you can defeat the computer\n\n\n"
f"Rock {ICONS_RPSLS['rock']} beats Scissors {ICONS_RPSLS['scissors']} and Lizard {ICONS_RPSLS['lizard']}\n"
f"Paper {ICONS_RPSLS['paper']} beats Rock {ICONS_RPSLS['rock']} and Spock {ICONS_RPSLS['spock']}\n"
f"Scissors {ICONS_RPSLS['scissors']} beats Paper {ICONS_RPSLS['paper']} and Lizard {ICONS_RPSLS['lizard']}\n"
f"Lizard {ICONS_RPSLS['lizard']} beats Paper {ICONS_RPSLS['paper']} and Spock {ICONS_RPSLS['spock']}\n"
f"Spock {ICONS_RPSLS['spock']} beats Rock {ICONS_RPSLS['rock']} and Scissors {ICONS_RPSLS['scissors']}\n"
)
await inter.reply(embed=embed, ephemeral=True)
@on_click.matching_id("cancel", cancel_others=False)
async def on_cancel(inter):
await inter.reply(type=ResponseType.DeferredUpdateMessage)
await msg.edit(content="Very well, maybe later", components=None)
@on_click.timeout
async def on_timeout():
global dead
if not dead:
await msg.edit(content="Okay then, maybe later", components=None)
async def red_delete_data_for_user(self, *, requester: RequestType, user_id: int) -> None:
# this cog does not store any user data
pass
| [
"logging.getLogger",
"random.choice",
"discord.PartialEmoji",
"redbot.core.commands.command",
"redbot.core.config.Config.get_conf",
"discord.Embed"
] | [((245, 282), 'logging.getLogger', 'logging.getLogger', (['"""red.yamicogs.rps"""'], {}), "('red.yamicogs.rps')\n", (262, 282), False, 'import logging\n'), ((1096, 1153), 'redbot.core.commands.command', 'commands.command', ([], {'name': '"""rpsrules"""', 'aliases': "['rpslsrules']"}), "(name='rpsrules', aliases=['rpslsrules'])\n", (1112, 1153), False, 'from redbot.core import commands\n'), ((2919, 2947), 'redbot.core.commands.command', 'commands.command', ([], {'name': '"""rps"""'}), "(name='rps')\n", (2935, 2947), False, 'from redbot.core import commands\n'), ((8915, 8945), 'redbot.core.commands.command', 'commands.command', ([], {'name': '"""rpsls"""'}), "(name='rpsls')\n", (8931, 8945), False, 'from redbot.core import commands\n'), ((974, 1042), 'redbot.core.config.Config.get_conf', 'Config.get_conf', (['self'], {'identifier': '(582650109)', 'force_registration': '(True)'}), '(self, identifier=582650109, force_registration=True)\n', (989, 1042), False, 'from redbot.core.config import Config\n'), ((1269, 1284), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (1282, 1284), False, 'import discord\n'), ((4424, 4468), 'random.choice', 'random.choice', (["['rock', 'paper', 'scissors']"], {}), "(['rock', 'paper', 'scissors'])\n", (4437, 4468), False, 'import random\n'), ((10996, 11059), 'random.choice', 'random.choice', (["['rock', 'paper', 'scissors', 'lizard', 'spock']"], {}), "(['rock', 'paper', 'scissors', 'lizard', 'spock'])\n", (11009, 11059), False, 'import random\n'), ((7861, 7876), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (7874, 7876), False, 'import discord\n'), ((18816, 18831), 'discord.Embed', 'discord.Embed', ([], {}), '()\n', (18829, 18831), False, 'import discord\n'), ((3211, 3241), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""🪨"""'}), "(name='🪨')\n", (3231, 3241), False, 'import discord\n'), ((3441, 3471), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""📰"""'}), "(name='📰')\n", (3461, 3471), False, 'import discord\n'), ((3678, 3709), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""✂️"""'}), "(name='✂️')\n", (3698, 3709), False, 'import discord\n'), ((4142, 4172), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""📝"""'}), "(name='📝')\n", (4162, 4172), False, 'import discord\n'), ((9226, 9256), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""🪨"""'}), "(name='🪨')\n", (9246, 9256), False, 'import discord\n'), ((9456, 9486), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""📰"""'}), "(name='📰')\n", (9476, 9486), False, 'import discord\n'), ((9693, 9724), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""✂️"""'}), "(name='✂️')\n", (9713, 9724), False, 'import discord\n'), ((9961, 9991), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""🦎"""'}), "(name='🦎')\n", (9981, 9991), False, 'import discord\n'), ((10193, 10223), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""🖖"""'}), "(name='🖖')\n", (10213, 10223), False, 'import discord\n'), ((10714, 10744), 'discord.PartialEmoji', 'discord.PartialEmoji', ([], {'name': '"""📝"""'}), "(name='📝')\n", (10734, 10744), False, 'import discord\n')] |
from Animations import Display
from Animations.animation import Animation
from Animations.default_colours import *
import time
import random
import math
BRIGHTNESS_CUTOFF = 0.1
class Chaser:
def __init__(self, display: Display, led_num, max_brightness, colour, trailer):
self.display: Display = display
self.brightness = max_brightness
self.colour: Colour = colour
self.led_num = led_num
self.trailer: Chaser = trailer
def set_brightness(self, new_brightness):
# brightness is a fraction of 1
self.brightness = new_brightness
def divide(self, division_amount):
if self.trailer is not None:
self.trailer.divide(division_amount)
if self.trailer.get_brightness() < BRIGHTNESS_CUTOFF:
self.trailer.set_colour((0, 0, 0))
self.trailer.push_to_led()
del self.trailer
self.brightness /= division_amount
def set_colour(self, new_colour):
self.colour = list(new_colour)
def set_trailer(self, trailer):
self.trailer = trailer
def push_to_led(self):
for i in range(3):
self.colour[i] = math.floor(self.colour[i] * self.brightness)
if self.trailer is not None:
self.trailer.push_to_led()
if self.led_num > 0 or self.led_num < self.display.get_num_pixels():
self.display.set_pixel_colour(self.led_num, Colour(self.colour[1],
self.colour[0], self.colour[2]))
def get_led_num(self):
return self.led_num
def get_colour(self):
return self.colour
def get_brightness(self):
return self.brightness
class ChaseLight(Animation):
def __init__(self, display: Display, running_time: int, sleep_time: float, chaser_chance: float, regular: bool,
colours, reverse: bool):
super().__init__(display)
self.running_time = running_time
self.sleep_time = sleep_time
self.chaser_chance = chaser_chance
self.regular = regular
self.colours = colours
self.reverse = reverse
def run(self):
# Currently overlapping flashes, need to change
start_time = time.time()
if self.colours == "random":
colour_list = all_colours
else:
colour_list = self.colours
if self.reverse:
next = -1
insert_pos = self.display.get_num_pixels()
else:
next = 1
insert_pos = 0
# chaser setup
chaser_list = [Chaser(self.display, insert_pos,
1, random.choice(colour_list), 0)]
num_steps_gone = 0
while (time.time() - start_time) < self.running_time:
if num_steps_gone > 10:
if self.regular:
chaser_list.append(Chaser(self.display, insert_pos,
1, random.choice(colour_list), 0))
else:
if random.randint(0, 100) <= self.chaser_chance:
chaser_list.append(Chaser(self.display, insert_pos,
1, random.choice(colour_list), 0))
num_steps_gone = 0
for chaser in chaser_list:
chaser.push_to_led()
chaser.divide(1.25)
self.display.update()
time.sleep(self.sleep_time)
try:
for i in range(len(chaser_list)):
if (chaser_list[i].get_led_num() + 1) > self.display.get_num_pixels() + 10 or (
chaser_list[i].get_led_num() - 1) < -10:
del chaser_list[i]
else:
chaser_list.insert(0, Chaser(self.display, chaser_list[i].get_led_num() + next, 1,
chaser_list[i].get_colour(), chaser_list[i]))
del chaser_list[i + 1] # because inserted a new element before
except IndexError:
continue
num_steps_gone += 1
| [
"random.choice",
"math.floor",
"time.sleep",
"time.time",
"random.randint"
] | [((2268, 2279), 'time.time', 'time.time', ([], {}), '()\n', (2277, 2279), False, 'import time\n'), ((1187, 1231), 'math.floor', 'math.floor', (['(self.colour[i] * self.brightness)'], {}), '(self.colour[i] * self.brightness)\n', (1197, 1231), False, 'import math\n'), ((3477, 3504), 'time.sleep', 'time.sleep', (['self.sleep_time'], {}), '(self.sleep_time)\n', (3487, 3504), False, 'import time\n'), ((2687, 2713), 'random.choice', 'random.choice', (['colour_list'], {}), '(colour_list)\n', (2700, 2713), False, 'import random\n'), ((2762, 2773), 'time.time', 'time.time', ([], {}), '()\n', (2771, 2773), False, 'import time\n'), ((3076, 3098), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (3090, 3098), False, 'import random\n'), ((2999, 3025), 'random.choice', 'random.choice', (['colour_list'], {}), '(colour_list)\n', (3012, 3025), False, 'import random\n'), ((3251, 3277), 'random.choice', 'random.choice', (['colour_list'], {}), '(colour_list)\n', (3264, 3277), False, 'import random\n')] |
import re
import sys
import logging
import urllib
import json
import plugin
from utils import url_parser, auto_requests, str_utils
class Titlegiver(plugin.Plugin):
TITLE_REGEX = re.compile(r"<title[^>]*>(.*?)</title>", re.IGNORECASE | re.DOTALL)
WHITESPACE_REGEX = re.compile(r"\s+")
MAX_CONTENT_LENGTH = 64 * 1024
USER_AGENT = (
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/43.0.2357.37 Safari/537.36"
)
MAX_LINE_COUNT = 16
def __init__(self):
plugin.Plugin.__init__(self, "titlegiver")
@staticmethod
def get_title_from_url(url):
# Fetch page (no need to verfiy SSL certs for titles)
response = auto_requests.get(
url,
verify=False,
headers={"User-Agent": Titlegiver.USER_AGENT, "Accept-Language": "en_US"},
)
content = response.text[: Titlegiver.MAX_CONTENT_LENGTH]
# Avoid leaving dangling redirects when we've got the content
response.connection.close()
return Titlegiver.find_title_in_content(content).strip()
@staticmethod
def find_title_in_content(text):
try:
title = Titlegiver.WHITESPACE_REGEX.sub(
" ", Titlegiver.TITLE_REGEX.search(text).group(1)
)
return str_utils.unescape_entities(title)
except:
logging.exception("Regexp or unescape failed")
return None
@staticmethod
# Split a given string and remove empty lines
def split_strip_and_slice(text, limit=0):
return [line.strip() for line in text.splitlines() if line.strip()][0:limit]
def started(self, settings):
self.settings = json.loads(settings)
def process(self, url, server, channel):
parts = urllib.parse.urlparse(url)
if parts.netloc in self.settings["blacklist"]:
logging.info("Blacklisted %s", url)
return
title = Titlegiver.get_title_from_url(url)
for line in Titlegiver.split_strip_and_slice(title, Titlegiver.MAX_LINE_COUNT):
self.privmsg(server, channel, line)
def on_pubmsg(self, server, user, channel, message):
for url in url_parser.find_urls(message):
try:
self._thread(self.process, url, server, channel)
except:
logging.exception("Unable to find title for: %s", url)
if __name__ == "__main__":
sys.exit(Titlegiver.run())
| [
"json.loads",
"urllib.parse.urlparse",
"re.compile",
"utils.str_utils.unescape_entities",
"utils.url_parser.find_urls",
"logging.exception",
"plugin.Plugin.__init__",
"utils.auto_requests.get",
"logging.info"
] | [((186, 252), 're.compile', 're.compile', (['"""<title[^>]*>(.*?)</title>"""', '(re.IGNORECASE | re.DOTALL)'], {}), "('<title[^>]*>(.*?)</title>', re.IGNORECASE | re.DOTALL)\n", (196, 252), False, 'import re\n'), ((277, 295), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (287, 295), False, 'import re\n'), ((540, 582), 'plugin.Plugin.__init__', 'plugin.Plugin.__init__', (['self', '"""titlegiver"""'], {}), "(self, 'titlegiver')\n", (562, 582), False, 'import plugin\n'), ((716, 832), 'utils.auto_requests.get', 'auto_requests.get', (['url'], {'verify': '(False)', 'headers': "{'User-Agent': Titlegiver.USER_AGENT, 'Accept-Language': 'en_US'}"}), "(url, verify=False, headers={'User-Agent': Titlegiver.\n USER_AGENT, 'Accept-Language': 'en_US'})\n", (733, 832), False, 'from utils import url_parser, auto_requests, str_utils\n'), ((1726, 1746), 'json.loads', 'json.loads', (['settings'], {}), '(settings)\n', (1736, 1746), False, 'import json\n'), ((1810, 1836), 'urllib.parse.urlparse', 'urllib.parse.urlparse', (['url'], {}), '(url)\n', (1831, 1836), False, 'import urllib\n'), ((2224, 2253), 'utils.url_parser.find_urls', 'url_parser.find_urls', (['message'], {}), '(message)\n', (2244, 2253), False, 'from utils import url_parser, auto_requests, str_utils\n'), ((1334, 1368), 'utils.str_utils.unescape_entities', 'str_utils.unescape_entities', (['title'], {}), '(title)\n', (1361, 1368), False, 'from utils import url_parser, auto_requests, str_utils\n'), ((1904, 1939), 'logging.info', 'logging.info', (['"""Blacklisted %s"""', 'url'], {}), "('Blacklisted %s', url)\n", (1916, 1939), False, 'import logging\n'), ((1397, 1443), 'logging.exception', 'logging.exception', (['"""Regexp or unescape failed"""'], {}), "('Regexp or unescape failed')\n", (1414, 1443), False, 'import logging\n'), ((2373, 2427), 'logging.exception', 'logging.exception', (['"""Unable to find title for: %s"""', 'url'], {}), "('Unable to find title for: %s', url)\n", (2390, 2427), False, 'import logging\n')] |
from contextlib import contextmanager
import sys
from io import StringIO
import logging
@contextmanager
def captured_output():
# https://stackoverflow.com/questions/4219717/how-to-assert-output-with-nosetest-unittest-in-python
no, ne = StringIO(), StringIO()
oo, oe = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = no, ne
yield lambda: no.getvalue(), lambda: ne.getvalue()
finally:
sys.stdout, sys.stderr = oo, oe
| [
"io.StringIO"
] | [((246, 256), 'io.StringIO', 'StringIO', ([], {}), '()\n', (254, 256), False, 'from io import StringIO\n'), ((258, 268), 'io.StringIO', 'StringIO', ([], {}), '()\n', (266, 268), False, 'from io import StringIO\n')] |
import numpy as np
import nxsdk.api.n2a as nx
from nxsdk.graph.processes.phase_enums import Phase
import os
from snn_loihi.oculumotor_snn import RobotHeadNet
def setup_full_head_snn():
"""
Setup Full Head SNN on Loihi for testing
Returns:
net: Loihi network object
input_conn_dict: dictionary of input fake connections
"""
joint_name_list = ['eye_pan', 'eye_left_tilt', 'eye_right_tilt']
ebn_2_eye_motor_conn_mask_dict = {'eye_pan': np.array([[1, 0], [0, 1], [0, 0],
[0, 0], [0, 0], [0, 0]]),
'eye_left_tilt': np.array([[0, 0], [0, 0], [1, 0],
[0, 1], [0, 0], [0, 0]]),
'eye_right_tilt': np.array([[0, 0], [0, 0], [0, 0],
[0, 0], [1, 0], [0, 1]])}
llbn_2_neck_motor_conn_mask_dict = {'eye_pan': np.array([[1, 0], [0, 1], [0, 0], [0, 0]]),
'eye_left_tilt': np.array([[0, 0], [0, 0], [1, 0], [0, 0]]),
'eye_right_tilt': np.array([[0, 0], [0, 0], [0, 0], [0, 1]])}
ebn_2_coupling_conn_mask_dict = {'eye_left_tilt': np.array([[1, 0], [-1, 0], [0, 1], [0, -1]]),
'eye_right_tilt': np.array([[-1, 0], [1, 0], [0, -1], [0, 1]])}
net = nx.NxNet()
eye_motor_neuron, neck_motor_neuron = RobotHeadNet.motor_neurons(net)
input_neuron_dict, input_conn_dict = RobotHeadNet.online_input_neurons(net, joint_name_list)
ebn_dict, llbn_dict = RobotHeadNet.control_core_module_all_joints(net, input_neuron_dict, joint_name_list)
RobotHeadNet.eye_joints_control(eye_motor_neuron, ebn_dict, ebn_2_eye_motor_conn_mask_dict)
RobotHeadNet.neck_joints_control(neck_motor_neuron, ebn_dict, llbn_2_neck_motor_conn_mask_dict)
RobotHeadNet.eye_coupling_control(net, eye_motor_neuron, ebn_dict, ebn_2_coupling_conn_mask_dict)
RobotHeadNet.online_motor_neurons_spike_probe(eye_motor_neuron, neck_motor_neuron)
return net, input_conn_dict
def compile_single_joint_head_snn(net, input_conn_dict, snip_path="./snn_loihi/snips"):
"""
Compile Loihi network with online encoding and decoding
Args:
net (NxNet): Loihi network object
input_conn_dict (dict): dictionary of input fake connections
snip_path (str): directory for snip
Returns:
board: Loihi compiled network
encoder_channel: encoder channel
decoder_channel: decoder channel
"""
compiler = nx.N2Compiler()
board = compiler.compile(net)
input_neuron_id = RobotHeadNet.online_get_fake_input_connection_axon_id(net, input_conn_dict)
print("Input Neuron Axon Id: ", input_neuron_id)
include_dir = os.path.abspath(snip_path)
encoder_snip = board.createSnip(
Phase.EMBEDDED_SPIKING,
name="encoder",
includeDir=include_dir,
cFilePath=include_dir + "/encoder.c",
funcName="run_encoder",
guardName="do_encoder"
)
decoder_snip = board.createSnip(
Phase.EMBEDDED_MGMT,
name="decoder",
includeDir=include_dir,
cFilePath=include_dir + "/decoder.c",
funcName="run_decoder",
guardName="do_decoder"
)
encoder_channel = board.createChannel(b'encodeinput', "int", 6)
encoder_channel.connect(None, encoder_snip)
decoder_channel = board.createChannel(b'decodeoutput', "int", 10)
decoder_channel.connect(decoder_snip, None)
return board, encoder_channel, decoder_channel
| [
"snn_loihi.oculumotor_snn.RobotHeadNet.motor_neurons",
"nxsdk.api.n2a.NxNet",
"snn_loihi.oculumotor_snn.RobotHeadNet.eye_joints_control",
"snn_loihi.oculumotor_snn.RobotHeadNet.eye_coupling_control",
"snn_loihi.oculumotor_snn.RobotHeadNet.online_motor_neurons_spike_probe",
"snn_loihi.oculumotor_snn.RobotHeadNet.control_core_module_all_joints",
"snn_loihi.oculumotor_snn.RobotHeadNet.neck_joints_control",
"snn_loihi.oculumotor_snn.RobotHeadNet.online_get_fake_input_connection_axon_id",
"numpy.array",
"nxsdk.api.n2a.N2Compiler",
"snn_loihi.oculumotor_snn.RobotHeadNet.online_input_neurons",
"os.path.abspath"
] | [((1471, 1481), 'nxsdk.api.n2a.NxNet', 'nx.NxNet', ([], {}), '()\n', (1479, 1481), True, 'import nxsdk.api.n2a as nx\n'), ((1524, 1555), 'snn_loihi.oculumotor_snn.RobotHeadNet.motor_neurons', 'RobotHeadNet.motor_neurons', (['net'], {}), '(net)\n', (1550, 1555), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((1597, 1652), 'snn_loihi.oculumotor_snn.RobotHeadNet.online_input_neurons', 'RobotHeadNet.online_input_neurons', (['net', 'joint_name_list'], {}), '(net, joint_name_list)\n', (1630, 1652), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((1679, 1767), 'snn_loihi.oculumotor_snn.RobotHeadNet.control_core_module_all_joints', 'RobotHeadNet.control_core_module_all_joints', (['net', 'input_neuron_dict', 'joint_name_list'], {}), '(net, input_neuron_dict,\n joint_name_list)\n', (1722, 1767), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((1768, 1863), 'snn_loihi.oculumotor_snn.RobotHeadNet.eye_joints_control', 'RobotHeadNet.eye_joints_control', (['eye_motor_neuron', 'ebn_dict', 'ebn_2_eye_motor_conn_mask_dict'], {}), '(eye_motor_neuron, ebn_dict,\n ebn_2_eye_motor_conn_mask_dict)\n', (1799, 1863), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((1864, 1963), 'snn_loihi.oculumotor_snn.RobotHeadNet.neck_joints_control', 'RobotHeadNet.neck_joints_control', (['neck_motor_neuron', 'ebn_dict', 'llbn_2_neck_motor_conn_mask_dict'], {}), '(neck_motor_neuron, ebn_dict,\n llbn_2_neck_motor_conn_mask_dict)\n', (1896, 1963), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((1964, 2065), 'snn_loihi.oculumotor_snn.RobotHeadNet.eye_coupling_control', 'RobotHeadNet.eye_coupling_control', (['net', 'eye_motor_neuron', 'ebn_dict', 'ebn_2_coupling_conn_mask_dict'], {}), '(net, eye_motor_neuron, ebn_dict,\n ebn_2_coupling_conn_mask_dict)\n', (1997, 2065), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((2066, 2152), 'snn_loihi.oculumotor_snn.RobotHeadNet.online_motor_neurons_spike_probe', 'RobotHeadNet.online_motor_neurons_spike_probe', (['eye_motor_neuron', 'neck_motor_neuron'], {}), '(eye_motor_neuron,\n neck_motor_neuron)\n', (2111, 2152), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((2664, 2679), 'nxsdk.api.n2a.N2Compiler', 'nx.N2Compiler', ([], {}), '()\n', (2677, 2679), True, 'import nxsdk.api.n2a as nx\n'), ((2736, 2811), 'snn_loihi.oculumotor_snn.RobotHeadNet.online_get_fake_input_connection_axon_id', 'RobotHeadNet.online_get_fake_input_connection_axon_id', (['net', 'input_conn_dict'], {}), '(net, input_conn_dict)\n', (2789, 2811), False, 'from snn_loihi.oculumotor_snn import RobotHeadNet\n'), ((2883, 2909), 'os.path.abspath', 'os.path.abspath', (['snip_path'], {}), '(snip_path)\n', (2898, 2909), False, 'import os\n'), ((478, 536), 'numpy.array', 'np.array', (['[[1, 0], [0, 1], [0, 0], [0, 0], [0, 0], [0, 0]]'], {}), '([[1, 0], [0, 1], [0, 0], [0, 0], [0, 0], [0, 0]])\n', (486, 536), True, 'import numpy as np\n'), ((652, 710), 'numpy.array', 'np.array', (['[[0, 0], [0, 0], [1, 0], [0, 1], [0, 0], [0, 0]]'], {}), '([[0, 0], [0, 0], [1, 0], [0, 1], [0, 0], [0, 0]])\n', (660, 710), True, 'import numpy as np\n'), ((833, 891), 'numpy.array', 'np.array', (['[[0, 0], [0, 0], [0, 0], [0, 0], [1, 0], [0, 1]]'], {}), '([[0, 0], [0, 0], [0, 0], [0, 0], [1, 0], [0, 1]])\n', (841, 891), True, 'import numpy as np\n'), ((1011, 1053), 'numpy.array', 'np.array', (['[[1, 0], [0, 1], [0, 0], [0, 0]]'], {}), '([[1, 0], [0, 1], [0, 0], [0, 0]])\n', (1019, 1053), True, 'import numpy as np\n'), ((1112, 1154), 'numpy.array', 'np.array', (['[[0, 0], [0, 0], [1, 0], [0, 0]]'], {}), '([[0, 0], [0, 0], [1, 0], [0, 0]])\n', (1120, 1154), True, 'import numpy as np\n'), ((1214, 1256), 'numpy.array', 'np.array', (['[[0, 0], [0, 0], [0, 0], [0, 1]]'], {}), '([[0, 0], [0, 0], [0, 0], [0, 1]])\n', (1222, 1256), True, 'import numpy as np\n'), ((1313, 1357), 'numpy.array', 'np.array', (['[[1, 0], [-1, 0], [0, 1], [0, -1]]'], {}), '([[1, 0], [-1, 0], [0, 1], [0, -1]])\n', (1321, 1357), True, 'import numpy as np\n'), ((1414, 1458), 'numpy.array', 'np.array', (['[[-1, 0], [1, 0], [0, -1], [0, 1]]'], {}), '([[-1, 0], [1, 0], [0, -1], [0, 1]])\n', (1422, 1458), True, 'import numpy as np\n')] |
import numpy as np
import cv2
import scipy.io
import argparse
from tqdm import tqdm
from os import listdir
from os.path import isfile, join
import sys
import dlib
from moviepy.editor import *
def warp_im(im, M, dshape):
output_im = np.zeros(dshape, dtype=im.dtype)
cv2.warpAffine(im,
M[:2],
(dshape[1], dshape[0]),
dst=output_im,
borderMode=cv2.BORDER_TRANSPARENT,
flags=cv2.WARP_INVERSE_MAP)
return output_im
def transformation_from_points(points1, points2):
"""
Return an affine transformation [s * R | T] such that:
sum ||s*R*p1,i + T - p2,i||^2
is minimized.
"""
# Solve the procrustes problem by subtracting centroids, scaling by the
# standard deviation, and then using the SVD to calculate the rotation. See
# the following for more details:
# https://en.wikipedia.org/wiki/Orthogonal_Procrustes_problem
points1 = points1.astype(np.float64)
points2 = points2.astype(np.float64)
c1 = np.mean(points1, axis=0)
c2 = np.mean(points2, axis=0)
points1 -= c1
points2 -= c2
s1 = np.std(points1)
s2 = np.std(points2)
points1 /= s1
points2 /= s2
U, S, Vt = np.linalg.svd(points1.T * points2)
# The R we seek is in fact the transpose of the one given by U * Vt. This
# is because the above formulation assumes the matrix goes on the right
# (with row vectors) where as our solution requires the matrix to be on the
# left (with column vectors).
R = (U * Vt).T
return np.vstack([np.hstack(((s2 / s1) * R,
c2.T - (s2 / s1) * R * c1.T)),
np.matrix([0., 0., 1.])])
def get_landmarks(im,detector,predictor):
rects = detector(im, 1)
return np.matrix([[p.x, p.y] for p in predictor(im, rects[0]).parts()])
def get_args():
parser = argparse.ArgumentParser(description="This script cleans-up noisy labels "
"and creates database for training.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--output", "-o", type=str,
help="path to output database mat file")
parser.add_argument("--img_size", type=int, default=64,
help="output image size")
args = parser.parse_args()
return args
def main():
args = get_args()
output_path = args.output
img_size = args.img_size
mypath = './morph2'
isPlot = False
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor("landmarks/shape_predictor_68_face_landmarks.dat")
ref_img = cv2.imread(mypath+'/009055_1M54.JPG')
landmark_ref = get_landmarks(ref_img,detector,predictor)
FACE_POINTS = list(range(17, 68))
MOUTH_POINTS = list(range(48, 61))
RIGHT_BROW_POINTS = list(range(17, 22))
LEFT_BROW_POINTS = list(range(22, 27))
RIGHT_EYE_POINTS = list(range(36, 42))
LEFT_EYE_POINTS = list(range(42, 48))
NOSE_POINTS = list(range(27, 35))
JAW_POINTS = list(range(0, 17))
# Points used to line up the images.
ALIGN_POINTS = (LEFT_BROW_POINTS + RIGHT_EYE_POINTS + LEFT_EYE_POINTS +
RIGHT_BROW_POINTS + NOSE_POINTS + MOUTH_POINTS)
out_genders = []
out_ages = []
out_imgs = []
for i in tqdm(range(len(onlyfiles))):
img_name = onlyfiles[i]
temp_name = img_name.split('_')
temp_name = temp_name[1].split('.')
isMale = temp_name[0].find('M')
isFemale = temp_name[0].find('F')
if isMale > -1:
gender = 0
age = temp_name[0].split('M')
age = age[1]
elif isFemale > -1:
gender = 1
age = temp_name[0].split('F')
age = age[1]
age = int(float(age))
input_img = cv2.imread(mypath+'/'+img_name)
img_h, img_w, _ = np.shape(input_img)
detected = detector(input_img,1)
if len(detected) == 1:
#---------------------------------------------------------------------------------------------
# Face align
landmark = get_landmarks(input_img,detector,predictor)
M = transformation_from_points(landmark_ref[ALIGN_POINTS], landmark[ALIGN_POINTS])
input_img = warp_im(input_img, M, ref_img.shape)
#---------------------------------------------------------------------------------------------
detected = detector(input_img, 1)
if len(detected) == 1:
faces = np.empty((len(detected), img_size, img_size, 3))
for i, d in enumerate(detected):
x1, y1, x2, y2, w, h = d.left(), d.top(), d.right() + 1, d.bottom() + 1, d.width(), d.height()
xw1 = max(int(x1 - 0.4 * w), 0)
yw1 = max(int(y1 - 0.4 * h), 0)
xw2 = min(int(x2 + 0.4 * w), img_w - 1)
yw2 = min(int(y2 + 0.4 * h), img_h - 1)
faces[i,:,:,:] = cv2.resize(input_img[yw1:yw2 + 1, xw1:xw2 + 1, :], (img_size, img_size))
if isPlot:
cv2.rectangle(input_img, (x1, y1), (x2, y2), (255, 0, 0), 2)
cv2.rectangle(input_img, (xw1, yw1), (xw2, yw2), (0, 255, 0), 2)
img_clip = ImageClip(input_img)
img_clip.show()
key = cv2.waitKey(1000)
#only add to the list when faces is detected
out_imgs.append(faces[0,:,:,:])
out_genders.append(int(gender))
out_ages.append(int(age))
np.savez(output_path,image=np.array(out_imgs), gender=np.array(out_genders), age=np.array(out_ages), img_size=img_size)
if __name__ == '__main__':
main()
| [
"cv2.rectangle",
"numpy.mean",
"cv2.warpAffine",
"os.listdir",
"cv2.resize",
"argparse.ArgumentParser",
"numpy.hstack",
"os.path.join",
"dlib.shape_predictor",
"dlib.get_frontal_face_detector",
"numpy.zeros",
"numpy.array",
"cv2.waitKey",
"numpy.std",
"numpy.linalg.svd",
"numpy.shape",
"numpy.matrix",
"cv2.imread"
] | [((237, 269), 'numpy.zeros', 'np.zeros', (['dshape'], {'dtype': 'im.dtype'}), '(dshape, dtype=im.dtype)\n', (245, 269), True, 'import numpy as np\n'), ((274, 406), 'cv2.warpAffine', 'cv2.warpAffine', (['im', 'M[:2]', '(dshape[1], dshape[0])'], {'dst': 'output_im', 'borderMode': 'cv2.BORDER_TRANSPARENT', 'flags': 'cv2.WARP_INVERSE_MAP'}), '(im, M[:2], (dshape[1], dshape[0]), dst=output_im, borderMode\n =cv2.BORDER_TRANSPARENT, flags=cv2.WARP_INVERSE_MAP)\n', (288, 406), False, 'import cv2\n'), ((1055, 1079), 'numpy.mean', 'np.mean', (['points1'], {'axis': '(0)'}), '(points1, axis=0)\n', (1062, 1079), True, 'import numpy as np\n'), ((1089, 1113), 'numpy.mean', 'np.mean', (['points2'], {'axis': '(0)'}), '(points2, axis=0)\n', (1096, 1113), True, 'import numpy as np\n'), ((1160, 1175), 'numpy.std', 'np.std', (['points1'], {}), '(points1)\n', (1166, 1175), True, 'import numpy as np\n'), ((1185, 1200), 'numpy.std', 'np.std', (['points2'], {}), '(points2)\n', (1191, 1200), True, 'import numpy as np\n'), ((1253, 1287), 'numpy.linalg.svd', 'np.linalg.svd', (['(points1.T * points2)'], {}), '(points1.T * points2)\n', (1266, 1287), True, 'import numpy as np\n'), ((1925, 2098), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""This script cleans-up noisy labels and creates database for training."""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description=\n 'This script cleans-up noisy labels and creates database for training.',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (1948, 2098), False, 'import argparse\n'), ((2689, 2721), 'dlib.get_frontal_face_detector', 'dlib.get_frontal_face_detector', ([], {}), '()\n', (2719, 2721), False, 'import dlib\n'), ((2738, 2809), 'dlib.shape_predictor', 'dlib.shape_predictor', (['"""landmarks/shape_predictor_68_face_landmarks.dat"""'], {}), "('landmarks/shape_predictor_68_face_landmarks.dat')\n", (2758, 2809), False, 'import dlib\n'), ((2829, 2868), 'cv2.imread', 'cv2.imread', (["(mypath + '/009055_1M54.JPG')"], {}), "(mypath + '/009055_1M54.JPG')\n", (2839, 2868), False, 'import cv2\n'), ((4063, 4098), 'cv2.imread', 'cv2.imread', (["(mypath + '/' + img_name)"], {}), "(mypath + '/' + img_name)\n", (4073, 4098), False, 'import cv2\n'), ((4121, 4140), 'numpy.shape', 'np.shape', (['input_img'], {}), '(input_img)\n', (4129, 4140), True, 'import numpy as np\n'), ((1599, 1650), 'numpy.hstack', 'np.hstack', (['(s2 / s1 * R, c2.T - s2 / s1 * R * c1.T)'], {}), '((s2 / s1 * R, c2.T - s2 / s1 * R * c1.T))\n', (1608, 1650), True, 'import numpy as np\n'), ((1720, 1746), 'numpy.matrix', 'np.matrix', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (1729, 1746), True, 'import numpy as np\n'), ((2625, 2640), 'os.listdir', 'listdir', (['mypath'], {}), '(mypath)\n', (2632, 2640), False, 'from os import listdir\n'), ((5959, 5977), 'numpy.array', 'np.array', (['out_imgs'], {}), '(out_imgs)\n', (5967, 5977), True, 'import numpy as np\n'), ((5986, 6007), 'numpy.array', 'np.array', (['out_genders'], {}), '(out_genders)\n', (5994, 6007), True, 'import numpy as np\n'), ((6013, 6031), 'numpy.array', 'np.array', (['out_ages'], {}), '(out_ages)\n', (6021, 6031), True, 'import numpy as np\n'), ((2651, 2666), 'os.path.join', 'join', (['mypath', 'f'], {}), '(mypath, f)\n', (2655, 2666), False, 'from os.path import isfile, join\n'), ((5268, 5340), 'cv2.resize', 'cv2.resize', (['input_img[yw1:yw2 + 1, xw1:xw2 + 1, :]', '(img_size, img_size)'], {}), '(input_img[yw1:yw2 + 1, xw1:xw2 + 1, :], (img_size, img_size))\n', (5278, 5340), False, 'import cv2\n'), ((5413, 5473), 'cv2.rectangle', 'cv2.rectangle', (['input_img', '(x1, y1)', '(x2, y2)', '(255, 0, 0)', '(2)'], {}), '(input_img, (x1, y1), (x2, y2), (255, 0, 0), 2)\n', (5426, 5473), False, 'import cv2\n'), ((5498, 5562), 'cv2.rectangle', 'cv2.rectangle', (['input_img', '(xw1, yw1)', '(xw2, yw2)', '(0, 255, 0)', '(2)'], {}), '(input_img, (xw1, yw1), (xw2, yw2), (0, 255, 0), 2)\n', (5511, 5562), False, 'import cv2\n'), ((5689, 5706), 'cv2.waitKey', 'cv2.waitKey', (['(1000)'], {}), '(1000)\n', (5700, 5706), False, 'import cv2\n')] |
"""
read.py
Auxiliary functions for reading of seq files and seq file entries.
"""
from typing import Union
from pathlib import Path
from pypulseq.Sequence.sequence import Sequence
def get_minor_version(seq_file: Union[str, Path]) -> int:
"""
Reads minor version from a seq file.
:param seq_file: path to the sequence file to read into the Sequence object
:return version: version from the sequence file
"""
with open(seq_file) as file:
for line in file:
if line.startswith('minor'):
return int(line[len('minor '):])
def read_any_version(seq_file: Union[str, Path],
seq: Sequence = None) \
-> Sequence:
"""
Reads a sequence file (seq_file) independent of the (py)pulseq version.
:param seq_file: path to the sequence file to read into the Sequence object
:param seq: the sequence to read the seq file into. If not provided, a new Sequence object is instantiated
:return seq: Sequence object
"""
version = get_minor_version(seq_file)
if not seq:
seq = Sequence()
if version in [2, 3]:
seq.read(seq_file)
else:
raise ValueError('Version', version, 'can not be converted.')
return seq
| [
"pypulseq.Sequence.sequence.Sequence"
] | [((1088, 1098), 'pypulseq.Sequence.sequence.Sequence', 'Sequence', ([], {}), '()\n', (1096, 1098), False, 'from pypulseq.Sequence.sequence import Sequence\n')] |
"""A soundboard made with streamlit."""
import chime
import streamlit as st
"""
# `chime` soundboard 🎶
This soundboard allows you to listen to the sounds available for each theme in the Python
[`chime`](https://github.com/MaxHalford/chime/) package. Feel welcome to
[open a pull request](https://github.com/MaxHalford/chime/compare) if you wish to contribute a new
theme. 🤗
"""
for i, theme in enumerate(chime.themes()):
if i:
st.markdown('---')
st.header(theme)
for event in ['success', 'warning', 'error', 'info']:
st.subheader(event)
with open(chime.themes_dir().joinpath(f'{theme}/{event}.wav'), 'rb') as wav:
st.audio(wav.read())
| [
"chime.themes",
"streamlit.markdown",
"chime.themes_dir",
"streamlit.subheader",
"streamlit.header"
] | [((408, 422), 'chime.themes', 'chime.themes', ([], {}), '()\n', (420, 422), False, 'import chime\n'), ((466, 482), 'streamlit.header', 'st.header', (['theme'], {}), '(theme)\n', (475, 482), True, 'import streamlit as st\n'), ((443, 461), 'streamlit.markdown', 'st.markdown', (['"""---"""'], {}), "('---')\n", (454, 461), True, 'import streamlit as st\n'), ((550, 569), 'streamlit.subheader', 'st.subheader', (['event'], {}), '(event)\n', (562, 569), True, 'import streamlit as st\n'), ((588, 606), 'chime.themes_dir', 'chime.themes_dir', ([], {}), '()\n', (604, 606), False, 'import chime\n')] |
import unittest
import sys
import os
import shutil
from StringIO import StringIO
sys.exit = lambda x: x
OUTPUT = StringIO()
sys.stdout = OUTPUT
class CommandTest(unittest.TestCase):
def setUp(self):
OUTPUT.truncate(0)
class InitCommandTest(CommandTest):
def setUp(self):
super(InitCommandTest, self).setUp()
self.arguments = {
'--handlers': None,
'--help': False,
'--version': False,
'<args>': [],
'<name>': 'example42',
'<port>': None,
'init': True,
'list': False,
'panel': False,
'watch': False
}
def test_init_with_name_empty_dir(self):
from easydojo.commands import DojoCommand
path = 'example42'
os.mkdir(path)
os.chdir(path)
cmd = DojoCommand.make(self.arguments)
cmd.run()
self.assertTrue(os.path.exists('.easydojo.yaml'))
self.assertTrue(os.path.exists('example42.py'))
self.assertTrue(os.path.exists('test_example42.py'))
self.assertIn("Initialize example42", OUTPUT.getvalue())
os.chdir('..')
shutil.rmtree(path)
def test_init_with_name_already_easydojo_dir(self):
from easydojo.commands import DojoCommand
path = 'example42'
os.mkdir(path)
os.chdir(path)
cmd = DojoCommand.make(self.arguments)
cmd.run()
self.assertIn("Initialize example42", OUTPUT.getvalue())
cmd.run()
self.assertIn("EasyDojo already exists", OUTPUT.getvalue())
os.chdir('..')
shutil.rmtree(path)
def test_init_with_name_not_empty_dir(self):
from easydojo.commands import DojoCommand
cmd = DojoCommand.make(self.arguments)
cmd.run()
self.assertIn("This dir isn't empty", OUTPUT.getvalue())
class ListCommandTest(CommandTest):
def setUp(self):
super(ListCommandTest, self).setUp()
self.arguments = {
'--handlers': None,
'--help': False,
'--version': False,
'<args>': [],
'<name>': None,
'<port>': None,
'init': False,
'list': True,
'panel': False,
'watch': False
}
def test_list(self):
from easydojo.commands import DojoCommand
cmd = DojoCommand.make(self.arguments)
cmd.run()
self.assertIn('List of all handlers:', OUTPUT.getvalue())
if __name__ == '__main__':
unittest.main()
| [
"StringIO.StringIO",
"os.path.exists",
"easydojo.commands.DojoCommand.make",
"os.chdir",
"os.mkdir",
"shutil.rmtree",
"unittest.main"
] | [((114, 124), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (122, 124), False, 'from StringIO import StringIO\n'), ((2533, 2548), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2546, 2548), False, 'import unittest\n'), ((797, 811), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (805, 811), False, 'import os\n'), ((820, 834), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (828, 834), False, 'import os\n'), ((849, 881), 'easydojo.commands.DojoCommand.make', 'DojoCommand.make', (['self.arguments'], {}), '(self.arguments)\n', (865, 881), False, 'from easydojo.commands import DojoCommand\n'), ((1148, 1162), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (1156, 1162), False, 'import os\n'), ((1171, 1190), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (1184, 1190), False, 'import shutil\n'), ((1333, 1347), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (1341, 1347), False, 'import os\n'), ((1356, 1370), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (1364, 1370), False, 'import os\n'), ((1385, 1417), 'easydojo.commands.DojoCommand.make', 'DojoCommand.make', (['self.arguments'], {}), '(self.arguments)\n', (1401, 1417), False, 'from easydojo.commands import DojoCommand\n'), ((1595, 1609), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (1603, 1609), False, 'import os\n'), ((1618, 1637), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (1631, 1637), False, 'import shutil\n'), ((1752, 1784), 'easydojo.commands.DojoCommand.make', 'DojoCommand.make', (['self.arguments'], {}), '(self.arguments)\n', (1768, 1784), False, 'from easydojo.commands import DojoCommand\n'), ((2383, 2415), 'easydojo.commands.DojoCommand.make', 'DojoCommand.make', (['self.arguments'], {}), '(self.arguments)\n', (2399, 2415), False, 'from easydojo.commands import DojoCommand\n'), ((924, 956), 'os.path.exists', 'os.path.exists', (['""".easydojo.yaml"""'], {}), "('.easydojo.yaml')\n", (938, 956), False, 'import os\n'), ((982, 1012), 'os.path.exists', 'os.path.exists', (['"""example42.py"""'], {}), "('example42.py')\n", (996, 1012), False, 'import os\n'), ((1038, 1073), 'os.path.exists', 'os.path.exists', (['"""test_example42.py"""'], {}), "('test_example42.py')\n", (1052, 1073), False, 'import os\n')] |
from github_trending.__init__ import __version__
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
description='View Github Trending and repository README from the command line',
long_description=long_description,
long_description_content_type='text/markdown',
author='<NAME>',
url='https://github.com/blue-9/github-trending',
download_url='https://pypi.python.org/pypi/github-trending-cli',
author_email='<EMAIL>',
version=__version__,
license='Apache License 2.0',
install_requires=[
'click>=5.1,<7.0',
'colorama>=0.3.3,<1.0.0',
'requests>=2.4.3,<3.0.0',
'pygments>=2.0.2,<3.0.0',
'prompt-toolkit>=1.0.0,<1.1.0',
'six>=1.9.0,<2.0.0',
],
extras_require={
'testing': [
'mock>=1.0.1,<2.0.0',
'tox>=1.9.2,<2.0.0'
],
},
entry_points={
'console_scripts': [
'github-trending = github_trending.main:cli',
'gt = github_trending.main_cli:cli'
]
},
packages=find_packages(),
scripts=[],
name='github-trending-cli',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| [
"setuptools.find_packages"
] | [((1178, 1193), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1191, 1193), False, 'from setuptools import setup, find_packages\n')] |
import abc
import logging
from collections import OrderedDict
import regex as re
from multipledispatch import dispatch
from bn.values.value import Value
dispatch_namespace = dict()
class Graph:
"""
Representation of a relational structure as a directed acyclic graph. The two
parameters V and R respectively express the types of values associated with each
node and each relation. In addition to its main content, each node can also
express a set of attributes (such as POS tags, named entities, timing information,
etc.).
The string representation of the graph is similar to the one used in the Stanford
NLP package. For instance, the string [loves subject>John object>Mary] represents
a graph with three nodes (loves, John and Mary), with a relation labelled
"subject" from "loves" to "John", and a relation labelled "object" between "loves"
and "Mary". Brackets are used to construct embedded graphs, such as for instance
[eats subject>Pierre object>[apple attribute>red]], which is a graph with four
nodes, where the node "apple" is itself the governor of the node "red".
The optional attributes are indicated via a | bar followed by a key:value pair
right after the node content. For instance, "loves|pos:VB" indicates that the pos
attribute for the node has the value "VB". To incorporate several attributes, you
can simply add additional | bars, like this: loves|pos:VB|index:1.
Finally, it is also possible to construct graph with more than one root by
including several brackets at the top level, such as [eats subject>Pierre][drinks
subject>Milen].
The class is abstract, and its extension requires the definition of three methods
that define how the values V and R can be created from string, and how values V
can be copied.
"""
__metaclass__ = abc.ABCMeta
# logger
log = logging.getLogger('PyOpenDial')
_value_regex = r'([^\[\]\s\|]+)((?:\|\w+:[^\[\]\s\|]+)*)'
_value_pattern = re.compile(_value_regex)
_graph_pattern = re.compile(r"\[" + _value_regex + r"((\s+\S+>" + _value_regex + r")*)\]")
def __init__(self, arg1=None):
if arg1 == None:
"""
Constructs an empty graph
"""
self._roots = []
self._nodes = []
self._str = ''
elif isinstance(arg1, str):
str_representation = arg1
"""
Constructs a graph from a string
:param str_representation: the string representation for the graph
"""
self._roots = []
self._nodes = []
str_representation = str_representation.strip().replace('> ', '>')
bracket_cnt = 0
tmp = []
for c in str_representation:
tmp.append(c)
if c == '[':
bracket_cnt += 1
elif c == ']':
bracket_cnt -= 1
if bracket_cnt == 0:
node = self._create_node(''.join(tmp))
self._roots.append(node)
self._str = ''.join([str(root) for root in self._roots])
else:
raise NotImplementedError()
def __hash__(self):
"""
Returns the hashcode for the graph
"""
return self._str.__hash__()
def __eq__(self, other):
"""
Returns true if the object is a graph with the same content.
"""
if not isinstance(other, Graph):
return False
return self._str == str(other)
def __str__(self):
"""
Returns the string representation for the graph
"""
return self._str
@staticmethod
@dispatch(str, namespace=dispatch_namespace)
def is_relational(str_representation):
"""
Returns true if the string represents a relational structure, else false.
:param str_representation: the string to check
:return: true if the string encodes a graph, else false
"""
if not str_representation.startswith('['):
return False
if not str_representation.endswith(']'):
return False
if '>' not in str_representation:
return False
return Graph._graph_pattern.search(str_representation) is not None
@dispatch(str)
@abc.abstractmethod
def create_value(self, str_representation):
"""
Creates a value of type V from a string
:param str_representation: the string
:return: the corresponding value
"""
raise NotImplementedError()
@dispatch(str)
@abc.abstractmethod
def create_relation(self, str_representation):
"""
Creates a value of type R from a string
:param str_representation: the string
:return: the corresponding value
"""
raise NotImplementedError()
@dispatch(Value)
@abc.abstractmethod
def copy_value(self, value):
"""
Copies the value
:param value: the value to copy
:return: the copied value
"""
raise NotImplementedError()
@dispatch()
def get_nodes(self):
"""
Returns the nodes of the graph
:return: the nodes
"""
return self._nodes
@dispatch()
def get_roots(self):
"""
Returns the roots of the graph
:return: the roots
"""
return self._roots
@dispatch(str)
def _create_node(self, str_representation):
"""
Creates a new node from the string representation. This method is called
recursively to build the full graph structure.
:param str_representation: the string
:return: the corresponding node
"""
searcher = Graph._graph_pattern.search(str_representation)
while searcher is not None:
if searcher.start() > 0 or searcher.end() < len(str_representation):
node = self._create_node(searcher.group(0))
str_representation = str_representation[:searcher.start()] + str(id(node)) + str_representation[searcher.end():]
searcher = Graph._graph_pattern.search(str_representation)
else:
content = searcher.group(1)
node = Node(self.create_value(content))
attributes = searcher.group(2)
attributes = attributes[1:] if len(attributes) > 0 else attributes
for attribute in attributes.split('|'):
if len(attribute) == 0:
continue
attribute = attribute.split(':')
node.add_attributes(attribute[0], self.create_value(attribute[1]))
relations = searcher.group(3).split(' ')
for relation in relations:
if len(relation) == 0 or '>' not in relation:
continue
relation = relation.split('>')
relation_key = self.create_relation(relation[0])
relation_content = relation[1]
child_node = None
for _node in self._nodes:
if str(id(_node)) == relation_content:
child_node = _node
break
node.add_child(relation_key, self._create_node("[%s]" % relation_content) if child_node is None else child_node)
self._nodes.insert(0, node)
return node
raise ValueError()
class NodeWrapper:
pass
class Node(NodeWrapper):
"""
Representation of an individual node, with a content, optional attributes and
outgoing relations.
"""
def __init__(self, arg1):
if isinstance(arg1, object):
content = arg1
"""
Creates a new node with the given content
:param content: the content
"""
self._content = content
self._children = OrderedDict()
self._attributes = dict()
else:
raise NotImplementedError()
def __str__(self):
"""
Returns a string representation of the node and its descendants
"""
result = str(self._content) + ''.join(['|%s:%s' % (key, str(value)) for key, value in self._attributes.items()])
if len(self._children) == 0:
return result
return '[' + result + ''.join([' %s>%s' % (key, str(value)) for key, value in self._children.items()]) + ']'
def __eq__(self, other):
"""
Returns true if the object is a graph with the same content.
"""
if not isinstance(other, Node):
return False
if self._content != other._content:
return False
if self._children != other._children:
return False
if self._attributes != other._attributes:
return False
return True
def __hash__(self):
"""
Returns the hashcode for the node
"""
return hash(self._content) - hash(frozenset(self._children.items())) + hash(frozenset(self._attributes.items()))
@dispatch(str, object)
def add_attributes(self, key, value):
"""
Adds a new attribute to the node
:param key: the attribute label
:param value: the corresponding value
"""
self._attributes[key] = value
@dispatch(object, NodeWrapper)
def add_child(self, relation, node):
"""
Adds a new outgoing relation to the node. Throws an exception if a cycle
is found.
:param relation: the relation label
:param node: the dependent node
"""
if self in node.get_descendants():
raise ValueError()
self._children[relation] = node
self._children = OrderedDict(sorted(self._children.items(), key=lambda t: t[0]))
@dispatch()
def get_content(self):
"""
Returns the node content
"""
return self._content
@dispatch()
def get_relations(self):
"""
returns the relation labels going out of the node
"""
return sorted(set(self._children.keys()))
@dispatch(object)
def get_child(self, relation):
"""
Returns the node that is a child of the current node through the given
relation. Returns null if the child cannot be found.
:param relation: the labelled relation
:return: the corresponding child node
"""
return self._children[relation]
@dispatch()
def get_attributes(self):
"""
Returns the set of attribute keys.
:return: the keys
"""
return set(self._attributes.keys())
@dispatch(str)
def get_attr_value(self, key):
"""
Returns the attribute value for the given key, if it exists. Else returns null.
"""
return self._attributes[key]
@dispatch()
def get_children(self):
"""
Returns the set of children nodes
:return: the children nodes
"""
return list(self._children.values())
@dispatch()
def get_descendants(self):
"""
Returns the set of all descendant nodes.
:return: the descendant nodes
"""
descendants = set()
to_process = list(self._children.values())
while len(to_process) > 0:
node = to_process.pop(0)
descendants.add(node)
to_process.extend(node.get_children())
return descendants
@dispatch()
def copy(self):
"""
Copies the node
:return: the copy
"""
node = Node(self._copy_value_func(self._content), self._copy_value_func)
for attr_key in self._attributes.keys():
node.add_attributes(attr_key, self._copy_value_func(self._attributes[attr_key]))
for child_key in self._children.keys():
node.add_child(child_key, self._children[child_key].copy())
return node
@dispatch(NodeWrapper)
def merge(self, other):
"""
Merges the node with another one (if two values are incompatible, the
content of the other node takes precedence).
:param other: otherGraphNode the other node
:return: the merged node
"""
merged_node = other.copy()
for attr_key in self._attributes:
merged_node.add_attribute(attr_key, self._copy_value_func(self._attributes[attr_key]))
for child_key in self._children.keys():
if child_key in merged_node._children:
merged_child_node = merged_node._children[child_key]
merged_child_node = self._children[child_key].merge(merged_child_node)
merged_node.add_child(child_key, merged_child_node)
else:
merged_node.add_child(child_key, self._children[child_key])
return merged_node
| [
"logging.getLogger",
"collections.OrderedDict",
"multipledispatch.dispatch",
"regex.compile"
] | [((1901, 1932), 'logging.getLogger', 'logging.getLogger', (['"""PyOpenDial"""'], {}), "('PyOpenDial')\n", (1918, 1932), False, 'import logging\n'), ((2017, 2041), 'regex.compile', 're.compile', (['_value_regex'], {}), '(_value_regex)\n', (2027, 2041), True, 'import regex as re\n'), ((2063, 2137), 'regex.compile', 're.compile', (["('\\\\[' + _value_regex + '((\\\\s+\\\\S+>' + _value_regex + ')*)\\\\]')"], {}), "('\\\\[' + _value_regex + '((\\\\s+\\\\S+>' + _value_regex + ')*)\\\\]')\n", (2073, 2137), True, 'import regex as re\n'), ((3773, 3816), 'multipledispatch.dispatch', 'dispatch', (['str'], {'namespace': 'dispatch_namespace'}), '(str, namespace=dispatch_namespace)\n', (3781, 3816), False, 'from multipledispatch import dispatch\n'), ((4387, 4400), 'multipledispatch.dispatch', 'dispatch', (['str'], {}), '(str)\n', (4395, 4400), False, 'from multipledispatch import dispatch\n'), ((4675, 4688), 'multipledispatch.dispatch', 'dispatch', (['str'], {}), '(str)\n', (4683, 4688), False, 'from multipledispatch import dispatch\n'), ((4966, 4981), 'multipledispatch.dispatch', 'dispatch', (['Value'], {}), '(Value)\n', (4974, 4981), False, 'from multipledispatch import dispatch\n'), ((5204, 5214), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (5212, 5214), False, 'from multipledispatch import dispatch\n'), ((5364, 5374), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (5372, 5374), False, 'from multipledispatch import dispatch\n'), ((5524, 5537), 'multipledispatch.dispatch', 'dispatch', (['str'], {}), '(str)\n', (5532, 5537), False, 'from multipledispatch import dispatch\n'), ((9282, 9303), 'multipledispatch.dispatch', 'dispatch', (['str', 'object'], {}), '(str, object)\n', (9290, 9303), False, 'from multipledispatch import dispatch\n'), ((9542, 9571), 'multipledispatch.dispatch', 'dispatch', (['object', 'NodeWrapper'], {}), '(object, NodeWrapper)\n', (9550, 9571), False, 'from multipledispatch import dispatch\n'), ((10031, 10041), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (10039, 10041), False, 'from multipledispatch import dispatch\n'), ((10161, 10171), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (10169, 10171), False, 'from multipledispatch import dispatch\n'), ((10339, 10355), 'multipledispatch.dispatch', 'dispatch', (['object'], {}), '(object)\n', (10347, 10355), False, 'from multipledispatch import dispatch\n'), ((10695, 10705), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (10703, 10705), False, 'from multipledispatch import dispatch\n'), ((10879, 10892), 'multipledispatch.dispatch', 'dispatch', (['str'], {}), '(str)\n', (10887, 10892), False, 'from multipledispatch import dispatch\n'), ((11083, 11093), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (11091, 11093), False, 'from multipledispatch import dispatch\n'), ((11276, 11286), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (11284, 11286), False, 'from multipledispatch import dispatch\n'), ((11701, 11711), 'multipledispatch.dispatch', 'dispatch', ([], {}), '()\n', (11709, 11711), False, 'from multipledispatch import dispatch\n'), ((12178, 12199), 'multipledispatch.dispatch', 'dispatch', (['NodeWrapper'], {}), '(NodeWrapper)\n', (12186, 12199), False, 'from multipledispatch import dispatch\n'), ((8108, 8121), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8119, 8121), False, 'from collections import OrderedDict\n')] |
# package imports
import dash
import dash_bootstrap_components as dbc
from flask_caching import Cache
import os
# local imports
from plotting.layout.layout import layout
cwd = os.getcwd()
assets_path = os.path.join(
cwd, 'src', 'plotting', 'assets'
)
# create app
app = dash.Dash(
__name__,
external_stylesheets=[dbc.icons.FONT_AWESOME],
suppress_callback_exceptions=True,
title='CmyPlot',
assets_folder=assets_path
)
# set up cache
cache = Cache(app.server, config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': 'cache-directory'
})
# set initial layout
app.layout = layout
| [
"flask_caching.Cache",
"os.path.join",
"dash.Dash",
"os.getcwd"
] | [((178, 189), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (187, 189), False, 'import os\n'), ((204, 250), 'os.path.join', 'os.path.join', (['cwd', '"""src"""', '"""plotting"""', '"""assets"""'], {}), "(cwd, 'src', 'plotting', 'assets')\n", (216, 250), False, 'import os\n'), ((277, 431), 'dash.Dash', 'dash.Dash', (['__name__'], {'external_stylesheets': '[dbc.icons.FONT_AWESOME]', 'suppress_callback_exceptions': '(True)', 'title': '"""CmyPlot"""', 'assets_folder': 'assets_path'}), "(__name__, external_stylesheets=[dbc.icons.FONT_AWESOME],\n suppress_callback_exceptions=True, title='CmyPlot', assets_folder=\n assets_path)\n", (286, 431), False, 'import dash\n'), ((469, 559), 'flask_caching.Cache', 'Cache', (['app.server'], {'config': "{'CACHE_TYPE': 'filesystem', 'CACHE_DIR': 'cache-directory'}"}), "(app.server, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR':\n 'cache-directory'})\n", (474, 559), False, 'from flask_caching import Cache\n')] |
from .. import headers, user_lang
from . import pre_check
import requests
import json
lolicon_token = pre_check('lolicon_token', False)
def loli_img(keyword: str = ''):
try:
res = requests.get(
'https://api.lolicon.app/setu/', headers=headers,
params={
'apikey': lolicon_token if lolicon_token else '',
'r18': '2',
'keyword': keyword,
'num': 1,
'proxy': 'disable'
}
)
except Exception as e:
return False, repr(e), False
else:
if res.status_code != requests.codes.ok:
return False, ("Get Data Failed" if user_lang != 'zh' else "获取图源数据失败"), False
data = json.loads(res.text)
return data['code'] == 0, data['msg'], data['data']
def magnet2torrent(hash: str):
try:
from ..NetTools import get_fileinfo
from ..NetTools.NormalDL import normal_dl
infos = get_fileinfo('https://m2t.lolicon.app/m/' + hash)
normal_dl(infos[0], set_name=f'{infos[-1].headers["torrent-name"]}.torrent')
return True, 'Success'
except Exception as e:
return False, repr(e)
| [
"json.loads",
"requests.get"
] | [((195, 387), 'requests.get', 'requests.get', (['"""https://api.lolicon.app/setu/"""'], {'headers': 'headers', 'params': "{'apikey': lolicon_token if lolicon_token else '', 'r18': '2', 'keyword':\n keyword, 'num': 1, 'proxy': 'disable'}"}), "('https://api.lolicon.app/setu/', headers=headers, params={\n 'apikey': lolicon_token if lolicon_token else '', 'r18': '2', 'keyword':\n keyword, 'num': 1, 'proxy': 'disable'})\n", (207, 387), False, 'import requests\n'), ((735, 755), 'json.loads', 'json.loads', (['res.text'], {}), '(res.text)\n', (745, 755), False, 'import json\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from abc import abstractmethod
from dataclasses import dataclass
from plc4py.api.PlcConnection import PlcConnection
from plc4py.api.authentication.PlcAuthentication import PlcAuthentication
from plc4py.api.exceptions.exceptions import PlcNotImplementedException
from plc4py.api.messages.PlcDiscovery import PlcDiscoveryRequestBuilder
@dataclass
class PlcDriverMetaData:
"""
Information about driver capabilities
"""
"""Indicates that the driver supports discovery"""
can_discover: bool = False
class PlcDriver:
"""
General interface defining the minimal methods required for adding a new type of driver to the PLC4PY system.
<b>Note that each driver has to add a setuptools entrypoint as plc4x.driver in order to be loaded by pluggy</b>
"""
def __init__(self):
self.protocol_code: str
self.protocol_name: str
@property
def metadata(self):
return PlcDriverMetaData()
@abstractmethod
def get_connection(
self, url: str, authentication: PlcAuthentication = PlcAuthentication()
) -> PlcConnection:
"""
Connects to a PLC using the given plc connection string.
:param url: plc connection string
:param authentication: authentication credentials.
:return PlcConnection: PLC Connection object
"""
pass
def discovery_request_builder(self) -> PlcDiscoveryRequestBuilder:
"""
Discovery Request Builder aids in generating a discovery request for this protocol
:return builder: Discovery request builder
"""
raise PlcNotImplementedException(f"Not implemented for {self.protocol_name}")
| [
"plc4py.api.exceptions.exceptions.PlcNotImplementedException",
"plc4py.api.authentication.PlcAuthentication.PlcAuthentication"
] | [((1844, 1863), 'plc4py.api.authentication.PlcAuthentication.PlcAuthentication', 'PlcAuthentication', ([], {}), '()\n', (1861, 1863), False, 'from plc4py.api.authentication.PlcAuthentication import PlcAuthentication\n'), ((2396, 2467), 'plc4py.api.exceptions.exceptions.PlcNotImplementedException', 'PlcNotImplementedException', (['f"""Not implemented for {self.protocol_name}"""'], {}), "(f'Not implemented for {self.protocol_name}')\n", (2422, 2467), False, 'from plc4py.api.exceptions.exceptions import PlcNotImplementedException\n')] |
# Copyright <NAME> 2019
import requests
import time
import database_interface as db
def message_group(msg, group_id):
# GroupMe limits the number of characters to post at ~1000, so if we exceed 990 chars, the message
# needs to be broken up
i = 0
messages = []
while (i < len(msg)):
messages.append(msg[i:i + 990])
i += 990
# Identify the id to send the query to
bot_id = db.execute_query('SELECT bot_id FROM dining.groups WHERE group_id = %s', values=group_id, results=True)
if len(bot_id) > 0:
bot_id = bot_id[0][0]
if len(messages) == 1:
sleep_time = 0
else:
sleep_time = 0.5
for message in messages:
data = {
"bot_id": bot_id,
"text": message
}
response = requests.post("https://api.groupme.com/v3/bots/post", data=data)
if response.status_code < 400:
print("Bot successfully posted message!")
else:
print("Error, message wasn't sent")
time.sleep(sleep_time)
| [
"requests.post",
"time.sleep",
"database_interface.execute_query"
] | [((419, 526), 'database_interface.execute_query', 'db.execute_query', (['"""SELECT bot_id FROM dining.groups WHERE group_id = %s"""'], {'values': 'group_id', 'results': '(True)'}), "('SELECT bot_id FROM dining.groups WHERE group_id = %s',\n values=group_id, results=True)\n", (435, 526), True, 'import database_interface as db\n'), ((797, 861), 'requests.post', 'requests.post', (['"""https://api.groupme.com/v3/bots/post"""'], {'data': 'data'}), "('https://api.groupme.com/v3/bots/post', data=data)\n", (810, 861), False, 'import requests\n'), ((1025, 1047), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (1035, 1047), False, 'import time\n')] |
from pyaudiogame.keymap import KeyMap
# The default keymap. If there is no quit mapping, then you will need to go to the command prompt and hit ctrl+c to exit the window.
# the key mapping object
global_keymap = KeyMap([
{'key': 'f4', 'mods': ['alt'], 'event': 'quit'},
{'key': 'escape', 'event':'quit'}
])
from pyaudiogame.app import App
from pyaudiogame.app import event_queue
from pyaudiogame.speech import speak
import pyaudiogame.storage | [
"pyaudiogame.keymap.KeyMap"
] | [((213, 310), 'pyaudiogame.keymap.KeyMap', 'KeyMap', (["[{'key': 'f4', 'mods': ['alt'], 'event': 'quit'}, {'key': 'escape', 'event':\n 'quit'}]"], {}), "([{'key': 'f4', 'mods': ['alt'], 'event': 'quit'}, {'key': 'escape',\n 'event': 'quit'}])\n", (219, 310), False, 'from pyaudiogame.keymap import KeyMap\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 27 09:25:38 2019
@author: amandaash
"""
import numpy as np
import matplotlib.pyplot as plt
import numpy.random as rand
import math
def x2(x):
y = x**2
return y
def sinx(x):
y = np.sin(x)
return y
def g(x):
y = 10*(np.cos(0.25*x))**3
return y
def probability_dist_integral(function, N, a, b):
accepted= []
rejected = []
#A_square = (np.abs(a) + np.abs(b))*(np.abs(function(a)) + np.abs(function(b)))
A_square = (np.abs(b-a))*(np.abs(function(b)-function(a)))
print(A_square)
x_sample = rand.uniform(a, b, N)
y_sample = rand.uniform(np.min(function(x_sample)), np.max(function(x_sample)), N)
points = np.column_stack((x_sample, y_sample))
for coordinate in points:
if coordinate[1] > 0:
if coordinate[1] > 0 and coordinate[1] <= function(coordinate[0]):
accepted.append(coordinate)
else:
rejected.append(coordinate)
else:
if coordinate[1] < 0 and coordinate[1] >= function(coordinate[0]):
accepted.append(coordinate)
else:
rejected.append(coordinate)
N_accepted = len(accepted)
accepted = np.array(accepted)
rejected = np.array(rejected)
plt.plot(accepted[:,0], accepted[:,1], '.')
plt.plot(rejected[:,0], rejected[:,1], '.')
plt.show()
numeric_area = (N_accepted/N)*A_square
return accepted, rejected, numeric_area
plt.title('$x^2$')
accepted1, rejected1, area1 = probability_dist_integral(x2, 100000, 0, 10)
print('I(x^2)~{0}'.format(area1))
plt.title('sin(x)')
accepted2, rejected2, area2 = probability_dist_integral(sinx, 100000, 0, 2*np.pi)
print('I(sin(x))~{0}'.format(area2))
plt.title('$(10*cos^2(\\frac{x}{4}))^3$')
accepted3, rejected3, area3 = probability_dist_integral(g, 100000, 1, 50)
print('I((10*cos^2(0.25*x))^3)~{0})'.format(area3))
| [
"numpy.abs",
"matplotlib.pyplot.plot",
"numpy.column_stack",
"numpy.array",
"numpy.cos",
"numpy.random.uniform",
"numpy.sin",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] | [((1536, 1554), 'matplotlib.pyplot.title', 'plt.title', (['"""$x^2$"""'], {}), "('$x^2$')\n", (1545, 1554), True, 'import matplotlib.pyplot as plt\n'), ((1664, 1683), 'matplotlib.pyplot.title', 'plt.title', (['"""sin(x)"""'], {}), "('sin(x)')\n", (1673, 1683), True, 'import matplotlib.pyplot as plt\n'), ((1803, 1844), 'matplotlib.pyplot.title', 'plt.title', (['"""$(10*cos^2(\\\\frac{x}{4}))^3$"""'], {}), "('$(10*cos^2(\\\\frac{x}{4}))^3$')\n", (1812, 1844), True, 'import matplotlib.pyplot as plt\n'), ((263, 272), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (269, 272), True, 'import numpy as np\n'), ((615, 636), 'numpy.random.uniform', 'rand.uniform', (['a', 'b', 'N'], {}), '(a, b, N)\n', (627, 636), True, 'import numpy.random as rand\n'), ((737, 774), 'numpy.column_stack', 'np.column_stack', (['(x_sample, y_sample)'], {}), '((x_sample, y_sample))\n', (752, 774), True, 'import numpy as np\n'), ((1278, 1296), 'numpy.array', 'np.array', (['accepted'], {}), '(accepted)\n', (1286, 1296), True, 'import numpy as np\n'), ((1312, 1330), 'numpy.array', 'np.array', (['rejected'], {}), '(rejected)\n', (1320, 1330), True, 'import numpy as np\n'), ((1335, 1380), 'matplotlib.pyplot.plot', 'plt.plot', (['accepted[:, 0]', 'accepted[:, 1]', '"""."""'], {}), "(accepted[:, 0], accepted[:, 1], '.')\n", (1343, 1380), True, 'import matplotlib.pyplot as plt\n'), ((1383, 1428), 'matplotlib.pyplot.plot', 'plt.plot', (['rejected[:, 0]', 'rejected[:, 1]', '"""."""'], {}), "(rejected[:, 0], rejected[:, 1], '.')\n", (1391, 1428), True, 'import matplotlib.pyplot as plt\n'), ((1431, 1441), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1439, 1441), True, 'import matplotlib.pyplot as plt\n'), ((533, 546), 'numpy.abs', 'np.abs', (['(b - a)'], {}), '(b - a)\n', (539, 546), True, 'import numpy as np\n'), ((309, 325), 'numpy.cos', 'np.cos', (['(0.25 * x)'], {}), '(0.25 * x)\n', (315, 325), True, 'import numpy as np\n')] |
"""View for bot usage stats."""
from concurrent.futures.thread import ThreadPoolExecutor
from django.utils.translation import gettext_lazy as _
from django.views import View
from django.views.generic.base import TemplateResponseMixin
from django.utils.timezone import get_current_timezone
from JellyBot.views import render_template
from JellyBot.utils import get_root_oid
from JellyBot.components.mixin import ChannelOidRequiredMixin
from extutils import safe_cast
from mongodb.factory import BotFeatureUsageDataManager
from mongodb.helper import BotUsageStatsDataProcessor
KEY_HR_FLOW = "usage_hr_data"
KEY_TOTAL_USAGE = "total_usage"
KEY_MEMBER_USAGE = "member_usage"
def _hr_flow(channel_oid, hours_within, tzinfo):
return KEY_HR_FLOW, BotFeatureUsageDataManager.get_channel_hourly_avg(
channel_oid, hours_within=hours_within, incl_not_used=True, tzinfo_=tzinfo)
def _total_usage(channel_oid, hours_within):
return KEY_TOTAL_USAGE, BotFeatureUsageDataManager.get_channel_usage(
channel_oid, hours_within=hours_within, incl_not_used=False)
def _member_usage(channel_oid, hours_within):
return KEY_MEMBER_USAGE, BotUsageStatsDataProcessor.get_per_user_bot_usage(
channel_oid, hours_within=hours_within)
def get_bot_stats_data_package(channel_data, hours_within, tzinfo) -> dict:
"""
Get the bot usage stats asynchronously and return these as a package.
:param channel_data: channel model of the bot stats
:param hours_within: time range to get the stats
:param tzinfo: timezone info to be used when getting the stats
:return: a `dict` containing the bot stats
"""
ret = {}
with ThreadPoolExecutor(max_workers=4, thread_name_prefix="BotStats") as executor:
futures = [executor.submit(_hr_flow, channel_data.id, hours_within, tzinfo),
executor.submit(_total_usage, channel_data.id, hours_within),
executor.submit(_member_usage, channel_data, hours_within)]
for completed in futures:
key, result = completed.result()
ret[key] = result
return ret
class ChannelBotUsageStatsView(ChannelOidRequiredMixin, TemplateResponseMixin, View):
"""View of the page to see the bot usage stats."""
# noinspection PyUnusedLocal, DuplicatedCode
def get(self, request, *args, **kwargs):
"""
Page to view the bot usage stats.
There is an optional keyword ``hours_within`` for limiting the time range of the bot usage stats.
"""
channel_data = self.get_channel_data(*args, **kwargs)
hours_within = safe_cast(request.GET.get("hours_within"), int)
# channel_members = ProfileManager.get_channel_members(channel_oid) # Reserved for per member analysis
channel_name = channel_data.model.get_channel_name(get_root_oid(request))
pkg = get_bot_stats_data_package(channel_data.model, hours_within, get_current_timezone())
ctxt = {
"channel_name": channel_name,
"channel_data": channel_data.model,
"hr_range": hours_within or pkg[KEY_HR_FLOW].hr_range
}
ctxt.update(pkg)
return render_template(
self.request, _("Bot Usage Stats - {}").format(channel_name),
"info/botstats/main.html", ctxt, nav_param=kwargs)
| [
"mongodb.factory.BotFeatureUsageDataManager.get_channel_hourly_avg",
"mongodb.factory.BotFeatureUsageDataManager.get_channel_usage",
"django.utils.translation.gettext_lazy",
"django.utils.timezone.get_current_timezone",
"mongodb.helper.BotUsageStatsDataProcessor.get_per_user_bot_usage",
"JellyBot.utils.get_root_oid",
"concurrent.futures.thread.ThreadPoolExecutor"
] | [((748, 878), 'mongodb.factory.BotFeatureUsageDataManager.get_channel_hourly_avg', 'BotFeatureUsageDataManager.get_channel_hourly_avg', (['channel_oid'], {'hours_within': 'hours_within', 'incl_not_used': '(True)', 'tzinfo_': 'tzinfo'}), '(channel_oid, hours_within\n =hours_within, incl_not_used=True, tzinfo_=tzinfo)\n', (797, 878), False, 'from mongodb.factory import BotFeatureUsageDataManager\n'), ((958, 1068), 'mongodb.factory.BotFeatureUsageDataManager.get_channel_usage', 'BotFeatureUsageDataManager.get_channel_usage', (['channel_oid'], {'hours_within': 'hours_within', 'incl_not_used': '(False)'}), '(channel_oid, hours_within=\n hours_within, incl_not_used=False)\n', (1002, 1068), False, 'from mongodb.factory import BotFeatureUsageDataManager\n'), ((1150, 1244), 'mongodb.helper.BotUsageStatsDataProcessor.get_per_user_bot_usage', 'BotUsageStatsDataProcessor.get_per_user_bot_usage', (['channel_oid'], {'hours_within': 'hours_within'}), '(channel_oid, hours_within\n =hours_within)\n', (1199, 1244), False, 'from mongodb.helper import BotUsageStatsDataProcessor\n'), ((1664, 1728), 'concurrent.futures.thread.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(4)', 'thread_name_prefix': '"""BotStats"""'}), "(max_workers=4, thread_name_prefix='BotStats')\n", (1682, 1728), False, 'from concurrent.futures.thread import ThreadPoolExecutor\n'), ((2831, 2852), 'JellyBot.utils.get_root_oid', 'get_root_oid', (['request'], {}), '(request)\n', (2843, 2852), False, 'from JellyBot.utils import get_root_oid\n'), ((2930, 2952), 'django.utils.timezone.get_current_timezone', 'get_current_timezone', ([], {}), '()\n', (2950, 2952), False, 'from django.utils.timezone import get_current_timezone\n'), ((3222, 3247), 'django.utils.translation.gettext_lazy', '_', (['"""Bot Usage Stats - {}"""'], {}), "('Bot Usage Stats - {}')\n", (3223, 3247), True, 'from django.utils.translation import gettext_lazy as _\n')] |
import sys
sys.setrecursionlimit(50000)
from collections import deque
# Tree Node
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
# Function to Build Tree
def buildTree(s):
#Corner Case
if(len(s)==0 or s[0]=="N"):
return None
# Creating list of strings from input
# string after spliting by space
ip=list(map(str,s.split()))
# Create the root of the tree
root=Node(int(ip[0]))
size=0
q=deque()
# Push the root to the queue
q.append(root)
size=size+1
# Starting from the second element
i=1
while(size>0 and i<len(ip)):
# Get and remove the front of the queue
currNode=q[0]
q.popleft()
size=size-1
# Get the current node's value from the string
currVal=ip[i]
# If the left child is not null
if(currVal!="N"):
# Create the left child for the current node
currNode.left=Node(int(currVal))
# Push it to the queue
q.append(currNode.left)
size=size+1
# For the right child
i=i+1
if(i>=len(ip)):
break
currVal=ip[i]
# If the right child is not null
if(currVal!="N"):
# Create the right child for the current node
currNode.right=Node(int(currVal))
# Push it to the queue
q.append(currNode.right)
size=size+1
i=i+1
return root
class Solution:
# your task is to complete this function
def longestConsecutive(self, root):
self.maximum = 0
if root.left is not None :
self.recurse(root.left, (2 if root.left.data == (root.data + 1) else 1))
if root.right is not None :
self.recurse(root.right, (2 if root.right.data == (root.data + 1) else 1))
ans = self.maximum
if ans < 2 :
return -1
return ans
def recurse(self, root, value) :
self.maximum = max(self.maximum, value)
if root.left is not None :
self.recurse(root.left, (value + 1 if root.left.data == (root.data + 1) else 1))
if root.right is not None :
self.recurse(root.right, (value + 1 if root.right.data == (root.data + 1) else 1))
if __name__=="__main__":
t=int(input())
for _ in range(0,t):
s=input()
root=buildTree(s)
print(Solution().longestConsecutive(root))
| [
"sys.setrecursionlimit",
"collections.deque"
] | [((11, 39), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(50000)'], {}), '(50000)\n', (32, 39), False, 'import sys\n'), ((551, 558), 'collections.deque', 'deque', ([], {}), '()\n', (556, 558), False, 'from collections import deque\n')] |
# Generated by Django 2.1.2 on 2019-01-06 11:25
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('formprocessor', '0003_delete_question'),
]
operations = [
migrations.CreateModel(
name='SavedFormData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('form_data', models.TextField()),
],
),
]
| [
"django.db.models.AutoField",
"django.db.models.TextField"
] | [((360, 453), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (376, 453), False, 'from django.db import migrations, models\n'), ((482, 500), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (498, 500), False, 'from django.db import migrations, models\n')] |
from talon import Context, actions, ui, Module, app
# from user.knausj_talon.code.snippet_watcher import snippet_watcher
import os
ctx = Context()
ctx.matches = r"""
mode: user.java
mode: command
and code.language: java
"""
# short name -> ide clip name
ctx.lists["user.snippets"] = {
"abstract":"ab",
"assert":"as",
"assert true":"at",
"assert false":"af",
"assert equals":"ae",
"array list":"d.al",
"break":"br",
"block comment":"/*",
"case":"cs",
"catch":"ca",
"class":"cl",
"class extends":"cle",
"full class":"clc",
"constant":"co",
"constructor":"cn",
"constant string":"cos",
"default":"de",
"define":"def",
"else if":"elif",
"else":"el",
"extends":"ext",
"final":"fi",
"field":"f",
"fine print":"printf",
"for":"for",
"for each":"fore",
"for counter":"fori",
"getter":"get",
"get her center":"gs",
"if":"if",
"implements":"imp",
"import":"im",
"interface":"in",
"java beans":"j.b",
"java input":"j.i",
"java math":"j.m",
"java net":"j.n",
"java utility":"j.u",
"list":"list",
"method main":"main",
"map":"map",
"method":"m",
"method throws":"mt",
"method with doc":"md",
"object":"o",
"primitive": "i",
"package":"pa",
"print":"p",
"print array":"printlna",
"print error":"serr",
"print exception":"errf",
"print line":"pl",
"private":"pr",
"protected":"po",
"public class":"pcl",
"public":"pu",
"return":"re",
"setter":"set",
"set variable":"cc",
"singleton":"singlet",
"stack":"d.st",
"set":"set",
"string":"str",
"static":"st",
"switch":"sw",
"test throws":"tt",
"test":"t",
"test case":"tc",
"throw":"th",
"throws":"ths",
"try finally":"tryf",
"try":"try",
"variable":"v",
"while":"wh",
}
def update_list(watch_list):
ctx.lists["user.snippets"] = watch_list
| [
"talon.Context"
] | [((141, 150), 'talon.Context', 'Context', ([], {}), '()\n', (148, 150), False, 'from talon import Context, actions, ui, Module, app\n')] |
# Outline algorithm
import pygame
opx = 2 # outline size in pixels
# Return the set of points in the circle radius r, using Bresenham's circle algorithm
def _circlepoints(r):
r = int(round(r))
x, y, e = r, 0, 1 - r
points = []
while x >= y:
points.append((x, y))
y += 1
if e < 0:
e += 2 * y - 1
else:
x -= 1
e += 2 * (y - x) - 1
points += [(y, x) for x, y in points if x > y]
points += [(-x, y) for x, y in points if x]
points += [(x, -y) for x, y in points if y]
points.sort()
return points
screen = pygame.display.set_mode((1000, 600))
screen.fill((100, 100, 200))
pygame.font.init()
font = pygame.font.Font(None, 60)
def getsurf(color):
return font.render("hello", True, color).convert_alpha()
def getoutlinesurf(blendmode = None):
surf = pygame.Surface((300, 100)).convert_alpha()
surf.fill((0, 0, 0, 0))
for dx, dy in _circlepoints(opx):
if blendmode is not None:
surf.blit(osurf, (dx + opx, dy + opx), None, blendmode)
else:
surf.blit(osurf, (dx + opx, dy + opx))
surf.blit(tsurf, (opx, opx))
return surf
osurf = getsurf((0, 0, 0, 0))
tsurf = getsurf((255, 255, 255, 0))
for offset, blendmax in [(0, False), (300, True)]:
surf = pygame.Surface((300, 100)).convert_alpha()
surf.fill((0, 0, 0, 0))
for dx, dy in _circlepoints(opx):
if blendmax:
surf.blit(osurf, (dx + opx, dy + opx), None, pygame.BLEND_RGBA_MAX)
else:
surf.blit(osurf, (dx + opx, dy + opx))
surf.blit(tsurf, (opx, opx))
screen.blit(pygame.transform.scale(surf, (300*6, 100*6)), (0, offset))
pygame.display.flip()
while not any(event.type in (pygame.KEYDOWN, pygame.QUIT) for event in pygame.event.get()):
pass
| [
"pygame.Surface",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.font.init",
"pygame.font.Font",
"pygame.transform.scale"
] | [((533, 569), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(1000, 600)'], {}), '((1000, 600))\n', (556, 569), False, 'import pygame\n'), ((599, 617), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (615, 617), False, 'import pygame\n'), ((625, 651), 'pygame.font.Font', 'pygame.font.Font', (['None', '(60)'], {}), '(None, 60)\n', (641, 651), False, 'import pygame\n'), ((1531, 1552), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (1550, 1552), False, 'import pygame\n'), ((1471, 1519), 'pygame.transform.scale', 'pygame.transform.scale', (['surf', '(300 * 6, 100 * 6)'], {}), '(surf, (300 * 6, 100 * 6))\n', (1493, 1519), False, 'import pygame\n'), ((778, 804), 'pygame.Surface', 'pygame.Surface', (['(300, 100)'], {}), '((300, 100))\n', (792, 804), False, 'import pygame\n'), ((1189, 1215), 'pygame.Surface', 'pygame.Surface', (['(300, 100)'], {}), '((300, 100))\n', (1203, 1215), False, 'import pygame\n'), ((1624, 1642), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1640, 1642), False, 'import pygame\n')] |
import os
import json
from behave import given
from helpers import (
aws_helper,
manifest_comparison_helper,
console_printer,
invoke_lambda,
)
@given("The manifest generation tables have been created")
def step_impl(context):
with open(
os.path.join(context.manifest_templates_local, "drop-table.sql"), "r"
) as f:
base_drop_query = f.read()
with open(
os.path.join(context.manifest_templates_local, "create-parquet-table.sql"), "r"
) as f:
base_create_parquet_query = f.read()
with open(
os.path.join(
context.manifest_templates_local, "create-missing-import-table.sql"
),
"r",
) as f:
base_create_missing_import_query = f.read()
with open(
os.path.join(
context.manifest_templates_local, "create-missing-export-table.sql"
),
"r",
) as f:
base_create_missing_export_query = f.read()
with open(
os.path.join(context.manifest_templates_local, "create-count-table.sql"), "r"
) as f:
base_create_count_query = f.read()
tables = [
[
context.manifest_missing_imports_table_name,
base_create_missing_import_query,
context.manifest_s3_input_parquet_location_missing_import,
],
[
context.manifest_missing_exports_table_name,
base_create_missing_export_query,
context.manifest_s3_input_parquet_location_missing_export,
],
[
context.manifest_counts_table_name,
base_create_count_query,
context.manifest_s3_input_parquet_location_counts,
],
[
context.manifest_mismatched_timestamps_table_name,
base_create_parquet_query,
context.manifest_s3_input_parquet_location_mismatched_timestamps,
],
]
for table_details in tables:
console_printer.print_info(
f"Dropping table named '{table_details[0]}' if exists"
)
drop_query = base_drop_query.replace("[table_name]", table_details[0])
aws_helper.execute_athena_query(
context.manifest_s3_output_location_templates, drop_query
)
console_printer.print_info(
f"Generating table named '{table_details[0]}' from S3 location of '{table_details[2]}'"
)
s3_location = (
table_details[2]
if table_details[2].endswith("/")
else f"{table_details[2]}/"
)
create_query = table_details[1].replace("[table_name]", table_details[0])
create_query = create_query.replace("[s3_input_location]", s3_location)
aws_helper.execute_athena_query(
context.manifest_s3_output_location_templates, create_query
)
@given("The manifest generation tables have been populated")
def step_impl(context):
glue_jobs = (
[context.manifest_etl_glue_job_name]
if "," not in context.manifest_etl_glue_job_name
else context.manifest_etl_glue_job_name.split(",")
)
for glue_job in glue_jobs:
if glue_job:
aws_helper.execute_manifest_glue_job(
glue_job,
context.manifest_cut_off_date_start_epoch,
context.manifest_cut_off_date_end_epoch,
context.manifest_margin_of_error_epoch,
context.manifest_snapshot_type,
context.manifest_import_type,
context.manifest_s3_input_location_import_prefix,
context.manifest_s3_input_location_export_prefix,
)
@when("I generate the manifest comparison queries of type '{query_type}'")
def step_impl(context, query_type):
context.manifest_queries = []
for query_file in os.listdir(
os.path.join(context.manifest_queries_local, query_type)
):
if os.path.splitext(query_file)[1] == ".json":
with open(
os.path.join(context.manifest_queries_local, query_type, query_file),
"r",
) as metadata_file:
metadata = json.loads(metadata_file.read())
with open(
os.path.join(
context.manifest_queries_local, query_type, metadata["query_file"]
),
"r",
) as query_sql_file:
base_query = query_sql_file.read()
query = base_query.replace(
"[parquet_table_name_missing_imports]",
context.manifest_missing_imports_table_name,
)
query = query.replace(
"[parquet_table_name_missing_exports]",
context.manifest_missing_exports_table_name,
)
query = query.replace(
"[parquet_table_name_counts]", context.manifest_counts_table_name
)
query = query.replace(
"[parquet_table_name_mismatched]",
context.manifest_mismatched_timestamps_table_name,
)
query = query.replace(
"[count_of_ids]", str(context.manifest_report_count_of_ids)
)
query = query.replace(
"[specific_id]", "521ee02f-6d75-42da-b02a-560b0bb7cbbc"
)
query = query.replace("[specific_timestamp]", "1585055547016")
query = query.replace(
"[distinct_default_database_collection_list_full]",
context.distinct_default_database_collection_list_full,
)
query = query.replace(
"[distinct_default_database_list]",
context.distinct_default_database_list_full,
)
context.manifest_queries.append([metadata, query])
@when(
"I run the manifest comparison queries of type '{query_type}' and upload the result to S3"
)
def step_impl(context, query_type):
context.manifest_query_results = []
context.failed_queries = []
for query_number in range(1, len(context.manifest_queries) + 1):
for manifest_query in context.manifest_queries:
if int(manifest_query[0]["order"]) == query_number:
if manifest_query[0]["enabled"] and (
manifest_query[0]["query_type"] == query_type
):
console_printer.print_info(
f"Running query with name of '{manifest_query[0]['query_name']}' "
+ f"and description of '{manifest_query[0]['query_description']}' "
+ f"and order of '{manifest_query[0]['order']}'"
)
try:
aws_helper.clear_session()
results_array = [
manifest_query[0],
aws_helper.execute_athena_query(
context.manifest_s3_output_location_queries,
manifest_query[1],
),
]
context.manifest_query_results.append(results_array)
except Exception as ex:
console_printer.print_warning_text(
f"Error occurred running query named '{manifest_query[0]['query_name']}': '{ex}'"
)
context.failed_queries.append(manifest_query[0]["query_name"])
else:
console_printer.print_info(
f"Not running query with name of '{manifest_query[0]['query_name']}' "
+ f"because 'enabled' value is set to '{manifest_query[0]['enabled']}'"
)
console_printer.print_info("All queries finished execution")
console_printer.print_info("Generating test result")
results_string = manifest_comparison_helper.generate_formatted_results(
context.manifest_query_results
)
console_printer.print_info(f"\n\n\n\n\n{results_string}\n\n\n\n\n")
results_file_name = f"{context.test_run_name}_results.txt"
results_file = os.path.join(context.temp_folder, results_file_name)
with open(results_file, "wt") as open_results_file:
open_results_file.write(console_printer.strip_formatting(results_string))
s3_uploaded_location_txt = os.path.join(
context.manifest_s3_output_prefix_results, results_file_name
)
aws_helper.upload_file_to_s3_and_wait_for_consistency(
results_file,
context.manifest_s3_bucket,
context.timeout,
s3_uploaded_location_txt,
)
console_printer.print_bold_text(
f"Uploaded text results file to S3 bucket with name of '{context.manifest_s3_bucket}' at location '{s3_uploaded_location_txt}'"
)
os.remove(results_file)
console_printer.print_info("Generating json result")
results_json = manifest_comparison_helper.generate_json_formatted_results(
context.manifest_query_results, context.test_run_name
)
json_file_name = f"{context.test_run_name}_results.json"
json_file = os.path.join(context.temp_folder, json_file_name)
with open(json_file, "w") as open_json_file:
json.dump(results_json, open_json_file, indent=4)
s3_uploaded_location_json = os.path.join(
context.manifest_s3_output_prefix_results, json_file_name
)
aws_helper.upload_file_to_s3_and_wait_for_consistency(
json_file,
context.manifest_s3_bucket,
context.timeout,
s3_uploaded_location_json,
)
console_printer.print_bold_text(
f"Uploaded json results file to S3 bucket with name of '{context.manifest_s3_bucket}' at location '{s3_uploaded_location_json}'"
)
os.remove(json_file)
if len(context.failed_queries) > 0:
raise AssertionError(
"The following queries failed to execute: "
+ ", ".join(context.failed_queries)
)
else:
console_printer.print_info(f"All queries executed successfully")
console_printer.print_info(f"Query execution step completed")
@then("The query results match the expected results for queries of type '{query_type}'")
def step_impl(context, query_type):
if context.manifest_verify_results != "true":
console_printer.print_info(
f"Not verifying results as 'context.manifest_verify_results' is set to '{context.manifest_verify_results}' rather than 'true'"
)
return
for manifest_query_result in context.manifest_query_results:
manifest_query_details = manifest_query_result[0]
manifest_query_result_file = os.path.join(
context.manifest_queries_local,
query_type,
manifest_query_details["results_file"],
)
if os.path.exists(manifest_query_result_file):
console_printer.print_info(
f"Verifying results for query with name of '{manifest_query_details['query_name']}' "
+ f"and description of '{manifest_query_details['query_description']}' "
+ f"from results file of '{manifest_query_details['results_file']}'"
)
with open(manifest_query_result_file, "r") as results_file_expected:
results_expected = results_file_expected.read()
results_actual = manifest_comparison_helper.generate_sql_verification_data(
manifest_query_result[1]
)
results_expected_array = results_expected.splitlines()
console_printer.print_info(f"Expected results: '{results_expected_array}'")
results_actual_array = results_actual.splitlines()
console_printer.print_info(f"Actual results: '{results_actual_array}'")
console_printer.print_info("Verifying results")
console_printer.print_info("Asserting results length")
assert len(results_expected_array) == len(results_actual_array)
console_printer.print_info("Asserting result rows")
assert all(
elem in results_expected.splitlines()
for elem in results_actual.splitlines()
)
else:
console_printer.print_info(
f"Not verifying results for query with name of '{manifest_query_details['query_name']}' "
+ f"and description of '{manifest_query_details['query_description']}' "
+ f"as no results file exists at '{manifest_query_details['results_file']}'"
)
@then("The query results are printed")
def step_impl(context):
for manifest_query_result in context.manifest_query_results:
manifest_query_details = manifest_query_result[0]
console_printer.print_info(
f"Printing results for query with name of '{manifest_query_details['query_name']}' "
+ f"and description of '{manifest_query_details['query_description']}'"
)
results_actual = manifest_comparison_helper.generate_sql_verification_data(
manifest_query_result[1]
)
results_actual_array = results_actual.splitlines()
console_printer.print_info(f"Actual results: '{results_actual_array}'")
@when("I start the kafka reconciliation")
def step_impl(context):
payload = {
"detail": {
"jobName": f"{context.test_run_name}",
"jobQueue": "dataworks-behavioural-framework",
"status": "SUCCEEDED",
"ignoreBatchChecks": "true",
}
}
payload_json = json.dumps(payload)
console_printer.print_info(f"Glue launcher lambda payload is: '{payload_json}'")
invoke_lambda.invoke_glue_launcher_lambda(payload_json)
console_printer.print_info(f"Kafka reconciliation started via Glue launcher lambda")
| [
"helpers.aws_helper.execute_manifest_glue_job",
"os.remove",
"os.path.exists",
"helpers.aws_helper.execute_athena_query",
"json.dumps",
"helpers.console_printer.print_info",
"helpers.aws_helper.upload_file_to_s3_and_wait_for_consistency",
"helpers.invoke_lambda.invoke_glue_launcher_lambda",
"helpers.manifest_comparison_helper.generate_sql_verification_data",
"helpers.console_printer.strip_formatting",
"helpers.aws_helper.clear_session",
"behave.given",
"helpers.console_printer.print_bold_text",
"os.path.splitext",
"helpers.manifest_comparison_helper.generate_formatted_results",
"helpers.manifest_comparison_helper.generate_json_formatted_results",
"os.path.join",
"helpers.console_printer.print_warning_text",
"json.dump"
] | [((162, 219), 'behave.given', 'given', (['"""The manifest generation tables have been created"""'], {}), "('The manifest generation tables have been created')\n", (167, 219), False, 'from behave import given\n'), ((2830, 2889), 'behave.given', 'given', (['"""The manifest generation tables have been populated"""'], {}), "('The manifest generation tables have been populated')\n", (2835, 2889), False, 'from behave import given\n'), ((7777, 7837), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""All queries finished execution"""'], {}), "('All queries finished execution')\n", (7803, 7837), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((7843, 7895), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""Generating test result"""'], {}), "('Generating test result')\n", (7869, 7895), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((7917, 8007), 'helpers.manifest_comparison_helper.generate_formatted_results', 'manifest_comparison_helper.generate_formatted_results', (['context.manifest_query_results'], {}), '(context.\n manifest_query_results)\n', (7970, 8007), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8021, 8082), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""\n\n\n\n\n{results_string}\n\n\n\n\n"""'], {}), '(f"""\n\n\n\n\n{results_string}\n\n\n\n\n""")\n', (8047, 8082), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8172, 8224), 'os.path.join', 'os.path.join', (['context.temp_folder', 'results_file_name'], {}), '(context.temp_folder, results_file_name)\n', (8184, 8224), False, 'import os\n'), ((8395, 8469), 'os.path.join', 'os.path.join', (['context.manifest_s3_output_prefix_results', 'results_file_name'], {}), '(context.manifest_s3_output_prefix_results, results_file_name)\n', (8407, 8469), False, 'import os\n'), ((8488, 8631), 'helpers.aws_helper.upload_file_to_s3_and_wait_for_consistency', 'aws_helper.upload_file_to_s3_and_wait_for_consistency', (['results_file', 'context.manifest_s3_bucket', 'context.timeout', 's3_uploaded_location_txt'], {}), '(results_file, context\n .manifest_s3_bucket, context.timeout, s3_uploaded_location_txt)\n', (8541, 8631), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8671, 8841), 'helpers.console_printer.print_bold_text', 'console_printer.print_bold_text', (['f"""Uploaded text results file to S3 bucket with name of \'{context.manifest_s3_bucket}\' at location \'{s3_uploaded_location_txt}\'"""'], {}), '(\n f"Uploaded text results file to S3 bucket with name of \'{context.manifest_s3_bucket}\' at location \'{s3_uploaded_location_txt}\'"\n )\n', (8702, 8841), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8851, 8874), 'os.remove', 'os.remove', (['results_file'], {}), '(results_file)\n', (8860, 8874), False, 'import os\n'), ((8880, 8932), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""Generating json result"""'], {}), "('Generating json result')\n", (8906, 8932), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8952, 9070), 'helpers.manifest_comparison_helper.generate_json_formatted_results', 'manifest_comparison_helper.generate_json_formatted_results', (['context.manifest_query_results', 'context.test_run_name'], {}), '(context.\n manifest_query_results, context.test_run_name)\n', (9010, 9070), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((9158, 9207), 'os.path.join', 'os.path.join', (['context.temp_folder', 'json_file_name'], {}), '(context.temp_folder, json_file_name)\n', (9170, 9207), False, 'import os\n'), ((9348, 9419), 'os.path.join', 'os.path.join', (['context.manifest_s3_output_prefix_results', 'json_file_name'], {}), '(context.manifest_s3_output_prefix_results, json_file_name)\n', (9360, 9419), False, 'import os\n'), ((9438, 9579), 'helpers.aws_helper.upload_file_to_s3_and_wait_for_consistency', 'aws_helper.upload_file_to_s3_and_wait_for_consistency', (['json_file', 'context.manifest_s3_bucket', 'context.timeout', 's3_uploaded_location_json'], {}), '(json_file, context.\n manifest_s3_bucket, context.timeout, s3_uploaded_location_json)\n', (9491, 9579), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((9619, 9790), 'helpers.console_printer.print_bold_text', 'console_printer.print_bold_text', (['f"""Uploaded json results file to S3 bucket with name of \'{context.manifest_s3_bucket}\' at location \'{s3_uploaded_location_json}\'"""'], {}), '(\n f"Uploaded json results file to S3 bucket with name of \'{context.manifest_s3_bucket}\' at location \'{s3_uploaded_location_json}\'"\n )\n', (9650, 9790), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((9800, 9820), 'os.remove', 'os.remove', (['json_file'], {}), '(json_file)\n', (9809, 9820), False, 'import os\n'), ((10094, 10155), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Query execution step completed"""'], {}), "(f'Query execution step completed')\n", (10120, 10155), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((13605, 13624), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (13615, 13624), False, 'import json\n'), ((13629, 13714), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Glue launcher lambda payload is: \'{payload_json}\'"""'], {}), '(f"Glue launcher lambda payload is: \'{payload_json}\'"\n )\n', (13655, 13714), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((13714, 13769), 'helpers.invoke_lambda.invoke_glue_launcher_lambda', 'invoke_lambda.invoke_glue_launcher_lambda', (['payload_json'], {}), '(payload_json)\n', (13755, 13769), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((13775, 13864), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Kafka reconciliation started via Glue launcher lambda"""'], {}), "(\n f'Kafka reconciliation started via Glue launcher lambda')\n", (13801, 13864), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((1938, 2025), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Dropping table named \'{table_details[0]}\' if exists"""'], {}), '(\n f"Dropping table named \'{table_details[0]}\' if exists")\n', (1964, 2025), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((2130, 2225), 'helpers.aws_helper.execute_athena_query', 'aws_helper.execute_athena_query', (['context.manifest_s3_output_location_templates', 'drop_query'], {}), '(context.\n manifest_s3_output_location_templates, drop_query)\n', (2161, 2225), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((2252, 2377), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Generating table named \'{table_details[0]}\' from S3 location of \'{table_details[2]}\'"""'], {}), '(\n f"Generating table named \'{table_details[0]}\' from S3 location of \'{table_details[2]}\'"\n )\n', (2278, 2377), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((2712, 2809), 'helpers.aws_helper.execute_athena_query', 'aws_helper.execute_athena_query', (['context.manifest_s3_output_location_templates', 'create_query'], {}), '(context.\n manifest_s3_output_location_templates, create_query)\n', (2743, 2809), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((3829, 3885), 'os.path.join', 'os.path.join', (['context.manifest_queries_local', 'query_type'], {}), '(context.manifest_queries_local, query_type)\n', (3841, 3885), False, 'import os\n'), ((9265, 9314), 'json.dump', 'json.dump', (['results_json', 'open_json_file'], {'indent': '(4)'}), '(results_json, open_json_file, indent=4)\n', (9274, 9314), False, 'import json\n'), ((10024, 10088), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""All queries executed successfully"""'], {}), "(f'All queries executed successfully')\n", (10050, 10088), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((10341, 10505), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Not verifying results as \'context.manifest_verify_results\' is set to \'{context.manifest_verify_results}\' rather than \'true\'"""'], {}), '(\n f"Not verifying results as \'context.manifest_verify_results\' is set to \'{context.manifest_verify_results}\' rather than \'true\'"\n )\n', (10367, 10505), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((10694, 10794), 'os.path.join', 'os.path.join', (['context.manifest_queries_local', 'query_type', "manifest_query_details['results_file']"], {}), "(context.manifest_queries_local, query_type,\n manifest_query_details['results_file'])\n", (10706, 10794), False, 'import os\n'), ((10850, 10892), 'os.path.exists', 'os.path.exists', (['manifest_query_result_file'], {}), '(manifest_query_result_file)\n', (10864, 10892), False, 'import os\n'), ((12788, 12982), 'helpers.console_printer.print_info', 'console_printer.print_info', (['(f"Printing results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n + f"and description of \'{manifest_query_details[\'query_description\']}\'")'], {}), '(\n f"Printing results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n + f"and description of \'{manifest_query_details[\'query_description\']}\'")\n', (12814, 12982), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((13033, 13121), 'helpers.manifest_comparison_helper.generate_sql_verification_data', 'manifest_comparison_helper.generate_sql_verification_data', (['manifest_query_result[1]'], {}), '(manifest_query_result\n [1])\n', (13090, 13121), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((13207, 13278), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Actual results: \'{results_actual_array}\'"""'], {}), '(f"Actual results: \'{results_actual_array}\'")\n', (13233, 13278), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((267, 331), 'os.path.join', 'os.path.join', (['context.manifest_templates_local', '"""drop-table.sql"""'], {}), "(context.manifest_templates_local, 'drop-table.sql')\n", (279, 331), False, 'import os\n'), ((408, 482), 'os.path.join', 'os.path.join', (['context.manifest_templates_local', '"""create-parquet-table.sql"""'], {}), "(context.manifest_templates_local, 'create-parquet-table.sql')\n", (420, 482), False, 'import os\n'), ((569, 654), 'os.path.join', 'os.path.join', (['context.manifest_templates_local', '"""create-missing-import-table.sql"""'], {}), "(context.manifest_templates_local,\n 'create-missing-import-table.sql')\n", (581, 654), False, 'import os\n'), ((775, 860), 'os.path.join', 'os.path.join', (['context.manifest_templates_local', '"""create-missing-export-table.sql"""'], {}), "(context.manifest_templates_local,\n 'create-missing-export-table.sql')\n", (787, 860), False, 'import os\n'), ((981, 1053), 'os.path.join', 'os.path.join', (['context.manifest_templates_local', '"""create-count-table.sql"""'], {}), "(context.manifest_templates_local, 'create-count-table.sql')\n", (993, 1053), False, 'import os\n'), ((3164, 3520), 'helpers.aws_helper.execute_manifest_glue_job', 'aws_helper.execute_manifest_glue_job', (['glue_job', 'context.manifest_cut_off_date_start_epoch', 'context.manifest_cut_off_date_end_epoch', 'context.manifest_margin_of_error_epoch', 'context.manifest_snapshot_type', 'context.manifest_import_type', 'context.manifest_s3_input_location_import_prefix', 'context.manifest_s3_input_location_export_prefix'], {}), '(glue_job, context.\n manifest_cut_off_date_start_epoch, context.\n manifest_cut_off_date_end_epoch, context.manifest_margin_of_error_epoch,\n context.manifest_snapshot_type, context.manifest_import_type, context.\n manifest_s3_input_location_import_prefix, context.\n manifest_s3_input_location_export_prefix)\n', (3200, 3520), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((8313, 8361), 'helpers.console_printer.strip_formatting', 'console_printer.strip_formatting', (['results_string'], {}), '(results_string)\n', (8345, 8361), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((10906, 11179), 'helpers.console_printer.print_info', 'console_printer.print_info', (['(\n f"Verifying results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n +\n f"and description of \'{manifest_query_details[\'query_description\']}\' " +\n f"from results file of \'{manifest_query_details[\'results_file\']}\'")'], {}), '(\n f"Verifying results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n +\n f"and description of \'{manifest_query_details[\'query_description\']}\' " +\n f"from results file of \'{manifest_query_details[\'results_file\']}\'")\n', (10932, 11179), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((11400, 11488), 'helpers.manifest_comparison_helper.generate_sql_verification_data', 'manifest_comparison_helper.generate_sql_verification_data', (['manifest_query_result[1]'], {}), '(manifest_query_result\n [1])\n', (11457, 11488), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((11594, 11669), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Expected results: \'{results_expected_array}\'"""'], {}), '(f"Expected results: \'{results_expected_array}\'")\n', (11620, 11669), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((11746, 11817), 'helpers.console_printer.print_info', 'console_printer.print_info', (['f"""Actual results: \'{results_actual_array}\'"""'], {}), '(f"Actual results: \'{results_actual_array}\'")\n', (11772, 11817), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((11831, 11878), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""Verifying results"""'], {}), "('Verifying results')\n", (11857, 11878), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((11892, 11946), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""Asserting results length"""'], {}), "('Asserting results length')\n", (11918, 11946), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((12035, 12086), 'helpers.console_printer.print_info', 'console_printer.print_info', (['"""Asserting result rows"""'], {}), "('Asserting result rows')\n", (12061, 12086), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((12261, 12546), 'helpers.console_printer.print_info', 'console_printer.print_info', (['(\n f"Not verifying results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n +\n f"and description of \'{manifest_query_details[\'query_description\']}\' " +\n f"as no results file exists at \'{manifest_query_details[\'results_file\']}\'")'], {}), '(\n f"Not verifying results for query with name of \'{manifest_query_details[\'query_name\']}\' "\n +\n f"and description of \'{manifest_query_details[\'query_description\']}\' " +\n f"as no results file exists at \'{manifest_query_details[\'results_file\']}\'")\n', (12287, 12546), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((3904, 3932), 'os.path.splitext', 'os.path.splitext', (['query_file'], {}), '(query_file)\n', (3920, 3932), False, 'import os\n'), ((3987, 4055), 'os.path.join', 'os.path.join', (['context.manifest_queries_local', 'query_type', 'query_file'], {}), '(context.manifest_queries_local, query_type, query_file)\n', (3999, 4055), False, 'import os\n'), ((4210, 4295), 'os.path.join', 'os.path.join', (['context.manifest_queries_local', 'query_type', "metadata['query_file']"], {}), "(context.manifest_queries_local, query_type, metadata['query_file']\n )\n", (4222, 4295), False, 'import os\n'), ((6368, 6592), 'helpers.console_printer.print_info', 'console_printer.print_info', (['(f"Running query with name of \'{manifest_query[0][\'query_name\']}\' " +\n f"and description of \'{manifest_query[0][\'query_description\']}\' " +\n f"and order of \'{manifest_query[0][\'order\']}\'")'], {}), '(\n f"Running query with name of \'{manifest_query[0][\'query_name\']}\' " +\n f"and description of \'{manifest_query[0][\'query_description\']}\' " +\n f"and order of \'{manifest_query[0][\'order\']}\'")\n', (6394, 6592), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((7531, 7710), 'helpers.console_printer.print_info', 'console_printer.print_info', (['(f"Not running query with name of \'{manifest_query[0][\'query_name\']}\' " +\n f"because \'enabled\' value is set to \'{manifest_query[0][\'enabled\']}\'")'], {}), '(\n f"Not running query with name of \'{manifest_query[0][\'query_name\']}\' " +\n f"because \'enabled\' value is set to \'{manifest_query[0][\'enabled\']}\'")\n', (7557, 7710), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((6723, 6749), 'helpers.aws_helper.clear_session', 'aws_helper.clear_session', ([], {}), '()\n', (6747, 6749), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((6867, 6966), 'helpers.aws_helper.execute_athena_query', 'aws_helper.execute_athena_query', (['context.manifest_s3_output_location_queries', 'manifest_query[1]'], {}), '(context.manifest_s3_output_location_queries,\n manifest_query[1])\n', (6898, 6966), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n'), ((7230, 7357), 'helpers.console_printer.print_warning_text', 'console_printer.print_warning_text', (['f"""Error occurred running query named \'{manifest_query[0][\'query_name\']}\': \'{ex}\'"""'], {}), '(\n f"Error occurred running query named \'{manifest_query[0][\'query_name\']}\': \'{ex}\'"\n )\n', (7264, 7357), False, 'from helpers import aws_helper, manifest_comparison_helper, console_printer, invoke_lambda\n')] |
import unittest
import gym
import numpy as np
from gym_go import govars
class TestGoEnvBasics(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.env = gym.make('gym_go:go-v0', size=7, reward_method='real')
def setUp(self):
self.env.reset()
def test_state(self):
env = gym.make('gym_go:go-v0', size=7)
state = env.reset()
self.assertIsInstance(state, np.ndarray)
self.assertEqual(state.shape[0], govars.NUM_CHNLS)
env.close()
def test_board_sizes(self):
expected_sizes = [7, 13, 19]
for expec_size in expected_sizes:
env = gym.make('gym_go:go-v0', size=expec_size)
state = env.reset()
self.assertEqual(state.shape[1], expec_size)
self.assertEqual(state.shape[2], expec_size)
env.close()
def test_empty_board(self):
state = self.env.reset()
self.assertEqual(np.count_nonzero(state), 0)
def test_reset(self):
state, reward, done, info = self.env.step((0, 0))
self.assertEqual(np.count_nonzero(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]), 2)
self.assertEqual(np.count_nonzero(state), 51)
state = self.env.reset()
self.assertEqual(np.count_nonzero(state), 0)
def test_preserve_original_state(self):
state = self.env.reset()
original_state = np.copy(state)
self.env.gogame.next_state(state, 0)
assert (original_state == state).all()
def test_black_moves_first(self):
"""
Make a move at 0,0 and assert that a black piece was placed
:return:
"""
next_state, reward, done, info = self.env.step((0, 0))
self.assertEqual(next_state[govars.BLACK, 0, 0], 1)
self.assertEqual(next_state[govars.WHITE, 0, 0], 0)
def test_turns(self):
for i in range(7):
# For the first move at i == 0, black went so now it should be white's turn
state, reward, done, info = self.env.step((i, 0))
self.assertIn('turn', info)
self.assertEqual(info['turn'], 1 if i % 2 == 0 else 0)
def test_multiple_action_formats(self):
for _ in range(10):
action_1d = np.random.randint(50)
action_2d = None if action_1d == 49 else (action_1d // 7, action_1d % 7)
self.env.reset()
state_from_1d, _, _, _ = self.env.step(action_1d)
self.env.reset()
state_from_2d, _, _, _ = self.env.step(action_2d)
self.assertTrue((state_from_1d == state_from_2d).all())
def test_passing(self):
"""
None indicates pass
:return:
"""
# Pass on first move
state, reward, done, info = self.env.step(None)
# Expect empty board still
self.assertEqual(np.count_nonzero(state[[govars.BLACK, govars.WHITE]]), 0)
# Expect passing layer and turn layer channels to be all ones
self.assertEqual(np.count_nonzero(state), 98, state)
self.assertEqual(np.count_nonzero(state[govars.PASS_CHNL]), 49)
self.assertEqual(np.count_nonzero(state[govars.PASS_CHNL] == 1), 49)
self.assertIn('turn', info)
self.assertEqual(info['turn'], 1)
# Make a move
state, reward, done, info = self.env.step((0, 0))
# Expect the passing layer channel to be empty
self.assertEqual(np.count_nonzero(state), 2)
self.assertEqual(np.count_nonzero(state[govars.WHITE]), 1)
self.assertEqual(np.count_nonzero(state[govars.WHITE] == 1), 1)
self.assertEqual(np.count_nonzero(state[govars.PASS_CHNL]), 0)
# Pass on second move
self.env.reset()
state, reward, done, info = self.env.step((0, 0))
# Expect two pieces (one in the invalid channel)
# Plus turn layer is all ones
self.assertEqual(np.count_nonzero(state), 51, state)
self.assertEqual(np.count_nonzero(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]), 2, state)
self.assertIn('turn', info)
self.assertEqual(info['turn'], 1)
# Pass
state, reward, done, info = self.env.step(None)
# Expect two pieces (one in the invalid channel)
self.assertEqual(np.count_nonzero(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]), 2,
state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]])
self.assertIn('turn', info)
self.assertEqual(info['turn'], 0)
def test_game_ends(self):
state, reward, done, info = self.env.step(None)
self.assertFalse(done)
state, reward, done, info = self.env.step(None)
self.assertTrue(done)
self.env.reset()
state, reward, done, info = self.env.step((0, 0))
self.assertFalse(done)
state, reward, done, info = self.env.step(None)
self.assertFalse(done)
state, reward, done, info = self.env.step(None)
self.assertTrue(done)
def test_game_does_not_end_with_disjoint_passes(self):
state, reward, done, info = self.env.step(None)
self.assertFalse(done)
state, reward, done, info = self.env.step((0, 0))
self.assertFalse(done)
state, reward, done, info = self.env.step(None)
self.assertFalse(done)
def test_num_liberties(self):
env = gym.make('gym_go:go-v0', size=7)
steps = [(0, 0), (0, 1)]
libs = [(2, 0), (1, 2)]
env.reset()
for step, libs in zip(steps, libs):
state, _, _, _ = env.step(step)
blacklibs, whitelibs = env.gogame.num_liberties(state)
self.assertEqual(blacklibs, libs[0], state)
self.assertEqual(whitelibs, libs[1], state)
steps = [(2, 1), None, (1, 2), None, (2, 3), None, (3, 2), None]
libs = [(4, 0), (4, 0), (6, 0), (6, 0), (8, 0), (8, 0), (9, 0), (9, 0)]
env.reset()
for step, libs in zip(steps, libs):
state, _, _, _ = env.step(step)
blacklibs, whitelibs = env.gogame.num_liberties(state)
self.assertEqual(blacklibs, libs[0], state)
self.assertEqual(whitelibs, libs[1], state)
def test_komi(self):
env = gym.make('gym_go:go-v0', size=7, komi=2.5, reward_method='real')
# White win
_ = env.step(None)
state, reward, done, info = env.step(None)
self.assertEqual(-1, reward)
# White still win
env.reset()
_ = env.step(0)
_ = env.step(2)
_ = env.step(1)
_ = env.step(None)
state, reward, done, info = env.step(None)
self.assertEqual(-1, reward)
# Black win
env.reset()
_ = env.step(0)
_ = env.step(None)
_ = env.step(1)
_ = env.step(None)
_ = env.step(2)
_ = env.step(None)
state, reward, done, info = env.step(None)
self.assertEqual(1, reward)
env.close()
def test_children(self):
for canonical in [False, True]:
for _ in range(20):
action = self.env.uniform_random_action()
self.env.step(action)
state = self.env.state()
children = self.env.children(canonical, padded=True)
valid_moves = self.env.valid_moves()
for a in range(len(valid_moves)):
if valid_moves[a]:
child = self.env.gogame.next_state(state, a, canonical)
equal = children[a] == child
self.assertTrue(equal.all(), (canonical, np.argwhere(~equal)))
else:
self.assertTrue((children[a] == 0).all())
def test_real_reward(self):
env = gym.make('gym_go:go-v0', size=7, reward_method='real')
# In game
state, reward, done, info = env.step((0, 0))
self.assertEqual(reward, 0)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
# Win
state, reward, done, info = env.step(None)
self.assertEqual(reward, 1)
# Lose
env.reset()
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
state, reward, done, info = env.step((0, 0))
self.assertEqual(reward, 0)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
state, reward, done, info = env.step(None)
self.assertEqual(reward, -1)
# Tie
env.reset()
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
env.close()
def test_heuristic_reward(self):
env = gym.make('gym_go:go-v0', size=7, reward_method='heuristic')
# In game
state, reward, done, info = env.step((0, 0))
self.assertEqual(reward, 49)
state, reward, done, info = env.step((0, 1))
self.assertEqual(reward, 0)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 0)
state, reward, done, info = env.step((1, 0))
self.assertEqual(reward, -49)
# Lose
state, reward, done, info = env.step(None)
self.assertEqual(reward, -49)
state, reward, done, info = env.step(None)
self.assertEqual(reward, -49)
# Win
env.reset()
state, reward, done, info = env.step((0, 0))
self.assertEqual(reward, 49)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 49)
state, reward, done, info = env.step(None)
self.assertEqual(reward, 49)
env.close()
if __name__ == '__main__':
unittest.main()
| [
"numpy.copy",
"numpy.count_nonzero",
"numpy.random.randint",
"numpy.argwhere",
"unittest.main",
"gym.make"
] | [((9811, 9826), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9824, 9826), False, 'import unittest\n'), ((221, 275), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)', 'reward_method': '"""real"""'}), "('gym_go:go-v0', size=7, reward_method='real')\n", (229, 275), False, 'import gym\n'), ((364, 396), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)'}), "('gym_go:go-v0', size=7)\n", (372, 396), False, 'import gym\n'), ((1446, 1460), 'numpy.copy', 'np.copy', (['state'], {}), '(state)\n', (1453, 1460), True, 'import numpy as np\n'), ((5423, 5455), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)'}), "('gym_go:go-v0', size=7)\n", (5431, 5455), False, 'import gym\n'), ((6292, 6356), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)', 'komi': '(2.5)', 'reward_method': '"""real"""'}), "('gym_go:go-v0', size=7, komi=2.5, reward_method='real')\n", (6300, 6356), False, 'import gym\n'), ((7802, 7856), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)', 'reward_method': '"""real"""'}), "('gym_go:go-v0', size=7, reward_method='real')\n", (7810, 7856), False, 'import gym\n'), ((8825, 8884), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': '(7)', 'reward_method': '"""heuristic"""'}), "('gym_go:go-v0', size=7, reward_method='heuristic')\n", (8833, 8884), False, 'import gym\n'), ((685, 726), 'gym.make', 'gym.make', (['"""gym_go:go-v0"""'], {'size': 'expec_size'}), "('gym_go:go-v0', size=expec_size)\n", (693, 726), False, 'import gym\n'), ((989, 1012), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (1005, 1012), True, 'import numpy as np\n'), ((1127, 1198), 'numpy.count_nonzero', 'np.count_nonzero', (['state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]'], {}), '(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]])\n', (1143, 1198), True, 'import numpy as np\n'), ((1228, 1251), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (1244, 1251), True, 'import numpy as np\n'), ((1315, 1338), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (1331, 1338), True, 'import numpy as np\n'), ((2292, 2313), 'numpy.random.randint', 'np.random.randint', (['(50)'], {}), '(50)\n', (2309, 2313), True, 'import numpy as np\n'), ((2896, 2949), 'numpy.count_nonzero', 'np.count_nonzero', (['state[[govars.BLACK, govars.WHITE]]'], {}), '(state[[govars.BLACK, govars.WHITE]])\n', (2912, 2949), True, 'import numpy as np\n'), ((3049, 3072), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (3065, 3072), True, 'import numpy as np\n'), ((3110, 3151), 'numpy.count_nonzero', 'np.count_nonzero', (['state[govars.PASS_CHNL]'], {}), '(state[govars.PASS_CHNL])\n', (3126, 3151), True, 'import numpy as np\n'), ((3182, 3228), 'numpy.count_nonzero', 'np.count_nonzero', (['(state[govars.PASS_CHNL] == 1)'], {}), '(state[govars.PASS_CHNL] == 1)\n', (3198, 3228), True, 'import numpy as np\n'), ((3475, 3498), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (3491, 3498), True, 'import numpy as np\n'), ((3528, 3565), 'numpy.count_nonzero', 'np.count_nonzero', (['state[govars.WHITE]'], {}), '(state[govars.WHITE])\n', (3544, 3565), True, 'import numpy as np\n'), ((3595, 3637), 'numpy.count_nonzero', 'np.count_nonzero', (['(state[govars.WHITE] == 1)'], {}), '(state[govars.WHITE] == 1)\n', (3611, 3637), True, 'import numpy as np\n'), ((3667, 3708), 'numpy.count_nonzero', 'np.count_nonzero', (['state[govars.PASS_CHNL]'], {}), '(state[govars.PASS_CHNL])\n', (3683, 3708), True, 'import numpy as np\n'), ((3947, 3970), 'numpy.count_nonzero', 'np.count_nonzero', (['state'], {}), '(state)\n', (3963, 3970), True, 'import numpy as np\n'), ((4008, 4079), 'numpy.count_nonzero', 'np.count_nonzero', (['state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]'], {}), '(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]])\n', (4024, 4079), True, 'import numpy as np\n'), ((4324, 4395), 'numpy.count_nonzero', 'np.count_nonzero', (['state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]]'], {}), '(state[[govars.BLACK, govars.WHITE, govars.INVD_CHNL]])\n', (4340, 4395), True, 'import numpy as np\n'), ((7649, 7668), 'numpy.argwhere', 'np.argwhere', (['(~equal)'], {}), '(~equal)\n', (7660, 7668), True, 'import numpy as np\n')] |
from ScopeFoundry import Measurement
import time
from qtpy import QtWidgets
from PyQt5.Qt import QFormLayout
class AttoCubeStageControlMeasure(Measurement):
def __init__(self, app, name=None, hw_name='attocube_xyz_stage'):
self.hw_name = hw_name
Measurement.__init__(self, app, name=name)
def setup(self):
self.hw = self.app.hardware[self.hw_name]
S = self.hw.settings
self.ui = QtWidgets.QWidget()
self.ui.setLayout(QtWidgets.QVBoxLayout())
self.ctr_box = QtWidgets.QGroupBox("Attocube ECC 100: {} {}".format(self.name, self.hw_name))
self.ctr_box.setLayout(QtWidgets.QHBoxLayout())
self.ui.layout().addWidget(self.ctr_box, stretch=0)
self.connect_checkBox = QtWidgets.QCheckBox("Connect to Hardware")
self.ctr_box.layout().addWidget(self.connect_checkBox)
S.connected.connect_to_widget(self.connect_checkBox)
self.run_checkBox = QtWidgets.QCheckBox("Live Update")
self.ctr_box.layout().addWidget(self.run_checkBox)
self.settings.activation.connect_to_widget(self.run_checkBox)
self.dev_id_doubleSpinBox = QtWidgets.QDoubleSpinBox()
self.ctr_box.layout().addWidget(self.dev_id_doubleSpinBox)
S.device_id.connect_to_widget(self.dev_id_doubleSpinBox)
self.axes_box = QtWidgets.QGroupBox("Axes")
self.axes_box.setLayout(QtWidgets.QHBoxLayout())
self.ui.layout().addWidget(self.axes_box, stretch=0)
for i,axis in enumerate(self.hw.ax_names):
names = [name for name in S.as_dict().keys() if name.split('_')[0] == axis]
widget = S.New_UI(names)
widget.layout().insertRow(0, "Axis {}".format(i+1), QtWidgets.QLabel("<B>{}</B>".format(axis)))
self.axes_box.layout().addWidget(widget)
self.ui.layout().addWidget(QtWidgets.QWidget(), stretch=1)
def setup_figure(self):
pass
def run(self):
while not self.interrupt_measurement_called:
time.sleep(0.1)
self.hw.read_from_hardware()
pass
def update_display(self):
pass | [
"qtpy.QtWidgets.QDoubleSpinBox",
"qtpy.QtWidgets.QVBoxLayout",
"time.sleep",
"ScopeFoundry.Measurement.__init__",
"qtpy.QtWidgets.QCheckBox",
"qtpy.QtWidgets.QGroupBox",
"qtpy.QtWidgets.QWidget",
"qtpy.QtWidgets.QHBoxLayout"
] | [((273, 315), 'ScopeFoundry.Measurement.__init__', 'Measurement.__init__', (['self', 'app'], {'name': 'name'}), '(self, app, name=name)\n', (293, 315), False, 'from ScopeFoundry import Measurement\n'), ((457, 476), 'qtpy.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (474, 476), False, 'from qtpy import QtWidgets\n'), ((779, 821), 'qtpy.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['"""Connect to Hardware"""'], {}), "('Connect to Hardware')\n", (798, 821), False, 'from qtpy import QtWidgets\n'), ((983, 1017), 'qtpy.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['"""Live Update"""'], {}), "('Live Update')\n", (1002, 1017), False, 'from qtpy import QtWidgets\n'), ((1192, 1218), 'qtpy.QtWidgets.QDoubleSpinBox', 'QtWidgets.QDoubleSpinBox', ([], {}), '()\n', (1216, 1218), False, 'from qtpy import QtWidgets\n'), ((1377, 1404), 'qtpy.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['"""Axes"""'], {}), "('Axes')\n", (1396, 1404), False, 'from qtpy import QtWidgets\n'), ((503, 526), 'qtpy.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (524, 526), False, 'from qtpy import QtWidgets\n'), ((661, 684), 'qtpy.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (682, 684), False, 'from qtpy import QtWidgets\n'), ((1437, 1460), 'qtpy.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1458, 1460), False, 'from qtpy import QtWidgets\n'), ((1904, 1923), 'qtpy.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (1921, 1923), False, 'from qtpy import QtWidgets\n'), ((2097, 2112), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2107, 2112), False, 'import time\n')] |
import unittest
from tests.test_base import TestBase
from fractions import Fraction
from core import data
# TODO
class TestBasic(TestBase):
def test_arithmetic(self):
r = self.readEvalPrint
self.assertEqual(r('1'), 1)
self.assertEqual(r('2 + 2'), 4)
self.assertEqual(r('1 + 2 * 3'), 7)
self.assertEqual(r('(1 + 2) * 3'), 9)
self.assertEqual(r('1 * (2 + 3)'), 5)
self.assertEqual(r('1 + 2 - 3 * 4 / 5'), Fraction('0.6'))
self.assertEqual(r('1 / 2 / 2'), Fraction(1, 4))
self.assertEqual(r('0.03 * 0.03 * 0.03'), Fraction('2.7e-05'))
self.assertEqual(r('2 ^ 2 ^ 2'), 16)
self.assertEqual(r('1+1i'), data.Complex(1, 1))
self.assertEqual(r('1+2i + 3+4i'), data.Complex(4, 6))
self.assertEqual(r('1i ^ 1i'), 1)
def test_abstraction(self):
xs = [
'λx.x',
'λx.x x',
'λx.x * 2',
'let x := 2 in x + x',
'let x := 2 in let y := 3 in x + y',
'let x := 2 and y := 3 in x + y',
'let x := (λx.x) ((λx.x x) (λx.x)) (λx.x) 2 in x + x',
'(λx.x)(λy.y)',
'(λx.x x)(λx.x)',
'((λx.x)(λx.x))(λx.x)',
'(λx y z. x + y + z) 1 2 3',
'x + x where x := 2',
'x + x where x := y ^ y where y := 2',
'let x := 1 :: 2 :: 3 :: nil in head x',
'let x := 1 :: 2 :: 3 :: nil in head (tail (tail x))'
]
for x in xs:
self.readEvalPrint(x)
def test_condition(self):
r = self.readEvalPrint
self.assertEqual(r('if true then 1 else 0'), 1)
self.assertEqual(r('1 if true else 0'), 1)
self.assertEqual(r('if false then 1 else 0'), 0)
self.assertEqual(r('1 if false else 0'), 0)
self.assertEqual(r('if nil then 1 else 0'), 0)
self.assertEqual(r('1 if nil else 0'), 0)
self.assertEqual(r('if 1 then true else false'), data.TRUE)
self.assertEqual(r('if 1 then true else false'), data.TRUE)
self.assertEqual(r('if 0 then true else false'), data.TRUE)
def test_structures(self):
r = self.readEvalPrint
self.assertEqual(r('()'), [])
self.assertEqual(r('(1, 2)'), [1, 2])
self.assertEqual(r('(1, 2, 3)'), [1, 2, 3])
xs = [
'1 :: []',
'[1 2 3]',
'[1 . [2 . [3 . nil]]]',
'[1 . 2]',
'[[1 2] [3 4]]',
'[(1, 2) (3, 4)]',
'([1 2], [3 4])',
'[(1 + 2) (3 - 4)]',
'(1 + 2, 3 - 4)'
]
for x in xs:
self.readEvalPrint(x)
def test_minmax(self):
r = self.readEvalPrint
self.assertEqual(r('min(3, 1, 2)'), 1)
self.assertEqual(r('max(3, 1, 2)'), 3)
def test_comments(self):
xs = [
'# Line comment',
'#| Block comment |#',
'#| Nested #| Block comment |# |#',
'#| Nested #| Block |# comment |#',
'#| Line comment inside # Block comment |#',
'# Block comment inside #| Line comment |#',
'#',
'#||#'
]
for x in xs:
self.assertIsNone(self.readEvalPrint(x))
def test_string(self):
r = self.readEvalPrint
self.assertEqual(r('""'), '')
self.assertEqual(r('"String"'), 'String')
self.assertEqual(r('""""'), '"')
def test_print(self):
xs = [
'print 1',
'print "a"',
'let x := 1 and y := 2 in print (x, y)'
]
for x in xs:
self.readEvalPrint(x)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"fractions.Fraction",
"core.data.Complex"
] | [((3680, 3695), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3693, 3695), False, 'import unittest\n'), ((465, 480), 'fractions.Fraction', 'Fraction', (['"""0.6"""'], {}), "('0.6')\n", (473, 480), False, 'from fractions import Fraction\n'), ((523, 537), 'fractions.Fraction', 'Fraction', (['(1)', '(4)'], {}), '(1, 4)\n', (531, 537), False, 'from fractions import Fraction\n'), ((589, 608), 'fractions.Fraction', 'Fraction', (['"""2.7e-05"""'], {}), "('2.7e-05')\n", (597, 608), False, 'from fractions import Fraction\n'), ((691, 709), 'core.data.Complex', 'data.Complex', (['(1)', '(1)'], {}), '(1, 1)\n', (703, 709), False, 'from core import data\n'), ((754, 772), 'core.data.Complex', 'data.Complex', (['(4)', '(6)'], {}), '(4, 6)\n', (766, 772), False, 'from core import data\n')] |
# -*- coding: utf-8 -*-
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import sys
import os_net_config
from os_net_config import cli
from os_net_config import impl_ifcfg
from os_net_config.tests import base
import six
REALPATH = os.path.dirname(os.path.realpath(__file__))
SAMPLE_BASE = os.path.join(REALPATH, '../../', 'etc',
'os-net-config', 'samples')
class TestCli(base.TestCase):
def run_cli(self, argstr, exitcodes=(0,)):
orig = sys.stdout
orig_stderr = sys.stderr
sys.stdout = six.StringIO()
sys.stderr = six.StringIO()
ret = cli.main(argstr.split())
self.assertIn(ret, exitcodes)
stdout = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = orig
stderr = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = orig_stderr
return (stdout, stderr)
def test_bond_noop_output(self):
bond_yaml = os.path.join(SAMPLE_BASE, 'bond.yaml')
bond_json = os.path.join(SAMPLE_BASE, 'bond.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % bond_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % bond_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-ctlplane',
'DEVICE=em2',
'DEVICE=em1',
'DEVICE=bond1',
'DEVICETYPE=ovs']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_ivs_noop_output(self):
ivs_yaml = os.path.join(SAMPLE_BASE, 'ivs.yaml')
ivs_json = os.path.join(SAMPLE_BASE, 'ivs.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=nic2',
'DEVICE=nic3',
'DEVICE=api201',
'DEVICE=storage202',
'DEVICETYPE=ivs']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_bridge_noop_output(self):
bridge_yaml = os.path.join(SAMPLE_BASE, 'bridge_dhcp.yaml')
bridge_json = os.path.join(SAMPLE_BASE, 'bridge_dhcp.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=eni --noop -c %s' %
bridge_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=eni --noop -c %s' %
bridge_json)
self.assertEqual('', stderr)
sanity_devices = ['iface br-ctlplane inet dhcp',
'iface em1',
'ovs_type OVSBridge']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_vlan_noop_output(self):
vlan_yaml = os.path.join(SAMPLE_BASE, 'bridge_vlan.yaml')
vlan_json = os.path.join(SAMPLE_BASE, 'bridge_vlan.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% vlan_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% vlan_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-ctlplane',
'DEVICE=em1',
'DEVICE=vlan16',
'DEVICETYPE=ovs']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_interface_noop_output(self):
interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml')
interface_json = os.path.join(SAMPLE_BASE, 'interface.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% interface_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% interface_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=em1',
'BOOTPROTO=static',
'IPADDR=192.0.2.1']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_bridge_noop_rootfs(self):
for provider in ('ifcfg', 'eni'):
bond_yaml = os.path.join(SAMPLE_BASE, 'bridge_dhcp.yaml')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=%s --noop '
'--root-dir=/rootfs '
'-c %s' % (provider, bond_yaml))
self.assertEqual('', stderr)
self.assertIn('File: /rootfs/', stdout_yaml)
def test_interface_noop_detailed_exit_codes(self):
interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s --detailed-exit-codes'
% interface_yaml, exitcodes=(2,))
def test_interface_noop_detailed_exit_codes_no_changes(self):
interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml')
class TestImpl(os_net_config.NetConfig):
def add_interface(self, interface):
pass
def apply(self, cleanup=False, activate=True):
# this fake implementation returns no changes
return {}
self.stubs.Set(impl_ifcfg, 'IfcfgNetConfig', TestImpl)
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s --detailed-exit-codes'
% interface_yaml, exitcodes=(0,))
def test_ovs_dpdk_bond_noop_output(self):
ivs_yaml = os.path.join(SAMPLE_BASE, 'ovs_dpdk_bond.yaml')
ivs_json = os.path.join(SAMPLE_BASE, 'ovs_dpdk_bond.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-link',
'TYPE=OVSUserBridge',
'DEVICE=dpdkbond0',
'TYPE=OVSDPDKBond']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_nfvswitch_noop_output(self):
nfvswitch_yaml = os.path.join(SAMPLE_BASE, 'nfvswitch.yaml')
nfvswitch_json = os.path.join(SAMPLE_BASE, 'nfvswitch.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % nfvswitch_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % nfvswitch_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=nic2',
'DEVICE=nic3',
'DEVICE=api201',
'DEVICE=storage202',
'DEVICETYPE=nfvswitch']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_ovs_dpdk_noop_output(self):
ivs_yaml = os.path.join(SAMPLE_BASE, 'ovs_dpdk.yaml')
ivs_json = os.path.join(SAMPLE_BASE, 'ovs_dpdk.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % ivs_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-link',
'TYPE=OVSUserBridge',
'DEVICE=dpdk0',
'TYPE=OVSDPDKPort']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
| [
"sys.stderr.getvalue",
"sys.stdout.close",
"six.StringIO",
"sys.stdout.getvalue",
"sys.stderr.close"
] | [((1094, 1108), 'six.StringIO', 'six.StringIO', ([], {}), '()\n', (1106, 1108), False, 'import six\n'), ((1130, 1144), 'six.StringIO', 'six.StringIO', ([], {}), '()\n', (1142, 1144), False, 'import six\n'), ((1240, 1261), 'sys.stdout.getvalue', 'sys.stdout.getvalue', ([], {}), '()\n', (1259, 1261), False, 'import sys\n'), ((1270, 1288), 'sys.stdout.close', 'sys.stdout.close', ([], {}), '()\n', (1286, 1288), False, 'import sys\n'), ((1332, 1353), 'sys.stderr.getvalue', 'sys.stderr.getvalue', ([], {}), '()\n', (1351, 1353), False, 'import sys\n'), ((1362, 1380), 'sys.stderr.close', 'sys.stderr.close', ([], {}), '()\n', (1378, 1380), False, 'import sys\n')] |
from datetime import date
'''Crie um programa que leia o ano de nascimento de sete pessoas. No final, mostre quantas pessoas ainda não atingiram a maioridade e quantas já são maiores.
Aula Anterior
Voltar para Módulo
Próximo'''
atual = date.today().year
totmaior = 0
totmenor = 0
for c in range (1, 8):
ano = int (input(' Escreva o ano de nascimento da {}° pessoa: '.format(c)))
if atual - ano <=21:
totmenor+=1
else:
totmaior+=1
print(' O total de pessoas maiores de idade é {}'.format(totmaior))
print(' E o total de pessoas menores de idade é {}.'.format(totmenor))
| [
"datetime.date.today"
] | [((241, 253), 'datetime.date.today', 'date.today', ([], {}), '()\n', (251, 253), False, 'from datetime import date\n')] |
from django import forms
from django.forms import ModelForm, Textarea
from .models import Flight, Comment, AviaCompany
class AddCommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = [
"comment_type",
"text",
]
labels = {
"comment_type": "Выберите тип коментария",
"text": "Введите свой комментарий",
}
widgets = {
"text": Textarea(attrs={'cols': 70, 'rows': 10}),
} | [
"django.forms.Textarea"
] | [((470, 510), 'django.forms.Textarea', 'Textarea', ([], {'attrs': "{'cols': 70, 'rows': 10}"}), "(attrs={'cols': 70, 'rows': 10})\n", (478, 510), False, 'from django.forms import ModelForm, Textarea\n')] |
"""
Base IO code for all datasets (borrowing concepts from sklearn.datasets and keras.utils.load_data)
"""
from importlib import resources
from spacekit.extractor.scrape import WebScraper, FileScraper, home_data_base
from spacekit.analyzer.scan import import_dataset, CalScanner, SvmScanner
from spacekit.datasets.meta import spacekit_collections
DATA = "spacekit.datasets.data"
DD = home_data_base()
def import_collection(name, date_key=None):
source = f"{DATA}.{name}"
archives = spacekit_collections[name]["data"]
if date_key is None:
fnames = [archives[date]["fname"] for date in archives.keys()]
else:
fnames = [archives[date_key]["fname"]]
scr = FileScraper(cache_dir=".", clean=False)
for fname in fnames:
with resources.path(source, fname) as archive:
scr.fpaths.append(archive)
fpaths = scr.extract_archives()
return fpaths
def scrape_archives(archives, data_home=DD):
"""Download zip archives of training data iterations (includes datasets, models, and results).
Returns
-------
list
list of paths to retrueved and extracted dataset collection
"""
# data_home = home_data_base(data_home=data_home)
fpaths = WebScraper(archives["uri"], archives["data"], cache_dir=data_home).scrape()
return fpaths
def download_single_archive(archives, date_key=None, data_home=DD):
uri = archives["uri"]
data = archives["data"]
if date_key is None:
# default to most recent
date_key = sorted(list(data.keys()))[-1]
# limit data download to single archive
dataset = {date_key: data[date_key]}
# data_home = get_data_home(data_home=data_home)
scraper = WebScraper(uri, dataset, cache_dir=data_home).scrape()
fpath = scraper.fpaths[0]
print(fpath)
return fpath
def load_from_archive(archives, fpath=None, date_key=None, scanner=None, data_home=DD):
if fpath is None:
fpath = download_single_archive(
archives, date_key=date_key, data_home=data_home
)
if scanner:
scn = scanner(perimeter=fpath)
df = scn.load_dataframe(kwargs=scn.kwargs, decoder=scn.decoder)
else:
df = import_dataset(filename=fpath)
return df
def load_cal(fpath=None, date_key=None):
cal = spacekit_collections["calcloud"]
df = load_from_archive(cal, fpath=fpath, date_key=date_key, scanner=CalScanner)
return df
def load_svm(fpath=None, date_key=None):
svm = spacekit_collections["svm"]
df = load_from_archive(svm, fpath=fpath, date_key=date_key, scanner=SvmScanner)
return df
def load_k2(fpath=None, date_key=None):
k2 = spacekit_collections["k2"]
train, test = scrape_archives(k2, data_home=DD)
def load(name="calcloud", date_key=None, fpath=None):
if fpath is None:
fpath = import_collection(name, date_key=date_key)
if name == "calcloud":
scn = CalScanner(perimeter=fpath)
elif name == "svm":
scn = SvmScanner(perimeter=fpath)
df = scn.load_dataframe(kwargs=scn.kwargs, decoder=scn.decoder)
return df
| [
"spacekit.extractor.scrape.WebScraper",
"spacekit.extractor.scrape.home_data_base",
"spacekit.analyzer.scan.CalScanner",
"importlib.resources.path",
"spacekit.analyzer.scan.SvmScanner",
"spacekit.extractor.scrape.FileScraper",
"spacekit.analyzer.scan.import_dataset"
] | [((385, 401), 'spacekit.extractor.scrape.home_data_base', 'home_data_base', ([], {}), '()\n', (399, 401), False, 'from spacekit.extractor.scrape import WebScraper, FileScraper, home_data_base\n'), ((691, 730), 'spacekit.extractor.scrape.FileScraper', 'FileScraper', ([], {'cache_dir': '"""."""', 'clean': '(False)'}), "(cache_dir='.', clean=False)\n", (702, 730), False, 'from spacekit.extractor.scrape import WebScraper, FileScraper, home_data_base\n'), ((2197, 2227), 'spacekit.analyzer.scan.import_dataset', 'import_dataset', ([], {'filename': 'fpath'}), '(filename=fpath)\n', (2211, 2227), False, 'from spacekit.analyzer.scan import import_dataset, CalScanner, SvmScanner\n'), ((2913, 2940), 'spacekit.analyzer.scan.CalScanner', 'CalScanner', ([], {'perimeter': 'fpath'}), '(perimeter=fpath)\n', (2923, 2940), False, 'from spacekit.analyzer.scan import import_dataset, CalScanner, SvmScanner\n'), ((769, 798), 'importlib.resources.path', 'resources.path', (['source', 'fname'], {}), '(source, fname)\n', (783, 798), False, 'from importlib import resources\n'), ((1227, 1293), 'spacekit.extractor.scrape.WebScraper', 'WebScraper', (["archives['uri']", "archives['data']"], {'cache_dir': 'data_home'}), "(archives['uri'], archives['data'], cache_dir=data_home)\n", (1237, 1293), False, 'from spacekit.extractor.scrape import WebScraper, FileScraper, home_data_base\n'), ((1704, 1749), 'spacekit.extractor.scrape.WebScraper', 'WebScraper', (['uri', 'dataset'], {'cache_dir': 'data_home'}), '(uri, dataset, cache_dir=data_home)\n', (1714, 1749), False, 'from spacekit.extractor.scrape import WebScraper, FileScraper, home_data_base\n'), ((2979, 3006), 'spacekit.analyzer.scan.SvmScanner', 'SvmScanner', ([], {'perimeter': 'fpath'}), '(perimeter=fpath)\n', (2989, 3006), False, 'from spacekit.analyzer.scan import import_dataset, CalScanner, SvmScanner\n')] |
import numpy as np
from mpi4py import MPI
from mpids.MPInumpy.errors import NotSupportedError
from mpids.MPInumpy.distributions.Replicated import Replicated
from .MPIArray_test import MPIArrayDefaultTest
class MPIArray3DDefaultTest(MPIArrayDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Default distribution
parms['dist'] = 'b'
#Add 1 to avoid divide by zero errors/warnings
np_data = np.arange(16).reshape(4,2,2) + 1
parms['data'] = np_data
local_data_map = {0: np_data[:1],
1: np_data[1:2],
2: np_data[2:3],
3: np_data[3:]}
parms['local_data'] = local_data_map[parms['rank']].tolist()
parms['comm_dims'] = [parms['comm_size']]
parms['comm_coord'] = [parms['rank']]
local_to_global_map = {0 : {0 : (0, 1), 1 : (0, 2), 2 : (0, 2)},
1 : {0 : (1, 2), 1 : (0, 2), 2 : (0, 2)},
2 : {0 : (2, 3), 1 : (0, 2), 2 : (0, 2)},
3 : {0 : (3, 4), 1 : (0, 2), 2 : (0, 2)}}
parms['local_to_global'] = local_to_global_map[parms['rank']]
return parms
def test_custom_max_higher_dim_method(self):
#Max along specified axies
self.assertTrue(np.alltrue(self.np_array.max(axis=2) == self.mpi_array.max(axis=2)))
def test_custom_mean_higher_dim_method(self):
#Mean along specified axies
self.assertTrue(np.alltrue(self.np_array.mean(axis=2) == self.mpi_array.mean(axis=2)))
def test_custom_min_higher_dim_method(self):
#Min along specified axies
self.assertTrue(np.alltrue(self.np_array.min(axis=2) == self.mpi_array.min(axis=2)))
def test_custom_std_higher_dim_method(self):
#Std along specified axies
self.assertTrue(np.alltrue(self.np_array.std(axis=2) == self.mpi_array.std(axis=2)))
def test_custom_sum_higher_dim_method(self):
#Sum along specified axies
self.assertTrue(np.alltrue(self.np_array.sum(axis=2) == self.mpi_array.sum(axis=2)))
class MPIArray3DReplicatedTest(MPIArray3DDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Replicated distribution
parms['dist'] = 'r'
#Add 1 to avoid divide by zero errors/warnings
parms['data'] = np.arange(16).reshape(4,2,2) + 1
parms['local_data'] = parms['data']
parms['comm_dims'] = None
parms['comm_coord'] = None
parms['local_to_global'] = None
return parms
class MPIArray4DDefaultTest(MPIArrayDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Default distribution
parms['dist'] = 'b'
#Add 1 to avoid divide by zero errors/warnings
np_data = np.arange(32).reshape(4,2,2,2) + 1
parms['data'] = np_data
local_data_map = {0: np_data[:1],
1: np_data[1:2],
2: np_data[2:3],
3: np_data[3:]}
parms['local_data'] = local_data_map[parms['rank']].tolist()
parms['comm_dims'] = [parms['comm_size']]
parms['comm_coord'] = [parms['rank']]
local_to_global_map = {0 : {0 : (0, 1), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2)},
1 : {0 : (1, 2), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2)},
2 : {0 : (2, 3), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2)},
3 : {0 : (3, 4), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2)}}
parms['local_to_global'] = local_to_global_map[parms['rank']]
return parms
def test_custom_max_higher_dim_method(self):
#Max along specified axies
self.assertTrue(np.alltrue(self.np_array.max(axis=2) == self.mpi_array.max(axis=2)))
self.assertTrue(np.alltrue(self.np_array.max(axis=3) == self.mpi_array.max(axis=3)))
def test_custom_mean_higher_dim_method(self):
#Mean along specified axies
self.assertTrue(np.alltrue(self.np_array.mean(axis=2) == self.mpi_array.mean(axis=2)))
self.assertTrue(np.alltrue(self.np_array.mean(axis=3) == self.mpi_array.mean(axis=3)))
def test_custom_min_higher_dim_method(self):
#Min along specified axies
self.assertTrue(np.alltrue(self.np_array.min(axis=2) == self.mpi_array.min(axis=2)))
self.assertTrue(np.alltrue(self.np_array.min(axis=3) == self.mpi_array.min(axis=3)))
def test_custom_std_higher_dim_method(self):
if isinstance(self.mpi_array, Replicated):
#Std along specified axies
self.assertTrue(np.alltrue(self.np_array.std(axis=2) == self.mpi_array.std(axis=2)))
self.assertTrue(np.alltrue(self.np_array.std(axis=3) == self.mpi_array.std(axis=3)))
else:
#TODO: Need to revisit for higher dim
with self.assertRaises(NotSupportedError):
self.mpi_array.std(axis=2)
with self.assertRaises(NotSupportedError):
self.mpi_array.std(axis=3)
def test_custom_sum_higher_dim_method(self):
#Sum along specified axies
self.assertTrue(np.alltrue(self.np_array.sum(axis=2) == self.mpi_array.sum(axis=2)))
self.assertTrue(np.alltrue(self.np_array.sum(axis=3) == self.mpi_array.sum(axis=3)))
class MPIArray4DReplicatedTest(MPIArray4DDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Replicated distribution
parms['dist'] = 'r'
#Add 1 to avoid divide by zero errors/warnings
parms['data'] = np.arange(32).reshape(4,2,2,2) + 1
parms['local_data'] = parms['data']
parms['comm_dims'] = None
parms['comm_coord'] = None
parms['local_to_global'] = None
return parms
class MPIArray5DDefaultTest(MPIArrayDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Default distribution
parms['dist'] = 'b'
#Add 1 to avoid divide by zero errors/warnings
np_data = np.arange(64).reshape(4,2,2,2,2) + 1
parms['data'] = np_data
local_data_map = {0: np_data[:1],
1: np_data[1:2],
2: np_data[2:3],
3: np_data[3:]}
parms['local_data'] = local_data_map[parms['rank']].tolist()
parms['comm_dims'] = [parms['comm_size']]
parms['comm_coord'] = [parms['rank']]
local_to_global_map = {0 : {0 : (0, 1), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2), 4 : (0, 2)},
1 : {0 : (1, 2), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2), 4 : (0, 2)},
2 : {0 : (2, 3), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2), 4 : (0, 2)},
3 : {0 : (3, 4), 1 : (0, 2), 2 : (0, 2), 3 : (0, 2), 4 : (0, 2)}}
parms['local_to_global'] = local_to_global_map[parms['rank']]
return parms
def test_custom_max_higher_dim_method(self):
#Max along specified axies
self.assertTrue(np.alltrue(self.np_array.max(axis=2) == self.mpi_array.max(axis=2)))
self.assertTrue(np.alltrue(self.np_array.max(axis=3) == self.mpi_array.max(axis=3)))
self.assertTrue(np.alltrue(self.np_array.max(axis=4) == self.mpi_array.max(axis=4)))
def test_custom_mean_higher_dim_method(self):
#Mean along specified axies
self.assertTrue(np.alltrue(self.np_array.mean(axis=2) == self.mpi_array.mean(axis=2)))
self.assertTrue(np.alltrue(self.np_array.mean(axis=3) == self.mpi_array.mean(axis=3)))
self.assertTrue(np.alltrue(self.np_array.mean(axis=4) == self.mpi_array.mean(axis=4)))
def test_custom_min_higher_dim_method(self):
#Min along specified axies
self.assertTrue(np.alltrue(self.np_array.min(axis=2) == self.mpi_array.min(axis=2)))
self.assertTrue(np.alltrue(self.np_array.min(axis=3) == self.mpi_array.min(axis=3)))
self.assertTrue(np.alltrue(self.np_array.min(axis=4) == self.mpi_array.min(axis=4)))
def test_custom_std_higher_dim_method(self):
if isinstance(self.mpi_array, Replicated):
#Std along specified axies
self.assertTrue(np.alltrue(self.np_array.std(axis=2) == self.mpi_array.std(axis=2)))
self.assertTrue(np.alltrue(self.np_array.std(axis=3) == self.mpi_array.std(axis=3)))
self.assertTrue(np.alltrue(self.np_array.std(axis=4) == self.mpi_array.std(axis=4)))
else:
#TODO: Need to revisit for higher dim
with self.assertRaises(NotSupportedError):
self.mpi_array.std(axis=2)
with self.assertRaises(NotSupportedError):
self.mpi_array.std(axis=3)
with self.assertRaises(NotSupportedError):
self.mpi_array.std(axis=4)
def test_custom_sum_higher_dim_method(self):
#Sum along specified axies
self.assertTrue(np.alltrue(self.np_array.sum(axis=2) == self.mpi_array.sum(axis=2)))
self.assertTrue(np.alltrue(self.np_array.sum(axis=3) == self.mpi_array.sum(axis=3)))
self.assertTrue(np.alltrue(self.np_array.sum(axis=4) == self.mpi_array.sum(axis=4)))
class MPIArray5DReplicatedTest(MPIArray5DDefaultTest):
def create_setUp_parms(self):
parms = {}
parms['comm'] = MPI.COMM_WORLD
parms['rank'] = MPI.COMM_WORLD.Get_rank()
parms['comm_size'] = MPI.COMM_WORLD.Get_size()
# Replicated distribution
parms['dist'] = 'r'
#Add 1 to avoid divide by zero errors/warnings
parms['data'] = np.arange(64).reshape(4,2,2,2,2) + 1
parms['local_data'] = parms['data']
parms['comm_dims'] = None
parms['comm_coord'] = None
parms['local_to_global'] = None
return parms
if __name__ == '__main__':
unittest.main()
| [
"mpi4py.MPI.COMM_WORLD.Get_size",
"mpi4py.MPI.COMM_WORLD.Get_rank",
"numpy.arange"
] | [((373, 398), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (396, 398), False, 'from mpi4py import MPI\n'), ((428, 453), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (451, 453), False, 'from mpi4py import MPI\n'), ((2442, 2467), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (2465, 2467), False, 'from mpi4py import MPI\n'), ((2497, 2522), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (2520, 2522), False, 'from mpi4py import MPI\n'), ((3040, 3065), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (3063, 3065), False, 'from mpi4py import MPI\n'), ((3095, 3120), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (3118, 3120), False, 'from mpi4py import MPI\n'), ((5937, 5962), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (5960, 5962), False, 'from mpi4py import MPI\n'), ((5992, 6017), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (6015, 6017), False, 'from mpi4py import MPI\n'), ((6537, 6562), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (6560, 6562), False, 'from mpi4py import MPI\n'), ((6592, 6617), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (6615, 6617), False, 'from mpi4py import MPI\n'), ((10053, 10078), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (10076, 10078), False, 'from mpi4py import MPI\n'), ((10108, 10133), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (10131, 10133), False, 'from mpi4py import MPI\n'), ((586, 599), 'numpy.arange', 'np.arange', (['(16)'], {}), '(16)\n', (595, 599), True, 'import numpy as np\n'), ((2664, 2677), 'numpy.arange', 'np.arange', (['(16)'], {}), '(16)\n', (2673, 2677), True, 'import numpy as np\n'), ((3253, 3266), 'numpy.arange', 'np.arange', (['(32)'], {}), '(32)\n', (3262, 3266), True, 'import numpy as np\n'), ((6159, 6172), 'numpy.arange', 'np.arange', (['(32)'], {}), '(32)\n', (6168, 6172), True, 'import numpy as np\n'), ((6750, 6763), 'numpy.arange', 'np.arange', (['(64)'], {}), '(64)\n', (6759, 6763), True, 'import numpy as np\n'), ((10275, 10288), 'numpy.arange', 'np.arange', (['(64)'], {}), '(64)\n', (10284, 10288), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
import json
import os
from collections import defaultdict
from tqdm import tqdm
from transformers import BertConfig, BertTokenizer
import sys
sys.path.insert(0, "./")
class SentenceIter(object):
def __init__(self, dirname):
self.dirname = dirname
def __iter__(self):
for i, fname in enumerate(os.listdir(self.dirname)):
# if i > 5:
# continue
print(fname)
for line in open(os.path.join(self.dirname, fname), "r", encoding="utf-8"):
yield line.strip()
def get_vocab_freq(sentence_iter, tokenizer):
dict_vocab2freq = defaultdict(int)
for i, sent in tqdm(enumerate(sentence_iter)):
# if i > 5000:
# continue
# print(sent)
if not sent:
continue
tokens = tokenizer.tokenize(sent)
for tok in tokens:
dict_vocab2freq[tok] += 1
return dict_vocab2freq
if __name__ == "__main__":
# TOKENIZER
tokenizer = BertTokenizer.from_pretrained(
"resources/bert/chinese-bert-wwm-ext/"
)
corpus_folder = "datasets/news_corpus"
sentence_iter = SentenceIter(corpus_folder)
dict_vocab2freq = get_vocab_freq(sentence_iter, tokenizer)
json.dump(
dict_vocab2freq,
open("src/bert_models/vocab_process/dict_vocab2freq_0819.json", "w", encoding="utf-8"),
ensure_ascii=False,
)
| [
"os.listdir",
"sys.path.insert",
"transformers.BertTokenizer.from_pretrained",
"os.path.join",
"collections.defaultdict"
] | [((168, 192), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./"""'], {}), "(0, './')\n", (183, 192), False, 'import sys\n'), ((645, 661), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (656, 661), False, 'from collections import defaultdict\n'), ((1022, 1091), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['"""resources/bert/chinese-bert-wwm-ext/"""'], {}), "('resources/bert/chinese-bert-wwm-ext/')\n", (1051, 1091), False, 'from transformers import BertConfig, BertTokenizer\n'), ((347, 371), 'os.listdir', 'os.listdir', (['self.dirname'], {}), '(self.dirname)\n', (357, 371), False, 'import os\n'), ((479, 512), 'os.path.join', 'os.path.join', (['self.dirname', 'fname'], {}), '(self.dirname, fname)\n', (491, 512), False, 'import os\n')] |
from cacofonisk.callerid import CallerId
from cacofonisk.channel import SimpleChannel
from tests.replaytest import ChannelEventsTestCase
class TestOriginate(ChannelEventsTestCase):
def test_ctd_account_account(self):
"""
Click-to-dial call between a phoneaccount and an internal number.
In this scenario:
1. 201 is dialed,
2. 201 picks up,
3. 202 is dialed
4. 202 picks up
5. 201 hangs up
Which is reported as:
1. 201 calls 202
2. 202 picks up
3. 201 hangs up
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-account.json')
expected_events = [
('on_b_dial', {
'caller': 'SIP/150010001-00000002',
'targets': ['SIP/150010002-00000003'],
}),
('on_up', {
'caller': 'SIP/150010001-00000002',
'target': 'SIP/150010002-00000003',
}),
('on_hangup', {
'caller': 'SIP/150010001-00000002',
'reason': 'completed',
}),
]
self.assertEqualChannels(expected_events, events)
def test_ctd_account_world(self):
"""
Click-to-dial call between a phoneaccount and an external number.
In this scenario:
1. 201 is dialed
2. 201 picks up
3. +31260010001 is dialed
4. +31260010001 picks up
5. +31260010001 hangs up
Which is reported as:
1. 201 dials +31260010001
2. +31260010001 picks up
3. +31260010001 hangs up
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-world.json')
calling_chan = SimpleChannel(
account_code='15001',
caller_id=CallerId(num='201'),
cid_calling_pres='0 (Presentation Allowed, Not Screened)',
connected_line=CallerId(name='Calling...', num='+31150010001'),
exten='+31260010001',
linkedid='c4061ca6474c-1531990515.302',
name='SIP/150010001-00000008',
state=6,
uniqueid='c4061ca6474c-1531990515.317',
)
target_chan = SimpleChannel(
account_code='15001',
caller_id=CallerId(num='+31260010001'),
cid_calling_pres='0 (Presentation Allowed, Not Screened)',
connected_line=CallerId(num='+31150010001'),
exten='s',
linkedid='c4061ca6474c-1531990515.302',
name='SIP/voipgrid-siproute-docker-00000009',
state=6,
uniqueid='c4061ca6474c-1531990517.363',
)
expected_events = [
('on_b_dial', {
'caller': calling_chan,
'targets': [target_chan.replace(state=5)],
}),
('on_up', {
'caller': calling_chan,
'target': target_chan,
}),
('on_hangup', {
'caller': calling_chan,
'reason': 'completed',
}),
]
self.assertEqual(expected_events, events)
def test_ctd_account_world_deny_a(self):
"""
Click-to-dial call between a phoneaccount and an external number.
In this scenario:
1. 201 is dialed
2. 201 refuses the call
Which is reported as:
1. Nothing, because 201 never called anyone.
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-world-deny-a.json')
self.assertEqual([], events)
def test_ctd_account_world_deny_b(self):
"""
Click-to-dial call between a phoneaccount and an external number.
In this scenario:
1. 201 is dialed
2. 201 picks up
3. +31260010001 is dialed
4. +31260010001 rejects the call
Which is reported as:
1. 201 dials +31260010001
2. +31260010001 hangs up
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-world-deny-b.json')
expected_events = [
('on_b_dial', {
'caller': 'SIP/150010001-00000011',
'targets': ['SIP/voipgrid-siproute-docker-00000012'],
}),
('on_hangup', {
'caller': 'SIP/150010001-00000011',
'reason': 'busy',
}),
]
self.assertEqualChannels(expected_events, events)
def test_ctd_attn_xfer_abbcac(self):
"""
Click-to-dial with an attended transfer initiated by B.
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-account-xfer-attn-abbcac.json')
expected_events = [
('on_b_dial', {
'caller': 'SIP/150010001-0000001d',
'targets': ['SIP/150010002-0000001e'],
}),
('on_up', {
'caller': 'SIP/150010001-0000001d',
'target': 'SIP/150010002-0000001e',
}),
('on_b_dial', {
'caller': 'SIP/150010002-0000001f',
'targets': ['SIP/150010003-00000020'],
}),
('on_up', {
'caller': 'SIP/150010002-0000001f',
'target': 'SIP/150010003-00000020',
}),
('on_attended_transfer', {
'caller': 'SIP/150010001-0000001d',
'target': 'SIP/150010003-00000020',
'transferer': 'SIP/150010002-0000001f',
}),
('on_hangup', {
'caller': 'SIP/150010001-0000001d',
'reason': 'completed',
}),
]
self.assertEqualChannels(expected_events, events)
def test_ctd_attn_xfer_abacbc(self):
"""
Click-to-dial with an attended transfer initiated by A.
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-world-xfer-attn-abacbc.json')
expected_events = [
('on_b_dial', {
'caller': 'SIP/150010001-00000025',
'targets': ['SIP/voipgrid-siproute-docker-00000026'],
}),
('on_up', {
'caller': 'SIP/150010001-00000025',
'target': 'SIP/voipgrid-siproute-docker-00000026',
}),
('on_b_dial', {
'caller': 'SIP/150010001-00000029',
'targets': ['SIP/150010003-0000002a'],
}),
('on_up', {
'caller': 'SIP/150010001-00000029',
'target': 'SIP/150010003-0000002a',
}),
('on_attended_transfer', {
'caller': 'SIP/voipgrid-siproute-docker-00000026',
'target': 'SIP/150010003-0000002a',
'transferer': 'SIP/150010001-00000029',
}),
('on_hangup', {
'caller': 'SIP/voipgrid-siproute-docker-00000026',
'reason': 'completed',
}),
]
self.assertEqualChannels(expected_events, events)
def test_ctd_account_world_no_ringing(self):
"""
Click-to-dial where the B side never reaches state 5 RINGING.
"""
events = self.run_and_get_events(
'fixtures/originate/ctd-account-world-no-ringing.json')
calling_chan = SimpleChannel(
account_code='15001',
caller_id=CallerId(num='2401'),
cid_calling_pres='0 (Presentation Allowed, Not Screened)',
connected_line=CallerId(name='Calling...', num='+31150010001'),
exten='+31260010001',
linkedid='ua5-ams-1552575068.23242646',
name='SIP/150010063-0015f5f1',
state=6,
uniqueid='ua5-ams-1552575068.23242663',
)
target_chan = SimpleChannel(
account_code='15001',
caller_id=CallerId(num='+31260010001'),
cid_calling_pres='0 (Presentation Allowed, Not Screened)',
connected_line=CallerId(num='+31150010001'),
exten='s',
linkedid='ua5-ams-1552575068.23242646',
name='SIP/voipgrid-siproute-ams-0015f5f3',
state=6,
uniqueid='ua5-ams-1552575069.23242717',
)
self.maxDiff = None
expected_events = [
('on_b_dial', {
'caller': calling_chan,
'targets': [target_chan], # no .replace(state=5) here
}),
('on_up', {
'caller': calling_chan,
'target': target_chan,
}),
('on_hangup', {
'caller': calling_chan,
'reason': 'completed',
}),
]
self.assertEqual(expected_events, events)
def test_cmn_world_world(self):
"""
Call-me-now call between two external numbers.
"""
events = self.run_and_get_events(
'fixtures/originate/cmn-world-world.json')
expected_events = [
('on_b_dial', {
'caller': 'SIP/voipgrid-siproute-docker-0000002b',
'targets': ['SIP/voipgrid-siproute-docker-0000002e'],
}),
('on_up', {
'caller': 'SIP/voipgrid-siproute-docker-0000002b',
'target': 'SIP/voipgrid-siproute-docker-0000002e',
}),
('on_hangup', {
'caller': 'SIP/voipgrid-siproute-docker-0000002b',
'reason': 'completed',
}),
]
self.assertEqualChannels(expected_events, events)
def test_cmn_world_account_unaccepted(self):
"""
Call-me-now call between two external numbers where A does not accept.
+31260010001 is dialed,
+31260010001 picks up and does not accept.
"""
events = self.run_and_get_events(
'fixtures/originate/cmn-world-world-unaccepted.json')
self.assertEqual([], events)
| [
"cacofonisk.callerid.CallerId"
] | [((1844, 1863), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""201"""'}), "(num='201')\n", (1852, 1863), False, 'from cacofonisk.callerid import CallerId\n'), ((1963, 2010), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'name': '"""Calling..."""', 'num': '"""+31150010001"""'}), "(name='Calling...', num='+31150010001')\n", (1971, 2010), False, 'from cacofonisk.callerid import CallerId\n'), ((2318, 2346), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""+31260010001"""'}), "(num='+31260010001')\n", (2326, 2346), False, 'from cacofonisk.callerid import CallerId\n'), ((2446, 2474), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""+31150010001"""'}), "(num='+31150010001')\n", (2454, 2474), False, 'from cacofonisk.callerid import CallerId\n'), ((7495, 7515), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""2401"""'}), "(num='2401')\n", (7503, 7515), False, 'from cacofonisk.callerid import CallerId\n'), ((7615, 7662), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'name': '"""Calling..."""', 'num': '"""+31150010001"""'}), "(name='Calling...', num='+31150010001')\n", (7623, 7662), False, 'from cacofonisk.callerid import CallerId\n'), ((7970, 7998), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""+31260010001"""'}), "(num='+31260010001')\n", (7978, 7998), False, 'from cacofonisk.callerid import CallerId\n'), ((8098, 8126), 'cacofonisk.callerid.CallerId', 'CallerId', ([], {'num': '"""+31150010001"""'}), "(num='+31150010001')\n", (8106, 8126), False, 'from cacofonisk.callerid import CallerId\n')] |
# Faça um programa que leia um ângulo qualquer e mostre na tela o valor do seno, cosseno e tangente desse ângulo.
import math
angulo = float(input('Digite um ângulo qualquer: '))
radiano = math.radians(angulo)
sen = math.sin(radiano)
cos = math.cos(radiano)
tan = math.tan(radiano)
print('-' * 30)
print('Com o ângulo de {}º, temos:'.format(angulo))
print('Seno: {:.2f}'.format(sen))
print('Cosseno: {:.2f}'.format(cos))
print('Tangente : {:.2f}'.format(tan))
print('-' * 30) | [
"math.cos",
"math.sin",
"math.tan",
"math.radians"
] | [((193, 213), 'math.radians', 'math.radians', (['angulo'], {}), '(angulo)\n', (205, 213), False, 'import math\n'), ((221, 238), 'math.sin', 'math.sin', (['radiano'], {}), '(radiano)\n', (229, 238), False, 'import math\n'), ((245, 262), 'math.cos', 'math.cos', (['radiano'], {}), '(radiano)\n', (253, 262), False, 'import math\n'), ((269, 286), 'math.tan', 'math.tan', (['radiano'], {}), '(radiano)\n', (277, 286), False, 'import math\n')] |
# Copyright (c) 2012 Ericsson Television Ltd
# Author <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from reviewbot.tools import Tool
from reviewbot.tools.process import execute
from reviewbot.utils import is_exe_in_path
class CPPCheckTool(Tool):
name = 'CPPCheck - Static Code Analysis'
version = '0.1'
description = ('Checks code for errors using Cppcheck - '
'A tool for static C/C++ code analysis')
options = [
{
'name': 'style_checks_enabled',
'field_type': 'django.forms.BooleanField',
'default': True,
'field_options': {
'label': 'Enable standard style checks',
'help_text': 'This will enable the standard style checks '
'- this also enables most warning, style and '
'performance checks.',
'required': False,
},
},
{
'name': 'all_checks_enabled',
'field_type': 'django.forms.BooleanField',
'default': False,
'field_options': {
'label': 'Enable ALL error checks',
'help_text': 'This will enable ALL the error checks '
'- likely to have many false postives.',
'required': False,
},
},
]
def check_dependencies(self):
return is_exe_in_path('cppcheck')
def handle_file(self, f):
if not (f.dest_file.lower().endswith('.cpp') or
f.dest_file.lower().endswith('.h') or
f.dest_file.lower().endswith('.c')):
# Ignore the file.
return False
path = f.get_patched_file_path()
if not path:
return False
enabled_checks = []
# Check the options we want to pass to cppcheck.
if self.settings['style_checks_enabled']:
enabled_checks.append('style')
if self.settings['all_checks_enabled']:
enabled_checks.append('all')
# Create string to pass to cppcheck
enable_settings = '%s' % ','.join(map(str, enabled_checks))
# Run the script and capture the output.
output = execute(
[
'cppcheck',
'--template=\"{file}::{line}::{severity}::{id}::{message}\"',
'--enable=%s' % enable_settings,
path
],
split_lines=True,
ignore_errors=True)
# Now for each line extract the fields and add a comment to the file.
for line in output:
# filename.cpp,849,style,unusedFunction, \
# The function 'bob' is never used
# filename.cpp,638,style,unusedFunction, \
# The function 'peter' is never used
# filename.cpp,722,style,unusedFunction,
# The function 'test' is never used
parsed = line.split('::')
# If we have a useful message
if len(parsed) == 5:
# Sometimes we dont gets a linenumber (just and empty string)
# Catch this case and set line number to 0.
if parsed[1]:
linenumber = int(parsed[1])
else:
linenumber = 0
# Now extract the other options.
category = parsed[2]
sub_category = parsed[3]
freetext = parsed[4][:-1] # strip the " from the end
# If the message is that its an error then override the
# default settings and raise an Issue otherwise just
# add a comment.
if category == 'error':
f.comment('%s.\n\nCategory: %s\nSub Category: %s' %
(freetext, category, sub_category),
linenumber, issue=True)
else:
f.comment('%s.\n\nCategory: %s\nSub Category: %s' %
(freetext, category, sub_category),
linenumber, issue=False)
return True
| [
"reviewbot.utils.is_exe_in_path",
"reviewbot.tools.process.execute"
] | [((2437, 2463), 'reviewbot.utils.is_exe_in_path', 'is_exe_in_path', (['"""cppcheck"""'], {}), "('cppcheck')\n", (2451, 2463), False, 'from reviewbot.utils import is_exe_in_path\n'), ((3252, 3424), 'reviewbot.tools.process.execute', 'execute', (['[\'cppcheck\', \'--template="{file}::{line}::{severity}::{id}::{message}"\', \n \'--enable=%s\' % enable_settings, path]'], {'split_lines': '(True)', 'ignore_errors': '(True)'}), '([\'cppcheck\',\n \'--template="{file}::{line}::{severity}::{id}::{message}"\', \n \'--enable=%s\' % enable_settings, path], split_lines=True, ignore_errors\n =True)\n', (3259, 3424), False, 'from reviewbot.tools.process import execute\n')] |
# EFILTER Forensic Query Language
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EFILTER query normalizer.
"""
__author__ = "<NAME> <<EMAIL>>"
from efilter import dispatch
from efilter import ast
from efilter import query as q
@dispatch.multimethod
def normalize(expr):
"""Optimizes the AST for better performance and simpler structure.
The returned query will be logically equivalent to what was provided but
transformations will be made to flatten and optimize the structure. This
works by recognizing certain patterns and replacing them with nicer ones,
eliminating pointless expressions, and so on.
# Collapsing nested variadic expressions:
Example:
Intersection(x, Interestion(y, z)) => Intersection(x, y, z)
# Empty branch elimination:
Example:
Intersection(x) => x
"""
_ = expr
raise NotImplementedError()
@normalize.implementation(for_type=q.Query)
def normalize(query):
new_root = normalize(query.root)
return q.Query(query, root=new_root)
@normalize.implementation(for_type=ast.Expression)
def normalize(expr):
return expr
@normalize.implementation(for_type=ast.BinaryExpression)
def normalize(expr):
"""Normalize both sides, but don't eliminate the expression."""
lhs = normalize(expr.lhs)
rhs = normalize(expr.rhs)
return type(expr)(lhs, rhs, start=lhs.start, end=rhs.end)
@normalize.implementation(for_type=ast.Apply)
def normalize(expr):
"""No elimination, but normalize arguments."""
args = [normalize(arg) for arg in expr.args]
return type(expr)(expr.func, *args, start=expr.start, end=expr.end)
@normalize.implementation(for_type=ast.VariadicExpression)
def normalize(expr):
"""Pass through n-ary expressions, and eliminate empty branches.
Variadic and binary expressions recursively visit all their children.
If all children are eliminated then the parent expression is also
eliminated:
(& [removed] [removed]) => [removed]
If only one child is left, it is promoted to replace the parent node:
(& True) => True
"""
children = []
for child in expr.children:
branch = normalize(child)
if branch is None:
continue
if type(branch) is type(expr):
children.extend(branch.children)
else:
children.append(branch)
if len(children) == 0:
return None
if len(children) == 1:
return children[0]
return type(expr)(*children, start=children[0].start,
end=children[-1].end)
| [
"efilter.query.Query"
] | [((1559, 1588), 'efilter.query.Query', 'q.Query', (['query'], {'root': 'new_root'}), '(query, root=new_root)\n', (1566, 1588), True, 'from efilter import query as q\n')] |
import python_path
from ml_workflow.rule_reference import RuleReference
from ml_workflow.rule import Rule
from ml_workflow.rule_config_manager import RuleConfigManager
import pytest
def clean_rule_for(name):
if name in Rule.rule_by_name:
del Rule.rule_by_name[name]
if name in RuleReference.dict_by_name:
del RuleReference.dict_by_name[name]
RuleConfigManager.unset_for_reference_name(name)
@pytest.fixture(autouse=True)
def run_before_and_after_tests():
clean_rule_for('test_rule_reference')
yield
clean_rule_for('test_rule_reference')
def test_rule_reference():
@Rule(name='test_rule_reference', version='v2')
def f():
return 0
@Rule(name='test_rule_reference', version='v1')
def g():
return 1
@RuleReference(name='test_rule_reference')
def rph(): pass
assert(rph() == 0)
Rule.set_for_reference_name('test_rule_reference', g)
assert(rph() == 1)
def test_rule_reference_with_version():
# Checking version ordering is good, since as string "10.0" < "2.0"
@Rule(name='test_rule_reference', version='10.0')
def f():
return 0
@Rule(name='test_rule_reference', version='2.0')
def g():
return 1
assert(f.version > g.version)
@RuleReference(name='test_rule_reference')
def rph(): pass
assert(rph() == 0)
Rule.set_for_reference_name('test_rule_reference', g)
assert(rph() == 1)
def test_rule_reference_coherence_check_rule_first():
@RuleReference(name='test_rule_reference')
def rph(x, y): pass
with pytest.raises(Exception):
@Rule(name='test_rule_reference')
def rule_that_fail(x): pass
def test_rule_reference_coherence_check_reference_first():
@Rule(name='test_rule_reference')
def rule_that_fail(x): pass
with pytest.raises(Exception):
@RuleReference(name='test_rule_reference')
def rph(x, y): pass
| [
"ml_workflow.rule_reference.RuleReference",
"pytest.raises",
"pytest.fixture",
"ml_workflow.rule_config_manager.RuleConfigManager.unset_for_reference_name",
"ml_workflow.rule.Rule",
"ml_workflow.rule.Rule.set_for_reference_name"
] | [((428, 456), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (442, 456), False, 'import pytest\n'), ((372, 420), 'ml_workflow.rule_config_manager.RuleConfigManager.unset_for_reference_name', 'RuleConfigManager.unset_for_reference_name', (['name'], {}), '(name)\n', (414, 420), False, 'from ml_workflow.rule_config_manager import RuleConfigManager\n'), ((618, 664), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""', 'version': '"""v2"""'}), "(name='test_rule_reference', version='v2')\n", (622, 664), False, 'from ml_workflow.rule import Rule\n'), ((701, 747), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""', 'version': '"""v1"""'}), "(name='test_rule_reference', version='v1')\n", (705, 747), False, 'from ml_workflow.rule import Rule\n'), ((784, 825), 'ml_workflow.rule_reference.RuleReference', 'RuleReference', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (797, 825), False, 'from ml_workflow.rule_reference import RuleReference\n'), ((874, 927), 'ml_workflow.rule.Rule.set_for_reference_name', 'Rule.set_for_reference_name', (['"""test_rule_reference"""', 'g'], {}), "('test_rule_reference', g)\n", (901, 927), False, 'from ml_workflow.rule import Rule\n'), ((1069, 1117), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""', 'version': '"""10.0"""'}), "(name='test_rule_reference', version='10.0')\n", (1073, 1117), False, 'from ml_workflow.rule import Rule\n'), ((1154, 1201), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""', 'version': '"""2.0"""'}), "(name='test_rule_reference', version='2.0')\n", (1158, 1201), False, 'from ml_workflow.rule import Rule\n'), ((1273, 1314), 'ml_workflow.rule_reference.RuleReference', 'RuleReference', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (1286, 1314), False, 'from ml_workflow.rule_reference import RuleReference\n'), ((1363, 1416), 'ml_workflow.rule.Rule.set_for_reference_name', 'Rule.set_for_reference_name', (['"""test_rule_reference"""', 'g'], {}), "('test_rule_reference', g)\n", (1390, 1416), False, 'from ml_workflow.rule import Rule\n'), ((1500, 1541), 'ml_workflow.rule_reference.RuleReference', 'RuleReference', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (1513, 1541), False, 'from ml_workflow.rule_reference import RuleReference\n'), ((1745, 1777), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (1749, 1777), False, 'from ml_workflow.rule import Rule\n'), ((1576, 1600), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (1589, 1600), False, 'import pytest\n'), ((1611, 1643), 'ml_workflow.rule.Rule', 'Rule', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (1615, 1643), False, 'from ml_workflow.rule import Rule\n'), ((1820, 1844), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (1833, 1844), False, 'import pytest\n'), ((1855, 1896), 'ml_workflow.rule_reference.RuleReference', 'RuleReference', ([], {'name': '"""test_rule_reference"""'}), "(name='test_rule_reference')\n", (1868, 1896), False, 'from ml_workflow.rule_reference import RuleReference\n')] |
from evalne.evaluation.evaluator import LPEvaluator
from evalne.evaluation.split import EvalSplit
from evalne.evaluation.score import Scoresheet
from evalne.utils import preprocess as pp
# Load and preprocess the network
#G = pp.load_graph('evalne/tests/data/network.edgelist')
G = pp.load_graph('../Graph_Conv_Neural_Nets/generic_datasets/Zachary-Karate/Zachary-Karate.edgelist')
G, _ = pp.prep_graph(G)
# Create an evaluator and generate train/test edge split
traintest_split = EvalSplit() # Bhevencious: EvalSplit() contains methods used to READ/SET a variety of properties/variables. Use the DOT & PARANTHESIS helpers to access parameters.
traintest_split.compute_splits(G, nw_name='Zachary-Karate.edgelist', train_frac=0.8)
nee = LPEvaluator(traintest_split)
# Create a Scoresheet to store the results
scoresheet = Scoresheet()
# Set the baselines
methods = ['adamic_adar_index', 'common_neighbours', 'jaccard_coefficient', 'katz', 'preferential_attachment', 'resource_allocation_index', 'random_prediction']
# Evaluate baselines
for method in methods:
result = nee.evaluate_baseline(method=method)
scoresheet.log_results(result)
try:
# Check if OpenNE is installed
import openne
# Set embedding methods from OpenNE
methods = ['node2vec', 'deepwalk', 'GraRep']
commands = [
'python -m openne --method node2vec --graph-format edgelist --p 1 --q 1',
'python -m openne --method deepWalk --graph-format edgelist --number-walks 40',
'python -m openne --method grarep --graph-format edgelist --epochs 10']
edge_emb = ['average', 'hadamard']
# Evaluate embedding methods
for i in range(len(methods)):
command = commands[i] + " --input {} --output {} --representation-size {}"
results = nee.evaluate_cmd(method_name=methods[i], method_type='ne', command=command, edge_embedding_methods=edge_emb, input_delim=' ', output_delim=' ')
scoresheet.log_results(results)
except ImportError:
print("The OpenNE library is not installed. Reporting results only for the baselines...")
pass
# Get output
scoresheet.print_tabular(metric='auroc')
scoresheet.write_all(filename='eval_log.txt', repeats='avg') # Bhevencious: score.py contains a range of methods & parameters for outputting results | [
"evalne.utils.preprocess.prep_graph",
"evalne.evaluation.evaluator.LPEvaluator",
"evalne.utils.preprocess.load_graph",
"evalne.evaluation.score.Scoresheet",
"evalne.evaluation.split.EvalSplit"
] | [((283, 391), 'evalne.utils.preprocess.load_graph', 'pp.load_graph', (['"""../Graph_Conv_Neural_Nets/generic_datasets/Zachary-Karate/Zachary-Karate.edgelist"""'], {}), "(\n '../Graph_Conv_Neural_Nets/generic_datasets/Zachary-Karate/Zachary-Karate.edgelist'\n )\n", (296, 391), True, 'from evalne.utils import preprocess as pp\n'), ((389, 405), 'evalne.utils.preprocess.prep_graph', 'pp.prep_graph', (['G'], {}), '(G)\n', (402, 405), True, 'from evalne.utils import preprocess as pp\n'), ((482, 493), 'evalne.evaluation.split.EvalSplit', 'EvalSplit', ([], {}), '()\n', (491, 493), False, 'from evalne.evaluation.split import EvalSplit\n'), ((738, 766), 'evalne.evaluation.evaluator.LPEvaluator', 'LPEvaluator', (['traintest_split'], {}), '(traintest_split)\n', (749, 766), False, 'from evalne.evaluation.evaluator import LPEvaluator\n'), ((824, 836), 'evalne.evaluation.score.Scoresheet', 'Scoresheet', ([], {}), '()\n', (834, 836), False, 'from evalne.evaluation.score import Scoresheet\n')] |
# This document is part of pelagos-data
# https://github.com/skytruth/pelagos-data
# =========================================================================== #
#
# The MIT License (MIT)
#
# Copyright (c) 2014 SkyTruth
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# =========================================================================== #
"""
Unittests for pelagos_processing.raw
"""
from __future__ import unicode_literals
import os
from os.path import isfile
import unittest
from pelagos_processing import raw
from pelagos_processing.tests import testdata
class DevNull(object):
@staticmethod
def write(self, *args, **kwargs):
pass
class TestCatFiles(unittest.TestCase):
def setUp(self):
self.input_files = (testdata.cat1, testdata.cat2, testdata.cat3, testdata.cat4)
self.test_file = '.TestCatFiles_standard--a--.csv.ext'
if isfile(self.test_file):
os.remove(self.test_file)
def tearDown(self):
if isfile(self.test_file):
os.remove(self.test_file)
def test_standard(self):
schema = ['uid', 'val']
expected = ','.join(schema) + os.linesep
for ifile in self.input_files:
with open(ifile) as f:
for line in f:
expected += line
self.assertTrue(raw.cat_files(self.input_files, self.test_file, schema=schema, write_mode='w'))
with open(self.test_file) as f:
actual = f.read()
self.assertEqual(expected, actual)
def test_skipline(self):
skip = 1
schema = 'uid,val'
expected = schema + os.linesep
for ifile in self.input_files:
with open(ifile) as f:
for sl in range(skip):
f.next()
for line in f:
expected += line
self.assertTrue(raw.cat_files(self.input_files, self.test_file, schema=schema, write_mode='w', skip_lines=skip))
with open(self.test_file) as f:
actual = f.read()
self.assertEqual(expected, actual)
def test_exceptions(self):
self.assertRaises(ValueError, raw.cat_files, *[self.input_files, self.test_file], **{'skip_lines': -1})
self.assertRaises(ValueError, raw.cat_files, *[self.input_files, self.test_file], **{'skip_lines': None})
self.assertRaises(TypeError, raw.cat_files, *[self.input_files, self.test_file], **{'schema': 1.23})
self.assertRaises(IOError, raw.cat_files, *[['I-DO_NOT_|EXIST'], self.test_file])
| [
"os.path.isfile",
"pelagos_processing.raw.cat_files",
"os.remove"
] | [((1924, 1946), 'os.path.isfile', 'isfile', (['self.test_file'], {}), '(self.test_file)\n', (1930, 1946), False, 'from os.path import isfile\n'), ((2022, 2044), 'os.path.isfile', 'isfile', (['self.test_file'], {}), '(self.test_file)\n', (2028, 2044), False, 'from os.path import isfile\n'), ((1960, 1985), 'os.remove', 'os.remove', (['self.test_file'], {}), '(self.test_file)\n', (1969, 1985), False, 'import os\n'), ((2058, 2083), 'os.remove', 'os.remove', (['self.test_file'], {}), '(self.test_file)\n', (2067, 2083), False, 'import os\n'), ((2363, 2441), 'pelagos_processing.raw.cat_files', 'raw.cat_files', (['self.input_files', 'self.test_file'], {'schema': 'schema', 'write_mode': '"""w"""'}), "(self.input_files, self.test_file, schema=schema, write_mode='w')\n", (2376, 2441), False, 'from pelagos_processing import raw\n'), ((2905, 3005), 'pelagos_processing.raw.cat_files', 'raw.cat_files', (['self.input_files', 'self.test_file'], {'schema': 'schema', 'write_mode': '"""w"""', 'skip_lines': 'skip'}), "(self.input_files, self.test_file, schema=schema, write_mode=\n 'w', skip_lines=skip)\n", (2918, 3005), False, 'from pelagos_processing import raw\n')] |
import json
import pathlib
file_path = str(pathlib.Path(__file__).parent.resolve())
with open(file_path + '/maps.json',"r") as file:
json_obj = json.load(file)
tempMapLayout = {"map_layout":{}}
for map in json_obj["map_layout"]:
chunk = int(map[5:])
if (chunk >= 464):
chunk +=29
tempMapLayout["map_layout"][f"Index{chunk}"] = json_obj["map_layout"][map]
new_object = json.dumps(tempMapLayout)
with open(file_path + "/newmaps.json","w") as file:
file.write(new_object)
print(tempMapLayout)
| [
"json.load",
"json.dumps",
"pathlib.Path"
] | [((407, 432), 'json.dumps', 'json.dumps', (['tempMapLayout'], {}), '(tempMapLayout)\n', (417, 432), False, 'import json\n'), ((155, 170), 'json.load', 'json.load', (['file'], {}), '(file)\n', (164, 170), False, 'import json\n'), ((44, 66), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (56, 66), False, 'import pathlib\n')] |
import os
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
import pandas as pd
from powersimdata.network.model import ModelImmutables
from powersimdata.scenario.check import _check_scenario_is_in_analyze_state
from postreise.analyze.demand import get_demand_time_series, get_net_demand_time_series
from postreise.analyze.generation.capacity import (
get_capacity_by_resources,
get_storage_capacity,
)
from postreise.analyze.generation.curtailment import get_curtailment_time_series
from postreise.analyze.generation.summarize import (
get_generation_time_series_by_resources,
get_storage_time_series,
)
from postreise.analyze.time import (
change_time_zone,
resample_time_series,
slice_time_series,
)
def plot_generation_time_series_stack(
scenario,
area,
resources,
area_type=None,
time_range=None,
time_zone="utc",
time_freq="H",
show_demand=True,
show_net_demand=True,
normalize=False,
t2c=None,
t2l=None,
t2hc=None,
title=None,
label_fontsize=20,
title_fontsize=22,
tick_fontsize=15,
legend_fontsize=18,
save=False,
filename=None,
filepath=None,
):
"""Generate time series generation stack plot in a certain area of a scenario.
:param powersimdata.scenario.scenario.Scenario scenario: scenario instance
:param str area: one of *loadzone*, *state*, *state abbreviation*,
*interconnect*, *'all'*
:param str/list resources: one or a list of resources. *'solar_curtailment'*,
*'wind_curtailment'*, *'wind_offshore_curtailment'* are valid entries together
with all available generator types in the area. The order of the resources
determines the stack order in the figure.
:param str area_type: one of *'loadzone'*, *'state'*, *'state_abbr'*,
*'interconnect'*
:param tuple time_range: [start_timestamp, end_timestamp] where each time stamp
is pandas.Timestamp/numpy.datetime64/datetime.datetime. If None, the entire
time range is used for the given scenario.
:param str time_zone: new time zone.
:param str time_freq: frequency. Either *'D'* (day), *'W'* (week), *'M'* (month).
:param bool show_demand: show demand line in the plot or not, default is True.
:param bool show_net_demand: show net demand line in the plot or not, default is
True.
:param bool normalize: normalize the generation based on capacity or not,
default is False.
:param dict t2c: user specified color of resource type to overwrite type2color
default dict. key: resource type, value: color code.
:param dict t2l: user specified label of resource type to overwrite type2label
default dict. key: resource type, value: label.
:param dict t2hc: user specified color of curtailable resource hatches to overwrite
type2hatchcolor default dict. key: resource type, valid keys are
*'wind_curtailment'*, *'solar_curtailment'*, *'wind_offshore_curtailment'*,
value: color code.
:param str title: user specified title of the figure, default is set to be area.
:param float label_fontsize: user specified label fontsize, default is 20.
:param float title_fontsize: user specified title fontsize, default is 22.
:param float tick_fontsize: user specified ticks of axes fontsize, default is 15.
:param float legend_fontsize: user specified legend fontsize, default is 18.
:param bool save: save the generated figure or not, default is False.
:param str filename: if save is True, user specified filename, use area if None.
:param str filepath: if save is True, user specified filepath, use current
directory if None.
"""
_check_scenario_is_in_analyze_state(scenario)
mi = ModelImmutables(scenario.info["grid_model"])
type2color = mi.plants["type2color"]
type2label = mi.plants["type2label"]
type2hatchcolor = mi.plants["type2hatchcolor"]
if t2c:
type2color.update(t2c)
if t2l:
type2label.update(t2l)
if t2hc:
type2hatchcolor.update(t2hc)
pg_stack = get_generation_time_series_by_resources(
scenario, area, resources, area_type=area_type
)
capacity = get_capacity_by_resources(scenario, area, resources, area_type=area_type)
demand = get_demand_time_series(scenario, area, area_type=area_type)
net_demand = get_net_demand_time_series(scenario, area, area_type=area_type)
capacity_ts = pd.Series(capacity.sum(), index=pg_stack.index)
curtailable_resources = {
"solar_curtailment",
"wind_curtailment",
"wind_offshore_curtailment",
}
if curtailable_resources & set(resources):
curtailment = get_curtailment_time_series(scenario, area, area_type=area_type)
for r in curtailable_resources:
if r in resources and r in curtailment.columns:
pg_stack[r] = curtailment[r]
pg_stack = change_time_zone(pg_stack, time_zone)
demand = change_time_zone(demand, time_zone)
net_demand = change_time_zone(net_demand, time_zone)
capacity_ts = change_time_zone(capacity_ts, time_zone)
if not time_range:
time_range = (
pd.Timestamp(scenario.info["start_date"], tz="utc"),
pd.Timestamp(scenario.info["end_date"], tz="utc"),
)
pg_stack = slice_time_series(pg_stack, time_range[0], time_range[1])
demand = slice_time_series(demand, time_range[0], time_range[1])
net_demand = slice_time_series(net_demand, time_range[0], time_range[1])
capacity_ts = slice_time_series(capacity_ts, time_range[0], time_range[1])
if time_freq != "H":
pg_stack = resample_time_series(pg_stack, time_freq)
demand = resample_time_series(demand, time_freq)
net_demand = resample_time_series(net_demand, time_freq)
capacity_ts = resample_time_series(capacity_ts, time_freq)
if "storage" in resources:
pg_storage = get_storage_time_series(scenario, area, area_type=area_type)
capacity_storage = get_storage_capacity(scenario, area, area_type=area_type)
capacity_storage_ts = pd.Series(capacity_storage, index=pg_storage.index)
pg_storage = change_time_zone(pg_storage, time_zone)
capacity_storage_ts = change_time_zone(capacity_storage_ts, time_zone)
pg_storage = slice_time_series(pg_storage, time_range[0], time_range[1])
capacity_storage_ts = slice_time_series(
capacity_storage_ts, time_range[0], time_range[1]
)
if time_freq != "H":
pg_storage = resample_time_series(pg_storage, time_freq)
capacity_storage_ts = resample_time_series(capacity_storage_ts, time_freq)
pg_stack["storage"] = pg_storage.clip(lower=0)
capacity_ts += capacity_storage_ts
fig, (ax, ax_storage) = plt.subplots(
2,
1,
figsize=(20, 15),
sharex="row",
gridspec_kw={"height_ratios": [3, 1], "hspace": 0.02},
)
plt.subplots_adjust(wspace=0)
if normalize:
pg_storage = pg_storage.divide(capacity_storage_ts, axis="index")
ax_storage.set_ylabel("Normalized Storage", fontsize=label_fontsize)
else:
ax_storage.set_ylabel("Energy Storage (MW)", fontsize=label_fontsize)
pg_storage.plot(color=type2color["storage"], lw=4, ax=ax_storage)
ax_storage.fill_between(
pg_storage.index,
0,
pg_storage.values,
color=type2color["storage"],
alpha=0.5,
)
# Erase year in xticklabels
xt_with_year = list(ax_storage.__dict__["date_axis_info"][0])
xt_with_year[-1] = b"%b"
ax_storage.__dict__["date_axis_info"][0] = tuple(xt_with_year)
ax_storage.tick_params(axis="both", which="both", labelsize=tick_fontsize)
ax_storage.set_xlabel("")
for a in fig.get_axes():
a.label_outer()
else:
fig = plt.figure(figsize=(20, 10))
ax = fig.gca()
if normalize:
pg_stack = pg_stack.divide(capacity_ts, axis="index")
demand = demand.divide(capacity_ts, axis="index")
net_demand = net_demand.divide(capacity_ts, axis="index")
ax.set_ylabel("Normalized Generation", fontsize=label_fontsize)
else:
pg_stack = pg_stack.divide(1e6, axis="index")
demand = demand.divide(1e6, axis="index")
net_demand = net_demand.divide(1e6, axis="index")
ax.set_ylabel("Daily Energy TWh", fontsize=label_fontsize)
available_resources = [r for r in resources if r in pg_stack.columns]
pg_stack[available_resources].clip(0, None).plot.area(
color=type2color, linewidth=0, alpha=0.7, ax=ax, sharex="row"
)
if show_demand:
demand.plot(color="red", lw=4, ax=ax)
if show_net_demand:
net_demand.plot(color="red", ls="--", lw=2, ax=ax)
if not title:
title = area
ax.set_title("%s" % title, fontsize=title_fontsize)
ax.grid(color="black", axis="y")
if "storage" not in resources:
# Erase year in xticklabels
xt_with_year = list(ax.__dict__["date_axis_info"][0])
xt_with_year[-1] = b"%b"
ax.__dict__["date_axis_info"][0] = tuple(xt_with_year)
ax.set_xlabel("")
ax.tick_params(which="both", labelsize=tick_fontsize)
ax.set_ylim(
[
min(0, 1.1 * net_demand.min()),
max(ax.get_ylim()[1], 1.1 * demand.max()),
]
)
handles, labels = ax.get_legend_handles_labels()
if show_demand:
labels[0] = "Demand"
if show_net_demand:
labels[1] = "Net Demand"
label_offset = show_demand + show_net_demand
labels = [type2label[l] if l in type2label else l for l in labels]
# Add hatches
for r in curtailable_resources:
if r in available_resources:
ind = available_resources.index(r)
ax.fill_between(
pg_stack[available_resources].index,
pg_stack[available_resources].iloc[:, : ind + 1].sum(axis=1),
pg_stack[available_resources].iloc[:, :ind].sum(axis=1),
color="none",
hatch="//",
edgecolor=type2hatchcolor[r],
linewidth=0.0,
)
handles[ind + label_offset] = mpatches.Patch(
facecolor=type2color[r],
hatch="//",
edgecolor=type2hatchcolor[r],
linewidth=0.0,
)
ax.legend(
handles[::-1],
labels[::-1],
frameon=2,
prop={"size": legend_fontsize},
loc="upper left",
bbox_to_anchor=(1, 1),
)
if save:
if not filename:
filename = area
if not filepath:
filepath = os.path.join(os.getcwd(), filename)
plt.savefig(f"{filepath}.pdf", bbox_inches="tight", pad_inches=0)
| [
"postreise.analyze.time.slice_time_series",
"postreise.analyze.demand.get_demand_time_series",
"postreise.analyze.demand.get_net_demand_time_series",
"powersimdata.network.model.ModelImmutables",
"postreise.analyze.generation.capacity.get_storage_capacity",
"powersimdata.scenario.check._check_scenario_is_in_analyze_state",
"matplotlib.pyplot.savefig",
"postreise.analyze.time.change_time_zone",
"postreise.analyze.generation.summarize.get_storage_time_series",
"matplotlib.patches.Patch",
"matplotlib.pyplot.subplots_adjust",
"pandas.Series",
"postreise.analyze.time.resample_time_series",
"postreise.analyze.generation.capacity.get_capacity_by_resources",
"postreise.analyze.generation.summarize.get_generation_time_series_by_resources",
"os.getcwd",
"matplotlib.pyplot.figure",
"postreise.analyze.generation.curtailment.get_curtailment_time_series",
"pandas.Timestamp",
"matplotlib.pyplot.subplots"
] | [((3731, 3776), 'powersimdata.scenario.check._check_scenario_is_in_analyze_state', '_check_scenario_is_in_analyze_state', (['scenario'], {}), '(scenario)\n', (3766, 3776), False, 'from powersimdata.scenario.check import _check_scenario_is_in_analyze_state\n'), ((3787, 3831), 'powersimdata.network.model.ModelImmutables', 'ModelImmutables', (["scenario.info['grid_model']"], {}), "(scenario.info['grid_model'])\n", (3802, 3831), False, 'from powersimdata.network.model import ModelImmutables\n'), ((4117, 4208), 'postreise.analyze.generation.summarize.get_generation_time_series_by_resources', 'get_generation_time_series_by_resources', (['scenario', 'area', 'resources'], {'area_type': 'area_type'}), '(scenario, area, resources,\n area_type=area_type)\n', (4156, 4208), False, 'from postreise.analyze.generation.summarize import get_generation_time_series_by_resources, get_storage_time_series\n'), ((4234, 4307), 'postreise.analyze.generation.capacity.get_capacity_by_resources', 'get_capacity_by_resources', (['scenario', 'area', 'resources'], {'area_type': 'area_type'}), '(scenario, area, resources, area_type=area_type)\n', (4259, 4307), False, 'from postreise.analyze.generation.capacity import get_capacity_by_resources, get_storage_capacity\n'), ((4321, 4380), 'postreise.analyze.demand.get_demand_time_series', 'get_demand_time_series', (['scenario', 'area'], {'area_type': 'area_type'}), '(scenario, area, area_type=area_type)\n', (4343, 4380), False, 'from postreise.analyze.demand import get_demand_time_series, get_net_demand_time_series\n'), ((4398, 4461), 'postreise.analyze.demand.get_net_demand_time_series', 'get_net_demand_time_series', (['scenario', 'area'], {'area_type': 'area_type'}), '(scenario, area, area_type=area_type)\n', (4424, 4461), False, 'from postreise.analyze.demand import get_demand_time_series, get_net_demand_time_series\n'), ((4954, 4991), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['pg_stack', 'time_zone'], {}), '(pg_stack, time_zone)\n', (4970, 4991), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5005, 5040), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['demand', 'time_zone'], {}), '(demand, time_zone)\n', (5021, 5040), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5058, 5097), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['net_demand', 'time_zone'], {}), '(net_demand, time_zone)\n', (5074, 5097), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5116, 5156), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['capacity_ts', 'time_zone'], {}), '(capacity_ts, time_zone)\n', (5132, 5156), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5356, 5413), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['pg_stack', 'time_range[0]', 'time_range[1]'], {}), '(pg_stack, time_range[0], time_range[1])\n', (5373, 5413), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5427, 5482), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['demand', 'time_range[0]', 'time_range[1]'], {}), '(demand, time_range[0], time_range[1])\n', (5444, 5482), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5500, 5559), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['net_demand', 'time_range[0]', 'time_range[1]'], {}), '(net_demand, time_range[0], time_range[1])\n', (5517, 5559), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5578, 5638), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['capacity_ts', 'time_range[0]', 'time_range[1]'], {}), '(capacity_ts, time_range[0], time_range[1])\n', (5595, 5638), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((4728, 4792), 'postreise.analyze.generation.curtailment.get_curtailment_time_series', 'get_curtailment_time_series', (['scenario', 'area'], {'area_type': 'area_type'}), '(scenario, area, area_type=area_type)\n', (4755, 4792), False, 'from postreise.analyze.generation.curtailment import get_curtailment_time_series\n'), ((5683, 5724), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['pg_stack', 'time_freq'], {}), '(pg_stack, time_freq)\n', (5703, 5724), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5742, 5781), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['demand', 'time_freq'], {}), '(demand, time_freq)\n', (5762, 5781), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5803, 5846), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['net_demand', 'time_freq'], {}), '(net_demand, time_freq)\n', (5823, 5846), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5869, 5913), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['capacity_ts', 'time_freq'], {}), '(capacity_ts, time_freq)\n', (5889, 5913), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((5967, 6027), 'postreise.analyze.generation.summarize.get_storage_time_series', 'get_storage_time_series', (['scenario', 'area'], {'area_type': 'area_type'}), '(scenario, area, area_type=area_type)\n', (5990, 6027), False, 'from postreise.analyze.generation.summarize import get_generation_time_series_by_resources, get_storage_time_series\n'), ((6055, 6112), 'postreise.analyze.generation.capacity.get_storage_capacity', 'get_storage_capacity', (['scenario', 'area'], {'area_type': 'area_type'}), '(scenario, area, area_type=area_type)\n', (6075, 6112), False, 'from postreise.analyze.generation.capacity import get_capacity_by_resources, get_storage_capacity\n'), ((6143, 6194), 'pandas.Series', 'pd.Series', (['capacity_storage'], {'index': 'pg_storage.index'}), '(capacity_storage, index=pg_storage.index)\n', (6152, 6194), True, 'import pandas as pd\n'), ((6217, 6256), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['pg_storage', 'time_zone'], {}), '(pg_storage, time_zone)\n', (6233, 6256), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((6287, 6335), 'postreise.analyze.time.change_time_zone', 'change_time_zone', (['capacity_storage_ts', 'time_zone'], {}), '(capacity_storage_ts, time_zone)\n', (6303, 6335), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((6357, 6416), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['pg_storage', 'time_range[0]', 'time_range[1]'], {}), '(pg_storage, time_range[0], time_range[1])\n', (6374, 6416), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((6447, 6515), 'postreise.analyze.time.slice_time_series', 'slice_time_series', (['capacity_storage_ts', 'time_range[0]', 'time_range[1]'], {}), '(capacity_storage_ts, time_range[0], time_range[1])\n', (6464, 6515), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((6855, 6965), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {'figsize': '(20, 15)', 'sharex': '"""row"""', 'gridspec_kw': "{'height_ratios': [3, 1], 'hspace': 0.02}"}), "(2, 1, figsize=(20, 15), sharex='row', gridspec_kw={\n 'height_ratios': [3, 1], 'hspace': 0.02})\n", (6867, 6965), True, 'import matplotlib.pyplot as plt\n'), ((7040, 7069), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0)'}), '(wspace=0)\n', (7059, 7069), True, 'import matplotlib.pyplot as plt\n'), ((8019, 8047), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)'}), '(figsize=(20, 10))\n', (8029, 8047), True, 'import matplotlib.pyplot as plt\n'), ((10898, 10963), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{filepath}.pdf"""'], {'bbox_inches': '"""tight"""', 'pad_inches': '(0)'}), "(f'{filepath}.pdf', bbox_inches='tight', pad_inches=0)\n", (10909, 10963), True, 'import matplotlib.pyplot as plt\n'), ((5215, 5266), 'pandas.Timestamp', 'pd.Timestamp', (["scenario.info['start_date']"], {'tz': '"""utc"""'}), "(scenario.info['start_date'], tz='utc')\n", (5227, 5266), True, 'import pandas as pd\n'), ((5280, 5329), 'pandas.Timestamp', 'pd.Timestamp', (["scenario.info['end_date']"], {'tz': '"""utc"""'}), "(scenario.info['end_date'], tz='utc')\n", (5292, 5329), True, 'import pandas as pd\n'), ((6592, 6635), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['pg_storage', 'time_freq'], {}), '(pg_storage, time_freq)\n', (6612, 6635), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((6670, 6722), 'postreise.analyze.time.resample_time_series', 'resample_time_series', (['capacity_storage_ts', 'time_freq'], {}), '(capacity_storage_ts, time_freq)\n', (6690, 6722), False, 'from postreise.analyze.time import change_time_zone, resample_time_series, slice_time_series\n'), ((10380, 10481), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'facecolor': 'type2color[r]', 'hatch': '"""//"""', 'edgecolor': 'type2hatchcolor[r]', 'linewidth': '(0.0)'}), "(facecolor=type2color[r], hatch='//', edgecolor=\n type2hatchcolor[r], linewidth=0.0)\n", (10394, 10481), True, 'import matplotlib.patches as mpatches\n'), ((10867, 10878), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (10876, 10878), False, 'import os\n')] |
from guifw.abstractparameters import *
from gcode import *
from PyQt5 import QtGui
import datetime
import geometry
import traceback
class PathTool(ItemWithParameters):
def __init__(self, path=None, model=None, viewUpdater=None, tool=None, source=None, **kwargs):
ItemWithParameters.__init__(self, **kwargs)
if path is None and self.name.getValue()=="-":
filename= QtGui.QFileDialog.getOpenFileName(None, 'Open file', '', "GCode files (*.ngc)")
self.path = read_gcode(filename[0])
else:
self.path=path
self.viewUpdater=viewUpdater
self.outpaths=[self.path]
self.model=model
self.source = source
self.tool = tool
self.steppingAxis = 2
feedrate=1000
if self.tool is not None:
feedrate =self.tool.feedrate.getValue()
startdepth=0
enddepth=0
outputFile = "gcode/output.ngc"
if model !=None:
startdepth=model.maxv[2]
enddepth=model.minv[2]
if model.filename is not None:
outputFile = model.filename.split(".stl")[0] + ".ngc"
else:
#print self.path.path
try:
if self.path is not None and self.path.getPathLength()>0:
startdepth=max([p.position[2] for p in self.path.get_draw_path() if p.position is not None])
enddepth=min([p.position[2] for p in self.path.get_draw_path() if p.position is not None])
except Exception as e:
print("path error:", e)
traceback.print_exc()
self.startDepth=NumericalParameter(parent=self, name='start depth', value=startdepth, enforceRange=False, step=1)
self.stopDepth=NumericalParameter(parent=self, name='end depth ', value=enddepth, enforceRange=0, step=1)
self.maxDepthStep=NumericalParameter(parent=self, name='max. depth step', value=10.0, min=0.1, max=100, step=1)
self.rampdown=NumericalParameter(parent=self, name='rampdown per loop (0=off)', value=0.1, min=0.0, max=10, step=0.01)
self.traverseHeight=NumericalParameter(parent=self, name='traverse height', value=startdepth+5.0, enforceRange=False, step=1.0)
self.laser_mode = NumericalParameter(parent=self, name='laser mode (power 0-100)', value=0.0, min=0.0, max=100.0, enforceRange=True, step=1.0, callback = self.updateView)
self.depthStepping=ActionParameter(parent=self, name='Depth ramping', callback=self.applyDepthStep)
self.depthSteppingRelRamp = CheckboxParameter(parent=self, name='relative ramping')
self.tabs = NumericalParameter(parent=self, name='Tabs per contour', value=0, min=0, max=20, step=1)
self.tabwidth = NumericalParameter(parent=self, name='Tab width', value=1, min=0, max=20, step=0.1)
self.tabheight = NumericalParameter(parent=self, name='Tab height', value=0.5, min=0, max=20, step=0.1)
self.removeNonCutting=ActionParameter(parent=self, name='Remove non-cutting points', callback=self.removeNoncuttingPoints)
self.invertPath=ActionParameter(parent=self, name='invert path', callback=self.applyInvertPath)
self.clean=ActionParameter(parent=self, name='clean paths', callback=self.cleanColinear)
self.smooth=ActionParameter(parent=self, name='smooth path', callback=self.fitArcs)
self.precision = NumericalParameter(parent=self, name='precision', value=0.005, min=0.001, max=1, step=0.001)
self.trochoidalDiameter=NumericalParameter(parent=self, name='tr. diameter', value=3.0, min=0.0, max=100, step=0.1)
self.trochoidalStepover=NumericalParameter(parent=self, name='tr. stepover', value=1.0, min=0.1, max=5, step=0.1)
self.trochoidalOrder=NumericalParameter(parent=self, name='troch. order', value=0.0, min=0, max=100000, step=1)
self.trochoidalSkip=NumericalParameter(parent=self, name='skip', value=1.0, min=1, max=100000, step=1)
self.trochoidalOuterDist=NumericalParameter(parent=self, name='outer dist', value=1.0, min=0, max=100000, step=1)
self.trochoidalMilling = ActionParameter(parent=self, name='trochoidal', callback=self.calcTrochoidalMilling)
self.feedrate=NumericalParameter(parent=self, name='default feedrate', value=feedrate, min=1, max=5000, step=10, callback=self.updateView)
self.plunge_feedrate = NumericalParameter(parent=self, name='plunge feedrate', value=feedrate/2.0, min=1, max=5000,
step=10, callback=self.updateView)
self.filename=TextParameter(parent=self, name="output filename", value=outputFile)
self.saveButton=ActionParameter(parent=self, name='Save to file', callback=self.save)
self.appendButton=ActionParameter(parent=self, name='append from file', callback=self.appendFromFile)
self.estimatedTime=TextParameter(parent=self, name='est. time', editable=False)
self.estimatedDistance=TextParameter(parent=self, name='distance', editable=False)
self.parameters=[self.startDepth,
self.stopDepth,
self.maxDepthStep,
self.rampdown,
self.traverseHeight,
self.laser_mode,
[self.depthStepping, self.depthSteppingRelRamp],
[self.tabs, self.tabwidth, self.tabheight],
[self.removeNonCutting,
self.invertPath],
[self.clean, self.smooth, self.precision],
[self.trochoidalDiameter, self.trochoidalStepover],
[self.trochoidalOrder, self.trochoidalSkip],
self.trochoidalOuterDist ,
self.trochoidalMilling,
self.feedrate, self.plunge_feedrate,
self.filename,
self.saveButton,
self.appendButton,
self.estimatedTime,
self.estimatedDistance]
self.updateView()
def updatePath(self, path):
self.path = path
self.outpaths=[self.path]
self.updateView()
def applyInvertPath(self):
if len(self.outpaths)==0:
self.path.outpaths=GCode()
self.outpaths.combinePath(self.path.path)
inpath = self.outpaths
pathlet = []
invertedPath = []
preamble = []
for path in inpath:
for p in path.path:
if p.position is not None:
break
else:
print("pre:", p.to_output())
preamble.append(p)
invertedPath+=preamble
for path in reversed(inpath):
for p in reversed(path.get_draw_path(interpolate_arcs=True)):
if p.position is not None: #only append positional points
"point:", p.to_output()
invertedPath.append(p)
self.outpaths = [GCode(path=invertedPath)]
self.updateView()
def cleanColinear(self):
if len(self.outpaths)==0:
self.path.outpaths=GCode()
self.path.outpaths.combinePath(self.path.path)
inpath=self.outpaths
precision = self.precision.getValue()
smoothPath = []
pathlet = []
for path in inpath:
for p in path.path:
pathlet.append(p)
if len(pathlet)<=2:
continue
# check for colinearity
max_error, furthest_point = path_colinear_error([p.position for p in pathlet])
if max_error< precision:
# if colinear, keep going
print("colinear:", len(pathlet), max_error)
pass
else: #if not colinear, check if the problem is at start or end
if len(pathlet)==3: # line doesn't start colinearly - drop first point
print("drop point")
smoothPath.append(pathlet.pop(0))
else: # last point breaks colinearity - append segment up to second-last point
print("append shortened path", len(pathlet), max_error, furthest_point)
smoothPath.append(pathlet[0])
smoothPath.append(pathlet[-2])
pathlet = pathlet[-1:]
smoothPath+=pathlet # append last remaining points
self.outpaths=[GCode(path=smoothPath)]
self.updateView()
def fitArcs(self, dummy=False, min_point_count = 5, max_radius = 1000.0):
if len(self.outpaths)==0:
self.path.outpaths=GCode()
self.path.outpaths.combinePath(self.path.path)
inpath=self.outpaths
print("min point count", min_point_count)
precision = self.precision.getValue()
smoothPath = []
pathlet = []
center = None
radius = 0
direction = "02"
for path in inpath:
for p in path.path:
if p.position is None:
continue
if len(pathlet) < 3: #need at least 3 points to start circle
pathlet.append(p)
# compute center with the first 3 points
elif len(pathlet)==3:
#check if points are in horizontal plane
if pathlet[0].position[2] == pathlet[1].position[2] and pathlet[1].position[2]==pathlet[2].position[2]:
center, radius = findCircleCenter(pathlet[0].position, pathlet[1].position, pathlet[2].position)
else:
center = None
if center is not None:
radius = dist(center, pathlet[0].position)
# check if points are colinear or not in plane, and drop first point
if center is None or radius > max_radius:
print("colinear, drop point")
smoothPath.append(pathlet.pop(0))
center=None
pathlet.append(p)
print(len(pathlet))
else:
# check if following points are also on the same arc
new_center, new_radius = findCircleCenter(pathlet[0].position, pathlet[int(len(pathlet) / 2)].position, p.position)
midpoints = [mid_point(pathlet[i].position, pathlet[i+1].position) for i in range(0, len(pathlet)-1)]
midpoints.append(mid_point(pathlet[-1].position, p.position))
#if abs(dist(p.position, center) - radius) < precision and \
if new_center is not None and \
p.position[2] == pathlet[0].position[2] and\
max([dist(mp, new_center)-new_radius for mp in midpoints]) < precision and \
max([abs(dist(ap.position, new_center) - new_radius) for ap in pathlet]) < precision and\
scapro(diff(pathlet[0].position, center), diff(p.position,center))>0.5:
center = new_center
radius = new_radius
pathlet.append(p)
else:
if len(pathlet)>min_point_count:
# create arc
print("making arc", len(pathlet))
#center_side = scapro(diff(pathlet[int(len(pathlet)/2)].position, pathlet[0].position), diff(center, pathlet[0].position))
center_side = isLeft(pathlet[0].position, pathlet[int(len(pathlet)/2)].position, center)
if center_side < 0:
direction = "02"
print(direction, center_side)
else:
direction = "03"
print(direction, center_side)
arc = GArc(position = pathlet[-1].position,
ij = [center[0] - pathlet[0].position[0], center[1]-pathlet[0].position[1]],
arcdir = direction)
smoothPath.append(pathlet[0])
smoothPath.append(arc)
center = None
pathlet = [p]
else:
#print("not arc, flush", len(pathlet))
smoothPath+=pathlet
pathlet=[p]
center = None
smoothPath+=pathlet # append last remaining points
self.outpaths=[GCode(path=smoothPath)]
self.updateView()
def getCompletePath(self):
completePath = GCode(path=[])
completePath.default_feedrate=self.feedrate.getValue()
completePath.laser_mode = (self.laser_mode.getValue() > 0.1)
completePath.laser_power = self.laser_mode.getValue()*10
print("gCP lasermode", completePath.laser_mode, self.laser_mode.getValue())
for path in self.outpaths:
completePath.combinePath(path)
return completePath
def updateView(self, val=None):
#for line in traceback.format_stack():
# print(line.strip())
if self.viewUpdater!=None:
print("pt:", self.tool)
try:
self.viewUpdater(self.getCompletePath(), tool=self.tool)
except Exception as e:
print(e)
self.updateEstimate()
def updateEstimate(self, val=None):
if self.path is None:
return
self.path.default_feedrate = self.feedrate.getValue()
estimate = None
estimate = self.getCompletePath().estimate()
print(estimate)
self.estimatedTime.updateValue("%s (%s)"%(str(datetime.timedelta(seconds=int(estimate[1]*60))),
str(datetime.timedelta(seconds=int(estimate[5]*60)))))
self.estimatedDistance.updateValue("{:.1f} (c {:.0f})".format(estimate[0], estimate[3], estimate[4]))
def appendFromFile(self):
filename= QtGui.QFileDialog.getOpenFileName(None, 'Open file', '', "GCode files (*.ngc)")
new_path =read_gcode(filename)
self.path.appendPath(new_path)
self.outpaths = [self.path]
self.updateView()
def save(self):
completePath=self.getCompletePath()
completePath.default_feedrate=self.feedrate.getValue()
completePath.laser_mode = (self.laser_mode.getValue()>0.5)
completePath.write(self.filename.getValue())
self.updateEstimate()
def segmentPath(self, path):
buffered_points = [] # points that need to be finished after rampdown
# split into segments of closed loops, or separated by rapids
segments = []
for p in path:
# buffer points to detect closed loops (for ramp-down)
if p.position is not None:
if p.rapid: #flush at rapids
if len(buffered_points)>0:
segments.append(buffered_points)
buffered_points = []
buffered_points.append(p)
# detect closed loops,
if (len(buffered_points) > 2 and dist2D(buffered_points[0].position, p.position) < 0.00001):
segments.append(buffered_points)
buffered_points = []
if len(buffered_points)>0:
segments.append(buffered_points)
buffered_points = []
return segments
def applyTabbing(self, segment, tabs, tabwidth, tabheight):
seg_len = polygon_closed_length2D(segment)
if seg_len<=tabs*tabwidth:
return
for i in range(0, tabs):
length = i * seg_len / tabs
#print(length, seg_len)
i1, p = polygon_point_at_position(segment, length)
height = p[2]
segment.insert(i1, GPoint(position = [x for x in p]))
p[2] = height + tabheight
segment.insert(i1+1, GPoint(position=[x for x in p]))
i2, p = polygon_point_at_position(segment, length+tabwidth)
# elevate all intermediate points
for i in range(i1+1, i2):
segment[i].position[2]=height+tabheight
p[2] = height + tabheight
segment.insert(i2, GPoint(position=[x for x in p]))
p[2] = height
segment.insert(i2+1, GPoint(position=[x for x in p]))
def applyRampDown(self, segment, previousCutDepth, currentDepthLimit, rampdown, relative_ramping = False, axis = 2, axis_scaling = 1):
lastPoint=None
output = []
if relative_ramping:
seg_len = polygon_closed_length2D(segment)
else:
seg_len = 1.0 # use constant for absolute ramping
print("segment length:", seg_len, "order:", segment[0].order, segment[0].dist_from_model)
#check if this is a closed segment:
if dist2D(segment[0].position, segment[-1].position)<0.0001:
# ramp "backwards" to reach target depth at start of segment
ramp = []
sl = len(segment)
pos = sl - 1
currentDepth = min([p.position[axis]/axis_scaling for p in segment]) #get deepest point in segment
while currentDepth < previousCutDepth:
p = segment[pos]
# length of closed polygon perimeter
#ignore rapids during ramp-down
if not p.rapid:
nd = max(p.position[axis]/axis_scaling, currentDepthLimit)
is_in_contact = True
dist = dist2D(segment[pos].position, segment[(pos+1)%sl].position)
currentDepth += dist * (rampdown/seg_len) # spiral up
if (nd<currentDepth):
nd = currentDepth
is_in_contact=False
newpoint = [x for x in p.position]
newpoint[axis] = nd * axis_scaling
ramp.append(GPoint(position=newpoint, rapid=p.rapid,
inside_model=p.inside_model, in_contact=is_in_contact, axis_mapping = p.axis_mapping, axis_scaling=p.axis_scaling))
pos = (pos-1+sl) % sl
p=ramp[-1]
newpoint = [x for x in p.position]
newpoint[axis] = self.traverseHeight.getValue() * axis_scaling
output.append(GPoint(position=newpoint, rapid=True,
inside_model=p.inside_model, in_contact=False, axis_mapping = p.axis_mapping, axis_scaling=p.axis_scaling))
for p in reversed(ramp):
output.append(p)
for p in segment[1:]:
output.append(p)
p=segment[-1]
newpoint = [x for x in p.position]
newpoint[axis] = self.traverseHeight.getValue() * axis_scaling
output.append(GPoint(position=newpoint, rapid=True,
inside_model=p.inside_model, in_contact=False, axis_mapping = p.axis_mapping, axis_scaling=p.axis_scaling))
else: # for open segments, apply forward ramping
lastPoint = None
for p in segment:
nd = max(p.position[2], currentDepthLimit)
is_in_contact = True
# check if rampdown is active, and we're below previously cut levels, then limit plunge rate accordingly
if not p.rapid and rampdown != 0 and nd < previousCutDepth and lastPoint != None:
dist = dist2D(p.position, lastPoint.position)
lastPointDepth = min(lastPoint.position[axis]/axis_scaling, previousCutDepth)
if (lastPointDepth - nd) > dist * rampdown: # plunging to deeply - need to reduce depth for this point
nd = lastPointDepth - dist * rampdown;
is_in_contact = False
# buffer this point to finish closed path at currentDepthLimit
newpoint = [x for x in p.position]
newpoint[axis] = nd * axis_scaling
output.append(GPoint(position=newpoint, rapid=p.rapid,
inside_model=p.inside_model, in_contact=is_in_contact, axis_mapping = p.axis_mapping, axis_scaling=p.axis_scaling))
lastPoint = output[-1]
return output
def applyStepping(self, segment, currentDepthLimit, finished, axis = 2, axis_scaling = 1):
output = []
for p in segment:
# is_in_contact=p.in_contact
is_in_contact = True
nd = p.position[axis] / axis_scaling
if nd < currentDepthLimit:
nd = currentDepthLimit
is_in_contact = False;
finished = False
newpoint = [x for x in p.position]
newpoint[axis] = axis_scaling * nd
output.append(GPoint(position=newpoint, rapid=p.rapid,
inside_model=p.inside_model, in_contact=is_in_contact, axis_mapping = p.axis_mapping, axis_scaling=p.axis_scaling))
return output, finished
def applyDepthStep(self):
print("apply depth stepping")
self.outpaths=[]
finished=False
depthStep=self.maxDepthStep.getValue()
currentDepthLimit=self.startDepth.getValue()-depthStep
endDepth=self.stopDepth.getValue()
relRamping = self.depthSteppingRelRamp.getValue()
if currentDepthLimit<endDepth:
currentDepthLimit=endDepth
previousCutDepth=self.startDepth.getValue()
rampdown=self.rampdown.getValue()
lastPoint=None
# split into segments of closed loops, or separated by rapids
segments = self.segmentPath(self.path.path)
axis = self.path.steppingAxis
axis_scaling = self.path.path[0].axis_scaling[self.path.steppingAxis]
print("axis:", axis, "scaling: ",axis_scaling)
while not finished:
finished=True
newpath=[]
prev_segment = None
for s in segments:
segment_output, finished = self.applyStepping(segment = s, currentDepthLimit = currentDepthLimit, finished=finished, axis = axis,
axis_scaling = axis_scaling)
if (rampdown!=0) and len(segment_output)>3:
if prev_segment is None or closest_point_on_open_polygon(s[0].position, prev_segment)[0] > self.tool.diameter.getValue()/2.0:
segment_output = self.applyRampDown(segment_output, previousCutDepth, currentDepthLimit, rampdown, relRamping, self.path.steppingAxis, axis_scaling)
if self.tabs.getValue()>0:
self.applyTabbing(segment_output, self.tabs.getValue(), self.tabwidth.getValue(), self.tabheight.getValue())
for p in segment_output:
newpath.append(p)
if prev_segment is None:
prev_segment = [p.position for p in s]
else:
prev_segment += [p.position for p in s]
if currentDepthLimit<=endDepth:
finished=True
previousCutDepth=currentDepthLimit
currentDepthLimit-=depthStep
if currentDepthLimit<endDepth:
currentDepthLimit=endDepth
self.outpaths.append(GCode(newpath))
self.updateView()
def removeNoncuttingPoints(self):
new_paths=[]
skipping=False
for path_index, path in enumerate(self.outpaths):
if path_index==0:
new_paths.append(path)
else:
newpath=[]
for p_index, p in enumerate(path):
# check if previous layer already got in contact with final surface
if self.path.outpaths[path_index-1][p_index].in_contact:
if not skipping:
# skip point at safe traverse depth
newpath.append(GPoint(position=(p.position[0], p.position[1], self.traverseHeight.getValue()), rapid=True, inside_model=p.inside_model, in_contact=False))
skipping=True
else:
if skipping:
newpath.append(GPoint(position=(p.position[0], p.position[1], self.traverseHeight.getValue()), rapid=True, inside_model=p.inside_model, in_contact=p.in_contact))
skipping=False
#append point to new output
newpath.append(GPoint(position=(p.position[0], p.position[1], p.position[2]), rapid=p.rapid, inside_model=p.inside_model, in_contact=p.in_contact))
new_paths.append(GCode(newpath))
self.outpaths=new_paths
self.updateView()
def calcTrochoidalMilling(self):
new_paths=[]
lastPoint = None
radius = self.trochoidalDiameter.getValue()/2.0
distPerRev = self.trochoidalStepover.getValue()
rampdown=self.rampdown.getValue()
steps_per_rev = 50
stock_poly = None
if self.source is not None:
stock_poly = self.source.getStockPolygon()
#for path_index, path in enumerate(self.path.path):
newpath=[]
angle = 0
for p_index, p in enumerate(self.path.path):
# when plunging, check if we already cut this part before
cutting = True
plunging = False
for cp in self.path.path[0:p_index]:
if cp.position is None or p.position is None:
continue;
if lastPoint is not None and lastPoint.position[2]>p.position[2] \
and geometry.dist(p.position, cp.position) < min(i for i in [radius, cp.dist_from_model] if i is not None ):
cutting = False
if p.rapid or p.order>self.trochoidalOrder.getValue() or p.dist_from_model< self.trochoidalOuterDist.getValue() or not cutting :
newpath.append(GPoint(position = (p.position), rapid = p.rapid, inside_model=p.inside_model, in_contact=p.in_contact))
else:
if p.order%self.trochoidalSkip.getValue()==0: #skip paths
if lastPoint is not None:
if lastPoint.position[2] > p.position[2]:
plunging = True
else:
plunging = False
dist=sqrt((p.position[0]-lastPoint.position[0])**2 + (p.position[1]-lastPoint.position[1])**2 + (p.position[2]-lastPoint.position[2])**2)
distPerRev = self.trochoidalStepover.getValue()
if plunging:
dradius = radius
if p.dist_from_model is not None:
dradius = min(min(radius, p.dist_from_model), self.tool.diameter.getValue()/2.0)
if rampdown>0.0:
distPerRev = rampdown*(dradius*2.0*pi)
steps = int(float(steps_per_rev)*dist/distPerRev)+1
dradius = 0.0
for i in range(0, steps):
angle -= (dist/float(distPerRev) / float(steps)) * 2.0*PI
dradius = radius
bore_expansion = False
if p.dist_from_model is not None and lastPoint.dist_from_model is not None:
dradius = min(radius, lastPoint.dist_from_model*(1.0-(float(i)/steps)) + p.dist_from_model*(float(i)/steps))
if p.dist_from_model is not None and lastPoint.dist_from_model is None:
dradius = min(radius, p.dist_from_model)
# if plunging and radius is larger than tool diameter, bore at smaller radius and expand out
if plunging:
if dradius>self.tool.diameter.getValue():
dradius = self.tool.diameter.getValue()/2.0
bore_expansion = True
x = lastPoint.position[0]*(1.0-(float(i)/steps)) + p.position[0]*(float(i)/steps) + dradius * sin(angle)
y = lastPoint.position[1]*(1.0-(float(i)/steps)) + p.position[1]*(float(i)/steps) + dradius * cos(angle)
z = lastPoint.position[2]*(1.0-(float(i)/steps)) + p.position[2]*(float(i)/steps)
cutting = True
if stock_poly is not None and not stock_poly.pointInside((x, y, z)):
cutting = False
for cp in self.path.path[0:p_index]:
if cp.dist_from_model is not None and geometry.dist((x, y, z), cp.position) < min(radius, cp.dist_from_model) - 0.5*self.trochoidalStepover.getValue():
cutting = False
if cutting:
feedrate=None
if plunging:
feedrate=self.plunge_feedrate.getValue()
newpath.append(GPoint(position=(x, y, z), rapid=p.rapid, inside_model=p.inside_model,in_contact=p.in_contact, feedrate = feedrate))
if bore_expansion:
distPerRev = self.trochoidalStepover.getValue()
dist = min(radius, p.dist_from_model) - dradius + distPerRev
steps = int(float(steps_per_rev) * (dist / distPerRev) )
for i in range(0, steps):
angle -= (dist / float(distPerRev) / float(steps)) * 2.0 * PI
dradius += dist/steps
if dradius>p.dist_from_model:
dradius=p.dist_from_model
x = p.position[0] + dradius * sin(angle)
y = p.position[1] + dradius * cos(angle)
z = p.position[2]
cutting = True
if stock_poly is not None and not stock_poly.pointInside((x, y, z)):
cutting = False
if cutting:
newpath.append(GPoint(position = (x, y, z), rapid = p.rapid, inside_model=p.inside_model, in_contact=p.in_contact))
lastPoint = p
#remove non-cutting points
# cleanpath=[]
# for p in newpath:
# cutting = True
# for cp in cleanpath:
# if geometry.dist(p.position, cp.position) < min(radius, cp.dist_from_model):
# cutting = False
# if cutting:
# cleanpath.append(p)
new_paths.append(GCode(newpath))
self.outpaths=new_paths
self.updateView()
| [
"PyQt5.QtGui.QFileDialog.getOpenFileName",
"traceback.print_exc",
"geometry.dist"
] | [((14803, 14882), 'PyQt5.QtGui.QFileDialog.getOpenFileName', 'QtGui.QFileDialog.getOpenFileName', (['None', '"""Open file"""', '""""""', '"""GCode files (*.ngc)"""'], {}), "(None, 'Open file', '', 'GCode files (*.ngc)')\n", (14836, 14882), False, 'from PyQt5 import QtGui\n'), ((403, 482), 'PyQt5.QtGui.QFileDialog.getOpenFileName', 'QtGui.QFileDialog.getOpenFileName', (['None', '"""Open file"""', '""""""', '"""GCode files (*.ngc)"""'], {}), "(None, 'Open file', '', 'GCode files (*.ngc)')\n", (436, 482), False, 'from PyQt5 import QtGui\n'), ((1607, 1628), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1626, 1628), False, 'import traceback\n'), ((26679, 26717), 'geometry.dist', 'geometry.dist', (['p.position', 'cp.position'], {}), '(p.position, cp.position)\n', (26692, 26717), False, 'import geometry\n'), ((29876, 29913), 'geometry.dist', 'geometry.dist', (['(x, y, z)', 'cp.position'], {}), '((x, y, z), cp.position)\n', (29889, 29913), False, 'import geometry\n')] |
from fastapi import status
from fastapi.testclient import TestClient
from sqlalchemy.orm import Session
from counter import crud
from tests.utils.utils import random_lower_string
def get_register_payload():
return {"username": random_lower_string(), "password": random_lower_string()}
def test_register_endpoint(client: TestClient, db: Session):
payload = get_register_payload()
r = client.post("/auth/register", json=payload)
new_user = r.json()
assert r.status_code == status.HTTP_200_OK
assert new_user
assert new_user['username'] == payload['username']
db_user = crud.user.get_by_username(db=db, username=payload['username'])
assert db_user
def test_register_twice_endpoint(client: TestClient):
payload = get_register_payload()
r = client.post("/auth/register", json=payload)
new_user = r.json()
assert r.status_code == status.HTTP_200_OK
assert new_user
assert new_user['username'] == payload['username']
r2 = client.post("/auth/register", json=payload)
assert r2.status_code == status.HTTP_400_BAD_REQUEST
| [
"tests.utils.utils.random_lower_string",
"counter.crud.user.get_by_username"
] | [((604, 666), 'counter.crud.user.get_by_username', 'crud.user.get_by_username', ([], {'db': 'db', 'username': "payload['username']"}), "(db=db, username=payload['username'])\n", (629, 666), False, 'from counter import crud\n'), ((234, 255), 'tests.utils.utils.random_lower_string', 'random_lower_string', ([], {}), '()\n', (253, 255), False, 'from tests.utils.utils import random_lower_string\n'), ((269, 290), 'tests.utils.utils.random_lower_string', 'random_lower_string', ([], {}), '()\n', (288, 290), False, 'from tests.utils.utils import random_lower_string\n')] |
import re
import sys
from UMAP_analysis.stats_utils.receiver_operating_characteristic import *
m = re.match("predictions/(?P<dataset>.*?)/(?P<prediction_method>.*?).csv", sys.argv[1])
dataset = m.groupdict()['dataset']
prediction_method = m.groupdict()['prediction_method']
moa = sys.argv[2]
truth = pd.read_csv(f"data/intermediate/{dataset}/labels.csv", index_col=0)
prediction = pd.read_csv(f"predictions/{dataset}/{prediction_method}.csv", index_col=0)
fig = make_roc_fig(moa)
fig.savefig("figures/roc_curves/{dataset}/{prediction_method}_"+moa+".svg")
plt.close(fig)
| [
"re.match"
] | [((100, 189), 're.match', 're.match', (['"""predictions/(?P<dataset>.*?)/(?P<prediction_method>.*?).csv"""', 'sys.argv[1]'], {}), "('predictions/(?P<dataset>.*?)/(?P<prediction_method>.*?).csv', sys\n .argv[1])\n", (108, 189), False, 'import re\n')] |
from pp.cell import cell
from pp.component import Component
from pp.components.rectangle import rectangle
from pp.layers import LAYER
@cell
def pads_shorted(width=100, n_pads=8, pad_spacing=150, layer=LAYER.M1):
c = Component(name="shorted_pads")
pad = rectangle(size=(width, width), layer=layer, centered=True)
for i in range(n_pads):
pad_ref = c.add_ref(pad)
pad_ref.movex(i * pad_spacing - n_pads / 2 * pad_spacing + pad_spacing / 2)
short = rectangle(size=(pad_spacing * (n_pads - 1), 10), layer=layer, centered=True)
c.add_ref(short)
return c
if __name__ == "__main__":
import pp
c = pads_shorted()
pp.show(c)
| [
"pp.component.Component",
"pp.components.rectangle.rectangle",
"pp.show"
] | [((222, 252), 'pp.component.Component', 'Component', ([], {'name': '"""shorted_pads"""'}), "(name='shorted_pads')\n", (231, 252), False, 'from pp.component import Component\n'), ((263, 321), 'pp.components.rectangle.rectangle', 'rectangle', ([], {'size': '(width, width)', 'layer': 'layer', 'centered': '(True)'}), '(size=(width, width), layer=layer, centered=True)\n', (272, 321), False, 'from pp.components.rectangle import rectangle\n'), ((480, 556), 'pp.components.rectangle.rectangle', 'rectangle', ([], {'size': '(pad_spacing * (n_pads - 1), 10)', 'layer': 'layer', 'centered': '(True)'}), '(size=(pad_spacing * (n_pads - 1), 10), layer=layer, centered=True)\n', (489, 556), False, 'from pp.components.rectangle import rectangle\n'), ((662, 672), 'pp.show', 'pp.show', (['c'], {}), '(c)\n', (669, 672), False, 'import pp\n')] |
import nonebot
import re
from aiocqhttp import Event
from omega_miya.plugins.Group_manage.group_permissions import *
bot = nonebot.get_bot()
last_msg = {}
last_repeat_msg = {}
repeat_count = {}
@bot.on_message('group')
async def repeater(event: Event):
global last_msg, last_repeat_msg, repeat_count
group_id = event.group_id
try:
last_msg[group_id]
except KeyError:
last_msg[group_id] = ''
try:
last_repeat_msg[group_id]
except KeyError:
last_repeat_msg[group_id] = ''
if not has_notice_permissions(group_id):
return
msg = str(event.message)
msg = fr'{msg}'
if re.match(r'^/', msg):
return
if msg != last_msg[group_id] or msg == last_repeat_msg[group_id]:
last_msg[group_id] = msg
repeat_count[group_id] = 0
return
else:
repeat_count[group_id] += 1
last_repeat_msg[group_id] = ''
if repeat_count[group_id] >= 2:
await bot.send_group_msg(group_id=group_id, message=msg)
repeat_count[group_id] = 0
last_msg[group_id] = ''
last_repeat_msg[group_id] = msg
| [
"nonebot.get_bot",
"re.match"
] | [((131, 148), 'nonebot.get_bot', 'nonebot.get_bot', ([], {}), '()\n', (146, 148), False, 'import nonebot\n'), ((682, 701), 're.match', 're.match', (['"""^/"""', 'msg'], {}), "('^/', msg)\n", (690, 701), False, 'import re\n')] |
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tempfile
from rnacentral_pipeline.databases.sequence_ontology import tree as so
import pytest
@pytest.fixture(scope="module")
def ontology():
return so.load_ontology(so.REMOTE_ONTOLOGY)
@pytest.mark.parametrize(
"so_term_id,expected",
[
("rRNA", [("SO:0000655", "ncRNA"), ("SO:0000252", "rRNA")]),
(
"cytosolic_18S_rRNA",
[
("SO:0000655", "ncRNA"),
("SO:0000252", "rRNA"),
("SO:0002343", "cytosolic_rRNA"),
("SO:0000650", "cytosolic_SSU_rRNA"),
("SO:0000407", "cytosolic_18S_rRNA"),
],
),
(
"lincRNA",
[
("SO:0000655", "ncRNA"),
("SO:0001877", "lncRNA"),
("SO:0001463", "lincRNA"),
],
),
(
"group_I_intron",
[
("SO:0000188", "intron"),
("SO:0000588", "autocatalytically_spliced_intron"),
("SO:0000587", "group_I_intron"),
],
),
(
"ribozyme",
[
("SO:0000673", "transcript"),
("SO:0000374", "ribozyme"),
],
),
(
"hammerhead_ribozyme",
[
("SO:0000673", "transcript"),
("SO:0000374", "ribozyme"),
("SO:0000380", "hammerhead_ribozyme"),
],
),
("ncRNA", [("SO:0000655", "ncRNA")]),
(
"rRNA_primary_transcript",
[
("SO:0000655", "ncRNA"),
("SO:0000252", "rRNA"),
("SO:0000655", "rRNA_primary_transcript"),
],
),
(
"antisense_lncRNA",
[
("SO:0000655", "ncRNA"),
("SO:0001877", "lncRNA"),
("SO:0001904", "antisense_lncRNA"),
],
),
(
"MicF_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0000644", "antisense_RNA"),
],
),
(
"class_I_RNA",
[
("SO:0000655", "ncRNA"),
],
),
(
"RNA_6S",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"RprA_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"DsrA_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"CsrB_RsmB_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"spot_42_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"OxyS_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"RRE_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"GcvB_RNA",
[
("SO:0000655", "ncRNA"),
("SO:0002247", "sncRNA"),
("SO:0000370", "small_regulatory_ncRNA"),
],
),
(
"pre_miRNA",
[
("SO:0000673", "transcript"),
("SO:0001244", "pre_miRNA"),
],
),
],
)
def test_can_compute_some_simple_paths(ontology, so_term_id, expected):
assert so.rna_type_tree(ontology, so_term_id) == expected
@pytest.mark.parametrize(
"old_term,expected",
[
(
"SO:0001171",
[
("SO:0000655", "ncRNA"),
("SO:0000252", "rRNA"),
("SO:0002128", "mt_rRNA"),
("SO:0002345", "mt_LSU_rRNA"),
],
),
(
"rRNA_21S",
[
("SO:0000655", "ncRNA"),
("SO:0000252", "rRNA"),
("SO:0002128", "mt_rRNA"),
("SO:0002345", "mt_LSU_rRNA"),
],
),
],
)
def test_can_compute_path_for_outdated_term(ontology, old_term, expected):
tree = so.rna_type_tree(ontology, old_term)
print(tree)
assert tree == expected
@pytest.mark.parametrize(
"old_id,expected_name",
[
("SO:0001171", "mt_LSU_rRNA"),
("SO:0002128", "mt_rRNA"),
],
)
def test_does_track_replacments(ontology, old_id, expected_name):
assert ontology.id_to_name[old_id] == expected_name
@pytest.mark.parametrize(
"so_id,name",
[
("SO:0000584", "tmRNA"),
("SO:0000602", "guide_RNA"),
("SO:0000390", "telomerase_RNA"),
("SO:0001877", "lncRNA"),
],
)
def test_can_create_expected_mapping(ontology, so_id, name):
with tempfile.NamedTemporaryFile() as tmp:
mapping = so.name_index(ontology, tmp.name)
assert mapping[so_id] == so_id
assert mapping[name] == so_id
@pytest.mark.parametrize(
"so_id,insdc",
[
("SO:0001035", ["piRNA"]),
("SO:0001244", ["pre_miRNA"]),
],
)
def test_has_correct_insdc_mapping(ontology, so_id, insdc):
node = ontology.node(so_id)
assert sorted(so.insdc_synonyms(node)) == sorted(insdc)
for name in insdc:
assert ontology.insdc_to_id[name] == so_id
| [
"rnacentral_pipeline.databases.sequence_ontology.tree.rna_type_tree",
"rnacentral_pipeline.databases.sequence_ontology.tree.load_ontology",
"rnacentral_pipeline.databases.sequence_ontology.tree.insdc_synonyms",
"pytest.mark.parametrize",
"rnacentral_pipeline.databases.sequence_ontology.tree.name_index",
"tempfile.NamedTemporaryFile",
"pytest.fixture"
] | [((719, 749), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (733, 749), False, 'import pytest\n'), ((817, 3018), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""so_term_id,expected"""', "[('rRNA', [('SO:0000655', 'ncRNA'), ('SO:0000252', 'rRNA')]), (\n 'cytosolic_18S_rRNA', [('SO:0000655', 'ncRNA'), ('SO:0000252', 'rRNA'),\n ('SO:0002343', 'cytosolic_rRNA'), ('SO:0000650', 'cytosolic_SSU_rRNA'),\n ('SO:0000407', 'cytosolic_18S_rRNA')]), ('lincRNA', [('SO:0000655',\n 'ncRNA'), ('SO:0001877', 'lncRNA'), ('SO:0001463', 'lincRNA')]), (\n 'group_I_intron', [('SO:0000188', 'intron'), ('SO:0000588',\n 'autocatalytically_spliced_intron'), ('SO:0000587', 'group_I_intron')]),\n ('ribozyme', [('SO:0000673', 'transcript'), ('SO:0000374', 'ribozyme')]\n ), ('hammerhead_ribozyme', [('SO:0000673', 'transcript'), ('SO:0000374',\n 'ribozyme'), ('SO:0000380', 'hammerhead_ribozyme')]), ('ncRNA', [(\n 'SO:0000655', 'ncRNA')]), ('rRNA_primary_transcript', [('SO:0000655',\n 'ncRNA'), ('SO:0000252', 'rRNA'), ('SO:0000655',\n 'rRNA_primary_transcript')]), ('antisense_lncRNA', [('SO:0000655',\n 'ncRNA'), ('SO:0001877', 'lncRNA'), ('SO:0001904', 'antisense_lncRNA')]\n ), ('MicF_RNA', [('SO:0000655', 'ncRNA'), ('SO:0000644',\n 'antisense_RNA')]), ('class_I_RNA', [('SO:0000655', 'ncRNA')]), (\n 'RNA_6S', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('RprA_RNA', [('SO:0000655',\n 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('DsrA_RNA', [('SO:0000655', 'ncRNA'), (\n 'SO:0002247', 'sncRNA'), ('SO:0000370', 'small_regulatory_ncRNA')]), (\n 'CsrB_RsmB_RNA', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('spot_42_RNA', [(\n 'SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('OxyS_RNA', [('SO:0000655', 'ncRNA'), (\n 'SO:0002247', 'sncRNA'), ('SO:0000370', 'small_regulatory_ncRNA')]), (\n 'RRE_RNA', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('GcvB_RNA', [('SO:0000655',\n 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('pre_miRNA', [('SO:0000673', 'transcript'\n ), ('SO:0001244', 'pre_miRNA')])]"], {}), "('so_term_id,expected', [('rRNA', [('SO:0000655',\n 'ncRNA'), ('SO:0000252', 'rRNA')]), ('cytosolic_18S_rRNA', [(\n 'SO:0000655', 'ncRNA'), ('SO:0000252', 'rRNA'), ('SO:0002343',\n 'cytosolic_rRNA'), ('SO:0000650', 'cytosolic_SSU_rRNA'), ('SO:0000407',\n 'cytosolic_18S_rRNA')]), ('lincRNA', [('SO:0000655', 'ncRNA'), (\n 'SO:0001877', 'lncRNA'), ('SO:0001463', 'lincRNA')]), ('group_I_intron',\n [('SO:0000188', 'intron'), ('SO:0000588',\n 'autocatalytically_spliced_intron'), ('SO:0000587', 'group_I_intron')]),\n ('ribozyme', [('SO:0000673', 'transcript'), ('SO:0000374', 'ribozyme')]\n ), ('hammerhead_ribozyme', [('SO:0000673', 'transcript'), ('SO:0000374',\n 'ribozyme'), ('SO:0000380', 'hammerhead_ribozyme')]), ('ncRNA', [(\n 'SO:0000655', 'ncRNA')]), ('rRNA_primary_transcript', [('SO:0000655',\n 'ncRNA'), ('SO:0000252', 'rRNA'), ('SO:0000655',\n 'rRNA_primary_transcript')]), ('antisense_lncRNA', [('SO:0000655',\n 'ncRNA'), ('SO:0001877', 'lncRNA'), ('SO:0001904', 'antisense_lncRNA')]\n ), ('MicF_RNA', [('SO:0000655', 'ncRNA'), ('SO:0000644',\n 'antisense_RNA')]), ('class_I_RNA', [('SO:0000655', 'ncRNA')]), (\n 'RNA_6S', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('RprA_RNA', [('SO:0000655',\n 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('DsrA_RNA', [('SO:0000655', 'ncRNA'), (\n 'SO:0002247', 'sncRNA'), ('SO:0000370', 'small_regulatory_ncRNA')]), (\n 'CsrB_RsmB_RNA', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('spot_42_RNA', [(\n 'SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('OxyS_RNA', [('SO:0000655', 'ncRNA'), (\n 'SO:0002247', 'sncRNA'), ('SO:0000370', 'small_regulatory_ncRNA')]), (\n 'RRE_RNA', [('SO:0000655', 'ncRNA'), ('SO:0002247', 'sncRNA'), (\n 'SO:0000370', 'small_regulatory_ncRNA')]), ('GcvB_RNA', [('SO:0000655',\n 'ncRNA'), ('SO:0002247', 'sncRNA'), ('SO:0000370',\n 'small_regulatory_ncRNA')]), ('pre_miRNA', [('SO:0000673', 'transcript'\n ), ('SO:0001244', 'pre_miRNA')])])\n", (840, 3018), False, 'import pytest\n'), ((4911, 5222), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""old_term,expected"""', "[('SO:0001171', [('SO:0000655', 'ncRNA'), ('SO:0000252', 'rRNA'), (\n 'SO:0002128', 'mt_rRNA'), ('SO:0002345', 'mt_LSU_rRNA')]), ('rRNA_21S',\n [('SO:0000655', 'ncRNA'), ('SO:0000252', 'rRNA'), ('SO:0002128',\n 'mt_rRNA'), ('SO:0002345', 'mt_LSU_rRNA')])]"], {}), "('old_term,expected', [('SO:0001171', [('SO:0000655',\n 'ncRNA'), ('SO:0000252', 'rRNA'), ('SO:0002128', 'mt_rRNA'), (\n 'SO:0002345', 'mt_LSU_rRNA')]), ('rRNA_21S', [('SO:0000655', 'ncRNA'),\n ('SO:0000252', 'rRNA'), ('SO:0002128', 'mt_rRNA'), ('SO:0002345',\n 'mt_LSU_rRNA')])])\n", (4934, 5222), False, 'import pytest\n'), ((5638, 5749), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""old_id,expected_name"""', "[('SO:0001171', 'mt_LSU_rRNA'), ('SO:0002128', 'mt_rRNA')]"], {}), "('old_id,expected_name', [('SO:0001171',\n 'mt_LSU_rRNA'), ('SO:0002128', 'mt_rRNA')])\n", (5661, 5749), False, 'import pytest\n'), ((5905, 6068), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""so_id,name"""', "[('SO:0000584', 'tmRNA'), ('SO:0000602', 'guide_RNA'), ('SO:0000390',\n 'telomerase_RNA'), ('SO:0001877', 'lncRNA')]"], {}), "('so_id,name', [('SO:0000584', 'tmRNA'), (\n 'SO:0000602', 'guide_RNA'), ('SO:0000390', 'telomerase_RNA'), (\n 'SO:0001877', 'lncRNA')])\n", (5928, 6068), False, 'import pytest\n'), ((6349, 6452), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""so_id,insdc"""', "[('SO:0001035', ['piRNA']), ('SO:0001244', ['pre_miRNA'])]"], {}), "('so_id,insdc', [('SO:0001035', ['piRNA']), (\n 'SO:0001244', ['pre_miRNA'])])\n", (6372, 6452), False, 'import pytest\n'), ((777, 813), 'rnacentral_pipeline.databases.sequence_ontology.tree.load_ontology', 'so.load_ontology', (['so.REMOTE_ONTOLOGY'], {}), '(so.REMOTE_ONTOLOGY)\n', (793, 813), True, 'from rnacentral_pipeline.databases.sequence_ontology import tree as so\n'), ((5554, 5590), 'rnacentral_pipeline.databases.sequence_ontology.tree.rna_type_tree', 'so.rna_type_tree', (['ontology', 'old_term'], {}), '(ontology, old_term)\n', (5570, 5590), True, 'from rnacentral_pipeline.databases.sequence_ontology import tree as so\n'), ((4857, 4895), 'rnacentral_pipeline.databases.sequence_ontology.tree.rna_type_tree', 'so.rna_type_tree', (['ontology', 'so_term_id'], {}), '(ontology, so_term_id)\n', (4873, 4895), True, 'from rnacentral_pipeline.databases.sequence_ontology import tree as so\n'), ((6179, 6208), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (6206, 6208), False, 'import tempfile\n'), ((6235, 6268), 'rnacentral_pipeline.databases.sequence_ontology.tree.name_index', 'so.name_index', (['ontology', 'tmp.name'], {}), '(ontology, tmp.name)\n', (6248, 6268), True, 'from rnacentral_pipeline.databases.sequence_ontology import tree as so\n'), ((6592, 6615), 'rnacentral_pipeline.databases.sequence_ontology.tree.insdc_synonyms', 'so.insdc_synonyms', (['node'], {}), '(node)\n', (6609, 6615), True, 'from rnacentral_pipeline.databases.sequence_ontology import tree as so\n')] |
import numpy as np
from lunarsky import SkyCoord
from astropy.coordinates import Angle
import pytest
def get_catalog():
# Generate a fake catalog for tests.
Nangs = 30
ras = np.linspace(0, 360, Nangs)
decs = np.linspace(-90, 90, Nangs)
ras, decs = map(np.ndarray.flatten, np.meshgrid(ras, decs))
stars = SkyCoord(ra=ras, dec=decs, unit='deg', frame='icrs')
return stars
def positions_close(fr0, fr1, tol):
# Check that astropy star positions are close.
# tol = Angle object or angle in rad
vecs0 = fr0.cartesian.xyz.value
vecs1 = fr1.cartesian.xyz.value
N = vecs0.shape[-1] # last axis is number of objects
dots = np.array([np.dot(vecs0[:, mi], vecs1[:, mi]) for mi in range(N)])
invalid = np.abs(dots) > 1.0
# Floating errors may push some dot products to be larger than 1.
# Check these are within floating precision of 1.
check_inv = np.isclose(np.abs(dots[invalid]), 1.0)
dev_angs = Angle(np.arccos(dots[~invalid]), 'rad')
return np.all(dev_angs < tol) and np.all(check_inv)
def assert_raises_message(exception_type, message, func, *args, **kwargs):
"""
Check that the correct error message is raised.
"""
nocatch = kwargs.pop('nocatch', False)
if nocatch:
func(*args, **kwargs)
with pytest.raises(exception_type) as err:
func(*args, **kwargs)
try:
assert message in str(err.value)
except AssertionError as excp:
print("{} not in {}".format(message, str(err.value)))
raise excp
| [
"numpy.abs",
"numpy.arccos",
"numpy.linspace",
"lunarsky.SkyCoord",
"pytest.raises",
"numpy.dot",
"numpy.meshgrid",
"numpy.all"
] | [((189, 215), 'numpy.linspace', 'np.linspace', (['(0)', '(360)', 'Nangs'], {}), '(0, 360, Nangs)\n', (200, 215), True, 'import numpy as np\n'), ((227, 254), 'numpy.linspace', 'np.linspace', (['(-90)', '(90)', 'Nangs'], {}), '(-90, 90, Nangs)\n', (238, 254), True, 'import numpy as np\n'), ((332, 384), 'lunarsky.SkyCoord', 'SkyCoord', ([], {'ra': 'ras', 'dec': 'decs', 'unit': '"""deg"""', 'frame': '"""icrs"""'}), "(ra=ras, dec=decs, unit='deg', frame='icrs')\n", (340, 384), False, 'from lunarsky import SkyCoord\n'), ((295, 317), 'numpy.meshgrid', 'np.meshgrid', (['ras', 'decs'], {}), '(ras, decs)\n', (306, 317), True, 'import numpy as np\n'), ((757, 769), 'numpy.abs', 'np.abs', (['dots'], {}), '(dots)\n', (763, 769), True, 'import numpy as np\n'), ((928, 949), 'numpy.abs', 'np.abs', (['dots[invalid]'], {}), '(dots[invalid])\n', (934, 949), True, 'import numpy as np\n'), ((977, 1002), 'numpy.arccos', 'np.arccos', (['dots[~invalid]'], {}), '(dots[~invalid])\n', (986, 1002), True, 'import numpy as np\n'), ((1022, 1044), 'numpy.all', 'np.all', (['(dev_angs < tol)'], {}), '(dev_angs < tol)\n', (1028, 1044), True, 'import numpy as np\n'), ((1049, 1066), 'numpy.all', 'np.all', (['check_inv'], {}), '(check_inv)\n', (1055, 1066), True, 'import numpy as np\n'), ((1311, 1340), 'pytest.raises', 'pytest.raises', (['exception_type'], {}), '(exception_type)\n', (1324, 1340), False, 'import pytest\n'), ((687, 721), 'numpy.dot', 'np.dot', (['vecs0[:, mi]', 'vecs1[:, mi]'], {}), '(vecs0[:, mi], vecs1[:, mi])\n', (693, 721), True, 'import numpy as np\n')] |
from pprint import pprint
import time
import numpy
import math
from normal_generator import normal_generator
def multivariate_standard_normal_generator(dimension):
"""
Generates data from multivariate standard Gaussian distribution.
"""
multivariate_vector = []
norm_gen = normal_generator()
while True:
for _ in range(dimension):
__, rand_norm = next(norm_gen)
multivariate_vector.append(rand_norm)
yield multivariate_vector
multivariate_vector = []
if __name__ == '__main__':
output_list = []
print("Generating 2-D 10000 random numbers")
start = time.time()
multi_gen = multivariate_standard_normal_generator(2)
for i in range(10000):
op1 = next(multi_gen)
output_list.append(op1)
end = time.time()
print("Time taken " + str(end - start))
print("Mean is: " + str(numpy.mean(output_list, axis=0)))
print("Standard Deviation: " + str(numpy.std(output_list, axis=0)))
| [
"numpy.mean",
"numpy.std",
"time.time",
"normal_generator.normal_generator"
] | [((295, 313), 'normal_generator.normal_generator', 'normal_generator', ([], {}), '()\n', (311, 313), False, 'from normal_generator import normal_generator\n'), ((636, 647), 'time.time', 'time.time', ([], {}), '()\n', (645, 647), False, 'import time\n'), ((803, 814), 'time.time', 'time.time', ([], {}), '()\n', (812, 814), False, 'import time\n'), ((887, 918), 'numpy.mean', 'numpy.mean', (['output_list'], {'axis': '(0)'}), '(output_list, axis=0)\n', (897, 918), False, 'import numpy\n'), ((960, 990), 'numpy.std', 'numpy.std', (['output_list'], {'axis': '(0)'}), '(output_list, axis=0)\n', (969, 990), False, 'import numpy\n')] |
from rest_framework import serializers
from django.contrib.auth.models import User
from mockatron_core.models import *
from .classes import *
class UserCreateSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'username', 'password')
extra_kwargs = {'password': {'<PASSWORD>': True}}
def create(self, validated_data):
user = User(
first_name=validated_data['first_name'],
last_name=validated_data['last_name'],
email=validated_data['email'],
username=validated_data['username']
)
user.set_password(validated_data['password'])
user.save()
return user
class UserReadOnlySerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'first_name', 'last_name', 'email', 'username')
class AgentSerializer(serializers.ModelSerializer):
class Meta:
model = Agent
fields = ('id', 'protocol', 'host', 'port', 'path', 'method', 'content_type', 'responder', 'created', 'operations', 'responses', 'filters')
class OperationSerializer(serializers.ModelSerializer):
class Meta:
model = Operation
fields = ('id', 'agent', 'name', 'input_message', 'output_message', 'responder', 'responses', 'filters')
class ResponseSerializer(serializers.ModelSerializer):
class Meta:
model = Response
fields = ('id', 'agent', 'operation', 'label', 'http_code', 'content', 'enable')
class FilterSerializer(serializers.ModelSerializer):
class Meta:
model = Filter
fields = ('id', 'agent', 'operation', 'label', 'priority', 'enable', 'request_conditions', 'response_conditions')
class RequestConditionSerializer(serializers.ModelSerializer):
class Meta:
model = RequestCondition
fields = ('id', 'filter', 'field_type', 'header_or_query_param', 'operator', 'value')
class ResponseConditionSerializer(serializers.ModelSerializer):
class Meta:
model = ResponseCondition
fields = ('id', 'filter', 'field_type', 'operator', 'value')
| [
"django.contrib.auth.models.User"
] | [((427, 589), 'django.contrib.auth.models.User', 'User', ([], {'first_name': "validated_data['first_name']", 'last_name': "validated_data['last_name']", 'email': "validated_data['email']", 'username': "validated_data['username']"}), "(first_name=validated_data['first_name'], last_name=validated_data[\n 'last_name'], email=validated_data['email'], username=validated_data[\n 'username'])\n", (431, 589), False, 'from django.contrib.auth.models import User\n')] |
import socket
import struct
import serial
import time
UDP_IP = "192.168.100.1"
UDP_PORT = 55056
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sock.bind((UDP_IP, UDP_PORT))
ser = serial.Serial('/dev/ttyACM0', 9600, timeout=0)
time.sleep(5)
print('ready')
while True:
data, addr = sock.recvfrom(1024)
x = struct.unpack('d', bytes(data[0:8]))[0]
y = struct.unpack('d', bytes(data[8:16]))[0]
#z = struct.unpack('d', bytes(data[16:24]))[0]
if(x < 6):
if(y < -3 ):
ser.write('a')
print('a')
elif(y > 3):
ser.write('b')
print('b')
else:
ser.write('f')
print('f')
elif(x > 9):
if(y < -3 ):
ser.write('c')
print('c')
elif(y > 3):
ser.write('d')
print('d')
else:
ser.write('z')
print('z')
else:
if(y < -3 ):
ser.write('l')
print('l')
elif(y > 3):
ser.write('r')
print('r')
else:
ser.write('n')
print('n')
time.sleep(0.016)
| [
"time.sleep",
"serial.Serial",
"socket.socket"
] | [((105, 153), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (118, 153), False, 'import socket\n'), ((190, 236), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM0"""', '(9600)'], {'timeout': '(0)'}), "('/dev/ttyACM0', 9600, timeout=0)\n", (203, 236), False, 'import serial\n'), ((237, 250), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (247, 250), False, 'import time\n'), ((1136, 1153), 'time.sleep', 'time.sleep', (['(0.016)'], {}), '(0.016)\n', (1146, 1153), False, 'import time\n')] |
#!/usr/bin/env python3 -*- coding: utf-8 -*-
"""
This script takes an output from:
python3 nltk_cli/senna.py --np test.txt > test.np
And process the noun phrases to filter out phrases
(i) that don't have the last word tagged as NN
(ii) has any token that is a stopword
(iii) the first and last word in phrase is not a punctuation
This is part of the Terminator software from
https://github.com/alvations/Terminator (Tan, 2015)
Usage:
python3 nltk_cli/clean_np.py test.np --output test.filtered.np
Reference:
<NAME>. 2015. EXPERT Innovations in Terminology Extraction and
Ontology Induction. In Proceedings of Proceedings of the EXPERT Scientific
and Technological Workshop. Malaga, Spain.
"""
import io, sys
from os.path import expanduser
from string import punctuation
from nltk.util import ngrams
from nltk.corpus import stopwords
from nltk import word_tokenize
from nltk.tag import PerceptronTagger
tagger = PerceptronTagger()
pos_tag = tagger.tag
STOPWORDS = stopwords.words('english')
def simple_filter(list_of_ngrams):
return [ng for ng in list_of_ngrams if
ng.lower() not in STOPWORDS and
ng[0] not in punctuation and ng[-1] not in punctuation and
ng.split()[-1].lower() not in STOPWORDS and
ng.split()[0].lower() not in STOPWORDS and
not any(i for i in ng.split() if i.lower() in STOPWORDS) and
any(pos for word,pos in pos_tag(ng.lower().split())
if pos.startswith('NN')) and
')' not in ng and '(' not in ng and ',' not in ng and
'pinyin' not in ng and
ng.split()[0] not in ['more', 'less']]
outfile = ""
try:
if sys.argv[2] == '--output':
outfile = sys.argv[3]
fout = io.open(outfile, 'w', encoding='utf8')
except IndexError:
pass
with io.open(sys.argv[1], 'r', encoding='utf8') as fin:
for line in fin:
list_of_ngrams = line.split('\t')[0].split('|')
for ng in simple_filter(list_of_ngrams):
if outfile:
fout.write(ng + '\n')
else:
print(ng)
| [
"nltk.tag.PerceptronTagger",
"nltk.corpus.stopwords.words",
"io.open"
] | [((932, 950), 'nltk.tag.PerceptronTagger', 'PerceptronTagger', ([], {}), '()\n', (948, 950), False, 'from nltk.tag import PerceptronTagger\n'), ((984, 1010), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (999, 1010), False, 'from nltk.corpus import stopwords\n'), ((1804, 1846), 'io.open', 'io.open', (['sys.argv[1]', '"""r"""'], {'encoding': '"""utf8"""'}), "(sys.argv[1], 'r', encoding='utf8')\n", (1811, 1846), False, 'import io, sys\n'), ((1734, 1772), 'io.open', 'io.open', (['outfile', '"""w"""'], {'encoding': '"""utf8"""'}), "(outfile, 'w', encoding='utf8')\n", (1741, 1772), False, 'import io, sys\n')] |
# -*- coding: utf-8 -*-
# Copyright The IETF Trust 2012-2019, All Rights Reserved
from __future__ import absolute_import, print_function, unicode_literals
from django.db import models
from django.core.exceptions import ValidationError
from django.template import Context
from django.utils.encoding import python_2_unicode_compatible
from ietf.group.models import Group
from ietf.name.models import DBTemplateTypeName
from ietf.utils.models import ForeignKey
TEMPLATE_TYPES = (
('plain', 'Plain'),
('rst', 'reStructuredText'),
('django', 'Django'),
)
@python_2_unicode_compatible
class DBTemplate(models.Model):
path = models.CharField( max_length=255, unique=True, blank=False, null=False, )
title = models.CharField( max_length=255, blank=False, null=False, )
variables = models.TextField( blank=True, null=True, )
type = ForeignKey( DBTemplateTypeName, )
content = models.TextField( blank=False, null=False, )
group = ForeignKey( Group, blank=True, null=True, )
def __str__(self):
return self.title
def clean(self):
from ietf.dbtemplate.template import PlainTemplate, RSTTemplate, DjangoTemplate
try:
if self.type.slug == 'rst':
RSTTemplate(self.content).render(Context({}))
elif self.type.slug == 'django':
DjangoTemplate(self.content).render(Context({}))
elif self.type.slug == 'plain':
PlainTemplate(self.content).render(Context({}))
else:
raise ValidationError("Unexpected DBTemplate.type.slug: %s" % self.type.slug)
except Exception as e:
raise ValidationError(e)
| [
"ietf.utils.models.ForeignKey",
"django.db.models.TextField",
"ietf.dbtemplate.template.RSTTemplate",
"ietf.dbtemplate.template.PlainTemplate",
"django.core.exceptions.ValidationError",
"ietf.dbtemplate.template.DjangoTemplate",
"django.template.Context",
"django.db.models.CharField"
] | [((644, 714), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)', 'blank': '(False)', 'null': '(False)'}), '(max_length=255, unique=True, blank=False, null=False)\n', (660, 714), False, 'from django.db import models\n'), ((730, 787), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(False)', 'null': '(False)'}), '(max_length=255, blank=False, null=False)\n', (746, 787), False, 'from django.db import models\n'), ((807, 846), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (823, 846), False, 'from django.db import models\n'), ((861, 891), 'ietf.utils.models.ForeignKey', 'ForeignKey', (['DBTemplateTypeName'], {}), '(DBTemplateTypeName)\n', (871, 891), False, 'from ietf.utils.models import ForeignKey\n'), ((909, 950), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(False)', 'null': '(False)'}), '(blank=False, null=False)\n', (925, 950), False, 'from django.db import models\n'), ((966, 1006), 'ietf.utils.models.ForeignKey', 'ForeignKey', (['Group'], {'blank': '(True)', 'null': '(True)'}), '(Group, blank=True, null=True)\n', (976, 1006), False, 'from ietf.utils.models import ForeignKey\n'), ((1666, 1684), 'django.core.exceptions.ValidationError', 'ValidationError', (['e'], {}), '(e)\n', (1681, 1684), False, 'from django.core.exceptions import ValidationError\n'), ((1274, 1285), 'django.template.Context', 'Context', (['{}'], {}), '({})\n', (1281, 1285), False, 'from django.template import Context\n'), ((1241, 1266), 'ietf.dbtemplate.template.RSTTemplate', 'RSTTemplate', (['self.content'], {}), '(self.content)\n', (1252, 1266), False, 'from ietf.dbtemplate.template import PlainTemplate, RSTTemplate, DjangoTemplate\n'), ((1384, 1395), 'django.template.Context', 'Context', (['{}'], {}), '({})\n', (1391, 1395), False, 'from django.template import Context\n'), ((1545, 1616), 'django.core.exceptions.ValidationError', 'ValidationError', (["('Unexpected DBTemplate.type.slug: %s' % self.type.slug)"], {}), "('Unexpected DBTemplate.type.slug: %s' % self.type.slug)\n", (1560, 1616), False, 'from django.core.exceptions import ValidationError\n'), ((1348, 1376), 'ietf.dbtemplate.template.DjangoTemplate', 'DjangoTemplate', (['self.content'], {}), '(self.content)\n', (1362, 1376), False, 'from ietf.dbtemplate.template import PlainTemplate, RSTTemplate, DjangoTemplate\n'), ((1492, 1503), 'django.template.Context', 'Context', (['{}'], {}), '({})\n', (1499, 1503), False, 'from django.template import Context\n'), ((1457, 1484), 'ietf.dbtemplate.template.PlainTemplate', 'PlainTemplate', (['self.content'], {}), '(self.content)\n', (1470, 1484), False, 'from ietf.dbtemplate.template import PlainTemplate, RSTTemplate, DjangoTemplate\n')] |
import codecs
import csv
from dna.database import query_database
from dna.load_text_processing import clean_text, simplify_text
triple_query = 'prefix : <urn:ontoinsights:dna:> prefix geo: <urn:ontoinsights:geonames:> ' \
'SELECT ?subj FROM <urn:graph> WHERE { spo BIND("urn:xyz" as ?subj) }'
event_query = 'prefix : <urn:ontoinsights:dna:> prefix geo: <urn:ontoinsights:geonames:> ' \
'SELECT ?event FROM <urn:graph> WHERE { ' \
'?event a evt_type ; :text "txt" ; :sentence_offset sent_off ; OPTIONAL ' \
'rdfs:label "lbl" . FILTERS }'
def get_simplified_text(metadata_dict: dict) -> str:
# Get narrative text
with open(f'resources/{metadata_dict["Source"]}', 'r') as orig:
# Get text as would be extracted by pdftotext
orig_text = orig.read()
# Perform processing steps to clean the text and "simplify" it
if metadata_dict['Clean'] == 'Y': # Cleaning is for PDF docs
text = clean_text(orig_text, metadata_dict)
else:
text = orig_text
return simplify_text(text, metadata_dict)
def query_for_triples(graph: str, file_name: str, test_db: str) -> bool:
# Open the file (whose name is constructed using the 'file_name' input parameter) and
# query the named graph (constructed using the 'graph' input parameter)
# in the database (defined by the 'test_db' parameter);
# Returns True if all indicated triples are found in the db, or False otherwise
with codecs.open(file_name, encoding='utf-8-sig') as csv_file:
triples = csv.reader(csv_file, delimiter=',', quotechar='"')
missing_triple = False
for row in triples:
expected, s, p, o = row
if expected == 'fail':
continue
if s and o:
if s == 'subject': # Skip column headings
continue
spo_str = update_spo(s, p, o)
query_str = triple_query.replace('graph', graph).replace('spo', spo_str).replace('xxx', '')
results = query_database('select', query_str, test_db)
if not results:
missing_triple = True
print(f'Missing: {s}, {p}, {o}')
if missing_triple:
return False
else:
return True
def query_for_events(graph: str, file_name: str, test_db: str) -> bool:
# Open the file (whose name is constructed using the 'file_name' input parameter) and
# query the named graph (again constructed using the 'graph' input parameter)
# in the database (defined by the 'test_db' parameter);
# Returns True if all indicated triples are found in the db, or False otherwise
with codecs.open(file_name, encoding='utf-8-sig') as csv_file:
triples = csv.reader(csv_file, delimiter=',', quotechar='"')
missing_event = False
for row in triples:
testing, expected, txt, lbl, sent_off, evt_type, evt_time, evt_begin, evt_end, evt_earliest, \
evt_latest, evt_agent, evt_active, evt_affected, evt_provider, evt_recipient, evt_holder, \
evt_location, evt_origin, evt_destination, evt_instrument, evt_resource, \
evt_topic, evt_component, evt_member, evt_sentiment = row
if testing == 'testing' or expected == 'fail': # Skip column headings
continue
if txt:
query_str = event_query.replace('graph', graph).replace('evt_type', evt_type).\
replace('txt', txt).replace('lbl', lbl).replace('sent_off', sent_off)
if evt_time:
query_str = query_str.replace('OPTIONAL', f":has_time {evt_time} ; OPTIONAL")
if evt_begin:
query_str = query_str.replace('OPTIONAL', f":has_beginning {evt_begin} ; OPTIONAL")
if evt_end:
query_str = query_str.replace('OPTIONAL', f":has_end {evt_end} ; OPTIONAL")
if evt_earliest:
query_str = query_str.replace('OPTIONAL', f":has_earliest_beginning {evt_earliest} ; OPTIONAL")
if evt_latest:
query_str = query_str.replace('OPTIONAL', f":has_latest_end {evt_latest} ; OPTIONAL")
if evt_agent:
query_str = update_query_clause(query_str, ':has_agent', evt_agent)
if evt_active:
query_str = update_query_clause(query_str, ':has_active_agent', evt_active)
if evt_affected:
query_str = update_query_clause(query_str, ':has_affected_agent', evt_affected)
if evt_provider:
query_str = update_query_clause(query_str, ':has_provider', evt_provider)
if evt_recipient:
query_str = update_query_clause(query_str, ':has_recipient', evt_recipient)
if evt_holder:
query_str = update_query_clause(query_str, ':has_holder', evt_holder)
if evt_location:
query_str = update_query_clause(query_str, ':has_location', evt_location)
if evt_origin:
query_str = update_query_clause(query_str, ':has_origin', evt_origin)
if evt_destination:
query_str = update_query_clause(query_str, ':has_destination', evt_destination)
if evt_instrument:
query_str = update_query_clause(query_str, ':has_instrument', evt_instrument)
if evt_resource:
query_str = update_query_clause(query_str, ':has_resource', evt_resource)
if evt_topic:
query_str = update_query_clause(query_str, ':has_topic', evt_topic)
if evt_component:
query_str = update_query_clause(query_str, ':has_component', evt_component)
if evt_member:
query_str = update_query_clause(query_str, ':has_member', evt_member)
if evt_sentiment:
query_str = query_str.replace('OPTIONAL', f':sentiment ?sentiment ; OPTIONAL')
query_str = query_str.replace('graph', graph).replace('OPTIONAL ', '').\
replace('FILTERS', '').replace('xxx', '')
results = query_database('select', query_str, test_db)
if not results:
missing_event = True
print(f'Missing for query: {query_str}')
if missing_event:
return False
else:
return True
def update_query_clause(query: str, pred: str, obj: str) -> str:
"""
"""
if 'xxx' in obj:
return query.replace('OPTIONAL', f'{pred} ?{pred[1:]}_obj ; OPTIONAL').\
replace('FILTERS', f'FILTER(CONTAINS(str(?{pred[1:]}_obj), "{obj}")) . FILTERS')
else:
return query.replace('OPTIONAL', f'{pred} {obj} ; OPTIONAL')
def update_spo(subj: str, pred: str, obj: str) -> str:
"""
"""
query_str = f'{subj} {pred} ?o'
if 'xxx' in subj:
if 'Affiliation' in subj:
# Format of the instance IRI is ':<id-agent>_xxx<id-affiliated-with>Affiliation'
subjs = subj.split('xxx')
query_str = f'?s {pred} ?o . FILTER(CONTAINS(str(?s), "{subjs[0]}")) . ' \
f'FILTER(CONTAINS(str(?s), "{subjs[1]}")) .'
else:
query_str = f'?s {pred} ?o . FILTER(CONTAINS(str(?s), "{subj}")) .'
if 'xxx' in obj:
return f'{query_str} FILTER(CONTAINS(str(?o), "{obj}")) .'
# Place quotation marks around strings
if not obj.isnumeric():
if not obj.startswith(':') and not obj.startswith('geo:'):
obj = f'"{obj}"'
return query_str.replace('?o', obj)
| [
"dna.load_text_processing.simplify_text",
"csv.reader",
"dna.database.query_database",
"codecs.open",
"dna.load_text_processing.clean_text"
] | [((1063, 1097), 'dna.load_text_processing.simplify_text', 'simplify_text', (['text', 'metadata_dict'], {}), '(text, metadata_dict)\n', (1076, 1097), False, 'from dna.load_text_processing import clean_text, simplify_text\n'), ((980, 1016), 'dna.load_text_processing.clean_text', 'clean_text', (['orig_text', 'metadata_dict'], {}), '(orig_text, metadata_dict)\n', (990, 1016), False, 'from dna.load_text_processing import clean_text, simplify_text\n'), ((1492, 1536), 'codecs.open', 'codecs.open', (['file_name'], {'encoding': '"""utf-8-sig"""'}), "(file_name, encoding='utf-8-sig')\n", (1503, 1536), False, 'import codecs\n'), ((1568, 1618), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(csv_file, delimiter=\',\', quotechar=\'"\')\n', (1578, 1618), False, 'import csv\n'), ((2729, 2773), 'codecs.open', 'codecs.open', (['file_name'], {'encoding': '"""utf-8-sig"""'}), "(file_name, encoding='utf-8-sig')\n", (2740, 2773), False, 'import codecs\n'), ((2805, 2855), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(csv_file, delimiter=\',\', quotechar=\'"\')\n', (2815, 2855), False, 'import csv\n'), ((2067, 2111), 'dna.database.query_database', 'query_database', (['"""select"""', 'query_str', 'test_db'], {}), "('select', query_str, test_db)\n", (2081, 2111), False, 'from dna.database import query_database\n'), ((6359, 6403), 'dna.database.query_database', 'query_database', (['"""select"""', 'query_str', 'test_db'], {}), "('select', query_str, test_db)\n", (6373, 6403), False, 'from dna.database import query_database\n')] |
"""Example 01
Section: Rectangular 230 x 450
Compression steel: Nil, Tension steel: 1 layer
Output: xu and report of the section.
"""
from rcdesign.is456.concrete import Concrete
from rcdesign.is456.stressblock import LSMStressBlock
from rcdesign.is456.rebar import (
RebarHYSD,
RebarLayer,
RebarGroup,
Stirrups,
ShearRebarGroup,
)
from rcdesign.is456.section import RectBeamSection
sb = LSMStressBlock("LSM Flexure")
m20 = Concrete("M20", 20)
fe415 = RebarHYSD("Fe 415", 415)
t1 = RebarLayer(fe415, [20, 16, 20], -35)
steel = RebarGroup([t1])
sh_st = ShearRebarGroup([Stirrups(fe415, 2, 8, 150)])
sec = RectBeamSection(230, 450, sb, m20, steel, sh_st, 25)
xu = sec.xu(0.0035)
print(f"xu = {xu:.2f}")
print(sec.report(xu, 0.0035))
m25 = Concrete("M25", 25)
fe500 = RebarHYSD("Fe 500", 500)
l1 = RebarLayer(fe500, [16, 16, 16, 16, 16, 16, 10], -58)
main_steel = RebarGroup([l1])
shear_steel = ShearRebarGroup([Stirrups(fe415, 2, 6, 300)])
sec2 = RectBeamSection(1000, 450, sb, m25, main_steel, shear_steel, 50)
xu2 = sec2.xu(0.0035)
print(sec2.report(xu2, 0.0035))
| [
"rcdesign.is456.rebar.Stirrups",
"rcdesign.is456.rebar.RebarHYSD",
"rcdesign.is456.concrete.Concrete",
"rcdesign.is456.stressblock.LSMStressBlock",
"rcdesign.is456.section.RectBeamSection",
"rcdesign.is456.rebar.RebarLayer",
"rcdesign.is456.rebar.RebarGroup"
] | [((411, 440), 'rcdesign.is456.stressblock.LSMStressBlock', 'LSMStressBlock', (['"""LSM Flexure"""'], {}), "('LSM Flexure')\n", (425, 440), False, 'from rcdesign.is456.stressblock import LSMStressBlock\n'), ((447, 466), 'rcdesign.is456.concrete.Concrete', 'Concrete', (['"""M20"""', '(20)'], {}), "('M20', 20)\n", (455, 466), False, 'from rcdesign.is456.concrete import Concrete\n'), ((475, 499), 'rcdesign.is456.rebar.RebarHYSD', 'RebarHYSD', (['"""Fe 415"""', '(415)'], {}), "('Fe 415', 415)\n", (484, 499), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((506, 542), 'rcdesign.is456.rebar.RebarLayer', 'RebarLayer', (['fe415', '[20, 16, 20]', '(-35)'], {}), '(fe415, [20, 16, 20], -35)\n', (516, 542), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((551, 567), 'rcdesign.is456.rebar.RebarGroup', 'RebarGroup', (['[t1]'], {}), '([t1])\n', (561, 567), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((629, 681), 'rcdesign.is456.section.RectBeamSection', 'RectBeamSection', (['(230)', '(450)', 'sb', 'm20', 'steel', 'sh_st', '(25)'], {}), '(230, 450, sb, m20, steel, sh_st, 25)\n', (644, 681), False, 'from rcdesign.is456.section import RectBeamSection\n'), ((763, 782), 'rcdesign.is456.concrete.Concrete', 'Concrete', (['"""M25"""', '(25)'], {}), "('M25', 25)\n", (771, 782), False, 'from rcdesign.is456.concrete import Concrete\n'), ((791, 815), 'rcdesign.is456.rebar.RebarHYSD', 'RebarHYSD', (['"""Fe 500"""', '(500)'], {}), "('Fe 500', 500)\n", (800, 815), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((821, 873), 'rcdesign.is456.rebar.RebarLayer', 'RebarLayer', (['fe500', '[16, 16, 16, 16, 16, 16, 10]', '(-58)'], {}), '(fe500, [16, 16, 16, 16, 16, 16, 10], -58)\n', (831, 873), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((887, 903), 'rcdesign.is456.rebar.RebarGroup', 'RebarGroup', (['[l1]'], {}), '([l1])\n', (897, 903), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((971, 1035), 'rcdesign.is456.section.RectBeamSection', 'RectBeamSection', (['(1000)', '(450)', 'sb', 'm25', 'main_steel', 'shear_steel', '(50)'], {}), '(1000, 450, sb, m25, main_steel, shear_steel, 50)\n', (986, 1035), False, 'from rcdesign.is456.section import RectBeamSection\n'), ((593, 619), 'rcdesign.is456.rebar.Stirrups', 'Stirrups', (['fe415', '(2)', '(8)', '(150)'], {}), '(fe415, 2, 8, 150)\n', (601, 619), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n'), ((935, 961), 'rcdesign.is456.rebar.Stirrups', 'Stirrups', (['fe415', '(2)', '(6)', '(300)'], {}), '(fe415, 2, 6, 300)\n', (943, 961), False, 'from rcdesign.is456.rebar import RebarHYSD, RebarLayer, RebarGroup, Stirrups, ShearRebarGroup\n')] |
from __future__ import print_function
import genutil
import MV2
def model_land_only(model, model_timeseries, lf, debug=False):
# -------------------------------------------------
# Mask out over ocean grid
# - - - - - - - - - - - - - - - - - - - - - - - - -
if debug:
print('debug: plot for beforeMask start')
import vcs
x = vcs.init()
x.plot(model_timeseries)
x.png('_'.join(['test', model, 'beforeMask.png']))
print('debug: plot for beforeMask done')
# Check land fraction variable to see if it meet criteria
# (0 for ocean, 100 for land, no missing value)
lat_c = lf.getAxis(0)
lon_c = lf.getAxis(1)
lf_id = lf.id
lf = MV2.array(lf.filled(0.))
lf.setAxis(0, lat_c)
lf.setAxis(1, lon_c)
lf.id = lf_id
if float(MV2.max(lf)) == 1.:
lf = MV2.multiply(lf, 100.)
# Matching dimension
if debug:
print('debug: match dimension in model_land_only')
model_timeseries, lf_timeConst = genutil.grower(model_timeseries, lf)
# Conserve axes
time_c = model_timeseries.getAxis(0)
lat_c2 = model_timeseries.getAxis(1)
lon_c2 = model_timeseries.getAxis(2)
opt1 = False
if opt1: # Masking out partial ocean grids as well
# Mask out ocean even fractional (leave only pure ocean grid)
model_timeseries_masked = MV2.masked_where(
lf_timeConst < 100, model_timeseries)
else: # Mask out only full ocean grid & use weighting for partial ocean grid
model_timeseries_masked = MV2.masked_where(
lf_timeConst == 0, model_timeseries) # mask out pure ocean grids
if model == 'EC-EARTH':
# Mask out over 90% land grids for models those consider river as
# part of land-sea fraction. So far only 'EC-EARTH' does..
model_timeseries_masked = MV2.masked_where(
lf_timeConst < 90, model_timeseries)
lf2 = MV2.divide(lf, 100.)
model_timeseries, lf2_timeConst = genutil.grower(
model_timeseries, lf2) # Matching dimension
model_timeseries_masked = MV2.multiply(
model_timeseries_masked, lf2_timeConst) # consider land fraction like as weighting
# Make sure to have consistent axes
model_timeseries_masked.setAxis(0, time_c)
model_timeseries_masked.setAxis(1, lat_c2)
model_timeseries_masked.setAxis(2, lon_c2)
if debug:
x.clear()
x.plot(model_timeseries_masked)
x.png('_'.join(['test', model, 'afterMask.png']))
x.close()
print('debug: plot for afterMask done')
return(model_timeseries_masked)
| [
"MV2.divide",
"MV2.max",
"vcs.init",
"MV2.multiply",
"genutil.grower",
"MV2.masked_where"
] | [((1015, 1051), 'genutil.grower', 'genutil.grower', (['model_timeseries', 'lf'], {}), '(model_timeseries, lf)\n', (1029, 1051), False, 'import genutil\n'), ((368, 378), 'vcs.init', 'vcs.init', ([], {}), '()\n', (376, 378), False, 'import vcs\n'), ((856, 879), 'MV2.multiply', 'MV2.multiply', (['lf', '(100.0)'], {}), '(lf, 100.0)\n', (868, 879), False, 'import MV2\n'), ((1375, 1429), 'MV2.masked_where', 'MV2.masked_where', (['(lf_timeConst < 100)', 'model_timeseries'], {}), '(lf_timeConst < 100, model_timeseries)\n', (1391, 1429), False, 'import MV2\n'), ((1559, 1612), 'MV2.masked_where', 'MV2.masked_where', (['(lf_timeConst == 0)', 'model_timeseries'], {}), '(lf_timeConst == 0, model_timeseries)\n', (1575, 1612), False, 'import MV2\n'), ((1959, 1980), 'MV2.divide', 'MV2.divide', (['lf', '(100.0)'], {}), '(lf, 100.0)\n', (1969, 1980), False, 'import MV2\n'), ((2022, 2059), 'genutil.grower', 'genutil.grower', (['model_timeseries', 'lf2'], {}), '(model_timeseries, lf2)\n', (2036, 2059), False, 'import genutil\n'), ((2129, 2181), 'MV2.multiply', 'MV2.multiply', (['model_timeseries_masked', 'lf2_timeConst'], {}), '(model_timeseries_masked, lf2_timeConst)\n', (2141, 2181), False, 'import MV2\n'), ((823, 834), 'MV2.max', 'MV2.max', (['lf'], {}), '(lf)\n', (830, 834), False, 'import MV2\n'), ((1874, 1927), 'MV2.masked_where', 'MV2.masked_where', (['(lf_timeConst < 90)', 'model_timeseries'], {}), '(lf_timeConst < 90, model_timeseries)\n', (1890, 1927), False, 'import MV2\n')] |
import os
import requests
from io import BytesIO
from PIL import Image
class Utils:
def __init__(self, cell_size: int, board_size_x: int, board_size_y: int) -> None:
self._image_host = os.environ.get("CLOUDCUBE_URL") + "/public"
self._cell_size = cell_size
self._border_size_x = board_size_x
self._border_size_y = board_size_y
def get_image(self, path: str, need_resize: bool = False):
board_path = self._image_host + path
response = requests.get(board_path)
image = Image.open(BytesIO(response.content)).convert("RGBA")
if need_resize:
image = image.resize((self._cell_size-1, self._cell_size-1))
return image
def set_position(self, board, figure, position: str):
x = self.__get_x_from_letter(position[0].lower())
y = self.__get_y_from_numb(int(position[1]))
board.paste(figure, (x, board.size[1] - y), figure)
@staticmethod
def get_file_from_image(image):
bio = BytesIO()
bio.name = "board.png"
image.save(bio, 'PNG')
bio.seek(0)
return bio
def __get_y_from_numb(self, number: int) -> int:
return self._border_size_y + self._cell_size * number
def __get_x_from_letter(self, letter: str) -> int:
coefficient = 0
if letter == 'a':
coefficient = 0
elif letter == 'b':
coefficient = 1
elif letter == 'c':
coefficient = 2
elif letter == 'd':
coefficient = 3
elif letter == 'e':
coefficient = 4
elif letter == 'f':
coefficient = 5
elif letter == 'g':
coefficient = 6
elif letter == 'h':
coefficient = 7
return self._border_size_x + self._cell_size * coefficient
| [
"io.BytesIO",
"os.environ.get",
"requests.get"
] | [((494, 518), 'requests.get', 'requests.get', (['board_path'], {}), '(board_path)\n', (506, 518), False, 'import requests\n'), ((1006, 1015), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1013, 1015), False, 'from io import BytesIO\n'), ((200, 231), 'os.environ.get', 'os.environ.get', (['"""CLOUDCUBE_URL"""'], {}), "('CLOUDCUBE_URL')\n", (214, 231), False, 'import os\n'), ((546, 571), 'io.BytesIO', 'BytesIO', (['response.content'], {}), '(response.content)\n', (553, 571), False, 'from io import BytesIO\n')] |
import random
import time
from FPSDetect import *
from ctypes import *
# 加载相关工具函数
from utils.FPSUtils import *
dll = cdll.LoadLibrary(r'lib/Dll.dll') # 加载用C语言封装过的“易键鼠”dll
def shoot_screen():
while True:
img = pyautogui.screenshot(region=[LEFT, TOP, 640, 640]) # region为屏幕截取区域格式为(left,top,w,h)
# 存储游戏过程中的截图的路径
images_path = 'E:/data/CSGO/images/'
img.save(
images_path + str(int(time.time())) + ''.join(
random.sample('zyxwvutsrqponmlkjihgfedcba', 2)) + '.jpg') # 随机生成文件名
time.sleep(0.5)
if __name__ == "__main__":
# ssp = Process(target=shoot_screen, name="ssp", args=())
# ssp.start()
# mPid = PID(0, 0, 1.0, 0) # PID 控制器参数:(真值,p,i,d)(有问题)
while True:
try:
img = ScreenShout() # 截取屏幕检测区域
detections = detect(img) # 送入yolo检测
btc, btp = FindBestCenter(detections) # 确定目标最优的射击中心
if btc is not None: # 如果屏幕区域有射击目标
dll.MoveTo2(int(LEFT + btc[0]), int(TOP + btc[1])) # 调用易键鼠移动鼠标(此处更换为自己的)
# pyautogui.moveTo(int(LEFT + btc[0]), int(TOP + btc[1]))
except:
print('ERROR!')
| [
"random.sample",
"time.time",
"time.sleep"
] | [((551, 566), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (561, 566), False, 'import time\n'), ((474, 520), 'random.sample', 'random.sample', (['"""zyxwvutsrqponmlkjihgfedcba"""', '(2)'], {}), "('zyxwvutsrqponmlkjihgfedcba', 2)\n", (487, 520), False, 'import random\n'), ((433, 444), 'time.time', 'time.time', ([], {}), '()\n', (442, 444), False, 'import time\n')] |
#!/usr/bin/env python
# coding: utf-8
"""Script to calculate the Amp-hours that were supplied in each
bike battery charging session.
"""
days_to_include = float(input('Enter # of Days to Incude: '))
default_delta = 300.0 # default reading spacing in seconds
max_delta = 1000.0 # if spacing greater than this between charging readings, must be a new cycle
ending_amps = 0.1 # if amps are below this level charging is complete
import pandas as pd
import numpy as np
from bmondata import Server
from datetime import datetime, timedelta
server = Server('https://bmon.analysisnorth.com/')
start_ts = str(datetime.now()-timedelta(days=days_to_include))
df = server.sensor_readings('260034000c47343432313031_amps', start_ts=start_ts)
df.columns = ['amps']
df['ts'] = pd.to_datetime(df.index)
# create a DataFrame that only includes charging periods
df2 = df.query('amps > @ending_amps').copy()
df2['delta'] = df2.ts.diff().dt.seconds # time between readings
# Fill the NA that was created by the diff() method.
df2.delta.fillna(default_delta, inplace=True)
# The start of the cycle is identified by a large time difference
# between a prior reading greater than ending amps.
# Clever trick with cumsum() to label all readings in a cycle with
# same id.
df2['cycle'] = np.where(df2.delta > max_delta, 1, 0).cumsum()
# Now make sure the first reading in each cyle uses the default time interval
# as that reading has a very time difference.
df2['delta'] = np.where(df2.delta > max_delta, default_delta, df2.delta)
df2['amp_hr'] = df2.amps * df2.delta / 3600.0
df_results = df2.groupby('cycle').agg({'amp_hr': 'sum', 'ts': 'first'})
print(df_results.reset_index()[['ts', 'amp_hr']])
| [
"numpy.where",
"datetime.datetime.now",
"datetime.timedelta",
"pandas.to_datetime",
"bmondata.Server"
] | [((555, 596), 'bmondata.Server', 'Server', (['"""https://bmon.analysisnorth.com/"""'], {}), "('https://bmon.analysisnorth.com/')\n", (561, 596), False, 'from bmondata import Server\n'), ((774, 798), 'pandas.to_datetime', 'pd.to_datetime', (['df.index'], {}), '(df.index)\n', (788, 798), True, 'import pandas as pd\n'), ((1469, 1526), 'numpy.where', 'np.where', (['(df2.delta > max_delta)', 'default_delta', 'df2.delta'], {}), '(df2.delta > max_delta, default_delta, df2.delta)\n', (1477, 1526), True, 'import numpy as np\n'), ((613, 627), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (625, 627), False, 'from datetime import datetime, timedelta\n'), ((628, 659), 'datetime.timedelta', 'timedelta', ([], {'days': 'days_to_include'}), '(days=days_to_include)\n', (637, 659), False, 'from datetime import datetime, timedelta\n'), ((1282, 1319), 'numpy.where', 'np.where', (['(df2.delta > max_delta)', '(1)', '(0)'], {}), '(df2.delta > max_delta, 1, 0)\n', (1290, 1319), True, 'import numpy as np\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.