blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b35d9aaf965f7d40c495282839accb0fc9ab6483 | ffceedc0f1e52f5a8534b729ab756a97ef70ff58 | /test/dynamo/test_export.py | 7a365d13de89cdf0443cef9e55f4e5f6eadb5d68 | [
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
]
| permissive | loongson-zn/pytorch | d10b7af29e2946b1f87bf4cb516828fafb919b2c | 7cef7195f616f75bf25a48cf5692f704d35ac4b2 | refs/heads/main | 2023-07-03T17:53:09.765084 | 2023-06-25T06:26:40 | 2023-06-25T06:26:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107,772 | py | # Owner(s): ["module: dynamo"]
"""
PYTEST_DONT_REWRITE (prevents pytest from rewriting assertions, which interferes
with test_export_persist_assert)
"""
import functools
import inspect
import math
import operator
import unittest
from enum import Enum
from typing import Dict, List, Sequence
from unittest.mock import patch
import torch
import torch._dynamo
import torch._dynamo.test_case
import torch._dynamo.testing
from functorch.experimental.control_flow import cond
from torch._dynamo import config
from torch._export import dynamic_dim
from torch._export.constraints import constrain_as_size, constrain_as_value
from torch.fx.experimental.proxy_tensor import make_fx
from torch.fx.experimental.symbolic_shapes import ConstraintViolationError
from torch.testing import FileCheck
from torch.testing._internal import common_utils
from torch.testing._internal.common_utils import skipIfRocm
class ExportTests(torch._dynamo.test_case.TestCase):
# TODO(voz): Refactor to a shared test function.
# The tests in this file are a little redundant,
# They all take a func, run it with eager, then export it, then compare
def test_export(self):
def pre_attention_state_ops(input, mems, state):
lc_key = state[0]
lc_val = state[1]
bar = []
for i in range(0, 4):
bar2 = []
for j in range(0, 3):
bar2.append(
lc_key + lc_val + torch.tensor([0.1, 0.25, 0.4, 0.5, 0.1])
)
bar.append(bar2)
return bar
def func():
mems = torch.tensor([[[1.8364, 0.2724, -1.4917, -0.4367, 0.8640]]])
state = [
torch.tensor([[[1.0517, 0.3848, -0.6472, 0.0823, 0.9116]]]),
torch.tensor([[[1.0517, 0.3848, -0.6472, 0.0823, 0.9116]]]),
]
i = torch.tensor(
[
[0.0313, -0.1487, -0.3846, -0.5321],
[-1.7073, 1.3331, -0.0890, -1.4935],
[-0.8314, -0.1862, -0.5935, 1.5232],
]
)
return pre_attention_state_ops(i, mems, state)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func()
torch._dynamo.reset()
exported = torch._dynamo.export(func)
out_graph = exported[0]
dynamo_result = out_graph()
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_mismatched_out(self):
def func(x):
y = x + 1
return ([x, x], (y, y))
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(torch.tensor([[[1.3737, 0.1]]]))
torch._dynamo.reset()
exported = torch._dynamo.export(func, torch.tensor([[[1.3737, 0.1]]]))
out_graph = exported[0]
dynamo_result = out_graph(torch.tensor([[[1.3737, 0.1]]]))
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_shape_control_flow_1(self):
def func(x):
if x.shape[0] > 10:
return x.cos()
return x.sin()
opt_func = torch._dynamo.optimize("eager")(func)
real_result = opt_func(torch.ones(6, 4))
torch._dynamo.reset()
exported = torch._dynamo.export(func, torch.ones(6, 4))
out_graph, out_guards = exported
dynamo_result = out_graph(torch.ones(6, 4))
from torch._guards import GuardSource
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
hit = False
for guard in out_guards:
if guard.source == GuardSource.SHAPE_ENV:
hit = True
self.assertTrue("L['x'].size()[0] <= 10" in guard.code_list)
self.assertTrue(hit)
def test_export_control_flow_with_getattr(self):
class Animal(Enum):
COW = "moo"
class MyModule(torch.nn.Module):
def __init__(self, a):
super().__init__()
self.a = a
def forward(self, x):
if self.a == Animal.COW.value:
return x * x
else:
raise ValueError("bad")
module = MyModule("moo")
input = (torch.ones(4, 3),)
resA = module(*input)
graph, _ = torch._dynamo.export(module, *input)
resB = graph(*input)
self.assertTrue(torch._dynamo.utils.same(resA, resB))
def test_export_graph_bypass(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
]
def func(x):
first = x[2]
second = x[2]
return first * second
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_list_unpack(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
]
def func(x):
first = x[2]
second = x[2]
return x[0], first * second, x[1], x[2]
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_with_shallow_list_copy_wo_side_effects(self):
def f(x):
y = x.copy()
return y[0] + y[1]
inp = [torch.tensor([1.3, 3.77, 0.1]), torch.tensor([8.7, 6.23, 9.9])]
gm, _ = torch._dynamo.export(f, inp, aten_graph=True, tracing_mode="symbolic")
self.assertTrue(torch._dynamo.utils.same(gm(inp), f(inp)))
def test_export_with_shallow_list_copy_with_side_effects(self):
def f(x):
y = x.copy()
x[0] = x[1]
y.append(torch.tensor([[100]]))
return x[0] + x[1], y[0] + y[1], y[2]
inp = [torch.tensor([1.3, 3.77, 0.1]), torch.tensor([8.7, 6.23, 9.9])]
gm, _ = torch._dynamo.export(f, inp, aten_graph=True, tracing_mode="symbolic")
res = gm(inp)
ref = f(inp)
self.assertTrue(torch._dynamo.utils.same(res, ref))
self.assertEqual(res[0], res[1])
def test_export_mismatched_out_2(self):
def func(x):
y = x + 1
return ([x, x], (y, y))
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(torch.tensor([[[1.3737, 0.1]]]))
torch._dynamo.reset()
exported = torch._dynamo.export(func, torch.tensor([[[1.3737, 0.1]]]))
out_graph = exported[0]
dynamo_result = out_graph(torch.tensor([[[1.3737, 0.1]]]))
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_graph_with_list(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
torch.tensor([0.4, 0.4]),
]
def func(x):
first = x[2]
second = x[2]
return first * second, x
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_graph_with_complex_reorder(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
torch.tensor([0.4, 0.4]),
]
def func(x):
first = x[0]
second = x[1]
third = x[2]
return third, first, second, first * second, first * third
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes(self):
inp = torch.tensor([0.1, 0.1])
def func(x):
y = x + 1
return y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_2(self):
inp = torch.tensor([0.1, 0.1])
def func(x):
y = x + 1
return y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.4, 0.4])
inps = [inp, inp2]
def func(x, z):
y = x + 1
return y, y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass_with_non_tensor_arg(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return y, y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass_reorder_with_non_tensor_arg(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return z, y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_dupes_and_bypass_with_non_tensor_output(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return y[0].item(), y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_zeroes_in_and_out_different_shape_on_test(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
return [[a], [b, c], [a + b], [[c + c]]]
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_zeroes_in_new_shape_scalar_out(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
return a[0].item() + b[0].item() + c[0].item()
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_zeroes_in_new_shape_scalar_out_permute(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
return b[0].item() + c[0].item() + a[0].item() + a[0].item()
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_zeroes_in_new_shape_scalar_out_permute_dupe_and_bypass(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
return a, b[0].item() + c[0].item() + a[0].item() + a[0].item(), a
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_func_return(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
x = a + b + c
def func2(y):
return x * y
return func2(x)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dict_return(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
x = a + b + c
return {"a": x}
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_with_aten_graph(self):
def pre_attention_state_ops(input, mems, state):
lc_key = state[0]
lc_val = state[1]
bar = []
for i in range(0, 4):
bar2 = []
for j in range(0, 3):
bar2.append(
lc_key + lc_val + torch.tensor([0.1, 0.25, 0.4, 0.5, 0.1])
)
bar.append(bar2)
return bar
def func():
mems = torch.tensor([[[1.8364, 0.2724, -1.4917, -0.4367, 0.8640]]])
state = [
torch.tensor([[[1.0517, 0.3848, -0.6472, 0.0823, 0.9116]]]),
torch.tensor([[[1.0517, 0.3848, -0.6472, 0.0823, 0.9116]]]),
]
i = torch.tensor(
[
[0.0313, -0.1487, -0.3846, -0.5321],
[-1.7073, 1.3331, -0.0890, -1.4935],
[-0.8314, -0.1862, -0.5935, 1.5232],
]
)
return pre_attention_state_ops(i, mems, state)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func()
torch._dynamo.reset()
exported = torch._dynamo.export(func, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph()
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_mismatched_out_with_aten_graph(self):
def func(x):
y = x + 1
return ([x, x], (y, y))
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(torch.tensor([[[1.3737, 0.1]]]))
torch._dynamo.reset()
exported = torch._dynamo.export(
func, torch.tensor([[[1.3737, 0.1]]]), aten_graph=True
)
out_graph = exported[0]
dynamo_result = out_graph(torch.tensor([[[1.3737, 0.1]]]))
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_graph_bypass_with_aten_graph(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
]
def func(x):
first = x[2]
second = x[2]
return first * second
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_list_unpack_with_aten_graph(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
]
def func(x):
first = x[2]
second = x[2]
return x[0], first * second, x[1], x[2]
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_mismatched_out_2_with_aten_graph(self):
def func(x):
y = x + 1
return ([x, x], (y, y))
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(torch.tensor([[[1.3737, 0.1]]]))
torch._dynamo.reset()
exported = torch._dynamo.export(
func, torch.tensor([[[1.3737, 0.1]]]), aten_graph=True
)
out_graph = exported[0]
dynamo_result = out_graph(torch.tensor([[[1.3737, 0.1]]]))
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_graph_with_list_with_aten_graph(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
torch.tensor([0.4, 0.4]),
]
def func(x):
first = x[2]
second = x[2]
return first * second, x
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_graph_with_complex_reorder_with_aten_graph(self):
inp = [
torch.tensor([0.1, 0.1]),
torch.tensor([0.2, 0.2]),
torch.tensor([0.3, 0.3]),
torch.tensor([0.4, 0.4]),
]
def func(x):
first = x[0]
second = x[1]
third = x[2]
return third, first, second, first * second, first * third
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
def func(x):
y = x + 1
return y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_2_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
def func(x):
y = x + 1
return y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(inp)
torch._dynamo.reset()
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.4, 0.4])
inps = [inp, inp2]
def func(x, z):
y = x + 1
return y, y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass_with_non_tensor_arg_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return y, y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dupes_and_bypass_reorder_with_non_tensor_arg_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return z, y, y
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_dupes_and_bypass_with_non_tensor_output_with_aten_graph(self):
inp = torch.tensor([0.1, 0.1])
inp2 = torch.tensor([0.1, 0.1])
inp3 = 4
inps = [inp, inp2, inp3]
def func(x, z, k):
y = x + k
return y[0].item(), y, z
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_zeroes_in_and_out_different_shape_on_test_with_aten_graph(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
return [[a], [b, c], [a + b], [[c + c]]]
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_func_return_with_aten_graph(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
x = a + b + c
def func2(y):
return x * y
return func2(x)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_dict_return_with_aten_graph(self):
inp = torch.zeros(10)
inp2 = torch.zeros(10)
inp3 = torch.zeros(10)
inps = [inp, inp2, inp3]
inps_rand = [torch.randn(10), torch.randn(10), torch.randn(10)]
def func(a, b, c):
x = a + b + c
return {"a": x}
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps_rand)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps_rand)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_with_stack_trace(self):
inp = torch.randn(4, 4)
class MyBlock(torch.nn.Module):
def forward(self, x):
x = torch.nn.functional.linear(x, torch.randn(4, 4))
return torch.cos(x).relu() + 1
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.block = MyBlock()
def forward(self, x):
out = self.block(x)
return out
exported = torch._dynamo.export(MyModule(), inp, aten_graph=False)
out_graph = exported[0]
for node in out_graph.graph.nodes:
if node.op not in {"placeholder", "output"}:
self.assertTrue(node.stack_trace is not None)
self.assertTrue(node.meta["nn_module_stack"] is not None)
self.assertTrue(node.meta["source_fn"] is not None)
torch._dynamo.reset()
exported = torch._dynamo.export(MyModule(), inp, aten_graph=True)
out_graph = exported[0]
for node in out_graph.graph.nodes:
if node.op == "call_function":
self.assertTrue(node.stack_trace is not None)
self.assertTrue(node.meta["nn_module_stack"] is not None)
self.assertTrue(node.meta["source_fn"] is not None)
self.assertTrue(node.meta["val"] is not None)
self.assertTrue(node.meta["original_aten"] is not None)
def test_export_preserves_nn_module_stack_for_get_attr(self):
inp = torch.randn(4, 4)
class MyBlock(torch.nn.Module):
def __init__(self):
super().__init__()
self.weight = torch.nn.Parameter(torch.ones(1, 1))
self.register_buffer("buffer", torch.ones(1, 1))
def forward(self, x):
x = torch.nn.functional.linear(x, torch.randn(4, 4))
return torch.cos(x).relu() + self.weight + self.buffer
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.block = MyBlock()
def forward(self, x):
out = self.block(x)
return out
m = MyModule()
exported = torch._dynamo.export(m, inp, aten_graph=False)
out_graph = exported[0]
attr_access_count = 0
for node in out_graph.graph.nodes:
if node.op == "get_attr":
attr_access_count += 1
self.assertTrue(node.meta["nn_module_stack"] is not None)
self.assertEqual(attr_access_count, 2)
torch._dynamo.reset()
exported = torch._dynamo.export(m, inp, aten_graph=True)
out_graph = exported[0]
attr_access_count = 0
for node in out_graph.graph.nodes:
if node.op == "get_attr":
attr_access_count += 1
self.assertTrue(node.meta["nn_module_stack"] is not None)
self.assertEqual(attr_access_count, 2)
def test_export_compare_optimize_with_make_fx(self):
inp = torch.tensor([0.1, 0.1])
linear = torch.nn.Linear(2, 2)
def func(x):
x = x + 1
y = x.t()
y = y.relu()
y = linear(y)
return y
exported = torch._dynamo.export(func, inp, aten_graph=True)
out_graph = exported[0]
export_result = out_graph(inp)
torch._dynamo.reset()
def compiler(gm, sample_inputs):
def fw(*args):
aten_gm = make_fx(gm)(*args)
return aten_gm(*args)
return fw
opt_func = torch._dynamo.optimize(compiler, nopython=True, dynamic=True)(func)
make_fx_result_through_backend = opt_func(inp)
fx_g = make_fx(func)(inp)
make_fx_result_through_direct = fx_g(inp)
self.assertTrue(
torch._dynamo.utils.same(make_fx_result_through_backend, export_result)
)
self.assertTrue(
torch._dynamo.utils.same(make_fx_result_through_direct, export_result)
)
def test_export_with_constant_method_on_module(self):
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(4, 2))
self.linear = torch.nn.Linear(2, 2)
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return torch.nonzero(x)
def forward(self, x):
y = torch.sin(x)
x = self.linear(x)
y = self.helper_fn(x)
return y
module = MyModule()
real_result = module(torch.tensor([[1.0, 0], [0, 0]]))
module = MyModule()
graph, _ = torch._dynamo.export(module, torch.tensor([[0.0, 0], [0, 0]]))
result = graph(torch.tensor([[1.0, 0.0], [0, 0]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(torch.tensor([[1, 0], [0.25, 0.25]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_method_on_module_invoke_twice(self):
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(4, 2))
self.linear = torch.nn.Linear(2, 2)
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return torch.nonzero(x)
def forward(self, x):
y = torch.sin(x)
x = self.linear(x)
y = self.helper_fn(x) + self.helper_fn(x)
return y
module = MyModule()
real_result = module(torch.tensor([[1.0, 0], [0, 0]]))
module = MyModule()
graph, _ = torch._dynamo.export(module, torch.tensor([[0.0, 0], [0, 0]]))
result = graph(torch.tensor([[1.0, 0.0], [0, 0]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(torch.tensor([[1, 0], [0.25, 0.25]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_free_function(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
return torch.nonzero(x)
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(4, 2))
self.linear = torch.nn.Linear(2, 2)
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return torch.nonzero(x)
def forward(self, x):
y = torch.sin(x)
x = self.linear(x)
y = helper_fn(x) + self.helper_fn(x)
return y
module = MyModule()
real_result = module(torch.tensor([[1.0, 0], [0, 0]]))
module = MyModule()
graph, _ = torch._dynamo.export(module, torch.tensor([[0.0, 0], [0, 0]]))
result = graph(torch.tensor([[1.0, 0.0], [0, 0]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(torch.tensor([[1, 0], [0.25, 0.25]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_free_function_and_class_method(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
return torch.nonzero(x)
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(4, 2))
self.linear = torch.nn.Linear(2, 2)
def forward(self, x):
y = torch.sin(x)
x = self.linear(x)
y = helper_fn(x)
return y
module = MyModule()
real_result = module(torch.tensor([[1.0, 0], [0, 0]]))
module = MyModule()
graph, _ = torch._dynamo.export(module, torch.tensor([[0.0, 0], [0, 0]]))
result = graph(torch.tensor([[1.0, 0.0], [0, 0]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(torch.tensor([[1, 0], [0.25, 0.25]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_free_function_and_class_method_multiarg(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
return torch.nonzero(x)
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(4, 2))
self.linear = torch.nn.Linear(2, 2)
def forward(self, x, z):
y = torch.sin(x)
x = self.linear(x)
y = helper_fn(x) + helper_fn(z)
return y
module = MyModule()
real_result = module(
torch.tensor([[1.0, 0], [0, 0]]), torch.tensor([[1.0, 0], [0, 0]])
)
module = MyModule()
graph, _ = torch._dynamo.export(
module, torch.tensor([[0.0, 0], [0, 0]]), torch.tensor([[1.0, 0], [0, 0]])
)
result = graph(
torch.tensor([[1.0, 0.0], [0, 0]]), torch.tensor([[1.0, 0.0], [0, 0]])
)
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(
torch.tensor([[1, 0], [0.25, 0.25]]), torch.tensor([[1, 0], [0.25, 0.25]])
)
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_free_function_and_class_method_multiarg_diff(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
return torch.nonzero(x)
class MyModule(torch.nn.Module):
def forward(self, x, z):
y = helper_fn(x) + helper_fn(z)
return y
module = MyModule()
real_result = module(
torch.tensor([[1.0, 0], [0, 0]]), torch.tensor([[1.0, 0], [0, 0]])
)
module = MyModule()
graph, _ = torch._dynamo.export(
module, torch.tensor([[0.0, 0], [0, 0]]), torch.tensor([[0.0, 0], [0.5, 0]])
)
result = graph(
torch.tensor([[1.0, 0.0], [0, 0]]), torch.tensor([[0.0, 1.0], [0, 0]])
)
self.assertTrue(torch._dynamo.utils.same(result, real_result))
result = graph(
torch.tensor([[1, 0], [0.25, 0.25]]),
torch.tensor([[0.33, 0.33], [0.25, 0.25]]),
)
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_tuple_nonzero(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return (torch.nonzero(x), torch.nonzero(x))
def forward(self, x):
y = torch.tensor([0.5])
elements = self.helper_fn(x)
all_y = []
for element in elements:
for item in element:
all_y.append(y * item)
return all_y
module = MyModule()
real_result = module(torch.tensor([1.0, 1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([1.0, 1.0]))
# Tensor input can be almost anything here, and the result will capture what we
# made constant at compile time.
result = graph(torch.tensor([[[1.0, 0], [0, 0]], [[1.0, 0], [0, 0]]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_list_nonzero(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return [torch.nonzero(x), torch.nonzero(x)]
def forward(self, x):
y = torch.tensor([0.5])
elements = self.helper_fn(x)
all_y = []
for element in elements:
for item in element:
all_y.append(y * item)
return all_y
module = MyModule()
real_result = module(torch.tensor([1.0, 1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([1.0, 1.0]))
# Tensor input can be almost anything here, and the result will capture what we
# made constant at compile time.
result = graph(torch.tensor([[[1.0, 0], [0, 0]], [[1.0, 0], [0, 0]]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_list_nonzero_free_function(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
return [torch.nonzero(x), torch.nonzero(x)]
class MyModule(torch.nn.Module):
def forward(self, x):
y = torch.tensor([0.5])
elements = helper_fn(x)
all_y = []
for element in elements:
for item in element:
all_y.append(y * item)
return all_y
module = MyModule()
real_result = module(torch.tensor([1.0, 1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([1.0, 1.0]))
# Tensor input can be almost anything here, and the result will capture what we
# made constant at compile time.
result = graph(torch.tensor([[[1.0, 0], [0, 0]], [[1.0, 0], [0, 0]]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_dict_values(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return {"x": x, "x^2": x * x}
def forward(self, x):
y = torch.tensor([0.5])
elements = self.helper_fn(x)
y = y * elements["x"]
y = y * elements["x^2"]
return y
module = MyModule()
real_result = module(torch.tensor([2.0, 2.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([2.0, 2.0]))
# Tensor input can be almost anything here, and the result will capture what we
# made constant at compile time.
result = graph(torch.tensor([[[1.0, 0], [0, 0]], [[1.0, 0], [0, 0]]]))
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_none_control_flow(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
if x.item() < 0:
return None
else:
return x
def forward(self, x):
y = torch.tensor([0.5])
x = self.helper_fn(x)
if x is None:
return y
return y * x
module = MyModule()
real_result = module(torch.tensor([-1]))
# X is negative, so .item() < 0, which means we return y
self.assertEqual(real_result, torch.tensor([0.5]))
graph, guards = torch._dynamo.export(module, torch.tensor([-1]))
result = graph(torch.tensor([2]))
# X is positive, but we compiled helper_fn to return None, so it will still return y
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_not_none_control_flow(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
if x.item() < 0:
return None
else:
return x
def forward(self, x):
y = torch.tensor([0.5])
x = self.helper_fn(x)
if x is None:
return y
return y * x
module = MyModule()
real_result = module(torch.tensor([2]))
# X is positive, so .item() > 0, which means we return y * x
self.assertEqual(real_result, torch.tensor([1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([2]))
result = graph(torch.tensor([-0.5]))
# X is negative, but we compiled helper_fn to return x, so it will still return y * x
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_none_control_flow_free_func(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
if x.item() < 0:
return None
else:
return x
class MyModule(torch.nn.Module):
def forward(self, x):
y = torch.tensor([0.5])
x = helper_fn(x)
if x is None:
return y
return y * x
module = MyModule()
real_result = module(torch.tensor([-1]))
# X is negative, so .item() < 0, which means we return y
self.assertEqual(real_result, torch.tensor([0.5]))
graph, guards = torch._dynamo.export(module, torch.tensor([-1]))
result = graph(torch.tensor([2]))
# X is positive, but we compiled helper_fn to return None, so it will still return y
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_not_none_control_flow_pos(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
if x.item() < 0:
return None
else:
return x
def forward(self, x):
y = torch.tensor([0.5])
x = self.helper_fn(x)
if x is None:
return y
return y * x
module = MyModule()
real_result = module(torch.tensor([2]))
# X is positive, so .item() > 0, which means we return y * x
self.assertEqual(real_result, torch.tensor([1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([2]))
result = graph(torch.tensor([-0.5]))
# X is negative, but we compiled helper_fn to return x, so it will still return y * x
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_not_none_control_flow_free_func(self):
@torch._dynamo.assume_constant_result
def helper_fn(x):
if x.item() < 0:
return None
else:
return x
class MyModule(torch.nn.Module):
def forward(self, x):
y = torch.tensor([0.5])
x = helper_fn(x)
if x is None:
return y
return y * x
module = MyModule()
real_result = module(torch.tensor([2]))
# X is positive, so .item() > 0, which means we return y * x
self.assertEqual(real_result, torch.tensor([1.0]))
graph, guards = torch._dynamo.export(module, torch.tensor([2]))
result = graph(torch.tensor([-0.5]))
# X is negative, but we compiled helper_fn to return x, so it will still return y * x
self.assertTrue(torch._dynamo.utils.same(result, real_result))
def test_export_with_constant_not_return_const(self):
class MyModule(torch.nn.Module):
@torch._dynamo.assume_constant_result
def helper_fn(self, x):
return self.val
def forward(self, x):
y = torch.tensor([0.5])
x = self.helper_fn(x)
if x == "A":
return y
return -1
module = MyModule()
module.val = "A"
resA = module(torch.tensor([2]))
graph, guards = torch._dynamo.export(module, torch.tensor([2]))
module.val = "B"
resB = graph(torch.tensor([2]))
self.assertTrue(torch._dynamo.utils.same(resA, resB))
def test_export_decomp(self):
def f(x):
return x.t() + x.t()
def nop(x):
return x.cos()
graph, _ = torch._dynamo.export(
f,
(torch.randn(5)),
aten_graph=True,
decomposition_table={torch.ops.aten.t.default: nop},
)
self.assertEqual(
len([n for n in graph.graph.nodes if n.target == torch.ops.aten.t.default]),
0,
)
graph, _ = torch._dynamo.export(
f, (torch.randn(5)), aten_graph=True, decomposition_table=None
)
self.assertEqual(
len([n for n in graph.graph.nodes if n.target == torch.ops.aten.t.default]),
2,
)
def test_export_decomp_asserts_bad_args(self):
def f(x):
return x.t() + x.t()
def nop(x):
return x.cos()
with self.assertRaises(AssertionError):
graph, _ = torch._dynamo.export(
f,
(torch.randn(5)),
aten_graph=False,
decomposition_table={torch.ops.aten.t.default: nop},
)
@config.patch(capture_scalar_outputs=True)
def test_export_with_module_layer(self):
from functorch.experimental.control_flow import cond
class Module(torch.nn.Module):
def __init__(self):
super().__init__()
self.linear = torch.nn.Linear(3, 3)
def forward(self, pred, x):
def true_fn(val):
return self.linear(val) * torch.tensor(2)
def false_fn(val):
return self.linear(val) * torch.tensor(-1)
return cond(pred, true_fn, false_fn, [x])
mod = Module()
x = torch.randn([3, 3])
pred = torch.tensor(x[0][0].item() < 0)
real_result = mod.forward(pred, x)
torch._dynamo.reset()
exported = torch._dynamo.export(mod.forward, pred, x)
out_graph = exported[0]
dynamo_result = out_graph(pred, x)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
# New X, just to show we did not specialize
x = x * -1
pred = torch.tensor(x[0][0].item() < 0)
real_result_2 = mod.forward(pred, x)
dynamo_result_2 = out_graph(pred, x)
self.assertTrue(torch._dynamo.utils.same(real_result_2, dynamo_result_2))
@config.patch(capture_scalar_outputs=True)
def test_export_with_cond_branches_calling_methods(self):
from functorch.experimental.control_flow import cond
class Module(torch.nn.Module):
# ok
def __init__(self):
super().__init__()
self.linear = torch.nn.Linear(3, 3)
def t(self, val):
return val + 1
def f(self, val):
return val - 1
def true_fn(self, val):
return self.linear(val) + self.t(val)
def false_fn(self, val):
return self.linear(val) - self.f(val)
def forward(self, pred, x):
return cond(pred, self.true_fn, self.false_fn, [x])
mod = Module()
x = torch.randn([3, 3])
pred = torch.tensor(x[0][0].item() < 0)
real_result = mod.forward(pred, x)
out_graph, _ = torch._dynamo.export(mod.forward, pred, x)
dynamo_result = out_graph(pred, x)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
@config.patch(capture_scalar_outputs=True)
def test_export_with_cond_closure(self):
from functorch.experimental.control_flow import cond
class Foo(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, pred, x):
def true_fn(x):
return x * 2
def false_fn(x):
return x - 2
return cond(pred, true_fn, false_fn, [x])
class Bar(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, pred, x):
def true_fn(x):
return x * 2
def false_fn(x):
return x - 2
return cond(pred, true_fn, false_fn, [x + 1])
class FooBar(torch.nn.Module):
def __init__(self):
super().__init__()
self.linear = torch.nn.Linear(3, 3)
def forward(self, pred, x):
y = x + x
def true_fn(x, y):
return self.linear(x) * (x + y)
def false_fn(x, y):
return x * (y - x)
return cond(pred, true_fn, false_fn, [x, y])
for Module in [Foo, Bar, FooBar]:
mod = Module()
x = torch.randn([3, 3])
pred = torch.tensor(x[0][0].item() < 0)
real_result = mod.forward(pred, x)
out_graph, _ = torch._dynamo.export(mod.forward, pred, x)
dynamo_result = out_graph(pred, x)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_with_cond_with_closed_function(self):
def hello(x):
return x + 1
def hi(x):
return x + 2
def foo(pred, x):
def true_fn(x):
return hello(x)
def false_fn(x):
return hi(x)
return cond(pred, true_fn, false_fn, [x])
x = torch.randn(5)
pred = x[0] > 0
real_result = foo(pred, x)
out_graph, _ = torch._dynamo.export(foo, pred, x)
dynamo_result = out_graph(pred, x)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_with_cond_dynamic_shape_pred(self):
from functorch.experimental.control_flow import cond
class Module(torch.nn.Module):
def forward(self, x):
def true_fn(x):
return x + x
def false_fn(x):
return x[:2]
return cond(x.shape[0] <= 2, true_fn, false_fn, [x])
mod = Module()
x = torch.randn(2, 2)
out_graph, _ = torch._dynamo.export(mod, x)
test_x = torch.randn(3, 2)
self.assertEqual(out_graph(test_x), mod(test_x))
def test_export_with_map_cond(self):
from functorch.experimental.control_flow import cond, map
class Module(torch.nn.Module):
def inner(self, x, pred):
def true_fn(x):
return x + x
def false_fn(x):
return x * x
return cond(pred, true_fn, false_fn, [x])
def forward(self, pred, xs):
def body(x, pred):
return self.inner(x, pred)
return map(body, xs, pred)
mod = Module()
x = torch.randn(3, 2, 1)
pred_x = torch.tensor(True)
y = torch.randn(4, 3, 2)
pred_y = torch.tensor(False)
real_result = mod(pred_y, y)
out_graph, _ = torch._dynamo.export(mod, pred_x, x)
self.assertEqual(real_result, out_graph(pred_y, y))
def test_export_with_map_zero_sized_tensor(self):
from functorch.experimental.control_flow import map
class Module(torch.nn.Module):
def forward(self, xs):
def body(x):
return x + 1
return map(body, xs)
mod = Module()
xs = torch.randn(0, 2)
with self.assertRaisesRegex(
torch._dynamo.exc.Unsupported,
"zero-sized tensor",
):
out_graph, _ = torch._dynamo.export(mod, xs)
def test_export_meta_val(self):
def f(x, y, z):
return x * y + z
gm, _ = torch._dynamo.export(
f,
torch.ones(3, 2),
torch.zeros(3, 2),
torch.ones(3, 2),
aten_graph=True,
)
for node in gm.graph.nodes:
if node.op == "placeholder":
self.assertIn("val", node.meta)
def test_input_container_type(self):
def f(x: torch.Tensor, y: List[torch.Tensor]) -> Dict[str, torch.Tensor]:
return {"a": x.sum() + sum(y).sum()}
inp = (torch.randn(6, 5), [torch.randn(6, 5), torch.randn(6, 5)])
gm, _ = torch._dynamo.export(f, *inp, aten_graph=True)
self.assertEqual(gm(*inp), f(*inp))
@config.patch(assume_static_by_default=False)
def test_export_symbolic_shape(self):
def f(x: torch.Tensor) -> torch.Tensor:
return torch.empty(x.shape[0] * 2)
inp = (torch.randn(6, 5),)
gm, _ = torch._dynamo.export(f, *inp, aten_graph=True)
has_sym_size = False
for node in gm.graph.nodes:
if node.target is torch.ops.aten.sym_size:
has_sym_size = True
self.assertTrue(has_sym_size)
@config.patch(assume_static_by_default=False)
def test_dynamic_slicing(self):
def f(x):
return x[: x.shape[0] - 2, x.shape[1] - 1 :: 2]
gm_aten_mode, _ = torch._dynamo.export(f, torch.randn(4, 5), aten_graph=True)
inp = torch.randn(6, 7)
self.assertEqual(gm_aten_mode(inp).shape, f(inp).shape)
count = 0
# aten graph should flatten getitem calls to actual
# slice kernel call.
for node in gm_aten_mode.graph.nodes:
if (
node.op == "call_function"
and node.target == torch.ops.aten.slice.Tensor
):
count += 1
self.assertEqual(count, 2)
gm_torch_mode, _ = torch._dynamo.export(f, torch.randn(4, 5), aten_graph=False)
# In torch mode, the graph should contain 3 getitem methods
# one for x.shape[0]-2 and one for x.shape[1]-1 and one for slice
# this is because Tensor class has its' own getitem method
# which gets translated to aten.Slice later.
count = 0
for node in gm_torch_mode.graph.nodes:
if node.op == "call_function" and node.target == operator.getitem:
count += 1
self.assertEqual(count, 3)
self.assertEqual(gm_torch_mode(inp).shape, f(inp).shape)
def test_dynamic_slicing_invalid(self):
def g(x, y):
return x[y : x.shape[0]]
with self.assertRaisesRegex(
torch._dynamo.exc.Unsupported,
"Dynamic slicing on data-dependent value is not supported",
):
torch._dynamo.export(
g,
torch.randn(4, 5),
torch.tensor(2),
aten_graph=True,
)
@skipIfRocm
@config.patch(capture_scalar_outputs=True)
def test_dynamic_slicing_simple(self):
def f(x):
return x[slice(None, None, None)]
gm, _ = torch._dynamo.export(f, torch.randn(4, 5), aten_graph=True)
inp = torch.randn(6, 7)
self.assertEqual(gm(inp), f(inp))
def test_pre_dispatch_simple(self):
def f(x):
y = torch.ones_like(x)
return torch.matmul(x, y)
gm, _ = torch._dynamo.export(
f,
torch.randn(5, 5),
aten_graph=True,
pre_dispatch=True,
tracing_mode="fake",
)
inp = torch.randn(6, 6)
self.assertEqual(gm(inp), f(inp))
self.assertExpectedInline(
gm.code.strip(),
"""\
def forward(self, x):
arg0, = fx_pytree.tree_flatten_spec(([x], {}), self._in_spec)
ones_like_default = torch.ops.aten.ones_like.default(arg0, pin_memory = False)
matmul_default = torch.ops.aten.matmul.default(arg0, ones_like_default); arg0 = ones_like_default = None
return pytree.tree_unflatten([matmul_default], self._out_spec)""",
)
@skipIfRocm
@patch.object(torch._dynamo.config, "capture_scalar_outputs", True)
def test_export_cond_in_aten_symbolic(self):
class ConditionOp(torch.nn.Module):
def true_fn(self, x, y):
return x * y
def false_fn(self, x, y):
return x + y
def forward(self, pred, x, y):
return cond(pred, self.true_fn, self.false_fn, [x, y])
model = ConditionOp()
inp = (
torch.tensor(False),
torch.randn(4, 4),
torch.randn(4, 4),
)
gm, _ = torch._dynamo.export(model, *inp, aten_graph=True)
gm.print_readable()
self.assertEqual(gm(*inp), model(*inp))
def test_export_with_kwargs(self):
def fn_with_kwargs(pos0, tuple0, *myargs, mykw0=None, **mykwargs):
out = pos0
for arg in tuple0:
out *= arg
for arg in myargs:
out *= arg
out *= mykw0
out *= mykwargs["input0"] * mykwargs["input1"]
return out
mykwargs = {"input0": torch.randn(4), "input1": torch.randn(4)}
tuple0 = (torch.randn(4), torch.randn(4))
mykw0 = torch.randn(4)
pos0 = torch.randn(4)
myargs = [torch.randn(4), torch.randn(4)]
expected_argument_names = [
"pos0",
"tuple0",
"myargs_0",
"myargs_1",
"mykw0",
"input0",
"input1",
]
self._test_export_preserving_original_signature(
fn_with_kwargs,
expected_argument_names,
pos0,
tuple0,
*myargs,
mykw0=mykw0,
**mykwargs,
)
def test_export_with_kwargs_and_empty_args(self):
def fn_with_kwargs(mykw0=None, **mykwargs):
out = mykw0
out *= mykwargs["input0"] * mykwargs["input1"]
return out
mykwargs = {"input0": torch.randn(4), "input1": torch.randn(4)}
mykw0 = torch.randn(4)
expected_argument_names = ["mykw0"] + list(mykwargs.keys())
self._test_export_preserving_original_signature(
fn_with_kwargs, expected_argument_names, mykw0, **mykwargs
)
def test_export_with_args_and_empty_kwargs(self):
def fn_with_kwargs(pos0, tuple0, *myargs):
out = pos0
for arg in tuple0:
out *= arg
for arg in myargs:
out *= arg
return out
tuple0 = (torch.randn(4), torch.randn(4))
pos0 = torch.randn(4)
myargs = [torch.randn(4), torch.randn(4)]
expected_argument_names = ["pos0", "tuple0", "myargs_0", "myargs_1"]
self._test_export_preserving_original_signature(
fn_with_kwargs, expected_argument_names, pos0, tuple0, *myargs
)
@common_utils.parametrize(
"default_value",
[
common_utils.subtest(None, name="None"),
common_utils.subtest(42.0, name="float"),
common_utils.subtest(
# FIXME: AssertionError: Dynamo input and output is a strict subset of traced input/output
torch.randn(4),
name="tensor",
decorators=[unittest.expectedFailure],
),
common_utils.subtest(
# FIXME: AssertionError: Dynamo input and output is a strict subset of traced input/output
(torch.randn(4),),
name="tuple",
decorators=[unittest.expectedFailure],
),
],
)
def test_export_with_args_with_default(self, default_value):
def fn(pos0, pos1_default=default_value):
out = pos0
if pos1_default is None:
pos1_default = torch.randn(4)
if isinstance(pos1_default, tuple):
pos1_default = pos1_default[0]
out *= pos1_default
return out
pos0 = torch.randn(4)
expected_argument_names = ["pos0"]
self._test_export_preserving_original_signature(
fn, expected_argument_names, pos0
)
@common_utils.parametrize(
"default_value",
[
common_utils.subtest(None, name="None"),
common_utils.subtest(42.0, name="float"),
common_utils.subtest(
# FIXME: AssertionError: Dynamo input and output is a strict subset of traced input/output
torch.randn(4),
name="tensor",
decorators=[unittest.expectedFailure],
),
common_utils.subtest(
# FIXME: AssertionError: Dynamo input and output is a strict subset of traced input/output
(torch.randn(4),),
name="tuple",
decorators=[unittest.expectedFailure],
),
],
)
def test_export_with_kwargs_with_default(self, default_value):
def fn(pos0, *, kw0, kw1_default=default_value, **kwargs):
out = pos0
out += kw0
if kw1_default is None:
kw1_default = torch.randn(4)
elif isinstance(kw1_default, tuple):
kw1_default = kw1_default[0]
out += kw1_default
out += kwargs["kw2"]
return out
pos0 = torch.randn(4)
kw0 = torch.randn(4)
kw2 = torch.randn(4)
args = (pos0,)
kwargs = {"kw0": kw0, "kw2": kw2}
expected_argument_names = ["pos0", "kw0", "kw2"]
self._test_export_preserving_original_signature(
fn, expected_argument_names, *args, **kwargs
)
def test_export_with_wrapped_fn(self):
# To ensure dynamo.export is robust to wrapped functions
# when it cannot use `inspect` to retrieve original signature
# info.
def _fn(pos0, pos1=1.0, *args, kw0, kw1=2.0, **kwargs):
out = pos0
out += pos1
out += kw0
out += kw1
for arg in args:
out += arg
for kwarg in kwargs.values():
out += kwarg
return out
def wrapped_fn(*args, **kwargs):
return _fn(*args, **kwargs)
pos0 = torch.randn(4)
kw0 = torch.randn(4)
args = (pos0, torch.randn(4), torch.randn(4))
kwargs = {"kw0": kw0, "kw2": torch.randn(4)}
expected_argument_names = [f"args_{i}" for i in range(len(args))] + list(
kwargs.keys()
)
self._test_export_preserving_original_signature(
wrapped_fn, expected_argument_names, *args, **kwargs
)
def test_export_with_functools_wrapped_method(self):
def test_decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x
@test_decorator
def method_to_test(self, pos0, pos1=1.0, *args, kw0, kw1=2.0, **kwargs):
out = pos0
out += pos1
out += kw0
out += kw1
for arg in args:
out += arg
for kwarg in kwargs.values():
out += kwarg
return out
pos0 = torch.randn(4)
pos1 = torch.randn(4)
unnamed_pos = torch.randn(4)
kw0 = torch.randn(4)
args = (pos0, pos1, unnamed_pos)
kwargs = {"kw0": kw0, "kw2": torch.randn(4), "unnamed_kw": torch.randn(4)}
expected_argument_names = [
"pos0",
"pos1",
"args_0", # 3rd unnamed positional argument
] + list(kwargs.keys())
m = MyModule()
self._test_export_preserving_original_signature(
m.method_to_test, expected_argument_names, *args, **kwargs
)
def test_export_with_functools_wrapped_fn(self):
def test_decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
@test_decorator
def _fn(pos0, pos1=1.0, *args, kw0, kw1=2.0, **kwargs):
out = pos0
out += pos1
out += kw0
out += kw1
for arg in args:
out += arg
for kwarg in kwargs.values():
out += kwarg
return out
def wrapped_fn(*args, **kwargs):
return _fn(*args, **kwargs)
pos0 = torch.randn(4)
kw0 = torch.randn(4)
args = (pos0, torch.randn(4), torch.randn(4))
kwargs = {"kw0": kw0, "kw2": torch.randn(4)}
expected_argument_names = [f"args_{i}" for i in range(len(args))] + list(
kwargs.keys()
)
self._test_export_preserving_original_signature(
wrapped_fn, expected_argument_names, *args, **kwargs
)
def _test_export_preserving_original_signature(
self, fn, expected_argument_names: Sequence[str], *args, **kwargs
):
torch._dynamo.reset()
exported = torch._dynamo.export(
fn,
*args,
**kwargs,
aten_graph=False,
)
out_graph = exported[0]
dynamo_result = out_graph(*args, **kwargs)
real_result = fn(*args, **kwargs)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
# Check that the exported graph preserves same argument names.
self.assertEqual(
inspect.getfullargspec(out_graph.forward).args[1:], expected_argument_names
)
def test_export_meta(self):
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.p = torch.nn.Parameter(torch.ones(2, 3))
def forward(self, x):
return self.p + x
with torch.device("meta"):
m = MyModule()
inp = torch.ones(2, 3, device="meta")
exported = torch._dynamo.export(m, inp)
out_graph = exported[0]
dynamo_result = out_graph(inp)
self.assertEqual(dynamo_result, m(inp))
def test_export_raise_guard_full_constraint(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(x):
if x.shape[0] == 3:
return x.sin()
return x.cos()
torch._dynamo.export(my_dyn_fn, y)
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, y, constraints=[dynamic_dim(y, 0)])
def test_export_module_specify_constraints_signature(self):
y = torch.randn([3, 3, 3])
class Mod(torch.nn.Module):
def forward(self, x):
if x.shape[0] == 3:
return x.sin()
return x.cos()
mod = Mod()
torch._dynamo.export(mod, y)
with self.assertRaisesRegex(
ConstraintViolationError, "def specify_constraints\\(x\\):"
):
torch._dynamo.export(mod, y, constraints=[dynamic_dim(y, 0)])
def test_export_raise_guard_partial_constraint(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(x):
if x.shape[0] > 3:
return x.sin()
return x.cos()
torch._dynamo.export(my_dyn_fn, y)
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, y, constraints=[dynamic_dim(y, 0)])
def test_export_raise_on_relationship(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(a, b, c):
if a.shape[0] == b.shape[1] == c.shape[2]:
return a.sin()
return a.cos()
torch._dynamo.export(my_dyn_fn, y, y, y)
constraints = [dynamic_dim(y, 0)]
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, y, y, y, constraints=constraints)
constraints += [
dynamic_dim(y, 1) == dynamic_dim(y, 0),
dynamic_dim(y, 2) == dynamic_dim(y, 0),
]
torch._dynamo.export(my_dyn_fn, y, y, y, constraints=constraints)
def test_export_no_raise(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(a, b, c):
if a.shape[1] == 3:
return a.cos()
return a * b * c
torch._dynamo.export(my_dyn_fn, y, y, y)
torch._dynamo.export(my_dyn_fn, y, y, y, constraints=[dynamic_dim(y, 0)])
@skipIfRocm
def test_export_multi_dynamic_dim_unsafe_relationship(self):
x = torch.randn([3, 3, 3])
y = torch.randn([2, 2, 2])
z = torch.randn([3, 3, 3])
def my_dyn_fn(a, b, c):
if a.shape[0] == c.shape[0]:
return a.cos()
return a * c, b
torch._dynamo.export(my_dyn_fn, x, y, z)
constraints = [dynamic_dim(x, 0), dynamic_dim(y, 0), dynamic_dim(z, 0)]
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, x, y, z, constraints=constraints)
constraints.append(dynamic_dim(z, 0) == dynamic_dim(x, 0))
torch._dynamo.export(my_dyn_fn, x, y, z, constraints=constraints)
@config.patch(
capture_dynamic_output_shape_ops=True,
specialize_int=True,
capture_scalar_outputs=True,
)
def test_export_preserve_constraints_as_metadata_scalar(self):
def f(x, y):
b = x.item()
constrain_as_size(b, min=2, max=5)
return torch.empty((b, y.shape[0]))
x = torch.tensor([3])
y = torch.randn([8, 8, 6])
example_inputs = [x, y]
constraints = [dynamic_dim(y, 0) >= 6, dynamic_dim(y, 0) <= 10]
gm, _ = torch._dynamo.export(
f,
*example_inputs,
constraints=constraints,
aten_graph=True,
tracing_mode="symbolic",
)
self.assertEqual(
gm.meta["input_shape_constraints"],
[c.serializable_spec for c in constraints],
)
@torch._dynamo.config.patch(
capture_dynamic_output_shape_ops=True,
specialize_int=True,
capture_scalar_outputs=True,
)
def test_export_preserve_constraints_as_metadata_tensor(self):
def f(x):
b = x.nonzero()
constrain_as_value(b.shape[0], min=2, max=5)
return b
y = torch.tensor([8, 8, 6])
constraints = []
gm, _ = torch._dynamo.export(
f,
y,
constraints=constraints,
aten_graph=True,
tracing_mode="symbolic",
)
@config.patch(
capture_dynamic_output_shape_ops=True,
specialize_int=True,
capture_scalar_outputs=True,
)
def test_exported_graph_serialization(self):
import io
def f(x, y):
b = x.item()
constrain_as_size(b, min=2, max=5)
return torch.empty((b, y.shape[0]))
x = torch.tensor([3])
y = torch.randn([8, 8, 6])
example_inputs = [x, y]
constraints = [dynamic_dim(y, 0) >= 6, dynamic_dim(y, 0) <= 10]
gm, _ = torch._dynamo.export(
f,
*example_inputs,
constraints=constraints,
aten_graph=True,
tracing_mode="symbolic",
)
# Ensure the exported graph module with metadata is serializable,
# metadata won't be saved in the serialized module
buffer = io.BytesIO()
torch.save(gm, buffer)
def test_export_with_inline_constraints(self):
def f(x):
a = x.item()
constrain_as_size(a, 4, 7)
return torch.empty((a, 4))
with self.assertRaisesRegex(
torch._dynamo.exc.UserError, r"Invalid value 20 for range \[4:7\]"
) as cm:
torch._export.export(f, (torch.tensor([20]),))
ep = torch._export.export(f, (torch.tensor([5]),))
self.assertEqual(ep(torch.tensor([6])).shape, (6, 4))
FileCheck().check_count(
"torch.ops.aten.sym_constrain_range.default", 1, exactly=True
).run(ep.graph_module.code)
with self.assertRaisesRegex(
RuntimeError,
r"_local_scalar_dense_default is outside of inline constraint \[4, 7\]",
) as cm:
ep(torch.tensor([30]))
def test_export_with_inline_constraints_complex(self):
def f(x):
a = x.item()
constrain_as_size(a, 4, 7)
empty = torch.empty((a, 4))
return torch.cat((empty.transpose(0, 1), torch.zeros(6, a)), 0)
ep = torch._export.export(f, (torch.tensor([6]),))
self.assertEqual(ep(torch.tensor([5])).shape, (10, 5))
FileCheck().check_count(
"torch.ops.aten.sym_constrain_range.default", 1, exactly=True
).run(ep.graph_module.code)
def test_export_dynamic_dim_not_1(self):
x = torch.randn([1, 1, 1])
def my_dyn_fn(a):
if a.shape[0] != 1:
return a.cos()
return a * a
torch._dynamo.export(my_dyn_fn, x)
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, x, constraints=[dynamic_dim(x, 0)])
def test_symbool(self):
def f(x):
a = torch.scalar_tensor(x.shape[0] > 4)
return x.sin().sum() + a.sum()
gm, _ = torch._dynamo.export(f, torch.ones(6, 4), aten_graph=True)
self.assertEqual(gm(torch.ones(3, 4)), f(torch.ones(3, 4)))
def test_export_multi_dynamic_dim_constraint(self):
x = torch.randn([3, 3, 3])
y = torch.randn([2, 2, 2])
z = torch.randn([3, 3, 3])
def my_dyn_fn(a, b, c):
if a.shape[0] == c.shape[0]:
return a.cos()
return a * c, b
torch._dynamo.export(my_dyn_fn, x, y, z)
constraints = [dynamic_dim(x, 0), dynamic_dim(x, 1), dynamic_dim(x, 2)]
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(my_dyn_fn, x, y, z, constraints=constraints)
constraints.append(dynamic_dim(z, 0) == dynamic_dim(x, 0))
torch._dynamo.export(my_dyn_fn, x, y, z, constraints=constraints)
def test_export_dynamic_dim_raise_on_compound_range_constraint(self):
x = torch.ones(6, 4, 4)
with self.assertRaisesRegex(TypeError, "Cannot determine truth value"):
4 < dynamic_dim(x, 0) <= 6 # noqa: B015
def test_export_dynamic_dim_range_constraint(self):
x = torch.ones(6, 4, 4)
constraints = [
4 < dynamic_dim(x, 0),
dynamic_dim(x, 0) <= 6,
]
def foo(x):
if x.shape[0] > 3: # ok
return x.sin()
return x.cos()
torch._dynamo.export(
foo,
x,
constraints=constraints,
aten_graph=True,
)
def bar(x):
if x.shape[0] > 5: # error
return x.sin()
return x.cos()
with self.assertRaises(ConstraintViolationError):
torch._dynamo.export(
bar,
x,
constraints=constraints,
aten_graph=True,
)
def test_list_contains(self):
def func(x):
assert x.size(-1) in [4, 5, 6], "bad"
return x + x
inps = (torch.randn(1, 5),)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_list_not_contains(self):
def func(x):
assert x.size(0) not in [4, 5, 6], "bad1"
assert "monkey" not in ["cow", "pig"], "bad2"
return x + x
inps = (torch.randn(1, 5),)
opt_func = torch._dynamo.optimize("eager", nopython=True, dynamic=True)(func)
real_result = opt_func(*inps)
torch._dynamo.reset()
exported = torch._dynamo.export(func, *inps, aten_graph=True)
out_graph = exported[0]
dynamo_result = out_graph(*inps)
self.assertTrue(torch._dynamo.utils.same(real_result, dynamo_result))
def test_export_identity(self):
inp = torch.tensor([0.1, 0.1])
def func(x):
return x
torch._dynamo.reset()
exported, _ = torch._dynamo.export(func, inp)
dynamo_result = exported(inp)
self.assertTrue(torch._dynamo.utils.same(inp, dynamo_result))
def test_export_specialized_int(self):
class Foo(torch.nn.Module):
def __init__(
self,
input_dim,
):
super().__init__()
self.torch_module = torch.nn.LayerNorm(
input_dim, eps=1e-5, elementwise_affine=True
)
self.int_val = 100
def forward(self, input):
return input.cos() * self.int_val * self.torch_module.eps
mod = Foo(128)
inp = torch.randn(3, 128)
# In export, int & float in forward should always be specialized
gm, _ = torch._dynamo.export(mod, inp, aten_graph=True)
count = 0
for node in gm.graph.nodes:
if node.op == "placeholder":
count += 1
self.assertEqual(count, 1)
def test_export_with_nonzero_static(self):
class BasicModule(torch.nn.Module):
def __init__(self, static_size):
super().__init__()
self.static_size = static_size
def forward(self, x):
return torch.nonzero_static(x, size=self.static_size)
input_tensors = torch.tensor([6, 8]), torch.zeros(2, 3)
static_sizes = 3, 4
for input_tensor, static_size in zip(input_tensors, static_sizes):
m = BasicModule(static_size)
gm, _ = torch._dynamo.export(m, input_tensor, aten_graph=True)
res = gm(input_tensor)
self.assertEqual(res.size(0), static_size)
self.assertTrue(
torch._dynamo.utils.same(
res, torch.nonzero_static(input_tensor, size=static_size)
)
)
def test_export_pass_arg_by_name(self):
class BasicModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.my_lin = torch.nn.Linear(3, 4, bias=True)
def forward(self, x):
return self.my_lin(x)
mod, input_tensor = BasicModule(), torch.randn(2, 3)
gm, guard = torch._dynamo.export(mod, input_tensor, aten_graph=True)
ref = mod(x=input_tensor)
res = gm(x=input_tensor)
self.assertTrue(torch._dynamo.utils.same(ref, res))
def test_export_pass_arg_by_name_star_args(self):
class BasicModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.my_lin = torch.nn.Linear(3, 4, bias=True)
def forward(self, *args):
return self.my_lin(args[0]) * self.my_lin(args[1])
mod, input_tensor, input_tensor2 = (
BasicModule(),
torch.randn(2, 3),
torch.randn(2, 3),
)
gm, guard = torch._dynamo.export(
mod, input_tensor, input_tensor2, aten_graph=True
)
ref = mod(input_tensor, input_tensor2)
res = gm(input_tensor, input_tensor2)
self.assertTrue(torch._dynamo.utils.same(ref, res))
def test_export_mark_dynamic_conflict_dynamic_dim(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(x):
if x.shape[0] > 3:
return x.sin()
return x.cos()
torch._dynamo.mark_dynamic(y, 0)
with self.assertRaisesRegex(
RuntimeError,
"Constraints violated",
):
torch._dynamo.export(my_dyn_fn, y, constraints=[dynamic_dim(y, 0)])
def test_export_dynamic_dim_cleanup(self):
y = torch.randn([3, 3, 3])
def my_dyn_fn(x):
return x.cos()
constraints = [dynamic_dim(y, 0)]
torch._dynamo.export(my_dyn_fn, y, constraints=constraints)
@config.patch(capture_dynamic_output_shape_ops=True)
def test_export_dynamic_control_flow_error(self):
def f(x):
if x.nonzero() > 3:
return x.cos()
return x.sin()
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Dynamic control flow is not supported at the moment",
):
gm, _ = torch._dynamo.export(f, torch.randn(5, 6), aten_graph=True)
@config.patch(assume_static_by_default=False)
def test_export_persist_assert(self):
def f(x):
assert x.shape[0] > 4, "Shape must be more than 4"
return x.cos() + x.sin()
gm, guard = torch._dynamo.export(
f, torch.randn(5, 4, 6), aten_graph=True, tracing_mode="symbolic"
)
def has_aten_op(gm, op):
for node in gm.graph.nodes:
if node.target == op:
return True
return False
self.assertTrue(has_aten_op(gm, torch.ops.aten._assert_async.msg))
gm.graph.eliminate_dead_code()
gm.recompile()
self.assertTrue(has_aten_op(gm, torch.ops.aten._assert_async.msg))
with self.assertRaisesRegex(RuntimeError, "Shape must be more than 4"):
gm(torch.randn(3, 4, 5))
def test_access_class_method_from_user_class(self):
class A:
@classmethod
def func(cls):
return torch.Tensor([4, 5])
def f(x):
a = A()
return x.sum() + type(a).func().sum()
with self.assertRaisesRegex(torch._dynamo.exc.UserError, "Can't call type()"):
gm, _ = torch._dynamo.export(f, torch.ones(6, 4), aten_graph=True)
def f_correct(x):
a = A()
return x.sum() + a.__class__.func().sum()
gm, _ = torch._dynamo.export(f_correct, torch.ones(6, 4), aten_graph=True)
self.assertEqual(f_correct(torch.ones(6, 4)), gm(torch.ones(6, 4)))
def test_not_functionalize(self):
class Foo(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer1", torch.ones(6, 2))
def forward(self, x):
x.add_(2)
return x.sum() + self.buffer1.sum()
example_inputs = (torch.ones(1, 2, 3),)
gm, _ = torch._dynamo.export(
Foo(),
*example_inputs,
aten_graph=True,
tracing_mode="symbolic",
)
count = 0
for node in gm.graph.nodes:
if node.target == torch.ops.aten.add_.Tensor:
count += 1
self.assertEqual(count, 1)
test_inp = (torch.ones(1, 2, 3),)
test_inp_v2 = (torch.ones(1, 2, 3),)
self.assertEqual(gm(*test_inp), Foo()(*test_inp_v2))
def test_round_dynamic_shapes(self):
def f(x):
return x[: round(x.shape[0] / 2)]
def f_correct(x):
return x[: math.floor(x.shape[0] / 2)]
with self.assertRaisesRegex(torch._dynamo.exc.UserError, "Calling round()"):
gm, _ = torch._dynamo.export(f, torch.ones(6, 4), aten_graph=True)
gm, _ = torch._dynamo.export(f_correct, torch.ones(6, 4), aten_graph=True)
self.assertEqual(f_correct(torch.ones(6, 4)), gm(torch.ones(6, 4)))
def test_cond_supported_pred_types(self):
def true_fn(x):
return x.cos()
def false_fn(x):
return x.sin()
def f_pred_traced_as_symnode_var(x):
return cond(x.shape[0] > 2, true_fn, false_fn, [x])
def f_pred_traced_as_tensor_var(x):
return cond(x.all(), true_fn, false_fn, [x])
def f_pred_complex_expression_traced_as_symnode_var(x):
return cond(
x.dim() > 1 and x.shape[1] > 5 and x.shape[1] <= 10,
true_fn,
false_fn,
[x],
)
example_inputs = (torch.rand(5, 8),)
for f in [
f_pred_traced_as_symnode_var,
f_pred_traced_as_tensor_var,
f_pred_complex_expression_traced_as_symnode_var,
]:
gm, _ = torch._dynamo.export(f, *example_inputs, aten_graph=True)
self.assertEqual(gm(*example_inputs), f(*example_inputs))
def test_mixed_real_and_fake_inputs(self):
class _TestPattern(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv = torch.nn.Conv2d(1, 1, 1)
self.bn = torch.nn.BatchNorm2d(1)
def forward(self, input):
running_std = torch.sqrt(self.bn.running_var + self.bn.eps)
scale_factor = self.bn.weight / running_std
weight_shape = [1] * len(self.conv.weight.shape)
weight_shape[0] = -1
bias_shape = [1] * len(self.conv.weight.shape)
bias_shape[1] = -1
scaled_weight = self.conv.weight * scale_factor.reshape(weight_shape)
zero_bias = torch.zeros_like(self.conv.bias, dtype=input.dtype)
conv = self.conv._conv_forward(input, scaled_weight, zero_bias)
conv_orig = conv / scale_factor.reshape(bias_shape)
conv_orig = conv_orig + self.conv.bias.reshape(bias_shape)
conv = self.bn(conv_orig)
return conv
example_inputs = (torch.randn(1, 1, 3, 3),)
torch._dynamo.export(
_TestPattern(),
*example_inputs,
aten_graph=True,
)
@config.patch(
capture_dynamic_output_shape_ops=True,
capture_scalar_outputs=True,
assume_static_by_default=False,
)
def test_sym_contains(self):
def f(x, y):
return x.size(0) in y
gm, _ = torch._dynamo.export(f, torch.ones(2), torch.ones(3), aten_graph=True)
true_inp = (torch.Tensor([6, 4, 5]), torch.ones(6, 4).add_(5))
false_inp = (torch.Tensor([6, 4, 5]), torch.ones(6, 4).add_(2))
self.assertEqual(gm(*true_inp), f(*true_inp))
self.assertEqual(gm(*false_inp), f(*false_inp))
def test_cond_raise_user_error_on_missing_args(self):
def true_fn(x):
return x.cos()
def false_fn(x):
return x.sin()
def f(x):
return cond(x.shape[0] > 10, true_fn, false_fn)
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected 4 arguments",
):
torch._dynamo.export(f, *example_inputs, aten_graph=True)
def test_cond_raise_user_error_on_unsupported_pred(self):
def f_unsupported_pred(x):
pred = torch.nn.Module()
return cond(pred, lambda x: x.sin(), lambda x: x.cos(), [x])
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected pred to be bool/int or a tensor",
):
torch._dynamo.export(
f_unsupported_pred,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_non_list_operands(self):
def f_non_list_operands(x):
return cond(torch.tensor(True), lambda x: x.sin(), lambda x: x.cos(), x)
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected a list but got",
):
torch._dynamo.export(
f_non_list_operands,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_non_tensor_operands(self):
def f_non_tensor_operands(x):
a: float = 3.14
return cond(
torch.tensor(1234), lambda x, a: x.sin(), lambda x, a: x.cos(), [x, a]
)
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected a list of tensors",
):
torch._dynamo.export(
f_non_tensor_operands,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_branch_args_mismatch(self):
def true_fn(x, y):
return x.sin()
def false_fn(x):
return x.cos()
def f_branch_args_mismatch(x, y):
return cond(torch.tensor([[[[100]]]]), true_fn, false_fn, [x, y])
example_inputs = (torch.rand(5), torch.rand(2))
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"too many positional arguments",
):
torch._dynamo.export(
f_branch_args_mismatch,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_branch_return_non_tensor(self):
def f_branch_return_non_tensor(x):
return cond(x.shape[0] <= 5, lambda x: 3.14, lambda x: 3.14, [x])
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"HigherOrderOperator can't return non-tensor scalar output",
):
torch._dynamo.export(
f_branch_return_non_tensor,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_branch_return_multiple_tenors(self):
def f_branch_return_multiple_tensors(x, y):
return cond(y, lambda x: (x, x), lambda x: (x, x), [x])
example_inputs = (torch.randn(4), torch.randn(2))
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected branch out type to be a single tensor",
):
torch._dynamo.export(
f_branch_return_multiple_tensors,
*example_inputs,
aten_graph=True,
)
def test_multiple_outputs_op_with_evaluator(self):
class TopKModel(torch.nn.Module):
def forward(self, x):
values, _ = torch.topk(x, 3)
return torch.sum(values)
x = torch.arange(1.0, 6.0, requires_grad=True)
torch._dynamo.export(TopKModel(), x)
def test_cond_raise_user_error_on_mismatch_return_length(self):
def true_fn(x):
return x
def false_fn(x):
return (x, x)
def f_mismatch_return_length(x):
return cond(torch.tensor(100), true_fn, false_fn, [x])
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected branch out type to be a single tensor",
):
torch._dynamo.export(
f_mismatch_return_length,
*example_inputs,
aten_graph=True,
)
def test_cond_raise_user_error_on_mismatch_return_tensor_meta(self):
def true_fn(x):
return torch.tensor([[3], [2]])
def false_fn(x):
return torch.tensor([3.14])
def f_return_tensor_mismatch(x):
return cond(x.shape[0] < 3, true_fn, false_fn, [x])
example_inputs = (torch.rand(5),)
with self.assertRaisesRegex(
torch._dynamo.exc.UserError,
"Expected each tensor to have same metadata but got",
):
torch._dynamo.export(
f_return_tensor_mismatch,
*example_inputs,
aten_graph=True,
)
def test_byte_tensor_does_not_crash(self):
# See https://github.com/pytorch/pytorch/issues/100455
def func(text):
tensor = torch.ByteTensor(list(bytes(text, "utf8")))
return tensor + tensor
text = "".join(chr(a % 90 + 40) for a in range(111))
opt_func = torch._dynamo.optimize("eager", dynamic=True)(func)
for i in [99, 100]:
input = text[:i]
opt_func(input)
def test_export_defaults_ok(self):
class DynamicSliceExportMod(torch.nn.Module):
def forward(self, x):
results = []
for i in range(4):
results.append(x[: x.size(0) - i, i : x.size(2), i:3])
return tuple(results)
gm, _ = torch._dynamo.export(
DynamicSliceExportMod(),
torch.randn(5, 5, 5),
aten_graph=True,
)
self.assertExpectedInline(
gm.code.strip(),
"""\
def forward(self, x):
arg0, = fx_pytree.tree_flatten_spec(([x], {}), self._in_spec)
slice_tensor = torch.ops.aten.slice.Tensor(arg0, 2, 0, 3)
sym_size = torch.ops.aten.sym_size(arg0, 0)
sub = sym_size - 1
slice_tensor_1 = torch.ops.aten.slice.Tensor(arg0, 0, 0, sub); sub = None
sym_size_1 = torch.ops.aten.sym_size(arg0, 2)
slice_tensor_2 = torch.ops.aten.slice.Tensor(slice_tensor_1, 1, 1, sym_size_1); slice_tensor_1 = None
slice_tensor_3 = torch.ops.aten.slice.Tensor(slice_tensor_2, 2, 1, 3); slice_tensor_2 = None
sub_1 = sym_size - 2
slice_tensor_4 = torch.ops.aten.slice.Tensor(arg0, 0, 0, sub_1); sub_1 = None
slice_tensor_5 = torch.ops.aten.slice.Tensor(slice_tensor_4, 1, 2, sym_size_1); slice_tensor_4 = None
slice_tensor_6 = torch.ops.aten.slice.Tensor(slice_tensor_5, 2, 2, 3); slice_tensor_5 = None
sub_2 = sym_size - 3; sym_size = None
slice_tensor_7 = torch.ops.aten.slice.Tensor(arg0, 0, 0, sub_2); arg0 = sub_2 = None
slice_tensor_8 = torch.ops.aten.slice.Tensor(slice_tensor_7, 1, 3, sym_size_1); slice_tensor_7 = sym_size_1 = None
slice_tensor_9 = torch.ops.aten.slice.Tensor(slice_tensor_8, 2, 3, 3); slice_tensor_8 = None
return pytree.tree_unflatten([slice_tensor, slice_tensor_3, slice_tensor_6, slice_tensor_9], self._out_spec)""",
)
def test_capture_symbolic_tracing(self) -> None:
from torch._dynamo.output_graph import config
from torch._subclasses import fake_tensor
from torch.fx.experimental.symbolic_shapes import ShapeEnv
class Model(torch.nn.Module):
def __init__(self) -> None:
super().__init__()
self.linear = torch.nn.Linear(2, 2)
self.linear2 = torch.nn.Linear(2, 2)
def forward(self, x):
out = self.linear(x)
out = self.linear2(out)
return out
# User-instantiated FakeTensorMode
fake_mode = fake_tensor.FakeTensorMode(
allow_non_fake_inputs=False,
allow_fallback_kernels=True,
shape_env=ShapeEnv(
allow_scalar_outputs=config.capture_scalar_outputs,
allow_dynamic_output_shape_ops=config.capture_dynamic_output_shape_ops,
frame_id=0,
),
)
# Fakefy input+model before exporting it
with fake_mode:
x = torch.rand(5, 2, 2)
model = Model()
# Export the model with fake inputs and parameters
for aten_graph in [True, False]:
graph_module, _ = torch._dynamo.export(
model, x, aten_graph=aten_graph, fake_mode=fake_mode
)
self.assertTrue(
isinstance(graph_module, torch.fx.GraphModule),
msg="test_capture_symbolic_tracing_aten_graph_" + str(aten_graph),
)
def test_cond_op_param_buffer_lifted(self):
class A(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer1", torch.zeros(6, 4))
def forward(self):
return self.buffer1.sum()
class B(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer2", torch.ones(6, 4))
def forward(self):
return self.buffer2.sum()
class M(torch.nn.Module):
def __init__(self):
super().__init__()
self.a = A()
self.b = B()
def forward(self, x):
def true_fn(x):
return x.cos() + self.a()
def false_fn(x):
return x.sin() + self.b()
return (cond(x.shape[0] > 4, true_fn, false_fn, [x]),)
gm, _ = torch._dynamo.export(M(), torch.ones(6, 4), aten_graph=False)
self.assertEqual(gm(torch.ones(6, 4)), M()(torch.ones(6, 4)))
self.assertEqual(gm(torch.ones(3, 4)), M()(torch.ones(3, 4)))
def test_nested_cond_op_param_buffer_lifted(self):
class A(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer1", torch.zeros(6, 4))
def forward(self):
return self.buffer1.sum()
class B(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer2", torch.ones(6, 4))
def forward(self):
return self.buffer2.sum()
class M(torch.nn.Module):
def __init__(self):
super().__init__()
self.a = A()
self.b = B()
def forward(self, x):
def true_true_fn(x):
return x.cos() + self.a()
def true_false_fn(x):
return x.cos() + self.a() + 1
def true_fn(x):
return cond(x.shape[0] > 5, true_true_fn, true_false_fn, [x])
def false_fn(x):
return x.sin() + self.b()
return (cond(x.shape[0] > 4, true_fn, false_fn, [x]),)
gm, _ = torch._dynamo.export(M(), torch.ones(6, 4), aten_graph=False)
self.assertEqual(gm(torch.ones(6, 4)), M()(torch.ones(6, 4)))
self.assertEqual(gm(torch.ones(5, 4)), M()(torch.ones(5, 4)))
self.assertEqual(gm(torch.ones(3, 4)), M()(torch.ones(3, 4)))
def test_map_cond_param_buffer_lifted(self):
from functorch.experimental.control_flow import cond, map
class A(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer1", torch.zeros(6, 4))
def forward(self):
return self.buffer1.sum()
class B(torch.nn.Module):
def __init__(self):
super().__init__()
self.register_buffer("buffer2", torch.ones(6, 4))
def forward(self):
return self.buffer2.sum()
class Module(torch.nn.Module):
def __init__(self):
super().__init__()
self.a = A()
self.b = B()
def inner(self, x, pred):
def true_fn(x):
return x + x + self.a()
def false_fn(x):
return x * x + self.b()
return cond(pred, true_fn, false_fn, [x])
def forward(self, pred, xs):
def body(x, pred):
return self.inner(x, pred) + self.b()
return map(body, xs, pred)
mod = Module()
x = torch.randn(3, 2, 1)
pred_x = torch.tensor(True)
y = torch.randn(4, 3, 2)
pred_y = torch.tensor(False)
real_result = mod(pred_y, y)
out_graph, _ = torch._dynamo.export(mod, pred_x, x)
self.assertEqual(real_result, out_graph(pred_y, y))
def test_cond_free_variables_overlapping(self):
from functorch.experimental.control_flow import cond
class Module(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, pred, x):
a = torch.ones(6, 4)
b = torch.ones(6, 4)
c = torch.ones(6, 4)
d = torch.ones(6, 4)
def true_fn(x):
return x + x + a.cos() + b.cos() + d.cos()
def false_fn(x):
return x * x + a.sin() + b.sin() + c.sin()
return cond(pred, true_fn, false_fn, [x])
mod = Module()
x = torch.ones(6, 4)
pred_x = torch.tensor(True)
out_graph, _ = torch._dynamo.export(mod, pred_x, x)
self.assertExpectedInline(
out_graph.code.strip(),
"""\
def forward(self, pred, x):
arg0, arg1, = fx_pytree.tree_flatten_spec(([pred, x], {}), self._in_spec)
ones = torch.ones(6, 4)
ones_1 = torch.ones(6, 4)
ones_2 = torch.ones(6, 4)
ones_3 = torch.ones(6, 4)
cond_true_0 = self.cond_true_0
cond_false_0 = self.cond_false_0
cond = torch.ops.cond(arg0, cond_true_0, cond_false_0, [arg1, ones, ones_1, ones_3, ones, ones_1, ones_2]); arg0 = cond_true_0 = cond_false_0 = arg1 = ones = ones_1 = ones_3 = ones_2 = None
return pytree.tree_unflatten([cond], self._out_spec)""", # noqa: B950,E122
)
self.assertExpectedInline(
out_graph.cond_true_0.code.strip(),
"""\
def forward(self, l_x_, ones, ones_1, ones_3, ones_2_false_branch, ones_1_false_branch, ones_false_branch):
add = l_x_ + l_x_; l_x_ = None
cos = ones.cos(); ones = None
add_1 = add + cos; add = cos = None
cos_1 = ones_1.cos(); ones_1 = None
add_2 = add_1 + cos_1; add_1 = cos_1 = None
cos_2 = ones_3.cos(); ones_3 = None
add_3 = add_2 + cos_2; add_2 = cos_2 = None
return add_3""",
)
self.assertExpectedInline(
out_graph.cond_false_0.code.strip(),
"""\
def forward(self, l_x_, ones_3_true_branch, ones_1_true_branch, ones_true_branch, ones, ones_1, ones_2):
mul = l_x_ * l_x_; l_x_ = None
sin = ones.sin(); ones = None
add = mul + sin; mul = sin = None
sin_1 = ones_1.sin(); ones_1 = None
add_1 = add + sin_1; add = sin_1 = None
sin_2 = ones_2.sin(); ones_2 = None
add_2 = add_1 + sin_2; add_1 = sin_2 = None
return add_2""",
)
common_utils.instantiate_parametrized_tests(ExportTests)
if __name__ == "__main__":
from torch._dynamo.test_case import run_tests
run_tests()
| [
"[email protected]"
]
| |
e76ea8046de2f60095cd2ccd24fede57d5ea31e1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2096/60763/281373.py | e2192515db85dfcca18346ca2edfb6e6b957b896 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | x = int(input())
sqrt = 0
while sqrt*sqrt <= x:
if sqrt*sqrt == x or (sqrt+1)*(sqrt+1) > x:
break
sqrt+=1
print(sqrt) | [
"[email protected]"
]
| |
9d451285ca268ec2cbf7312dc42ca1bbdcc9b8ab | 0f94b864fc86ce46c94fe136b391cbcf710709f3 | /lz.py | e2672af33951d281c94b0751dd4428e951a18bcc | []
| no_license | luzai/sample-cls | 740ca1452b0f283ce1735272cae4657716e6a4c9 | 8c399e24feb6eb71514dcf18b5008f546064be46 | refs/heads/master | 2021-08-23T05:43:40.268628 | 2017-12-03T17:55:52 | 2017-12-03T17:55:52 | 112,625,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,837 | py | try:
import cPickle as pickle
except:
import pickle
import six, os, sys, csv, time, \
random, os.path as osp, \
subprocess, json, \
numpy as np, pandas as pd, \
glob, re, networkx as nx, \
h5py, yaml, copy, multiprocessing as mp, \
pandas as pd, yaml, collections, \
logging, colorlog, yaml, cvbase as cvb, shutil, \
easydict
import subprocess
# tensorflow as tf, keras, torch , redis
# import torch
# from torch import nn
# from torch.autograd import Variable
# import torch.nn.functional as F
import matplotlib
# matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from IPython import embed
from IPython.display import display, HTML, SVG
# root_path = osp.normpath(
# osp.join(osp.abspath(osp.dirname(__file__)))
# )
root_path = '/home/wangxinglu/prj/few-shot/'
def set_stream_logger(log_level=logging.INFO):
sh = colorlog.StreamHandler()
sh.setLevel(log_level)
sh.setFormatter(
colorlog.ColoredFormatter(
'%(asctime)s %(filename)s [line:%(lineno)d] %(log_color)s%(levelname)s%(reset)s %(message)s'))
logging.root.addHandler(sh)
def set_file_logger(work_dir=None, log_level=logging.DEBUG):
work_dir = work_dir or os.getcwd()
fh = logging.FileHandler(os.path.join(work_dir, 'log.txt'))
fh.setLevel(log_level)
fh.setFormatter(
logging.Formatter('%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s'))
logging.root.addHandler(fh)
# def set_logger():
logging.root.setLevel(logging.DEBUG)
set_stream_logger()
set_file_logger()
def gc_collect():
import gc
gc.collect()
def sel_np(A):
dtype = str(A.dtype)
shape = A.shape
A = A.ravel().tolist()
sav = {'shape': shape, 'dtype': dtype,
'A': A
}
return json.dumps(sav)
def desel_np(s):
import json
sav = json.loads(s)
A = sav['A']
A = np.array(A, dtype=sav['dtype']).reshape(sav['shape'])
return A
def append_file(line, file=None):
file = file or 'append.txt'
with open(file, 'a') as f:
f.writelines(line + '\n')
def cpu_priority(level=19):
import psutil
p = psutil.Process(os.getpid())
p.nice(level)
def get_gpu_memory_map():
"""Get the current gpu usage.
Returns
-------
usage: dict
Keys are device ids as integers.
Values are memory usage as integers in MB.
"""
result = subprocess.check_output(
[
'nvidia-smi', '--query-gpu=memory.used',
'--format=csv,nounits,noheader'
], encoding='utf-8')
# Convert lines into a dictionary
gpu_memory = [int(x) for x in result.strip().split('\n')]
gpu_memory_map = dict(zip(range(len(gpu_memory)), gpu_memory))
return gpu_memory_map
def init_dev(n=(0,)):
logging.info('use gpu {}'.format(n))
from os.path import expanduser
home = expanduser("~")
if isinstance(n, int):
n = (n,)
devs = ''
for n_ in n:
devs += str(n_) + ','
os.environ["CUDA_VISIBLE_DEVICES"] = devs
os.environ['PATH'] = home + '/cuda-8.0/bin:' + os.environ['PATH']
# os.environ['PATH'] = home + '/anaconda2/bin:' + os.environ['PATH']
os.environ['PATH'] = home + '/usr/local/cuda-8.0/bin:' + os.environ['PATH']
os.environ['LD_LIBRARY_PATH'] = home + '/cuda-8.0/lib64'
os.environ['LD_LIBRARY_PATH'] = '/usr/local/cuda-8.0/lib64'
# os.environ['PYTHONWARNINGS'] = "ignore"
def set_env(key, value):
value = os.path.abspath(value)
os.environ[key] = value + ':' + os.environ[key]
def allow_growth_tf():
import tensorflow as tf
_sess_config = tf.ConfigProto(allow_soft_placement=True)
_sess_config.gpu_options.allow_growth = True
return _sess_config
def allow_growth_keras():
import tensorflow as tf
tf_graph = tf.get_default_graph()
_sess_config = tf.ConfigProto(allow_soft_placement=True)
_sess_config.gpu_options.allow_growth = True
sess = tf.Session(config=_sess_config, graph=tf_graph)
import keras.backend as K
K.set_session(sess)
return sess
def get_dev(n=1, ok=(0, 1, 2, 3, 4, 5, 6, 7), mem=(0.5, 0.9), sleep=60):
import GPUtil, time
logging.info('Auto select gpu')
GPUtil.showUtilization()
def _limit(devs, ok):
return [int(dev) for dev in devs if dev in ok]
devs = GPUtil.getAvailable(order='memory', maxLoad=1, maxMemory=mem[0], limit=n) #
devs = _limit(devs, ok)
if len(devs) >= 1:
logging.info('available {}'.format(devs))
# GPUtil.showUtilization()
return int(devs[0]) if n == 1 else devs
while len(devs) == 0:
devs = GPUtil.getAvailable(order='random', maxLoad=1, maxMemory=mem[1], limit=n)
devs = _limit(devs, ok)
if len(devs) >= 1:
logging.info('available {}'.format(devs))
GPUtil.showUtilization()
return devs[0] if n == 1 else devs
logging.info('no device avelaible')
GPUtil.showUtilization()
time.sleep(sleep)
# def grid_iter(tmp):
# res = cartesian(tmp.values())
# np.random.shuffle(res)
# for res_ in res:
# yield dict(zip(tmp.keys(), res_))
def shuffle_iter(iter):
iter = list(iter)
np.random.shuffle(iter)
for iter_ in iter:
yield iter_
def optional_arg_decorator(fn):
def wrapped_decorator(*args):
if len(args) == 1 and callable(args[0]):
return fn(args[0])
else:
def real_decorator(decoratee):
return fn(decoratee, *args)
return real_decorator
return wrapped_decorator
def randomword(length):
import random, string
return ''.join(random.choice(string.lowercase) for i in range(length))
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
def cosort(tensor, y, return_y=False):
comb = zip(tensor, y)
comb_sorted = sorted(comb, key=lambda x: x[1])
if not return_y:
return np.array([comb_[0] for comb_ in comb_sorted])
else:
return np.array([comb_[0] for comb_ in comb_sorted]), np.array([comb_[1] for comb_ in
comb_sorted])
class Timer(object):
"""A simple timer."""
def __init__(self):
self.total_time = 0.
self.calls = 0
self.start_time = 0.
self.diff = 0.
self.average_time = 0.
def tic(self):
# using time.time instead of time.clock because time time.clock
# does not normalize for multithreading
self.start_time = time.time()
def toc(self, average=True):
self.diff = time.time() - self.start_time
self.start_time = time.time()
# logger.info('time pass {}'.format(self.diff))
return self.diff
timer = Timer()
@optional_arg_decorator
def timeit(fn, info=''):
def wrapped_fn(*arg, **kwargs):
timer = Timer()
timer.tic()
res = fn(*arg, **kwargs)
diff = timer.toc()
logging.info((info + 'takes time {}').format(diff))
return res
return wrapped_fn
class Database(object):
def __init__(self, *args, **kwargs):
self.fid = h5py.File(*args, **kwargs)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.fid.close()
def __getitem__(self, keys):
if isinstance(keys, (tuple, list)):
return [self._get_single_item(k) for k in keys]
return self._get_single_item(keys)
def _get_single_item(self, key):
return np.asarray(self.fid[key])
def __setitem__(self, key, value):
if key in self.fid:
if self.fid[key].shape == value.shape and \
self.fid[key].dtype == value.dtype:
print('shape type same, old is updated')
self.fid[key][...] = value
else:
del self.fid[key]
print('old shape {} new shape {} updated'.format(self.fid[key].shape, value.shape))
self.fid.create_dataset(key, data=value)
else:
self.fid.create_dataset(key, data=value)
def __delitem__(self, key):
del self.fid[key]
def __len__(self):
return len(self.fid)
def __iter__(self):
return iter(self.fid)
def flush(self):
self.fid.flush()
def close(self):
self.fid.close()
def keys(self):
return self.fid.keys()
def mypickle(data, file_path):
mkdir_p(osp.dirname(file_path), delete=False)
print('pickle into', file_path)
with open(file_path, 'wb') as f:
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
def unpickle(file_path):
with open(file_path, 'rb') as f:
data = pickle.load(f)
return data
def write_df(df, path):
df.to_hdf(path, 'df', mode='w')
def read_df(path):
return pd.read_hdf(path, 'df')
def mkdir_p(path, delete=False):
if path == '': return
if delete:
rm(path)
if not osp.exists(path):
print('mkdir -p ' + path)
subprocess.call(('mkdir -p ' + path).split())
def shell(cmd, block=True):
import os
my_env = os.environ.copy()
home = os.path.expanduser('~')
# logging.info('cmd is ' + cmd)
if block:
# subprocess.call(cmd.split())
task = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=my_env)
msg = task.communicate()
if msg[0] != b'' and msg[0] != '':
logging.info('stdout {}'.format(msg[0]))
if msg[1] != b'' and msg[1] != '':
logging.error('stderr {}'.format(msg[1]))
return msg
else:
print('Non-block!')
task = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=my_env)
return task
def check_path(path):
path = osp.dirname(path)
if not osp.exists(path):
mkdir_p(path)
def ln(path, to_path):
if osp.exists(to_path):
print('error! exist ' + to_path)
path = osp.abspath(path)
cmd = "ln -s " + path + " " + to_path
print(cmd)
proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
return proc
def tar(path, to_path=None):
if not osp.exists(path):
return
if not osp.exists(to_path):
mkdir_p(to_path)
if os.path.exists(to_path) and not len(os.listdir(to_path)) == 0:
rm(path)
return
if to_path is not None:
cmd = "tar xf " + path + " -C " + to_path
print(cmd)
else:
cmd = "tar xf " + path
shell(cmd, block=True)
if os.path.exists(path):
rm(path)
def rmdir(path):
cmd = "rmdir " + path
shell(cmd)
def rm(path, block=True, hard=True):
path = osp.abspath(path)
if not hard:
dst = glob.glob('{}.bak*'.format(path))
parsr = re.compile('{}.bak(\d+)'.format(path))
used = [0, ]
for d in dst:
m = re.match(parsr, d)
used.append(int(m.groups()[0]))
dst_path = '{}.bak{}'.format(path, max(used) + 1)
cmd = 'mv {} {} '.format(path, dst_path)
print(cmd)
shell(cmd, block=block)
else:
if osp.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
return
def show_img(path):
from IPython.display import Image
fig = Image(filename=(path))
return fig
def show_pdf(path):
from IPython.display import IFrame
path = osp.relpath(path)
return IFrame(path, width=600, height=300)
def print_graph_info():
import tensorflow as tf
graph = tf.get_default_graph()
graph.get_tensor_by_name("Placeholder:0")
layers = [op.name for op in graph.get_operations() if op.type == "Placeholder"]
print([graph.get_tensor_by_name(layer + ":0") for layer in layers])
print([op.type for op in graph.get_operations()])
print([n.name for n in tf.get_default_graph().as_graph_def().node])
print([v.name for v in tf.global_variables()])
print(graph.get_operations()[20])
def chdir_to_root(fn):
def wrapped_fn(*args, **kwargs):
restore_path = os.getcwd()
os.chdir(root_path)
res = fn(*args, **kwargs)
os.chdir(restore_path)
return res
return wrapped_fn
def scp(src, dest, dry_run=False):
cmd = ('scp -r ' + src + ' ' + dest)
print(cmd)
if dry_run: return
return shell(cmd, block=False)
def read_list(file, delimi=" "):
if osp.exists(file):
lines = np.genfromtxt(file, dtype='str', delimiter=delimi)
return lines
else:
return []
def cp(from_path, to):
subprocess.call(('cp -r ' + from_path + ' ' + to).split())
def mv(from_path, to):
if not osp.exists(to):
mkdir_p(to)
if not isinstance(from_path, list):
subprocess.call(('mv ' + from_path + ' ' + to).split())
else:
for from_ in from_path:
subprocess.call(('mv ' + from_ + ' ' + to).split())
def dict_concat(d_l):
d1 = d_l[0].copy()
for d in d_l[1:]:
d1.update(d)
return d1
def clean_name(name):
import re
name = re.findall('([a-zA-Z0-9/-]+)(?::\d+)?', name)[0]
name = re.findall('([a-zA-Z0-9/-]+)(?:_\d+)?', name)[0]
return name
class Struct(object):
def __init__(self, entries):
self.__dict__.update(entries)
def __getitem__(self, item):
return self.__dict__[item]
def dict2obj(d):
return Struct(d)
def dict2str(others):
name = ''
for key, val in others.iteritems():
name += '_' + str(key)
if isinstance(val, dict):
name += '_' + dict2str(val)
elif isinstance(val, list):
for val_ in val:
name += '-' + str(val_)
else:
name += '_' + str(val)
return name
def list2str(li, delimier=''):
name = ''
for name_ in li:
name += (str(name_) + delimier)
return name
def write_list(file, l, sort=True, delimiter=' ', fmt='%.18e'):
l = np.array(l)
if sort:
l = np.sort(l, axis=0)
np.savetxt(file, l, delimiter=delimiter, fmt=fmt)
def rsync(from_, to):
cmd = ('rsync -avzP ' + from_ + ' ' + to)
print(cmd)
return shell(cmd, block=False)
def i_vis_graph(graph_def, max_const_size=32):
"""Visualize TensorFlow graph."""
import tensorflow as tf
from IPython.display import display, HTML, SVG
import os
def strip_consts(graph_def, max_const_size=32):
"""Strip large constant values from graph_def."""
import tensorflow as tf
strip_def = tf.GraphDef()
for n0 in graph_def.node:
n = strip_def.node.add()
n.MergeFrom(n0)
if n.op == 'Const':
tensor = n.attr['value'].tensor
size = len(tensor.tensor_content)
if size > max_const_size:
tensor.tensor_content = tf.compat.as_bytes("<stripped %d bytes>" % size)
return strip_def
if hasattr(graph_def, 'as_graph_def'):
graph_def = graph_def.as_graph_def()
strip_def = strip_consts(graph_def, max_const_size=max_const_size)
code = """
<script>
function load() {{
document.getElementById("{id}").pbtxt = {data};
}}
</script>
<link rel="import" href="https://tensorboard.appspot.com/tf-graph-basic.build.html" onload=load()>
<div style="height:600px">
<tf-graph-basic id="{id}"></tf-graph-basic>
</div>
""".format(data=repr(str(strip_def)), id='graph' + str(np.random.rand()))
iframe = """
<iframe seamless style="width:800px;height:620px;border:0" srcdoc="{}"></iframe>
""".format(code.replace('"', '"'))
display(HTML(iframe))
if __name__ == '__main__':
pass
| [
"[email protected]"
]
| |
47976502d48e9a7befc8f25a4a40b5c79b62478c | efc690a4c42b1511deb0fe80bf146872c45aed69 | /conf_site/reviews/views/results.py | 8e9c337d3fcfa45024658285ca45002dd3f015b0 | [
"MIT"
]
| permissive | jasongrout/conf_site | 34aa1197727fbbbdf8811338764a7451445f1803 | 6b3beb21de8d847cba65dcb6da84464b40739d48 | refs/heads/master | 2021-03-31T06:35:59.696561 | 2020-03-17T20:39:57 | 2020-03-17T20:39:57 | 248,086,087 | 0 | 0 | MIT | 2020-03-17T22:32:02 | 2020-03-17T22:32:01 | null | UTF-8 | Python | false | false | 6,671 | py | # -*- coding: utf-8 -*-
# Views relating to accepting/rejecting a reviewed proposal.
from django.contrib import messages
from django.contrib.auth.mixins import UserPassesTestMixin
from django.http import HttpResponseRedirect
from django.template.defaultfilters import pluralize
from django.urls import reverse
from django.views.generic import View
from conf_site.proposals.models import Proposal
from conf_site.reviews.models import ProposalNotification, ProposalResult
from conf_site.reviews.views import ProposalListView
from symposion.schedule.models import Presentation
class SuperuserOnlyView(UserPassesTestMixin, View):
"""A view which only allows access to superusers."""
def test_func(self):
if self.request.user.is_superuser:
return True
elif not self.request.user.is_anonymous:
# Non-anonymous, non-superuser users should see an error page.
self.raise_exception = True
return False
class ProposalChangeResultPostView(SuperuserOnlyView):
"""A view to allow superusers to change a proposal's voting result."""
http_method_names = ["get"]
def get(self, *args, **kwargs):
"""Update an individual ProposalResult object."""
proposal = Proposal.objects.get(pk=kwargs["pk"])
result = ProposalResult.objects.get_or_create(proposal=proposal)[0]
result.status = kwargs["status"]
result.save()
return HttpResponseRedirect(
reverse("review_proposal_detail", args=[proposal.id])
)
class ProposalResultListView(SuperuserOnlyView, ProposalListView):
def get(self, request, *args, **kwargs):
self.status = kwargs["status"]
if self.status == ProposalResult.RESULT_UNDECIDED:
# Create ProposalResults for proposals that do not have them.
proposals_without_result = Proposal.objects.filter(
review_result=None
)
for proposal in proposals_without_result:
ProposalResult.objects.create(
proposal=proposal, status=ProposalResult.RESULT_UNDECIDED
)
return super(ProposalResultListView, self).get(
request, *args, **kwargs
)
def get_queryset(self):
return Proposal.objects.order_by("pk").filter(
review_result__status=self.status
)
def get_context_data(self, **kwargs):
context = super(ProposalResultListView, self).get_context_data(
**kwargs
)
temp_result = ProposalResult(status=self.status)
context["proposal_category"] = temp_result.get_status_display()
return context
class ProposalMultieditPostView(SuperuserOnlyView):
"""A view to let superusers modify multiple proposals' results."""
http_method_names = ["post"]
def post(self, *args, **kwargs):
proposal_pks = self.request.POST.getlist("proposal_pk")
proposals = Proposal.objects.filter(pk__in=proposal_pks)
new_status = self.request.POST.get("mark_status")
if new_status:
# <queryset>.update() will not work here because
# the status field lives in the related model
# ProposalResult.
for proposal in proposals:
try:
proposal.review_result.status = new_status
proposal.review_result.save()
except ProposalResult.DoesNotExist:
proposal.review_result = ProposalResult.objects.create(
proposal=proposal, status=new_status
)
return HttpResponseRedirect(
reverse("review_proposal_result_list", args=[new_status])
)
elif self.request.POST.get("send_notification"):
# Save ProposalNotification to database, as a type
# of rudimentary logging.
notification = ProposalNotification.objects.create(
from_address=self.request.POST.get("from_address"),
subject=self.request.POST.get("subject"),
body=self.request.POST.get("body"),
)
notification.proposals.set(proposals)
unemailed_speakers = notification.send_email()
for speaker in unemailed_speakers:
messages.warning(
self.request,
"Speaker {} does not have an email address "
"and has not been notified.".format(speaker.name)
)
return HttpResponseRedirect(reverse("review_proposal_list"))
elif self.request.POST.get("create_presentations"):
num_presentations_created = 0
for proposal in proposals:
# We don't need to add all of the proposal's metadata
# to the presentation. Most fields will automatically be
# added when we save the proposal.
# See https://github.com/pydata/conf_site/pull/176.
# Note that the title needs to be added here so that
# a presentation's slugs are created properly.
presentation, created = Presentation.objects.get_or_create(
proposal_base=proposal.proposalbase_ptr,
section=proposal.section,
speaker=proposal.speaker,
title=proposal.title,
)
# If the presentation already existed, we do not need
# to attach it to the proposal.
if created:
proposal.presentation = presentation
proposal.save()
num_presentations_created += 1
# Create a message if any new presentations were created.
if num_presentations_created:
messages.success(
self.request,
"{} presentation{} created.".format(
num_presentations_created,
pluralize(num_presentations_created),
),
)
else:
messages.warning(
self.request,
"All selected proposals already had presentations.",
)
# Since the "create presentations" action can only
# be initated from the "Accepted Proposals"
# category listing, we return the user there.
return HttpResponseRedirect(
reverse(
"review_proposal_result_list",
args=[ProposalResult.RESULT_ACCEPTED],
)
)
| [
"[email protected]"
]
| |
a6163925f7b8ca2e2f2092068ada1812096825e1 | 536ec8e275d0e4ac826ed492a818802f17eb29da | /ABC/160/d.py | 02dddae132428e1112cce372965697f32401ed8e | []
| no_license | tamanyan/coding-problems | 3d74ee708a943348ee06f1a25c45ee3a35cfd9ee | 415e8230c8386163e1abf5eea217a1e5be8a15bc | refs/heads/master | 2020-07-03T21:36:23.566534 | 2020-06-10T16:33:55 | 2020-06-10T16:33:55 | 202,057,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,544 | py | from heapq import heappush, heappop, heapify
from collections import deque, defaultdict, Counter
import itertools
from itertools import permutations, combinations, accumulate
import sys
import bisect
import string
import math
import time
def I(): return int(input())
def S(): return input()
def MI(): return map(int, input().split())
def MS(): return map(str, input().split())
def LI(): return [int(i) for i in input().split()]
def LI_(): return [int(i)-1 for i in input().split()]
def StoI(): return [ord(i)-97 for i in input()]
def ItoS(nn): return chr(nn+97)
def input(): return sys.stdin.readline().rstrip()
def show(*inp, end='\n'):
if show_flg:
print(*inp, end=end)
def print_matrix(mat):
for i in range(len(mat)):
print(*mat[i])
yn = {False: 'No', True: 'Yes'}
YN = {False: 'NO', True: 'YES'}
MOD = 10**9+7
inf = float('inf')
IINF = 10**19
l_alp = string.ascii_lowercase
u_alp = string.ascii_uppercase
ts = time.time()
sys.setrecursionlimit(10**6)
nums = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
show_flg = False
# show_flg = True
def bfs(graph, initial):
n = len(graph) - 1
dist = [-1] * (n + 1)
q = deque([initial])
visited = [False] * (n + 1)
visited[initial] = True
dist[initial] = 0
while len(q) != 0:
edge = q.popleft()
nxt = graph[edge]
for i, e in enumerate(nxt):
if visited[e] is False:
q.append(e)
dist[e] = dist[edge] + 1
visited[e] = True
return dist
def main():
N, M = MI()
ret = bfs(graph, 1)
print(ret)
def main():
N, X, Y = MI()
graph = [[] for i in range(N+1)]
for i in range(1, N):
graph[i].append(i+1)
graph[i+1].append(i)
graph[X].append(Y)
graph[Y].append(X)
d = defaultdict(int)
for i in range(1, N+1):
ret = bfs(graph, i)
for j in range(1, len(ret)):
if i != j:
d[ret[j]] += 1
# print(ret)
# print(d)
for i in range(1, N):
if i in d:
print(d[i]//2)
else:
print(0)
# d = defaultdict(int)
# for i in range(1, N+1):
# for j in range(i+1, N+1):
# a = abs(j - i)
# b = abs(X - i) + 1 + abs(Y - j)
# c = abs(Y - i) + 1 + abs(X - j)
# d[min(a, b, c)] += 1
# for i in range(1, N):
# if i in d:
# print(d[i])
# else:
# print(0)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
ad20541f220ca3f25625474ef3e4b31b78b5f186 | c94a89b95140bd1e348318ecb2d4867e676ba18c | /Kids_ProjectPro/urls.py | f49350e44751bce58a5c970419d7491ddc5ed535 | []
| no_license | Bek-End/Kids_ProjectPro | c499a0319726f2c819527ce3b12f00e48edc708f | e1a9b6a4825bccbe2b620ddecc0b7cfac519d4ca | refs/heads/master | 2023-02-06T09:09:49.302103 | 2020-12-30T18:42:30 | 2020-12-30T18:42:30 | 325,620,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,114 | py | """Kids_ProjectPro URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
# from django.views.static import serve
# from django.conf.urls import url
urlpatterns = [
path('verify/', include('verification.urls')),
path('account/',include('accounts.urls')),
path('admin/', admin.site.urls),
# url(r'^media/(?P<path>.*)$', serve,{'document_root': settings.MEDIA_ROOT}),
# url(r'^static/(?P<path>.*)$', serve,{'document_root': settings.STATIC_ROOT}),
]
| [
"="
]
| = |
14c4f7a74b37bcc37269c434315d42e1b81f1aa2 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /ssm_write_f/parameter-version_label.py | 3db287273a2a4cd137dce02c234f61d14f00aed6 | []
| no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
"""
write_parameter("ssm", "label-parameter-version") | [
"[email protected]"
]
| |
3155809eb9213fe200a99270af4ffb00653d1968 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_001/ch1_2019_03_08_00_48_52_215191.py | 703dfbc3041cf04c1ed845a6b560498ac223dc71 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | def calcula_valor_devido(C,t,i):
y=C*(1+i)**t
return y | [
"[email protected]"
]
| |
79b1bcd49c77b250f57382414962ded3e5d58e28 | dd8faa90ee03ff52c571994ff797e4e4db38726d | /lib/python2.7/site-packages/google_compute_engine/instance_setup/instance_setup.py | 1677fc765c6b90328a859867099fa926d89b8411 | []
| no_license | bopopescu/elasticluster_forked | 16453b6d322ed26156a9759a8a3507ba9023883b | ba5433bc4a6e7b876161030f06b76ff1837e0e71 | refs/heads/master | 2022-11-21T20:49:13.533208 | 2017-10-25T22:39:00 | 2017-10-25T22:39:00 | 282,557,817 | 0 | 0 | null | 2020-07-26T02:02:21 | 2020-07-26T02:02:20 | null | UTF-8 | Python | false | false | 7,502 | py | #!/usr/bin/python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run initialization code the first time the instance boots."""
import logging.handlers
import optparse
import os
import re
import shutil
import subprocess
import tempfile
from google_compute_engine import file_utils
from google_compute_engine import logger
from google_compute_engine import metadata_watcher
from google_compute_engine.boto import boto_config
from google_compute_engine.instance_setup import instance_config
class InstanceSetup(object):
"""Initialize the instance the first time it boots."""
def __init__(self, debug=False):
"""Constructor.
Args:
debug: bool, True if debug output should write to the console.
"""
self.debug = debug
facility = logging.handlers.SysLogHandler.LOG_DAEMON
self.logger = logger.Logger(
name='instance-setup', debug=self.debug, facility=facility)
self.watcher = metadata_watcher.MetadataWatcher(logger=self.logger)
self.metadata_dict = None
self.instance_config = instance_config.InstanceConfig(logger=self.logger)
if self.instance_config.GetOptionBool('InstanceSetup', 'network_enabled'):
self.metadata_dict = self.watcher.GetMetadata()
instance_config_metadata = self._GetInstanceConfig()
self.instance_config = instance_config.InstanceConfig(
logger=self.logger, instance_config_metadata=instance_config_metadata)
if self.instance_config.GetOptionBool('InstanceSetup', 'set_host_keys'):
self._SetSshHostKeys()
if self.instance_config.GetOptionBool('InstanceSetup', 'set_boto_config'):
self._SetupBotoConfig()
if self.instance_config.GetOptionBool(
'InstanceSetup', 'optimize_local_ssd'):
self._RunScript('optimize_local_ssd')
if self.instance_config.GetOptionBool('InstanceSetup', 'set_multiqueue'):
self._RunScript('set_multiqueue')
try:
self.instance_config.WriteConfig()
except (IOError, OSError) as e:
self.logger.warning(str(e))
def _GetInstanceConfig(self):
"""Get the instance configuration specified in metadata.
Returns:
string, the instance configuration data.
"""
try:
instance_data = self.metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = self.metadata_dict['project']['attributes']
except KeyError:
project_data = {}
self.logger.warning('Project attributes were not found.')
return (instance_data.get('google-instance-configs') or
project_data.get('google-instance-configs'))
def _RunScript(self, script):
"""Run a script and log the streamed script output.
Args:
script: string, the file location of an executable script.
"""
process = subprocess.Popen(
script, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
while True:
for line in iter(process.stdout.readline, b''):
self.logger.info(line.decode('utf-8').rstrip('\n'))
if process.poll() is not None:
break
def _GetInstanceId(self):
"""Get the instance ID for this VM.
Returns:
string, the instance ID for the VM.
"""
try:
return str(self.metadata_dict['instance']['id'])
except KeyError:
self.logger.warning('Instance ID was not found in metadata.')
return None
def _GenerateSshKey(self, key_type, key_dest):
"""Generate a new SSH key.
Args:
key_type: string, the type of the SSH key.
key_dest: string, a file location to store the SSH key.
"""
# Create a temporary file to save the created RSA keys.
with tempfile.NamedTemporaryFile(prefix=key_type, delete=True) as temp:
temp_key = temp.name
command = ['ssh-keygen', '-t', key_type, '-f', temp_key, '-N', '', '-q']
try:
self.logger.info('Generating SSH key %s.', key_dest)
subprocess.check_call(command)
except subprocess.CalledProcessError:
self.logger.warning('Could not create SSH key %s.', key_dest)
return
shutil.move(temp_key, key_dest)
shutil.move('%s.pub' % temp_key, '%s.pub' % key_dest)
file_utils.SetPermissions(key_dest, mode=0o600)
file_utils.SetPermissions('%s.pub' % key_dest, mode=0o644)
def _StartSshd(self):
"""Initialize the SSH daemon."""
# Exit as early as possible.
# Instance setup systemd scripts block sshd from starting.
if os.path.exists('/bin/systemctl'):
return
elif (os.path.exists('/etc/init.d/ssh') or
os.path.exists('/etc/init/ssh.conf')):
subprocess.call(['service', 'ssh', 'start'])
subprocess.call(['service', 'ssh', 'reload'])
elif (os.path.exists('/etc/init.d/sshd') or
os.path.exists('/etc/init/sshd.conf')):
subprocess.call(['service', 'sshd', 'start'])
subprocess.call(['service', 'sshd', 'reload'])
def _SetSshHostKeys(self):
"""Regenerates SSH host keys when the VM is restarted with a new IP address.
Booting a VM from an image with a known SSH key allows a number of attacks.
This function will regenerating the host key whenever the IP address
changes. This applies the first time the instance is booted, and each time
the disk is used to boot a new instance.
"""
section = 'Instance'
instance_id = self._GetInstanceId()
if instance_id != self.instance_config.GetOptionString(
section, 'instance_id'):
self.logger.info('Generating SSH host keys for instance %s.', instance_id)
file_regex = re.compile(r'ssh_host_(?P<type>[a-z0-9]*)_key\Z')
key_dir = '/etc/ssh'
key_files = [f for f in os.listdir(key_dir) if file_regex.match(f)]
for key_file in key_files:
key_type = file_regex.match(key_file).group('type')
key_dest = os.path.join(key_dir, key_file)
self._GenerateSshKey(key_type, key_dest)
self._StartSshd()
self.instance_config.SetOption(section, 'instance_id', str(instance_id))
def _GetNumericProjectId(self):
"""Get the numeric project ID.
Returns:
string, the numeric project ID.
"""
try:
return str(self.metadata_dict['project']['numericProjectId'])
except KeyError:
self.logger.warning('Numeric project ID was not found in metadata.')
return None
def _SetupBotoConfig(self):
"""Set the boto config so GSUtil works with provisioned service accounts."""
project_id = self._GetNumericProjectId()
try:
boto_config.BotoConfig(project_id, debug=self.debug)
except (IOError, OSError) as e:
self.logger.warning(str(e))
def main():
parser = optparse.OptionParser()
parser.add_option(
'-d', '--debug', action='store_true', dest='debug',
help='print debug output to the console.')
(options, _) = parser.parse_args()
InstanceSetup(debug=bool(options.debug))
if __name__ == '__main__':
main()
| [
"vipin@kryptonite"
]
| vipin@kryptonite |
0768dc07b55dd1f9ec3c1e126265bb64af79e753 | c528d85b2db4e106f0a08ed0c0bb8f9a06c75c27 | /produccion/migrations/0016_produccionrealizada_cantidad_reproceso.py | 8e696116ef666a3b2e0438d0b937accc4769a505 | []
| no_license | alrvivas/CampoApp2 | 446e9eeb5f2b03963616b433340e99e3d481e532 | 8c574173f68887a16fb6c0e728a10ef12dbae4ce | refs/heads/master | 2016-09-10T19:51:50.586769 | 2015-05-19T20:59:49 | 2015-05-19T20:59:49 | 35,906,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 474 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('produccion', '0015_auto_20150205_1009'),
]
operations = [
migrations.AddField(
model_name='produccionrealizada',
name='cantidad_reproceso',
field=models.IntegerField(null=True, blank=True),
preserve_default=True,
),
]
| [
"[email protected]"
]
| |
e6ac376fc3ce85a4b5ed3d68aa7c036078669092 | 15cb0ddd678abe1e1f7a905fab0305079bfc4007 | /source/vsm/vsm/openstack/common/periodic_task.py | ba0c8723c4258638ee958becbe7b98ea8b935d88 | [
"Apache-2.0",
"MIT"
]
| permissive | ramkrsna/virtual-storage-manager | 3563baf9763a0925af77cc13245e0896c20a2ced | 78125bfb4dd4d78ff96bc3274c8919003769c545 | refs/heads/master | 2023-02-18T08:52:56.769486 | 2016-07-01T06:46:53 | 2016-07-01T06:46:53 | 63,155,952 | 0 | 0 | NOASSERTION | 2023-02-07T06:07:38 | 2016-07-12T12:27:16 | Python | UTF-8 | Python | false | false | 8,337 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 Intel Corporation, All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the"License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import random
from oslo.config import cfg
from vsm import flags
from vsm.openstack.common.gettextutils import _
from vsm.openstack.common import log as logging
from vsm.openstack.common import timeutils
from vsm import db
from vsm import utils
FLAGS = flags.FLAGS
periodic_opts = [
cfg.BoolOpt('run_external_periodic_tasks',
default=True,
help=('Some periodic tasks can be run in a separate process. '
'Should we run them here?')),
]
CONF = cfg.CONF
CONF.register_opts(periodic_opts)
LOG = logging.getLogger(__name__)
DEFAULT_INTERVAL = 60.0
class InvalidPeriodicTaskArg(Exception):
message = _("Unexpected argument for periodic task creation: %(arg)s.")
def periodic_task(*args, **kwargs):
"""Decorator to indicate that a method is a periodic task.
This decorator can be used in two ways:
1. Without arguments '@periodic_task', this will be run on every cycle
of the periodic scheduler.
2. With arguments:
@periodic_task(spacing=N [, run_immediately=[True|False]])
this will be run on approximately every N seconds. If this number is
negative the periodic task will be disabled. If the run_immediately
argument is provided and has a value of 'True', the first run of the
task will be shortly after task scheduler starts. If
run_immediately is omitted or set to 'False', the first time the
task runs will be approximately N seconds after the task scheduler
starts.
"""
def decorator(f):
# Test for old style invocation
if 'ticks_between_runs' in kwargs:
raise InvalidPeriodicTaskArg(arg='ticks_between_runs')
# Control if run at all
f._periodic_task = True
f._periodic_external_ok = kwargs.pop('external_process_ok', False)
if f._periodic_external_ok and not CONF.run_external_periodic_tasks:
f._periodic_enabled = False
else:
f._periodic_enabled = kwargs.pop('enabled', True)
# Control frequency
f._periodic_spacing = kwargs.pop('spacing', 0)
f._periodic_immediate = kwargs.pop('run_immediately', False)
f._service_topic = kwargs.pop('service_topic', None)
if f._periodic_immediate:
f._periodic_last_run = None
else:
f._periodic_last_run = timeutils.utcnow()
return f
# NOTE(sirp): The `if` is necessary to allow the decorator to be used with
# and without parens.
#
# In the 'with-parens' case (with kwargs present), this function needs to
# return a decorator function since the interpreter will invoke it like:
#
# periodic_task(*args, **kwargs)(f)
#
# In the 'without-parens' case, the original function will be passed
# in as the first argument, like:
#
# periodic_task(f)
if kwargs:
return decorator
else:
return decorator(args[0])
class _PeriodicTasksMeta(type):
def __init__(cls, names, bases, dict_):
"""Metaclass that allows us to collect decorated periodic tasks."""
super(_PeriodicTasksMeta, cls).__init__(names, bases, dict_)
# NOTE(sirp): if the attribute is not present then we must be the base
# class, so, go ahead an initialize it. If the attribute is present,
# then we're a subclass so make a copy of it so we don't step on our
# parent's toes.
try:
cls._periodic_tasks = cls._periodic_tasks[:]
except AttributeError:
cls._periodic_tasks = []
try:
cls._periodic_last_run = cls._periodic_last_run.copy()
except AttributeError:
cls._periodic_last_run = {}
try:
cls._periodic_spacing = cls._periodic_spacing.copy()
except AttributeError:
cls._periodic_spacing = {}
try:
cls._service_topic = cls._service_topic.copy()
except AttributeError:
cls._service_topic = {}
for value in cls.__dict__.values():
if getattr(value, '_periodic_task', False):
task = value
name = task.__name__
if task._periodic_spacing < 0:
continue
if not task._periodic_enabled:
continue
# A periodic spacing of zero indicates that this task should
# be run every pass
if task._periodic_spacing == 0:
task._periodic_spacing = None
cls._periodic_tasks.append((name, task))
cls._periodic_spacing[name] = task._periodic_spacing
cls._periodic_last_run[name] = task._periodic_last_run
cls._service_topic[name] = task._service_topic
class PeriodicTasks(object):
__metaclass__ = _PeriodicTasksMeta
def run_periodic_tasks(self, context, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
idle_for = DEFAULT_INTERVAL
for task_name, task in self._periodic_tasks:
full_task_name = '.'.join([self.__class__.__name__, task_name])
now = timeutils.utcnow()
spacing = self._periodic_spacing[task_name]
last_run = self._periodic_last_run[task_name]
service_topic = self._service_topic[task_name]
if service_topic is not None:
if not self._running_on_this_host(context, service_topic):
continue
# If a periodic task is _nearly_ due, then we'll run it early
if spacing is not None and last_run is not None:
due = last_run + datetime.timedelta(seconds=spacing)
if not timeutils.is_soon(due, 0.2):
idle_for = min(idle_for, timeutils.delta_seconds(now, due))
continue
if spacing is not None:
idle_for = min(idle_for, spacing)
self._periodic_last_run[task_name] = timeutils.utcnow()
try:
task(self, context)
except Exception as e:
LOG.exception(_("Error during %(full_task_name)s: %(e)s"),
locals())
if raise_on_error:
raise
time.sleep(0)
return idle_for
def _running_on_this_host(self, context, service_topic):
running_on_this_host = False
host_list = []
sers = db.service_get_all_by_topic(context, service_topic)
for ser in sers:
init_node = db.init_node_get_by_host(context, ser['host'])
if utils.service_is_up(ser) and init_node and init_node['status'] == "Active":
host_list.append(ser['host'])
host_list.sort()
if len(host_list) == 0:
return False
select_host = random.randint(0, len(host_list)-1)
if host_list[select_host] == FLAGS.host:
running_on_this_host = True
return running_on_this_host
| [
"[email protected]"
]
| |
020f38bcdcaa8a734beb4dd1a45e1e2e66bcd782 | fd25231975acd147e04dc3ed3627c92cb1a4f86c | /FlaskAPI/vir_env/lib/python3.7/site-packages/sklearn/linear_model/theil_sen.py | 3fbea57d5f9babe483c0672e470413c5cb049f49 | []
| no_license | sumitkutty/Flight-Price-Prediction | 832a2802a3367e655b46d3b44f073d917abd2320 | d974a8b75fbcbfa42f11703602af3e45a3f08b3c | refs/heads/master | 2022-12-25T07:13:06.375888 | 2020-10-08T18:46:44 | 2020-10-08T18:46:44 | 302,366,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:4180feba49700d901330d36bcb7c90b1782af3a052c08cac446725a84da24bc5
size 580
| [
"[email protected]"
]
| |
84a1ee5259d9018c15b1c540bf5849f763787883 | d9f8673ec068e4cb31e08513b9f7eab274d66243 | /tf_quant_finance/experimental/rqmc/sobol.py | 91a8ba386f09334d93a297726cf4b8d8043554d1 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | rambam613/tf-quant-finance | e42bd9bfb9337f41080a29ff444b8a21c00e3114 | c96898e13ce2592115720a19791fc1330697ff37 | refs/heads/master | 2023-06-24T12:08:04.629791 | 2021-07-26T12:57:51 | 2021-07-26T12:58:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,735 | py | # Lint as: python3
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for Sobol sequence generation."""
import tensorflow.compat.v2 as tf
from tf_quant_finance import types
from tf_quant_finance.experimental.rqmc import digital_net
from tf_quant_finance.experimental.rqmc import utils
from tf_quant_finance.math.random_ops import sobol
__all__ = [
'sample_sobol',
'sobol_generating_matrices',
]
(_PRIMITIVE_POLYNOMIAL_COEFFICIENTS,
_INITIAL_DIRECTION_NUMBERS) = sobol.load_data()
def sample_sobol(dim: types.IntTensor,
num_results: types.IntTensor,
sequence_indices: types.IntTensor = None,
digital_shift: types.IntTensor = None,
scrambling_matrices: types.IntTensor = None,
apply_tent_transform: bool = False,
validate_args: bool = False,
dtype: tf.DType = None,
name: str = None) -> types.RealTensor:
r"""Samples points from the Sobol sequence.
Args:
dim: Positive scalar `Tensor` of integers with rank 0. The event size of the
sampled points.
num_results: Positive scalar `Tensor` of integers with rank 0. The number of
points to sample.
sequence_indices: Optional positive scalar `Tensor` of integers with rank 1.
The elements of the sequence to return specified by their position in the
sequence.
Default value: `None` which corresponds to the `[0, num_results)` range.
digital_shift: Optional digital shift to be applied to all the points via a
bitwise xor.
Default value: `None`.
scrambling_matrices: Positive scalar `Tensor` with the same `shape` and
`dtype` as `generating_matrices`. Used to randomize `generating_matrices`.
Default value: `None`.
apply_tent_transform: Python `bool` indicating whether to apply a tent
transform to the sampled points.
Default value: `False`.
validate_args: Python `bool` indicating whether to validate arguments.
Default value: `False`.
dtype: Optional `dtype`. The `dtype` of the output `Tensor` (either
`float32` or `float64`).
Default value: `None` which maps to `float32`.
name: Python `str` name prefixed to ops created by this function.
Default value: `None` which maps to `sample_sobol`.
Returns:
A `Tensor` of samples from the Sobol sequence with `shape`
`(num_samples, dim)` where `num_samples = min(num_results,
size(sequence_indices))` and `dim = tf.shape(generating_matrices)[0]`.
"""
with tf.name_scope(name or 'sample_sobol'):
dtype = dtype or tf.float32
num_digits = tf.cast(
tf.math.ceil(utils.log2(tf.cast(num_results, dtype=tf.float32))),
tf.int32)
# shape: (dim, log_num_results)
generating_matrices = sobol_generating_matrices(
dim,
num_results,
num_digits,
validate_args=validate_args,
dtype=tf.int32)
if scrambling_matrices is not None:
# shape: (dim, log_num_results)
generating_matrices = digital_net.scramble_generating_matrices(
generating_matrices,
scrambling_matrices,
num_digits,
validate_args=validate_args)
# shape: (num_results, dim)
return digital_net.sample_digital_net(
generating_matrices,
num_results,
num_digits,
sequence_indices=sequence_indices,
digital_shift=digital_shift,
apply_tent_transform=apply_tent_transform,
validate_args=validate_args,
dtype=dtype)
def sobol_generating_matrices(dim: types.IntTensor,
num_results: types.IntTensor,
num_digits: types.IntTensor,
validate_args: bool = False,
dtype: tf.DType = None,
name: str = None) -> types.IntTensor:
r"""Returns Sobol generating matrices.
Args:
dim: Positive scalar `Tensor` of integers with rank 0. The event size of
points which can be sampled from the resulting generating matrices.
num_results: Positive scalar `Tensor` of integers with rank 0. The maximum
number of points which can be sampled from the resulting generating
matrices.
num_digits: Positive scalar `Tensor` of integers with rank 0. The base-2
precision of points which can be sampled from the resulting generating
matrices.
validate_args: Python `bool` indicating whether to validate arguments.
Default value: `False`.
dtype: Optional `dtype`. The `dtype` of the output `Tensor` (either a signed
or unsigned integer `dtype`).
Default value: `None` which maps to `int32`.
name: Python `str` name prefixed to ops created by this function.
Default value: `None` which maps to `sobol_generating_matrices`.
Returns:
A scalar `Tensor` with shape `(dim, log_num_results)` where
`log_num_results = ceil(log2(num_results))`.
"""
with tf.name_scope(name or 'sobol_generating_matrices'):
dtype = dtype or tf.int32
dim = tf.convert_to_tensor(dim, dtype=dtype, name='dim')
num_results = tf.convert_to_tensor(
num_results, dtype=dtype, name='num_results')
num_digits = tf.convert_to_tensor(
num_digits, dtype=dtype, name='num_digits')
log_num_results = tf.cast(
tf.math.ceil(utils.log2(tf.cast(num_results, dtype=tf.float32))),
dtype=dtype)
control_deps = []
if validate_args:
control_deps.append(
tf.debugging.assert_positive(dim, message='dim must be positive'))
control_deps.append(
tf.debugging.assert_positive(
num_results, message='num_results must be positive'))
control_deps.append(
tf.debugging.assert_positive(
num_digits, message='num_digits must be positive'))
control_deps.append(
tf.debugging.assert_less(
log_num_results,
tf.constant(32, dtype=dtype),
message='log2(num_results) must be less than 32'))
with tf.control_dependencies(control_deps):
# shape: (1, log_num_results)
identity = _identity_matrix(log_num_results, num_digits, dtype=dtype)
# shape: (dim - 1, log_num_results)
matrices = _sobol_generating_matrices(
dim - 1, log_num_results, num_digits, dtype=dtype)
# shape: (dim, log_num_results)
return tf.concat((identity, matrices), axis=0)
def _identity_matrix(num_columns: types.IntTensor,
num_digits: types.IntTensor,
dtype: tf.DType = None) -> types.IntTensor:
r"""Returns the identity matrix.
Args:
num_columns: Positive scalar `Tensor` with rank 0 representing the number of
columns of the returned matrix.
num_digits: Positive scalar `Tensor` with rank 0 representing the base-2
precision of the samples.
dtype: Optional `dtype`. The `dtype` of the output `Tensor` (either a signed
or unsigned integer `dtype`).
Default value: `None` which maps to `int32`.
Returns:
A scalar `Tensor` with shape `(1, num_columns)`.
"""
dtype = dtype or tf.int32
shifts = tf.range(num_digits - 1, num_digits - 1 - num_columns, delta=-1)
# shape: (1, num_columns)
return tf.bitwise.left_shift(
tf.ones(shape=(1, num_columns), dtype=dtype), tf.cast(shifts, dtype))
def _sobol_generating_matrices(dim: types.IntTensor,
log_num_results: types.IntTensor,
num_digits: types.IntTensor,
dtype=None) -> types.IntTensor:
r"""Returns all Sobol generating matrices.
Args:
dim: Positive scalar `Tensor` with rank 0 representing the event size of
points which can be sampled from the resulting generating matrix.
log_num_results: Positive scalar `Tensor` with rank 0 representing the
base-2 logarithm of the maximum number of points which can be sampled from
the resulting generating matrix.
num_digits: Positive scalar `Tensor` with rank 0 representing the base-2
precision of points which can be sampled from the resulting generating
matrix.
dtype: Optional `dtype`. The `dtype` of the output `Tensor` (either a signed
or unsigned integer `dtype`).
Default value: `None` which maps to `int32`.
Returns:
A scalar `Tensor` with shape `(dim, ceil(log2(num_results)))`.
"""
global _INITIAL_DIRECTION_NUMBERS
global _PRIMITIVE_POLYNOMIAL_COEFFICIENTS
dtype = dtype or tf.int32
indices = tf.cast(tf.range(0, log_num_results), dtype)
dimensions = tf.range(0, dim)
# shape: (?, ?)
directions = tf.convert_to_tensor(
_INITIAL_DIRECTION_NUMBERS, dtype=dtype, name='direction_numbers')
# shape: (log_num_results, ?)
directions = directions[:log_num_results]
# shape: (log_num_results, dim)
directions = tf.gather(directions, dimensions, axis=1)
# shape: (dim, log_num_results)
directions = tf.cast(tf.transpose(directions), dtype)
# shape: (?,)
polynomial = tf.convert_to_tensor(
_PRIMITIVE_POLYNOMIAL_COEFFICIENTS,
dtype=dtype,
name='polynomial_coefficients')
# shape: (1, dim)
polynomial = tf.cast(
tf.gather(polynomial, tf.expand_dims(dimensions, axis=1)), dtype)
# shape: (1, dim)
degree = tf.cast(
tf.math.floor(utils.log2(tf.cast(polynomial, dtype=tf.float32))),
dtype=dtype)
def loop_predicate_fn(matrix_values, column):
del matrix_values
return column < log_num_results - 1
def loop_body_fn(matrices, column):
# Loop invariant: At the end of the iteration, all values from column 0 to
# `column` have been calculated.
# shape: (dim, log_num_results)
column_values = tf.gather(matrices, [column], axis=1)
# Columns whose index is smaller than the degree of the primitive polynomial
# are obtained from direction numbers and thus should not be updated.
# During a given iteration, only the next n columns (where n is the degree
# of the primitive polynomial) should be updated.
# shape: (dim, log_num_results)
should_be_updated = tf.logical_and(
tf.less_equal(tf.math.maximum(degree, column + 1), indices),
tf.less_equal(indices, column + degree))
# shape: (dim, log_num_results)
updated_matrices = tf.bitwise.bitwise_xor(
tf.where(
tf.equal(indices, column + degree),
tf.bitwise.right_shift(column_values, degree), matrices),
utils.filter_tensor(column_values, polynomial,
column + degree - indices))
# shape: (dim, log_num_results)
returned_matrices = tf.where(should_be_updated, updated_matrices, matrices)
return (returned_matrices, column + 1)
# shape: (dim, log_num_results)
initial_matrices = tf.bitwise.left_shift(
directions,
tf.cast(tf.expand_dims(num_digits - 1 - indices, axis=0), dtype))
matrices, _ = tf.while_loop(
loop_predicate_fn,
loop_body_fn,
loop_vars=(initial_matrices, tf.constant(0, dtype)),
maximum_iterations=tf.cast(log_num_results, tf.int32) - 1)
# shape: (dim, log_num_results)
return matrices
| [
"[email protected]"
]
| |
9c5bb4dfa66e539726e24979938958a0f0da287a | 3ae201c706c4defbd6f1f4f4c578b702b3e100d9 | /dbsys-hw2/test/Experiment.py | 4e63834f6b1fabb44dc1bae99168a7dc8d675a16 | []
| no_license | jasonlingo/Database_Systems | 1ef6071300073c997958f1a9f087e96d9a6c44d5 | 7297b7e38c66d1d66b4f8d24639cbe66ab402616 | refs/heads/master | 2021-06-09T20:44:04.812666 | 2016-12-11T22:49:32 | 2016-12-11T22:49:32 | 50,884,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,850 | py | import Database
from Catalog.Schema import DBSchema
import time
def getResult(db, query):
return [query.schema().unpack(tup) for page in db.processQuery(query) for tup in page[1]]
if __name__=="__main__":
db = Database.Database(dataDir='./data')
"""
select p.p_name, s.s_name
from part p, supplier s, partsupp ps
where p.p_partkey = ps.ps_partkey
and ps.ps_suppkey = s.s_suppkey
and ps.ps_availqty = 1
union all
select p.p_name, s.s_name
from part p, supplier s, partsupp ps
where p.p_partkey = ps.ps_partkey
and ps.ps_suppkey = s.s_suppkey
and ps.ps_supplycost < 5;
"""
""" Schema
supplier[(S_SUPPKEY,int),(S_NAME,char(25)),(S_ADDRESS,char(40)),(S_NATIONKEY,int),(S_PHONE,char(15)),(S_ACCTBAL,double),(S_COMMENT,char(101))]
part[(P_PARTKEY,int),(P_NAME,char(55)),(P_MFGR,char(25)),(P_BRAND,char(10)),(P_TYPE,char(25)),(P_SIZE,int),(P_CONTAINER,char(10)),(P_RETAILPRICE,double),(P_COMMENT,char(23))]
partsupp[(PS_PARTKEY,int),(PS_SUPPKEY,int),(PS_AVAILQTY,int),(PS_SUPPLYCOST,double),(PS_COMMENT,char(199))]
"""
# part = db.query().fromTable('part')
# supplier = db.query().fromTable('supplier')
# partsupp = db.query().fromTable('partsupp')
#
# supplierSchema = db.relationSchema("supplier")
# partsuppSchema = db.relationSchema('partsupp')
#
# lhsKeySchema1 = DBSchema('pJoinKey', [('P_PARTKEY', 'int')])
# rhsKeySchema1 = DBSchema('psJoinKey1', [('PS_PARTKEY', 'int')])
#
# lhsKeySchema2 = DBSchema('psJoinKey2', [('PS_SUPPKEY', 'int')])
# rhsKeySchema2 = DBSchema('sJoinKey', [('S_SUPPKEY', 'int')])
# print(db.relationSchema('supplier').toString())
# print(db.relationSchema('part').toString())
# print(db.relationSchema('partsupp').toString())
# =======================================
# Question 1
# =======================================
# ========== block-nested join ==========
# query1 = db.query().fromTable('part')\
# .join(db.query().fromTable('partsupp'),
# rhsSchema=db.relationSchema('partsupp'),
# method='block-nested-loops',
# expr='P_PARTKEY == PS_PARTKEY'
# )\
# .join(db.query().fromTable('supplier'),
# rhsSchema=db.relationSchema('supplier'),
# method='block-nested-loops',
# expr='PS_SUPPKEY == S_SUPPKEY'
# )\
# .where('PS_AVAILQTY == 1')\
# .union(\
# db.query().fromTable('part')\
# .join(db.query().fromTable('partsupp'),
# rhsSchema=db.relationSchema('partsupp'),
# method='block-nested-loops',
# expr='P_PARTKEY == PS_PARTKEY'
# )\
# .join(db.query().fromTable('supplier'),
# rhsSchema=db.relationSchema('supplier'),
# method='block-nested-loops',
# expr='PS_SUPPKEY == S_SUPPKEY'
# )\
# .where('PS_SUPPLYCOST < 5'))\
# .select({'P_NAME': ('P_NAME', 'char(55)'), 'S_NAME': ('S_NAME', 'char(25)')}).finalize()
# ========== hash join ==========
# query1 = part.join(partsupp,
# rhsSchema=partsuppSchema,
# method='hash',
# lhsHashFn='hash(P_PARTKEY) % 4', lhsKeySchema=lhsKeySchema1,
# rhsHashFn='hash(PS_PARTKEY) % 4', rhsKeySchema=rhsKeySchema1,
# )\
# .join(supplier,
# rhsSchema=supplierSchema,
# method='hash',
# lhsHashFn='hash(PS_SUPPKEY) % 4', lhsKeySchema=lhsKeySchema2,
# rhsHashFn='hash(S_SUPPKEY) % 4', rhsKeySchema=rhsKeySchema2,
# )\
# .where('PS_AVAILQTY == 1')\
# .select({'p_name': ('P_NAME', 'char(55)'), 's_name': ('S_NAME', 'char(25)')})\
# .union(
# part.join(partsupp,
# rhsSchema=partsuppSchema,
# method='hash',
# lhsHashFn='hash(P_PARTKEY) % 4', lhsKeySchema=lhsKeySchema1,
# rhsHashFn='hash(PS_PARTKEY) % 4', rhsKeySchema=rhsKeySchema1,
# )\
# .join(supplier,
# rhsSchema=supplierSchema,
# method='hash',
# lhsHashFn='hash(PS_SUPPKEY) % 4', lhsKeySchema=lhsKeySchema2,
# rhsHashFn='hash(S_SUPPKEY) % 4', rhsKeySchema=rhsKeySchema2,
# )\
# .where('PS_SUPPLYCOST < 5')\
# .select({'p_name': ('P_NAME', 'char(55)'), 's_name': ('S_NAME', 'char(25)')}))\
# .finalize()
# partsuppSchema = db.relationSchema('partsupp')
# supplierSchema = db.relationSchema('supplier')
#
# keySchemaPart = DBSchema('partKey', [('P_PARTKEY', 'int')])
# keySchemaPartsupp1 = DBSchema('partsuppKey1', [('PS_PARTKEY', 'int')])
# keySchemaPartsupp2 = DBSchema('partsuppKey2', [('PS_SUPPKEY', 'int')])
# keySchemaSupplier = DBSchema('supplierKey', [('S_SUPPKEY', 'int')])
#
# part = db.query().fromTable('part')
# supplier = db.query().fromTable('supplier')
# partsupp = db.query().fromTable('partsupp')
# join1 = part.join(
# partsupp,
# rhsSchema=partsuppSchema,
# method='hash',
# lhsHashFn='hash(P_PARTKEY) % 4', lhsKeySchema=keySchemaPart,
# rhsHashFn='hash(PS_PARTKEY) % 4', rhsKeySchema=keySchemaPartsupp1
# ).join(
# supplier,
# rhsSchema = supplierSchema,
# method = 'hash',
# lhsHashFn='hash(PS_SUPPKEY) % 4', lhsKeySchema=keySchemaPartsupp2,
# rhsHashFn = 'hash(S_SUPPKEY) % 4', rhsKeySchema=keySchemaSupplier
# ).where('PS_AVAILQTY == 1').select({'p_name': ('P_NAME', 'char(55)'), 's_name': ('S_NAME', 'char(25)')})
#
# join2 = part.join(
# partsupp,
# rhsSchema = partsuppSchema,
# method = 'hash',
# lhsHashFn='hash(P_PARTKEY) % 4', lhsKeySchema=keySchemaPart,
# rhsHashFn = 'hash(PS_PARTKEY) % 4', rhsKeySchema=keySchemaPartsupp1
# ).join(
# supplier,
# rhsSchema = supplierSchema,
# method = 'hash',
# lhsHashFn='hash(PS_SUPPKEY) % 4', lhsKeySchema=keySchemaPartsupp2,
# rhsHashFn = 'hash(S_SUPPKEY) % 4', rhsKeySchema=keySchemaSupplier
# ).where('PS_SUPPLYCOST < 5').select({'p_name': ('P_NAME', 'char(55)'), 's_name': ('S_NAME', 'char(25)')})
#
# query1 = join1.union(join2).finalize()
# =======================================
# Question 3
# =======================================
""" original SQL query
with temp as (
select n.n_name as nation, p.p_name as part, sum(l.l_quantity) as num
from customer c, nation n, orders o, lineitem l, part p
where c.c_nationkey = n.n_nationkey
and c.c_custkey = o.o_custkey
and o.o_orderkey = l.l_orderkey
and l.l_partkey = p.p_partkey
group by n.n_name, p.p_name
)
select nation, max(num)
from temp
group by nation;
"""
# tables
customer = db.relationSchema('customer')
nation = db.relationSchema('nation')
orders = db.relationSchema('orders')
lineitem = db.relationSchema('lineitem')
part = db.relationSchema('part')
print(customer.toString())
print(nation.toString())
print(orders.toString())
print(lineitem.toString())
print(part.toString())
# # hash join
# # customer join nation
# cus_nationKey = DBSchema('cus_nationKey', [('C_NATIONKEY', 'int')])
# nat_nationKey = DBSchema('nat_nationKey', [('N_NATIONKEY', 'int')])
#
# # customer join orders
# cus_custKey = DBSchema('cus_custKey', [('C_CUSTKEY', 'int')])
# ord_custKey = DBSchema('ord_custKey', [('O_CUSTKEY', 'int')])
#
# # orders join lineitem
# ord_orderKey = DBSchema('ord_orderKey', [('O_ORDERKEY', 'int')])
# line_orderKey = DBSchema('line_orderKey', [('L_ORDERKEY', 'int')])
#
# # lineitem join part
# line_partKey = DBSchema('line_partKey', [('L_PARTKEY'), 'int'])
# part_partKey = DBSchema('part_partKey', [('P_PARTKEY'), 'int'])
#
# joinTables = db.query().fromTable(customer).join(
# nation,
# rhsSchema=db.relationSchema('nation'),
# method='hash',
# lhsHashFn='hash(C_NATIONKEY) % 4', lhsKeySchema=cus_nationKey,
# rhsHashFn='hash(N_NATIONKEY) % 4', rhsKeySchema=nat_nationKey
# ).join(
# orders,
# rhsSchema=db.relationSchema('orders'),
# method='hash',
# lhsHashFn='hash(C_CUSTKEY) % 4', lhsKeySchema=cus_custKey,
# rhsHashFn='hash(O_CUSTKEY) % 4', rhsKeySchema=ord_custKey
# ).join(
# lineitem,
# rhsSchema=db.relationSchema('lineitem'),
# method='hash',
# lhsHashFn='hash(O_ORDERKEY) % 4', lhsKeySchema=ord_orderKey,
# rhsHashFn='hash(L_ORDERKEY) % 4', rhsKeySchema=line_orderKey
# ).join(
# part,
# rhsSchema=db.relationSchema('part'),
# method='hash',
# lhsHashFn='hash(L_PARTKEY) % 4', lhsKeySchema=line_partKey,
# rhsHashFn='hash(P_PARTKEY) % 4', rhsKeySchema=part_partKey
# )
#
#
# # first group by
# groupSchema = DBSchema('nation_part_name', [('N_NAME', 'char(55)'), ('P_NAME', 'char(55)')])
# aggSumSchema = DBSchema('nation_part_sum', [('num', 'int')])
#
# groupBy = joinTables.groupBy(
# groupSchema=groupSchema,
# aggSchema=aggSumSchema,
# groupExpr=(lambda e: (e.N_NAME, e.P_NAME)),
# aggExprs=[(0, lambda acc, e: acc + e.L_QUANTITY, lambda x: x)],
# groupHashFn=(lambda gbVal: hash(gbVal[0]) % 10)
# )
#
# # second group by
# groupSchema2 = DBSchema('nationMax', [('N_NAME', 'char(55)')])
# aggMaxSchema = DBSchema('aggMax', [('max', 'int')])
#
# query2 = groupBy.groupBy(
# groupSchema=groupSchema2,
# aggSchema=aggMaxSchema,
# groupExpr=(lambda e: e.N_NAME),
# aggExprs=[(0, lambda acc, e: max(acc, e.num), lambda x: x)],
# groupHashFn=(lambda gbVal: hash(gbVal[0]) % 10)
# )
# execute query
# start = time.time()
# result = getResult(db, query2)
# end = time.time()
# print("data:", len(result))
# print("Time: ", end - start)
| [
"[email protected]"
]
| |
3f7f38885107eef43aceac023faab24e60716559 | 9d6e747ed7204555199ece2033decff978295a09 | /Programmers/연습문제/나누어 떨어지는 숫자 배열.py | fb1afb8f9e293de68da3844ce7cbf9d97d70dcdf | []
| no_license | leejaeyeong/Algorithm | 5b47ed9aa241990945cbf2451afe7f084984ced5 | 72072d1e0c28e72075fc00db9239a4bd444b68b6 | refs/heads/master | 2021-08-08T10:57:07.345943 | 2021-07-11T15:01:59 | 2021-07-11T15:01:59 | 238,156,464 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | def solution(arr, divisor):
answer = []
for i in range(len(arr)) :
if arr[i] % divisor == 0 :
answer.append(arr[i])
answer.append(-1) if len(answer) == 0 else answer.sort()
return answer | [
"[email protected]"
]
| |
4a5bef3cc293b6ab8ce44bb01c17f1a769623747 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_170/ch118_2020_04_01_01_59_46_302026.py | 7174e745c9ee56ed8d0b829dcf7a56162f0013bf | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | import math
def reflexao_total_interna(n1, n2, a2):
a2 = a2 * (math.pi/180)
a1 = math.asin((math.sin(a2))*n2/n1)
if math.sin(a1) > 1:
return True
else:
return False | [
"[email protected]"
]
| |
db489fba758e2bb4794dc2d8786107c23b06f986 | af34b2b44fe1a797a11f27e508d0d2e3c8764027 | /face/admin.py | f54344d632c1c0208da3331d47993c48fb0fad15 | []
| no_license | CodingSta/askface-with-aws | 12afa8f7884847bd979a890f96510642018ffe63 | 39ea4ddfbd5360629e7b3a77014adf2d3fadcf6d | refs/heads/master | 2020-05-27T15:35:48.108675 | 2018-08-14T11:46:43 | 2018-08-14T11:46:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | from django.contrib import admin
from django.db.models import Count
from .models import Collection, Person, Face
from .forms import FaceForm
@admin.register(Collection)
class CollectionAdmin(admin.ModelAdmin):
list_display = ['pk', 'name', 'slug']
class FaceInline(admin.TabularInline):
model = Face
form = FaceForm
@admin.register(Person)
class PersonAdmin(admin.ModelAdmin):
list_display = ['pk', 'collection', 'name', 'face_count']
list_display_links = ['name']
inlines = [FaceInline]
actions = ['indexing']
def get_queryset(self, request):
return Person.objects.all().annotate(Count('face'))
def face_count(self, person):
return person.face__count
def indexing(self, request, queryset):
for face in Face.objects.filter(person__in=queryset):
face.indexing()
self.message_user(request, 'indexing 완료')
@admin.register(Face)
class FaceAdmin(admin.ModelAdmin):
list_display = ['person', 'photo', 'meta']
| [
"[email protected]"
]
| |
24a44d518db3c183a6059c55278073b59cd615df | deff2922412bd0376ef38a34a895990d106b7b66 | /goodsunit/serializers.py | eedf1db3efe76eb7e5f964b7a1f96578f613af56 | [
"Apache-2.0"
]
| permissive | dpd-pub/GreaterWMS | 56ba10750e8724bc7b39de077e4b4bc43340a94a | 9eabb1b9b0f5376dcccd89ed86dd76995955a8ec | refs/heads/master | 2023-02-27T18:05:29.384931 | 2021-02-10T02:19:33 | 2021-02-10T02:19:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,698 | py | from rest_framework import serializers
from .models import ListModel
from userprofile.models import Users
import re
from rest_framework.exceptions import APIException
def data_validate(data):
script_obj = re.findall(r'script', str(data), re.IGNORECASE)
select_obj = re.findall(r'select', str(data), re.IGNORECASE)
if script_obj:
raise APIException({'detail': 'Bad Data can‘not be store'})
elif select_obj:
raise APIException({'detail': 'Bad Data can‘not be store'})
else:
return data
def openid_validate(data):
if Users.objects.filter(openid=data).exists():
return data
else:
raise APIException({'detail': 'User does not exists'})
def appid_validate(data):
if Users.objects.filter(appid=data).exists():
return data
else:
raise APIException({'detail': 'User does not exists'})
class GoodsunitGetSerializer(serializers.ModelSerializer):
goods_unit = serializers.CharField(read_only=True, required=False)
creater = serializers.CharField(read_only=True, required=False)
create_time = serializers.DateTimeField(read_only=True, format='%Y-%m-%d %H:%M:%S')
update_time = serializers.DateTimeField(read_only=True, format='%Y-%m-%d %H:%M:%S')
class Meta:
model = ListModel
exclude = ['openid', 'is_delete', ]
read_only_fields = ['id', ]
class GoodsunitPostSerializer(serializers.ModelSerializer):
openid = serializers.CharField(read_only=False, required=False, validators=[openid_validate])
goods_unit = serializers.CharField(read_only=False, required=True, validators=[data_validate])
creater = serializers.CharField(read_only=False, required=True, validators=[data_validate])
class Meta:
model = ListModel
exclude = ['is_delete', ]
read_only_fields = ['id', 'create_time', 'update_time', ]
class GoodsunitUpdateSerializer(serializers.ModelSerializer):
goods_unit = serializers.CharField(read_only=False, required=True, validators=[data_validate])
creater = serializers.CharField(read_only=False, required=True, validators=[data_validate])
class Meta:
model = ListModel
exclude = ['openid', 'is_delete', ]
read_only_fields = ['id', 'create_time', 'update_time', ]
class GoodsunitPartialUpdateSerializer(serializers.ModelSerializer):
goods_unit = serializers.CharField(read_only=False, required=False, validators=[data_validate])
creater = serializers.CharField(read_only=False, required=False, validators=[data_validate])
class Meta:
model = ListModel
exclude = ['openid', 'is_delete', ]
read_only_fields = ['id', 'create_time', 'update_time', ]
| [
"[email protected]"
]
| |
8815ed33785797bc95470cbc7b0d27380f4511b6 | 1082cee55e32fa76859666aa011428bf979182ea | /pose/configs/top_down/vit/coco/vit_large_patch16_384_coco_256x192.py | f371af7b4ab66da7ec5182b8843fce3e2361f2d2 | [
"MIT",
"Apache-2.0"
]
| permissive | cvsch/HRFormer | f7a96d8620f87986cf10c74fe4f47d5b7106d732 | 9e6ce958ba502354dff748846d6d98f682f5f9d1 | refs/heads/main | 2023-08-20T21:29:51.448485 | 2021-10-19T01:20:02 | 2021-10-19T01:20:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,684 | py | log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=5, create_symlink=False)
evaluation = dict(interval=10, metric='mAP', key_indicator='AP')
optimizer = dict(
type='AdamW',
lr=5e-4,
betas=(0.9, 0.999),
weight_decay=0.01,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
])
channel_cfg = dict(
num_output_channels=17,
dataset_joints=17,
dataset_channel=[
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
],
inference_channel=[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
])
# model settings
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
type='TopDown',
pretrained='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p16_384-b3be5167.pth',
backbone=dict(
type='VisionTransformer',
model_name='vit_large_patch16_384',
img_size= [256, 192],
patch_size=16,
in_chans=3,
embed_dim=1024,
depth=24,
num_heads=16,
num_classes=19,
drop_rate=0,
drop_path_rate=0.1,
norm_cfg=norm_cfg,
pos_embed_interp=True,
align_corners=False,
),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=1024,
out_channels=channel_cfg['num_output_channels'],
num_deconv_layers=2,
num_deconv_filters=(256, 256),
num_deconv_kernels=(4, 4),
norm_cfg=norm_cfg,
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_root = "/path/to/dataset/coco" # Set the data path here
data_cfg = dict(
image_size=[192, 256],
heatmap_size=[48, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
use_gt_bbox=False,
det_bbox_thr=0.0,
bbox_file=f'{data_root}/dataset/coco/person_detection_results/'
'COCO_val2017_detections_AP_H_56_person.json',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownHalfBodyTransform',
num_joints_half_body=8,
prob_half_body=0.3),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data = dict(
samples_per_gpu=32,
workers_per_gpu=2,
val_dataloader=dict(samples_per_gpu=256),
test_dataloader=dict(samples_per_gpu=256),
train=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_train2017.json',
img_prefix=f'{data_root}/train2017/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
test=dict(
type='TopDownCocoDataset',
ann_file=f'{data_root}/annotations/person_keypoints_val2017.json',
img_prefix=f'{data_root}/val2017/',
data_cfg=data_cfg,
pipeline=val_pipeline),
)
| [
"[email protected]"
]
| |
d8964fa3e67459f90ecc2926d498618a15de084e | 952dc66c61966f099756cdb6c2d13b40352f63cc | /zerver/migrations/0436_realmauthenticationmethods.py | 0716113e5428c03ea3a99236b30a568e8c822435 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
]
| permissive | zulip/zulip | 5ae6aad35fd9f72996c0a2a9cdd674400966ebf6 | 965a25d91b6ee2db54038f5df855215fa25146b0 | refs/heads/main | 2023-08-28T23:43:00.971110 | 2023-08-28T16:47:09 | 2023-08-28T19:33:02 | 43,160,685 | 20,239 | 8,996 | Apache-2.0 | 2023-09-14T20:57:47 | 2015-09-25T16:37:25 | Python | UTF-8 | Python | false | false | 1,780 | py | # Generated by Django 4.2 on 2023-04-13 23:45
import django.db.models.deletion
from django.db import migrations, models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fill_RealmAuthenticationMethod_data(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Realm = apps.get_model("zerver", "Realm")
RealmAuthenticationMethod = apps.get_model("zerver", "RealmAuthenticationMethod")
rows_to_create = []
for realm in Realm.objects.order_by("id"):
for key, value in realm.authentication_methods.iteritems():
if value:
rows_to_create.append(RealmAuthenticationMethod(name=key, realm_id=realm.id))
RealmAuthenticationMethod.objects.bulk_create(rows_to_create, batch_size=10000)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("zerver", "0435_scheduledmessage_rendered_content"),
]
operations = [
migrations.CreateModel(
name="RealmAuthenticationMethod",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(max_length=80)),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
),
),
],
options={
"unique_together": {("realm", "name")},
},
),
migrations.RunPython(fill_RealmAuthenticationMethod_data),
]
| [
"[email protected]"
]
| |
2fb88aab69ecd33a8ec7b8ed31e521803c847414 | fd2de23a704ec408f47c9f2263b604cbd204c0a3 | /MacrosAndScripts/myPlotStyle.py | f4c76ee6871fbaffc287d874bd84f58c5c4415d7 | []
| no_license | gparida/monoHiggs_postAnalyzer | 1a71c3eaa1cb11ce40923eb831077709987bd866 | 00fb3e37c5fa6bdd75e7426c3a7bf49534c3eec4 | refs/heads/master | 2023-01-19T15:19:09.487955 | 2020-12-03T10:15:11 | 2020-12-03T10:15:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,060 | py | import ROOT
def add_lumi(year, channel_):
lowX=0.40
lowY=0.825
lumi = ROOT.TPaveText(lowX, lowY+0.06, lowX+0.50, lowY+0.16, "NDC")
lumi.SetBorderSize( 0 )
lumi.SetFillStyle( 0 )
lumi.SetTextAlign( 32 )#12
lumi.SetTextColor( 1 )
lumi.SetTextSize(0.05)
lumi.SetTextFont ( 42 )
lumiProcessed="41.52"
if year=="2018":
lumiProcessed="59.7"
if channel_=="combined":
lumi.AddText("4 channels combined "+year+" , "+lumiProcessed+" fb^{-1} (13 TeV)")
if channel_=="mutau":
lumi.AddText("#mu#tau_{h} "+year+" , "+lumiProcessed+" fb^{-1} (13 TeV)")
if channel_=="etau":
lumi.AddText("e#tau_{h} "+year+" , "+lumiProcessed+" fb^{-1} (13 TeV)")
if channel_=="tautau":
lumi.AddText("#tau_{h}#tau_{h} "+year+" , "+lumiProcessed+" fb^{-1} (13 TeV)")
if channel_=="emu":
lumi.AddText("e#mu "+year+" , "+lumiProcessed+" fb^{-1} (13 TeV)")
return lumi
def add_CMS():
lowX=0.65 #0.21
lowY=0.68
lumi = ROOT.TPaveText(lowX, lowY+0.06, lowX+0.15, lowY+0.16, "NDC")
lumi.SetTextFont(61)
lumi.SetTextSize(0.08)
lumi.SetBorderSize( 0 )
lumi.SetFillStyle( 0 )
lumi.SetTextAlign( 31 )#12
lumi.SetTextColor( 1 )
lumi.AddText("CMS")
return lumi
def add_Preliminary():
lowX=0.65 # 0.21
lowY=0.63
lumi = ROOT.TPaveText(lowX, lowY+0.06, lowX+0.15, lowY+0.16, "NDC")
lumi.SetTextFont(61)
lumi.SetTextFont ( 40 )
lumi.SetTextSize(0.06)
lumi.SetBorderSize( 0 )
lumi.SetFillStyle( 0 )
lumi.SetTextAlign( 31 )#12
lumi.SetTextColor( 1 )
lumi.AddText("Preliminary")
return lumi
def make_legend():
output = ROOT.TLegend(0.85, 0.45, 1.0, 0.75, "", "brNDC")
#output = ROOT.TLegend(0.2, 0.1, 0.47, 0.65, "", "brNDC")
output.SetLineWidth(1)
output.SetLineStyle(1)
output.SetFillStyle(1001) #0
output.SetFillColor(0)
output.SetBorderSize(1)
output.SetTextFont(42)
return output
# declare colors
color_ztt="#ffcc66"
color_zll="#4496c8"
color_tt="#9999cc"
color_ggh="#12cadd"
color_vv="#990099"
color_wjets="#cc6666"
color_jetfake="#f1cde1"
errorStyle=3002
c=ROOT.TCanvas("canvas","",0,0,1300,1200)
pad1 = ROOT.TPad("pad1","pad1",0,0.25,1,1)
pad1.SetFillColor(0)
pad1.SetBorderMode(0)
pad1.SetBorderSize(1)
pad1.SetTickx(1)
pad1.SetTicky(1)
pad1.SetGridx()
pad1.SetLeftMargin(0.15) #0.15
pad1.SetRightMargin(0.15) #0.1
pad1.SetTopMargin(0.122)
pad1.SetBottomMargin(0.025)
pad1.SetFrameFillStyle(0)
pad1.SetFrameLineStyle(0)
pad1.SetFrameLineWidth(1)
pad1.SetFrameBorderMode(0)
pad1.SetFrameBorderSize(1)
categ = ROOT.TPaveText(0.21, 0.5+0.013, 0.43, 0.70+0.155, "NDC")
categ.SetBorderSize( 0 )
categ.SetFillStyle( 0 )
categ.SetTextAlign( 12 )
categ.SetTextSize ( 0.06 )
categ.SetTextColor( 1 )
categ.SetTextFont ( 42 )
pad2 = ROOT.TPad("pad2","pad2",0,0,1,0.25);
pad2.SetTopMargin(0.02);
pad2.SetBottomMargin(0.35);
pad2.SetLeftMargin(0.15);
pad2.SetRightMargin(0.15);
pad2.SetTickx(1)
pad2.SetTicky(1)
pad2.SetFrameLineWidth(1)
#pad2.SetGridx()
pad2.SetGridy()
#pad2.SetLogy()
| [
"[email protected]"
]
| |
35cab47b7a040620f054aad6aea3d3b78de71bf5 | 5182897b2f107f4fd919af59c6762d66c9be5f1d | /.history/src/Simulador_20200711154404.py | 0360ee4255d80ad8dc7ecb929e3108991c9b8d10 | [
"MIT"
]
| permissive | eduardodut/Trabalho_final_estatistica_cd | 422b7e702f96291f522bcc68d2e961d80d328c14 | fbedbbea6bdd7a79e1d62030cde0fab4e93fc338 | refs/heads/master | 2022-11-23T03:14:05.493054 | 2020-07-16T23:49:26 | 2020-07-16T23:49:26 | 277,867,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,281 | py | import pandas as pd
import numpy as np
from Matriz_esferica import Matriz_esferica
from Individuo import Individuo, Fabrica_individuo
import random
from itertools import permutations
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from scipy.sparse import csr_matrix, lil_matrix
class Simulador():
SADIO = 0
INFECTADO_TIPO_1 = 1 #assintomáticos e o infectado inicial
INFECTADO_TIPO_2 = 2 #sintomático
CURADO = 3
MORTO = 4
def __init__(
self,
tamanho_matriz, #numero de linhas e colunas da matriz esférica
percentual_inicial_tipo1, #percentual inicial da população que será infectada tipo 1
percentual_inicial_tipo2, #percentual inicial da população que será infectada tipo 2
chance_infeccao, #chance que um infectado tipo 2 tem de infectar um indivíduo saudável
chance_infeccao_tipo2, #chance de um indivíduo infectado se tornar contagioso
chance_morte, #chance de um indivíduo tipo 2 morrer ao fim de uma atualização
atualizacoes_cura): #número de atualizações necessárias para a cura de um indivíduo tipo 1 ou 2
self.num_atualizacoes = 0
self.lista_infectados_tipo_2 = []
self.lista_infectados_tipo_1 = []
self.num_curados = 0
self.num_mortos = 0
self.chance_infeccao = chance_infeccao
self.chance_infeccao_tipo2 = chance_infeccao_tipo2
self.chance_morte = chance_morte
self.atualizacoes_cura = atualizacoes_cura
self.populacao_inicial = int(tamanho_matriz**2)
self.num_inicial_tipo2 = int(self.populacao_inicial * percentual_inicial_tipo2)
self.num_inicial_tipo1 = 1 + int(self.populacao_inicial * percentual_inicial_tipo1)
self.num_inicial_sadios = self.populacao_inicial - (self.num_inicial_tipo2 + self.num_inicial_tipo1)
self.matriz_status = lil_matrix((tamanho_matriz, tamanho_matriz),dtype= np.uint8)
self.matriz_atualizacoes_cura = lil_matrix((tamanho_matriz, tamanho_matriz),dtype= np.uint8)
#self.matriz_status = self.df_individuos.to_numpy()
self.popular(tamanho_matriz)
self.lista_matrizes_status = []
#objeto que é responsável por validar a movimentação no grid n x n
self.matriz_esferica = Matriz_esferica(tamanho_matriz)
dict = {
'num_sadios':self.num_inicial_sadios,
'num_infect_t1':self.num_inicial_tipo1,
'num_infect_t2':self.num_inicial_tipo2,
'num_curados':0,
'num_mortos':0}
#dataframe que guardará os resultados de cada atualização
self.dataframe = pd.DataFrame(dict,index = [0])
self.salvar_posicionamento()
def criar_individuo(self, status, posicao):
print("Status inicial: ", status)
self.matriz_status[posicao[0], posicao[1]] = status
if status == self.INFECTADO_TIPO_1 or status == self.INFECTADO_TIPO_2:
self.matriz_atualizacoes_cura[posicao[0], posicao[1]] = self.atualizacoes_cura
def salvar_posicionamento(self):
self.lista_matrizes_status.append(self.matriz_status)
def verificar_infeccao(self, lista_infectantes):
lista_novos_infectados_tipo1 = []
lista_novos_infectados_tipo2 = []
#itera sobre sobre a lista de individuos que infectam e cada um realiza a tividade de infectar
for indice_infectante in lista_infectantes:
#busca os vizinhos do infectante atual
lista_vizinhos = self.matriz_esferica.get_vizinhos(indice_infectante)
#Para cada vizinho, se ele for sadio, é gerado um número aleatório para verificar se foi infectado
for indice_vizinho in lista_vizinhos:
#verificação de SADIO
if self.verifica_status(indice_vizinho) == self.SADIO:
#verificação do novo status
novo_status = self.infectar(chance_infeccao, chance_infeccao_tipo2)
#se for um infectado tipo 1
if novo_status == Individuo.INFECTADO_TIPO_1:
#adiciona na lista de novos tipo 1
lista_novos_infectados_tipo1.append(indice_vizinho)
self.criar_individuo(Individuo.INFECTADO_TIPO_1,indice_vizinho)
if novo_status == Individuo.INFECTADO_TIPO_2:
#adiciona na lista de novos tipo 1
lista_novos_infectados_tipo2.append(indice_vizinho)
self.criar_individuo(Individuo.INFECTADO_TIPO_2,indice_vizinho)
return lista_novos_infectados_tipo1, lista_novos_infectados_tipo2
def checagem_morte_individual(self, chance_morte, indice):
rng_morte = random.random()
if rng_morte <= chance_morte:
self.matriz_status[indice[0], indice[1]] = self.MORTO
return self.MORTO
else:
return self.checar_cura_individual(indice)
def checar_cura_individual(self, indice):
print("passei na cura")
num_atualizacoes_restantes = self.matriz_atualizacoes_cura[indice[0], indice[1]]
print(num_atualizacoes_restantes - 1)
self.matriz_atualizacoes_cura[indice[0], indice[1]] = num_atualizacoes_restantes - 1
if self.matriz_atualizacoes_cura[indice[0], indice[1]] == 0:
self.matriz_status[indice[0], indice[1]] = self.CURADO
return self.CURADO
else:
return self.matriz_status[indice[0], indice[1]]
def checagem_morte_cura_lista(self, lista_infectantes_tipo2):
lista_curados = []
lista_mortos = []
for indice_infectante in lista_infectantes_tipo2:
novo_status = self.checagem_morte_individual(self.chance_morte, indice_infectante)
if novo_status == Individuo.MORTO:
lista_mortos.append(indice_infectante)
if novo_status == Individuo.CURADO:
lista_curados.append(indice_infectante)
return lista_mortos, lista_curados
def checagem_cura_lista(self, lista_infectantes):
lista_curados = []
for indice_infectante in lista_infectantes:
novo_status = self.checar_cura_individual(indice_infectante)
if novo_status == Individuo.CURADO:
lista_curados.append(indice_infectante)
return lista_curados
def iterar(self):
#Verifica os novos infectados por infectantes do tipo 1 e 2
lista_novos_infectados_tipo1, lista_novos_infectados_tipo2 = self.verificar_infeccao(self.lista_infectados_tipo_1+self.lista_infectados_tipo_2)
#Verifica morte/cura dos infectados tipo 2
lista_mortos, lista_curados_t2 = self.checagem_morte_cura_lista(self.lista_infectados_tipo_2)
#Verifica cura dos infectados tipo 1
lista_curados_t1 = self.checagem_cura_lista(self.lista_infectados_tipo_1)
#remove os mortos e curados das listas de infectantes tipo 1 e 2
nova_lista_infectados_t2 = []
for indice in self.lista_infectados_tipo_2:
if indice not in lista_mortos and indice not in lista_curados_t2:
nova_lista_infectados_t2.append(indice)
self.lista_infectados_tipo_2 = nova_lista_infectados_t2
nova_lista_infectados_t1 = []
for indice in self.lista_infectados_tipo_1:
if indice not in lista_curados_t1:
nova_lista_infectados_t1.append(indice)
self.lista_infectados_tipo_1 = nova_lista_infectados_t1
#atualiza o número de mortos
self.num_mortos = self.num_mortos + len(lista_mortos)
#atualiza o número de curados
self.num_curados = self.num_curados + len(lista_curados_t1) + len(lista_curados_t2)
#movimentar infectantes:
for indice in self.lista_infectados_tipo_1:
self.mover_infectante(indice)
for indice in self.lista_infectados_tipo_2:
self.mover_infectante(indice)
#adicionar os novos infectados tipo 1 e 2 para as respectivas listas
self.lista_infectados_tipo_2 = self.lista_infectados_tipo_2 + lista_novos_infectados_tipo2
self.lista_infectados_tipo_1 = self.lista_infectados_tipo_1 + lista_novos_infectados_tipo1
dict = {
'num_sadios':self.populacao_inicial - self.num_mortos - self.num_curados - len(self.lista_infectados_tipo_1) - len(self.lista_infectados_tipo_2) ,
'num_infect_t1':len(self.lista_infectados_tipo_1),
'num_infect_t2':len(self.lista_infectados_tipo_2),
'num_curados':self.num_curados,
'num_mortos':self.num_mortos}
self.dataframe = self.dataframe.append(dict, ignore_index=True)
# print("num t1: ", len(self.lista_infectados_tipo_1))
# print("num t2: ", len(self.lista_infectados_tipo_2))
# print("num curados: ", self.num_curados)
# print("num mortos: ", self.num_mortos)
# print("---------")
# #salva a nova matriz de status
self.salvar_posicionamento()
#adiciona 1 ao número de atualizações realizadas na matriz
self.num_atualizacoes +=1
def infectar(self, chance_infeccao, chance_infeccao_tipo2):
saida = Individuo.SADIO
#número aleatório para chance de infectar o vizinho
rng_infeccao = random.random()
if rng_infeccao <= chance_infeccao:
#número aleatório para chance de infecção tipo 1 ou 2
rng_infeccao_tipo2 = random.random()
if rng_infeccao_tipo2 <= chance_infeccao_tipo2:
saida = Individuo.INFECTADO_TIPO_2
else:
saida = Individuo.INFECTADO_TIPO_1
return saida
def popular(self, tamanho_matriz):
#lista de possíveis combinações de índices da matriz de dados
permutacoes = permutations(list(range(tamanho_matriz)),2)
#conversão para lista de tuplas(x,y)
lista_indices = list(permutacoes)
#embaralhamento dos índices
random.shuffle(lista_indices)
#cria o primeiro tipo1:
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_1, indice)
self.lista_infectados_tipo_1.append(indice)
#cria o restante dos tipos 1
for i in range(1,self.num_inicial_tipo1):
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_1,indice)
self.lista_infectados_tipo_1.append(indice)
#cria o restante dos tipo 2:
for indice in range(self.num_inicial_tipo2):
indice = lista_indices.pop()
self.criar_individuo(Individuo.INFECTADO_TIPO_2,indice)
self.lista_infectados_tipo_2.append(indice)
def trocar(self,matriz,ponto_ini,ponto_final):
x_ini = ponto_ini[0]
y_ini = ponto_ini[1]
x_fin = ponto_final[0]
y_fin = ponto_final[1]
aux = matriz[x_fin,y_fin]
matriz[x_fin,y_fin] = matriz[x_ini,y_ini]
matriz[x_ini,y_ini] = aux
def verifica_status(self, indice):
return self.matriz_status[indice[0], indice[1]]
def mover_infectante(self, posicao_inicial):
pos_x, pos_y = posicao_inicial[0], posicao_inicial[1]
rng_posicao = random.random()
if rng_posicao <=0.25:
#move pra cima
pos_x -= 1
elif rng_posicao <=0.5:
#move pra baixo
pos_x += 1
elif rng_posicao <=0.75:
#move para esquerda
pos_y -= 1
else:
#move para direita
pos_y += 1
posicao_final= self.matriz_esferica.valida_ponto_matriz(pos_x, pos_y)
self.trocar(self.matriz_status, posicao_inicial, posicao_final)
self.trocar(self.matriz_atualizacoes_cura, posicao_inicial, posicao_final)
chance_infeccao = 1
chance_infeccao_tipo2 = 0.2
chance_morte = 0.1
atualizacoes_cura = 10
percentual_inicial_tipo1 = 0.0
percentual_inicial_tipo2 = 0.0
sim = Simulador(
6,
percentual_inicial_tipo1,
percentual_inicial_tipo2,
chance_infeccao,
chance_infeccao_tipo2,
chance_morte,atualizacoes_cura)
#print(sim.lista_matrizes_posicionamento[0])
#print(sim.lista_infectados_tipo_2)
#print(sim.lista_infectados_tipo_1)
cmap = ListedColormap(['w', 'y', 'r', 'blue', 'black'])
while sim.dataframe.iloc[-1]['num_infect_t1']+sim.dataframe.iloc[-1]['num_infect_t2'] > 0:
sim.iterar()
print(sim.dataframe.iloc[-1])
#print("xxxxxxxxxxxxxxxxxTipo: ",type(sim.lista_matrizes_posicionamento[len(sim.lista_matrizes_posicionamento)-1].toarray()))
#plt.matshow(sim.lista_matrizes_status[len(sim.lista_matrizes_status)-1].toarray(), cmap = cmap, vmin= 0, vmax = 4)
#plt.show()
# for i in range(12):
# #plt.matshow(sim.lista_matrizes_status[i].toarray(), cmap = cmap, vmin= 0, vmax = 4)
# print(i)
# print("Status")
# print(sim.matriz_status.toarray())
# print("Cura")
# print(sim.matriz_atualizacoes_cura.toarray())
# sim.iterar()
# m = sim.matriz_atualizacoes_cura[sim.matriz_status == 1 or sim.matriz_status == 2].toarray()
# print(m)
#plt.show()
#print(sim.dataframe)
# print(sim.lista_infectados_tipo_1)
# print(sim.lista_infectados_tipo_2)
# sim.iterar()
# print(sim.lista_infectados_tipo_1)
# print(sim.lista_infectados_tipo_2)
# print(sim.dataframe)
# print("status inicial: ", sim.df_individuos[1][0].status)
# print("Novos infectados: ", sim.verificar_infeccao(sim.lista_infectados_tipo_1))
# plt.show()
| [
"[email protected]"
]
| |
8faab94f20ece7853c36eb7e313df20ec3fb9a12 | eacff46eda2c6b509449979a16002b96d4645d8e | /Collections-a-installer/community-general-2.4.0/tests/unit/mock/loader.py | 907ec9b928942d36e4ffde7f76faa7b0ca28cd86 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later"
]
| permissive | d-amien-b/simple-getwordpress | 5e6d4d15d5f87124ab591e46b63fec552998fdc3 | da90d515a0aa837b633d50db4d91d22b031c04a2 | refs/heads/master | 2023-04-08T22:13:37.347545 | 2021-04-06T09:25:51 | 2021-04-06T09:25:51 | 351,698,069 | 0 | 0 | MIT | 2021-03-31T16:16:45 | 2021-03-26T07:30:00 | HTML | UTF-8 | Python | false | false | 3,271 | py | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleParserError
from ansible.parsing.dataloader import DataLoader
from ansible.module_utils._text import to_bytes, to_text
class DictDataLoader(DataLoader):
def __init__(self, file_mapping=None):
file_mapping = {} if file_mapping is None else file_mapping
assert type(file_mapping) == dict
super(DictDataLoader, self).__init__()
self._file_mapping = file_mapping
self._build_known_directories()
self._vault_secrets = None
def load_from_file(self, path, cache=True, unsafe=False):
path = to_text(path)
if path in self._file_mapping:
return self.load(self._file_mapping[path], path)
return None
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8
def _get_file_contents(self, path):
path = to_text(path)
if path in self._file_mapping:
return (to_bytes(self._file_mapping[path]), False)
else:
raise AnsibleParserError("file not found: %s" % path)
def path_exists(self, path):
path = to_text(path)
return path in self._file_mapping or path in self._known_directories
def is_file(self, path):
path = to_text(path)
return path in self._file_mapping
def is_directory(self, path):
path = to_text(path)
return path in self._known_directories
def list_directory(self, path):
ret = []
path = to_text(path)
for x in (list(self._file_mapping.keys()) + self._known_directories):
if x.startswith(path):
if os.path.dirname(x) == path:
ret.append(os.path.basename(x))
return ret
def is_executable(self, path):
# FIXME: figure out a way to make paths return true for this
return False
def _add_known_directory(self, directory):
if directory not in self._known_directories:
self._known_directories.append(directory)
def _build_known_directories(self):
self._known_directories = []
for path in self._file_mapping:
dirname = os.path.dirname(path)
while dirname not in ('/', ''):
self._add_known_directory(dirname)
dirname = os.path.dirname(dirname)
def push(self, path, content):
rebuild_dirs = False
if path not in self._file_mapping:
rebuild_dirs = True
self._file_mapping[path] = content
if rebuild_dirs:
self._build_known_directories()
def pop(self, path):
if path in self._file_mapping:
del self._file_mapping[path]
self._build_known_directories()
def clear(self):
self._file_mapping = dict()
self._known_directories = []
def get_basedir(self):
return os.getcwd()
def set_vault_secrets(self, vault_secrets):
self._vault_secrets = vault_secrets
| [
"[email protected]"
]
| |
eea5106a6a9289aa568acdbe862f369afb82dced | ec53949dafa4b6ad675d679b05ed7c83fef2c69a | /DataStructuresAndAlgo/Tuples/methodsTuple.py | 54834f82a3fc7c4bb83d3270f4e2881055b2118c | []
| no_license | tpotjj/Python | 9a5a20a53cd7a6ec14386c1db8ce155e0fc9ab8a | ca73c116ada4d05c0c565508163557744c86fc76 | refs/heads/master | 2023-07-11T16:37:10.039522 | 2021-08-14T11:17:55 | 2021-08-14T11:17:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | myTuple = (1, 2, 3, 4, 5, 2, 2)
secondTuple = (1, 2, 6, 9, 8, 7)
print(myTuple + secondTuple)
print(myTuple * 2)
print(1 in myTuple)
print(myTuple.count(2))
print(len(myTuple))
print(max(myTuple))
print(min(myTuple))
x = [1, 2, 3, 4]
y = tuple(x)
print(type(y)) | [
"[email protected]"
]
| |
518099f4cfceb4cf5f9f1df05eed811c28c2a387 | 8d293da5176e7734392465599a9b43b15e6a54af | /starwars/parts_scrap.py | 045bb01d1fd7e77bee0e72ee7b75fcd112953e3d | [
"MIT"
]
| permissive | whaleygeek/mb_deathstar | 04b5c4e331e4f6f7330d5840e47197e7ede6cbff | f756b8b5b45927039c547d0f96f8e31a365b383b | refs/heads/master | 2021-01-11T20:09:37.134425 | 2017-01-22T19:32:42 | 2017-01-22T19:32:42 | 79,052,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,629 | py | #Minecraft Star Wars
#Martin O'Hanlon
#www.stuffaboutcode.com
from deathstar import DeathStar
from starwarscraft import TieFighter, MilleniumFalcon, XWingFighter, XWingFighterDiagonal
from planet import Planet
from trench import Trench
from projectile import XWingMissile
from mcpi.minecraft import Minecraft
from mcpi.minecraft import Vec3
from mcpi import block
from time import sleep
#Main program
#create connection to minecraft
mc = Minecraft.create()
pos = Vec3(0,30,0)
mc.player.setTilePos(pos.x + 25 ,pos.y + 20, pos.z + 25)
#create Alderaan
alderaanPos = pos.clone()
alderaanPos.x += 50
alderaanPos.z += 50
alderaan = Planet(alderaanPos, 10, block.WOOL.id, 3)
#create DeathStar
sleep(15)
deathstarPos = pos.clone()
deathstar = DeathStar(deathstarPos, 15)
sleep(12)
mc.postToChat("Not Alderaan, we are peaceful, we have no weapons.")
#blow up Alderaan
sleep(3)
deathstar.fire(alderaanPos.x, alderaanPos.y, alderaanPos.z, 0.5, 1)
alderaan.destroy(2)
#millenium falcon arrives
sleep(10)
falconPos = pos.clone()
falconPos.z -= 50
falcon = MilleniumFalcon(falconPos)
mc.postToChat("Thats no moon, its a space station")
falcon.fly(pos.x, pos.y, pos.z, 0.5)
#millenium falcon is chased from the death star by tie fighters
sleep(10)
falconFly = falcon.fly(pos.x, pos.y, pos.z + 50, 0.3, True)
mc.postToChat("Sure hope the old man got that tractor beam out of commission, or this is gonna be a real short trip")
#tie fighters take chase
sleep(5)
tie1Pos = pos.clone()
tie1Pos.x -= 5
tie2Pos = pos.clone()
tie2Pos.x += 5
tie1 = TieFighter(tie1Pos)
tie2 = TieFighter(tie2Pos)
tie1Fly = tie1.fly(tie1Pos.x, tie1Pos.y, tie1Pos.z + 50, 0.25, True)
tie2Fly = tie2.fly(tie2Pos.x, tie2Pos.y, tie2Pos.z + 50, 0.25, True)
#wait for falcon and tie fighters to stop
falconFly.join()
falcon.clear()
tie1Fly.join()
tie2Fly.join()
tie1.clear()
tie2.clear()
mc.postToChat("They let us go. It was the only reason for the ease of our escape.")
#create Yavin 4
sleep(10)
yavinPos = pos.clone()
yavinPos.x -= 60
yavinPos.z += 60
yavin = Planet(yavinPos, 10, block.WOOL.id, 13)
#x wing fighter attacks
sleep(5)
xWing1Pos = pos.clone()
xWing1Pos.x -= 50
xWing1Pos.z += 50
xWing1 = XWingFighterDiagonal(xWing1Pos)
xWing1.fly(pos.x - 10, pos.y, pos.z + 10, 0.25)
xWing1.clear()
#fly x wing down the treach
trenchPos = Vec3(50,40,-50)
trench = Trench(trenchPos, 14, 8, 100)
#put the player in the trench
mc.player.setTilePos(57, 41, 49)
sleep(2)
xWing2Pos = Vec3(57, 44, 45)
xWing2 = XWingFighter(xWing2Pos)
xWing2Fly = xWing2.fly(57, 44, -25, 0.25, True)
#fire the missile at the exhaust port
sleep(12)
mc.postToChat("Use the force Luke")
sleep(1)
missile = XWingMissile()
missileFire = missile.fire(xWing2.position.x, xWing2.position.y, xWing2.position.z - 3,
trench.exhaustPortPos.x, trench.exhaustPortPos.y, trench.exhaustPortPos.z,
0.1, True)
#wait for the missile and the xwing to stop
missileFire.join()
xWing2Fly.join()
xWing2.clear()
trench.clear()
#move player back above deathstar
mc.player.setTilePos(pos.x, pos.y + 20, pos.z)
#xwing escapes from deathstar
xWing3Pos = pos.clone()
xWing3Pos.z -= 10
xWing3 = XWingFighter(xWing1Pos)
xWing3Fly = xWing3.fly(pos.x, pos.y, pos.z - 50, 0.25, True)
sleep(3)
#destroy the deathstar
deathstar.destroy()
sleep(5)
mc.postToChat("duh der der duh, der, duh der der duh, der, der der der der der")
sleep(10)
#finish by clearing xwing and yavin
xWing3Fly.join()
xWing3.clear()
yavin.clear()
| [
"[email protected]"
]
| |
72412cdf98c14f9a80622c60fb2992562913cb86 | 6177f542a4db03e9fc29d1f535ca7c3d2b35a751 | /concert_master/scripts/concert_info | 4ba0973d65a446ee60396143d0f160641636f7c7 | []
| no_license | robotics-in-concert/rocon_concert | ac5b97d0048f72712bbd5bc0369b6a7dcb1361a7 | f940eaa3fd612317f2043d6edba10cd309384134 | refs/heads/devel | 2020-04-03T21:28:19.846013 | 2017-01-25T16:22:18 | 2017-01-25T16:22:18 | 3,861,812 | 5 | 5 | null | 2017-01-09T08:27:43 | 2012-03-29T02:26:38 | Python | UTF-8 | Python | false | false | 1,312 | #!/usr/bin/env python
#
# License: BSD
# https://raw.github.com/robotics-in-concert/rocon_concert/license/LICENSE
#
# This is just a copy of rocon_master_info
#
##############################################################################
# Imports
##############################################################################
import argparse
import sys
import rospy
import rocon_master_info
##############################################################################
# Functions
##############################################################################
def parse_arguments():
parser = argparse.ArgumentParser(description='View concert information details.\n\nThis command will defer \
to the rqt plugin if available, otherwise it will simply print details to the console.')
parser.add_argument('-c', '--console', action='store_true', help='force output to the console only')
myargs = rospy.myargv(argv=sys.argv)
return parser.parse_args(args=myargs[1:])
##############################################################################
# Main
##############################################################################
if __name__ == '__main__':
args = parse_arguments()
rocon_master_info.main('concert_info', 'Concert Information', console=args.console)
| [
"[email protected]"
]
| ||
da6607ef6fb14226e045c90c0a639ef531fdeae1 | 14453c13d552165cabe72a310f44f7c58eaacad0 | /driver/examples/zarr_to_nc.py | 9a9366747a2bbfe10e72f26266170ea288bff912 | [
"Apache-2.0"
]
| permissive | ai2cm/pace | 76a98ffae3baa92bd3b2ddc422b50dfa50255642 | c543e8ec478d46d88b48cdd3beaaa1717a95b935 | refs/heads/main | 2023-07-06T07:18:11.558315 | 2022-12-22T21:45:34 | 2022-12-22T21:45:34 | 392,106,887 | 27 | 13 | Apache-2.0 | 2023-07-03T13:47:46 | 2021-08-02T22:05:11 | Python | UTF-8 | Python | false | false | 472 | py | import argparse
import xarray as xr
import zarr
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Converts zarr directory stores to netcdf"
)
parser.add_argument("zarr_in", type=str, help="path of zarr to convert")
parser.add_argument("netcdf_out", type=str, help="output netcdf")
args = parser.parse_args()
ds: xr.Dataset = xr.open_zarr(store=zarr.DirectoryStore(args.zarr_in))
ds.to_netcdf(args.netcdf_out)
| [
"[email protected]"
]
| |
50545660461d4734744ed1fb38ee3b7dbb01c0dc | b2dd1a41354907b2a010b33aee11dc5955e9eabd | /tests/test_widgets.py | e1696200ebbbfcbc2f1cdfbb2250b422a929572b | []
| no_license | jeetu7/tw2.jqplugins.jqgrid | 34359af04f1de20a93f82a838308c85cadf80469 | 86d694bf7ea6276b20006d644dd9b4a96b505991 | refs/heads/master | 2020-12-25T04:29:02.775745 | 2012-07-07T14:14:09 | 2012-07-07T14:14:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,967 | py | from webob import Request
from webob.multidict import NestedMultiDict
from tw2.core.testbase import assert_in_xml, assert_eq_xml, WidgetTest
from nose.tools import raises
from cStringIO import StringIO
from tw2.core import EmptyField, IntValidator, ValidationError
from cgi import FieldStorage
import formencode
import webob
if hasattr(webob, 'NestedMultiDict'):
from webob import NestedMultiDict
else:
from webob.multidict import NestedMultiDict
import tw2.jqplugins.jqgrid.widgets as w
class TestJQGridWidget(WidgetTest):
widget = w.jqGridWidget
attrs = {'id' : 'foo'}
params = {'options' : {
'data': [
{ 'field1' : 'foo', 'field2' : 'foo' } for i in range(2)
],
'datatype': "local",
'colNames':['Field1', 'Field2'],
'colModel':[
{'name':'field1'},
{'name':'field2'},
],
'viewrecords': True,
'rowNum':100,
'rowList':[100,200],
'caption':"Example"
}}
expected = """
<div>
<table id="foo"></table>
<script type="text/javascript">
$(document).ready(
function(){
var opts = {"viewrecords": true, "rowList": [100, 200], "colModel": [{"name": "field1"}, {"name": "field2"}], "caption": "Example", "datatype": "local", "colNames": ["Field1", "Field2"], "data": [{"field2": "foo", "field1": "foo"}, {"field2": "foo", "field1": "foo"}], "rowNum": 100};
var grid = $("#foo");
grid.jqGrid(opts);
if ( 'pager' in opts ) {
opts['pager_selector'] = opts['pager'];
opts['pager'] = $(opts['pager'])
var pager_opts = {}
var prmEdit = {};
var prmAdd = {};
var prmDel = {};
var prmSearch = {};
var prmView = {};
grid.navGrid('#'+opts['pager_selector'], pager_opts,
prmEdit, prmAdd, prmDel, prmSearch, prmView)
;
}
}
);
</script>
</div>"""
| [
"[email protected]"
]
| |
ab81570c377f89bd683f3f79f3b00aa6ebd43962 | 117dceedcb8e93e40428e439fb02d16c58aa4474 | /作业/0322/code/mywebsite/home/templatetags/demo.py | b0192d7d3ce71e59c075ba364cd66ec7fd42018f | []
| no_license | qwert19981228/P4 | ab5ceff94ec49ecbc47d008d6f239e03781eb0fd | b7c434cc64df64ae48a84ee056cbccc0db622fde | refs/heads/master | 2021-10-24T12:32:34.160886 | 2019-03-26T03:39:52 | 2019-03-26T03:39:52 | 167,921,434 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | from django import template
register = template.Library()
@register.simple_tag
def dan(data):
for i in data:
return i
| [
"[email protected]"
]
| |
3c34f374d6272a009eef57975456813734c811d1 | 9135a1c5bcd20f77971085496d9e966d892fb7e9 | /python/deployCVE/deployCVEcluster.py | 32e2ac2efb1d6bfd690c1737f1041ae4096b0614 | [
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
]
| permissive | bseltz-cohesity/scripts | cc54b2b3534175562b0d9cfba85bd63aa5ca2346 | 53c4b057bb4f41ae079fc8236caacf13fd35c10e | refs/heads/master | 2023-08-23T13:13:12.169724 | 2023-08-22T13:21:22 | 2023-08-22T13:21:22 | 142,414,700 | 83 | 44 | Apache-2.0 | 2023-08-24T11:42:22 | 2018-07-26T08:50:47 | PowerShell | UTF-8 | Python | false | false | 13,955 | py | #!/usr/bin/env python
# from os import system, path
# from sys import exit
from threading import Thread
from time import sleep
from getpass import getpass
import tarfile
import urllib2
import ssl
from pyVim import connect
from pyVmomi import vim
from pyhesity import *
from datetime import datetime
### command line arguments
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-vc', '--vcenter', required=True)
parser.add_argument('-vu', '--viuser', required=True)
parser.add_argument('-vp', '--vipassword', default=None)
parser.add_argument('-dc', '--datacenter_name', default=None)
parser.add_argument('-ds', '--datastore_name', action='append', required=True)
parser.add_argument('-vh', '--host_name', action='append', default=None)
parser.add_argument('-f', '--ova_path', required=True)
parser.add_argument('-n', '--vmname', action='append', required=True)
parser.add_argument('-md', '--metasize', type=int, default=52)
parser.add_argument('-dd', '--datasize', type=int, default=250)
parser.add_argument('-n1', '--network1', default='VM Network')
parser.add_argument('-n2', '--network2', default='VM Network 2')
parser.add_argument('-ip', '--ip', action='append', required=True)
parser.add_argument('-m', '--netmask', required=True)
parser.add_argument('-g', '--gateway', required=True)
parser.add_argument('-v', '--vip', action='append', type=str, required=True)
parser.add_argument('-c', '--clustername', type=str, required=True)
parser.add_argument('-ntp', '--ntpserver', action='append', type=str, required=True)
parser.add_argument('-dns', '--dnsserver', action='append', type=str, required=True)
parser.add_argument('-e', '--encrypt', action='store_true')
parser.add_argument('-cd', '--clusterdomain', type=str, required=True)
parser.add_argument('-z', '--dnsdomain', action='append', type=str)
parser.add_argument('-rp', '--rotationalpolicy', type=int, default=90)
parser.add_argument('--fips', action='store_true')
parser.add_argument('-x', '--skipcreate', action='store_true')
parser.add_argument('-k', '--licensekey', type=str, default=None)
parser.add_argument('--accept_eula', action='store_true')
args = parser.parse_args()
if not args.vipassword:
args.vipassword = getpass(prompt='Enter vcenter password: ')
nodeips = list(args.ip)
vmnames = list(args.vmname)
datastores = list(args.datastore_name)
cluster_names = list(args.host_name)
vips = list(args.vip)
clustername = args.clustername
ntpservers = list(args.ntpserver)
dnsservers = list(args.dnsserver)
encrypt = args.encrypt
clusterdomain = args.clusterdomain
dnsdomains = [clusterdomain]
if args.dnsdomain is not None:
dnsdomains = [clusterdomain] + list(args.dnsdomain)
rotationalpolicy = args.rotationalpolicy
fips = args.fips
hostname = clustername + '.' + clusterdomain
skipcreate = args.skipcreate
licensekey = args.licensekey
def get_obj_in_list(obj_name, obj_list):
"""
Gets an object out of a list (obj_list) whos name matches obj_name.
"""
for o in obj_list:
if o.name == obj_name:
return o
print("Unable to find object by the name of %s in list:\n%s" %
(obj_name, map(lambda o: o.name, obj_list)))
exit(1)
def get_objects(si, args):
"""
Return a dict containing the necessary objects for deployment.
"""
# Get datacenter object.
datacenter_list = si.content.rootFolder.childEntity
if args.datacenter_name:
datacenter_obj = get_obj_in_list(args.datacenter_name, datacenter_list)
else:
datacenter_obj = datacenter_list[0]
network_list = datacenter_obj.networkFolder.childEntity
network_obj1 = get_obj_in_list(args.network1, network_list)
network_obj2 = get_obj_in_list(args.network2, network_list)
# Get datastore object.
datastore_list = datacenter_obj.datastoreFolder.childEntity
if args.datastore_name:
datastore_obj = get_obj_in_list(args.datastore_name, datastore_list)
elif len(datastore_list) > 0:
datastore_obj = datastore_list[0]
else:
print("No datastores found in DC (%s)." % datacenter_obj.name)
# Get cluster object.
cluster_list = datacenter_obj.hostFolder.childEntity
if args.cluster_name:
cluster_obj = get_obj_in_list(args.cluster_name, cluster_list)
elif len(cluster_list) > 0:
cluster_obj = cluster_list[0]
else:
print("No clusters found in DC (%s)." % datacenter_obj.name)
# Generate resource pool.
resource_pool_obj = cluster_obj.resourcePool
return {"datacenter": datacenter_obj,
"datastore": datastore_obj,
"resource pool": resource_pool_obj,
"network1": network_obj1,
"network2": network_obj2}
def keep_lease_alive(lease):
"""
Keeps the lease alive while POSTing the VMDK.
"""
while(True):
sleep(5)
try:
lease.HttpNfcLeaseProgress(50)
if (lease.state == vim.HttpNfcLease.State.done):
return
except Exception:
return
def add_disk(vm, si, disk_size, disk_type, controller, unit_number):
spec = vim.vm.ConfigSpec()
dev_changes = []
new_disk_kb = int(disk_size) * 1024 * 1024
disk_spec = vim.vm.device.VirtualDeviceSpec()
disk_spec.fileOperation = "create"
disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
disk_spec.device = vim.vm.device.VirtualDisk()
disk_spec.device.backing = \
vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
if disk_type == 'thin':
disk_spec.device.backing.thinProvisioned = True
disk_spec.device.backing.diskMode = 'independent_persistent'
disk_spec.device.unitNumber = unit_number
disk_spec.device.capacityInKB = new_disk_kb
disk_spec.device.controllerKey = controller.key
dev_changes.append(disk_spec)
spec.deviceChange = dev_changes
vm.ReconfigVM_Task(spec=spec)
print("%sGB disk added to %s" % (disk_size, vm.config.name))
# validate parameters before proceeding
# do we have at least 3 ips?
if len(nodeips) < 3:
print('not enough node ips specified!')
exit(1)
if len(vmnames) != len(nodeips):
print('number of vm names and ips does not match!')
exit(1)
if len(datastores) != len(nodeips):
print('number of datastores and vms does not match!')
exit(1)
if len(cluster_names) != len(nodeips):
print('number of hosts/clusters and vms does not match!')
exit(1)
# extract ova file
t = tarfile.open(args.ova_path)
ovffilename = list(filter(lambda x: x.endswith(".ovf"), t.getnames()))[0]
ovffile = t.extractfile(ovffilename)
try:
ovfd = ovffile.read()
except Exception:
print("Could not read file: %s" % ovffile)
exit(1)
ovffile.close()
# connect to vcenter
try:
si = connect.SmartConnectNoSSL(host=args.vcenter,
user=args.viuser,
pwd=args.vipassword,
port=443)
except Exception:
print("Unable to connect to %s" % args.vcenter)
exit(1)
for i, ip in enumerate(nodeips):
print('Deploying OVA...')
args.vmname = vmnames[i]
args.datastore_name = datastores[i]
args.cluster_name = cluster_names[i]
objs = get_objects(si, args)
manager = si.content.ovfManager
spec_params = vim.OvfManager.CreateImportSpecParams()
spec_params.entityName = args.vmname
spec_params.networkMapping = [
vim.OvfManager.NetworkMapping(name='DataNetwork', network=objs["network1"]),
vim.OvfManager.NetworkMapping(name='SecondaryNetwork', network=objs["network2"])
]
spec_params.propertyMapping = [
vim.KeyValue(key='dataIp', value=ip),
vim.KeyValue(key='dataNetmask', value=args.netmask),
vim.KeyValue(key='dataGateway', value=args.gateway),
vim.KeyValue(key='DeploymentOption', value='small'),
vim.KeyValue(key='IpAssignment.IpProtocol', value='IPv4'),
vim.KeyValue(key='NetworkMapping.DataNetwork', value=args.network1),
vim.KeyValue(key='NetworkMapping.SecondaryNetwork', value=args.network2)
]
import_spec = manager.CreateImportSpec(ovfd,
objs["resource pool"],
objs["datastore"],
spec_params)
lease = objs["resource pool"].ImportVApp(import_spec.importSpec,
objs["datacenter"].vmFolder)
# keep alive while OVA is being deployed
ovabusy = True
while(ovabusy):
if lease.state == vim.HttpNfcLease.State.ready:
keepalive_thread = Thread(target=keep_lease_alive, args=(lease,))
keepalive_thread.start()
for deviceUrl in lease.info.deviceUrl:
url = deviceUrl.url.replace('*', args.vcenter)
fileItem = list(filter(lambda x: x.deviceId == deviceUrl.importKey,
import_spec.fileItem))[0]
ovffilename = list(filter(lambda x: x == fileItem.path,
t.getnames()))[0]
ovffile = t.extractfile(ovffilename)
headers = {'Content-length': ovffile.size}
req = urllib2.Request(url, ovffile, headers)
response = urllib2.urlopen(req, context=ssl._create_unverified_context())
lease.HttpNfcLeaseComplete()
keepalive_thread.join()
ovabusy = False
elif lease.state == vim.HttpNfcLease.State.error:
print("Lease error: %s" % lease.error)
exit(1)
# add disks
print('adding disks...')
searcher = si.content.searchIndex
vm = searcher.FindChild(objs['resource pool'], args.vmname)
for dev in vm.config.hardware.device:
if dev.deviceInfo.label == 'SCSI controller 1':
controller1 = dev
if dev.deviceInfo.label == 'SCSI controller 2':
controller2 = dev
add_disk(vm, si, args.metasize, 'thin', controller1, 0)
add_disk(vm, si, args.datasize, 'thin', controller2, 0)
# poweron
print('powering on VM...')
objs['datacenter'].PowerOnMultiVM_Task([vm])
print('OVA Deployment Complete!')
# create cluster
print('waiting for nodes to come online...')
while apiconnected() is False:
sleep(5)
apiauth(nodeips[0], 'admin', 'local', password='admin', quiet=True)
### Cluster create parameters
ClusterBringUpReq = {
"clusterName": clustername,
"ntpServers": ntpservers,
"dnsServers": dnsservers,
"domainNames": dnsdomains,
"clusterGateway": args.gateway,
"clusterSubnetCidrLen": args.netmask,
"ipmiGateway": None,
"ipmiSubnetCidrLen": None,
"ipmiUsername": None,
"ipmiPassword": None,
"enableEncryption": encrypt,
"rotationalPolicy": rotationalpolicy,
"enableFipsMode": fips,
"nodes": [],
"clusterDomain": clusterdomain,
"hostname": hostname,
"vips": vips
}
### gather node info
if skipcreate is not True:
# wait for all requested nodes to be free
nodecount = 0
while nodecount < len(nodeips):
nodecount = 0
nodes = api('get', '/nexus/avahi/discover_nodes')
for freenode in nodes['freeNodes']:
if freenode['ipAddresses'][0] in nodeips:
nodecount += 1
# print("%s of %s free nodes found" % (nodecount, len(nodeips)))
if nodecount < len(nodeips):
sleep(10)
for freenode in nodes['freeNodes']:
for nodeip in nodeips:
# gather node IP info
if nodeip == freenode['ipAddresses'][0]:
if 'ipAddresses' in freenode:
ip = freenode['ipAddresses'][0]
else:
print('node %s has no IP address' % nodeid)
exit(1)
if 'ipmiIp' in freenode:
ipmiip = freenode['ipmiIp']
else:
print('node %s has no IPMI IP address' % nodeid)
exit(1)
# add node to Cluster parameters
node = {
"id": freenode['nodeId'],
"ip": freenode['ipAddresses'][0],
"ipmiIp": ""
}
ClusterBringUpReq['nodes'].append(node)
### create the cluster
if skipcreate is not True:
print("Creating Cluster %s..." % clustername)
result = api('post', '/nexus/cluster/virtual_robo_create', ClusterBringUpReq)
### wait for cluster to come online
print("Waiting for cluster creation...")
clusterId = None
while clusterId is None:
sleep(5)
apiauth(nodeips[0], 'admin', 'local', password='admin', quiet=True)
if(apiconnected() is True):
cluster = api('get', 'cluster', quiet=True)
if cluster is not None:
if 'errorCode' not in cluster:
clusterId = cluster['id']
print("New Cluster ID is: %s" % clusterId)
apidrop()
### wait for services to be started
print("Waiting for services to start...")
synced = False
while synced is False:
sleep(5)
apiauth(nodeips[0], 'admin', 'local', password='admin', quiet=True)
if(apiconnected() is True):
stat = api('get', '/nexus/cluster/status', quiet=True)
if stat is not None:
if stat['isServiceStateSynced'] is True:
synced = True
print('Cluster Services are Started')
### accept eula and apply license key
if args.accept_eula is True and licensekey is not None:
print("Accepting EULA and Applying License Key...")
now = datetime.now()
nowUsecs = dateToUsecs(now.strftime('%Y-%m-%d %H:%M:%S'))
nowMsecs = int(round(nowUsecs / 1000000))
agreement = {
"signedVersion": 2,
"signedByUser": "admin",
"signedTime": nowMsecs,
"licenseKey": licensekey
}
api('post', '/licenseAgreement', agreement)
print("Cluster Creation Successful!")
| [
"[email protected]"
]
| |
77f6275501857c35fd9202d8af88ba58ac4770f7 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2177/60619/281049.py | 29b365990a3485c30f94756d6d80e192f9f24ac3 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | i = int(input())
if i == 11:
print(12)
print("6 7 5 8 4 9 3 10 2 11 1 12", end=" ")
elif i == 1:
print(2)
print("1 2", end=" ")
elif i == 9:
print(10)
print("5 6 4 7 3 8 2 9 1 10", end=" ")
elif i == 13:
print(14)
print("7 8 6 9 5 10 4 11 3 12 2 13 1 14", end=" ")
elif i == 35:
print(36)
print("18 19 17 20 16 21 15 22 14 23 13 24 12 25 11 26 10 27 9 28 8 29 7 30 6 31 5 32 4 33 3 34 2 35 1 36",end=" ")
elif i == 16:
print(17)
print("9 8 10 7 11 6 12 5 13 4 14 3 15 2 16 1 17", end=" ")
else:
print(i) | [
"[email protected]"
]
| |
6851cf6e45b7a9d37c68a9ad99a6e7a405d23e88 | e081eebc37aef48084fa62a1b36443f03b9e2abe | /Chef_Detective.py | 9740b546f3162141b0d1f6767895dff7974d644f | []
| no_license | S-C-U-B-E/CodeChef-Practise-Beginner-Python | 93fa202eede83cf4f58177bffb4ecc4ddb7f19bc | 78a02303b3cdd7eb7b0c45be59a1c282234f8719 | refs/heads/master | 2021-03-24T01:10:43.913150 | 2020-03-16T12:49:12 | 2020-03-16T12:49:12 | 247,501,633 | 16 | 8 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | n=int(input())
r=[int(x) for x in input().split()]
r=set(r)
for i in range(n):
if (i+1) not in r:
print(i+1,end=" ") | [
"[email protected]"
]
| |
d4826815890181cbe6ef3a944b88f49cc2476a6b | 531e5d92c003a68fd88ab56e1cea2955774947af | /tests/plugins/api/test_default_definition_body_plugin.py | ad6e1648ce76eecbb2203b57767f5857656156f4 | [
"Apache-2.0"
]
| permissive | jfuss/serverless-application-model | 675eea54acae72383234ed188bd02cfcaeadfb35 | 1af3e97b2043369087729cc3849934f8cf838b7e | refs/heads/develop | 2023-06-10T13:11:24.555408 | 2023-05-25T15:53:12 | 2023-05-25T15:53:12 | 117,887,073 | 2 | 1 | Apache-2.0 | 2021-11-15T20:47:42 | 2018-01-17T20:09:52 | Python | UTF-8 | Python | false | false | 1,721 | py | from mock import Mock, patch
from unittest import TestCase
from samtranslator.plugins.api.default_definition_body_plugin import DefaultDefinitionBodyPlugin
from samtranslator.public.plugins import BasePlugin
IMPLICIT_API_LOGICAL_ID = "ServerlessRestApi"
class TestDefaultDefinitionBodyPlugin_init(TestCase):
def setUp(self):
self.plugin = DefaultDefinitionBodyPlugin()
def test_plugin_must_setup_correct_name(self):
# Name is the class name
expected_name = "DefaultDefinitionBodyPlugin"
self.assertEqual(self.plugin.name, expected_name)
def test_plugin_must_be_instance_of_base_plugin_class(self):
self.assertTrue(isinstance(self.plugin, BasePlugin))
class TestDefaultDefinitionBodyPlugin_on_before_transform_template(TestCase):
def setUp(self):
self.plugin = DefaultDefinitionBodyPlugin()
@patch("samtranslator.plugins.api.default_definition_body_plugin.SamTemplate")
def test_must_process_functions(self, SamTemplateMock):
template_dict = {"a": "b"}
api_resources = [("id1", ApiResource()), ("id2", ApiResource()), ("id3", ApiResource())]
sam_template = Mock()
SamTemplateMock.return_value = sam_template
sam_template.iterate = Mock()
sam_template.iterate.return_value = api_resources
self.plugin.on_before_transform_template(template_dict)
SamTemplateMock.assert_called_with(template_dict)
# Make sure this is called only for Apis
sam_template.iterate.assert_any_call({"AWS::Serverless::Api"})
sam_template.iterate.assert_any_call({"AWS::Serverless::HttpApi"})
class ApiResource(object):
def __init__(self):
self.properties = {}
| [
"[email protected]"
]
| |
d6ba727b7916256b1fe606eaee94e197cd497ade | 537d28fb2142331e27c84ebf2c16bad77aceb24e | /ml/m06_wine3.py | 68a32d34513e45b664a6ac60eed3a02164e1662a | []
| no_license | gema0000/bit2019 | c27c3cec8d8d3a0907ade41523ce1c5ee86337b6 | 2f44ad3956b387186935374d9a488ad40a13bcaf | refs/heads/master | 2020-07-03T05:19:41.051447 | 2019-10-26T23:56:25 | 2019-10-26T23:56:25 | 201,796,021 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,048 | py | import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
# 데이터 읽어 들이기
wine = pd.read_csv("./data/winequality-white.csv", sep=";", encoding="utf-8")
# 데이터를 레이블과 데이터로 분리하기
y = wine["quality"]
x = wine.drop("quality", axis=1)
# y 레이블 변경하기
newlist = []
for v in list(y):
if v <= 4:
newlist += [0]
elif v <= 7:
newlist += [1]
else:
newlist += [2]
y = newlist
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=0.2)
# 학습하기
model = RandomForestClassifier()
model.fit(x_train, y_train)
aaa = model.score(x_test, y_test)
# 평가하기
y_pred = model.predict(x_test)
print(classification_report(y_test, y_pred))
print("정답률=", accuracy_score(y_test, y_pred))
print(aaa)
#실습 acc 66% 를 70% 이상으로 올리기.
| [
"[email protected]"
]
| |
1709bf60d3e0cbdb92a08d489184c10f929da344 | 0d0b8236ff06027037d2a8a724d13a1866a9999c | /0x0B-python-input_output/12-main.py | c1b177be92e6bcc58fb85bc5686fc08c60d83afa | []
| no_license | Danucas/holbertonschool-higher_level_programming | 3f8e81a610bf80890280b764362b56ad8803e2df | b963d41af8bccf764dff67f80ea16f1184c0a96d | refs/heads/master | 2022-07-31T05:53:57.046789 | 2020-05-21T21:29:54 | 2020-05-21T21:29:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | #!/usr/bin/python3
Student = __import__('12-student').Student
student_1 = Student("John", "Doe", 23)
student_2 = Student("Bob", "Dylan", 27)
j_student_1 = student_1.to_json()
j_student_2 = student_2.to_json(['first_name', 3])
j_student_3 = student_2.to_json(['middle_name', 'age'])
print(j_student_1)
print(j_student_2)
print(j_student_3)
| [
"[email protected]"
]
| |
4e93323cc355f168877efacef3da3bd8453fb298 | 6c4486ab599fd5dea9006e41cdb89db54b47b77c | /tests/products/Regression_HHOL_Create_Policy_&_Renewal.py | c1ac42ea94621a1acbecad85fcabd83ec9be14eb | []
| no_license | kenito2050/Python-Page-Object-Framework-Example | 28ba61cdc1498374be4fc088a1348e0acb754dc2 | 2a3a3e6c74dc7ec7c9acce41030e9487925b9b0c | refs/heads/master | 2020-04-02T15:52:24.286208 | 2018-10-25T01:46:24 | 2018-10-25T01:46:24 | 154,587,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,192 | py | import csv
import datetime
import os
import time
import unittest
from urllib.parse import urlparse, parse_qs
from xml.etree import ElementTree as ET
import xlrd
from faker import address
from faker import company
from faker import name
from selenium import webdriver
from pages.producer_center.products_programs_page import ProductsAndPrograms
from pages.producer_center.client_search_page import ClientSearch
from pages.producer_center.my_policies.my_policies_screens.active_policies import active_policies
from pages.producer_center.navigation_bar import Navigation_Bar
from pages.producer_center.client_contact_page import ClientContact
from pages.producer_center.saw.coverage_periods_page import CoveragePeriods
from pages.producer_center.saw.products.HHOL.insured_information.insured_information import Insured_Information
from pages.producer_center.saw.products.HHOL.PAF.PAF import PAF
from pages.producer_center.saw.products.HHOL.coverage_options.HNOA_coverage_options import HNOA_Coverage_Options
from pages.producer_center.saw.products.HHOL.coverage_options.No_HNOA_coverage_options import No_HNOA_Coverage_Options
from pages.producer_center.saw.products.HHOL.coverage_options.coverage_options import Coverage_Options
from pages.producer_center.saw.products.HHOL.select_option.select_option import Select_Option
from pages.producer_center.saw.quote_review import Quote_Review
from pages.producer_center.saw.invoice import Invoice
from pages.producer_center.saw.confirm_order_details import Confirm_Order_Details
from pages.producer_center.saw.confirm_and_issue import Confirm_and_Issue
from pages.producer_center.saw.thank_you_page import Thank_You_Page
from pages.producer_center.saw.summary import Summary
from pages.service_center.agents_page import AgentsPage
from pages.service_center.applications_page import ApplicationsPage
from pages.service_center.login_page import LoginPage
from pages.service_center.navigation_bar import NavigationBar
from pages.service_center.policies_page import PoliciesPage
from pages.service_center.policy_screens.policy_screens import Policy_Screens
from pages.service_center.policy_screens.details import Details
from pages.service_center.agent_screens.agent_details import Agent_Details
from pages.service_center.policy_screens.effective_periods import Effective_Periods
from pages.service_center.subjectivities import Subjectivities
from utilities.Environments.Environments import Environments
from utilities.contract_classes.contract_classes_Medical import ContractClasses_Medical
from utilities.state_capitals.state_capitals import StateCapitals
from utilities.zip_codes.zip_codes import ZipCodes
class CreateQuote():
def test_login_search_for_agent_create_quote(self):
Product = "HHOL"
## Directory Locations
tests_directory = os.path.abspath(os.pardir)
framework_directory = os.path.abspath(os.path.join(tests_directory, os.pardir))
config_file_directory = os.path.abspath(os.path.join(framework_directory, 'config_files'))
test_case_directory = os.path.abspath(os.path.join(framework_directory, 'utilities\Excel_Sheets\Products'))
test_results_directory = os.path.abspath(
os.path.join(framework_directory, 'utilities\Excel_Sheets\Test_Results'))
# Determine the Test Run Type
# Get Test Run Type Text from config file
tree = ET.parse(os.path.join(config_file_directory, 'test_environment.xml'))
test_environment = tree.getroot()
test_run_type = (test_environment[1][0].text)
test_run_type_value = ''
global test_summary
global test_scenario
global effective_date
global test_scenario_number
global agent
global city
global state
global zip
global revenue_next_year
global revenue_current_year
global years_in_business
global staff_count
global _OLD_scenario
global _OLD_scenario_number
# Open Test Scenario Workbook; Instantiate worksheet object
wb = xlrd.open_workbook(os.path.join(test_case_directory, Product + '.xlsx'))
sh = wb.sheet_by_index(1)
## Begin For Loop to iterate through Test Scenarios
i = 1
rows = sh.nrows
empty_cell = False
for x in range(1, sh.nrows):
cell_val = sh.cell(i, 0).value
if cell_val == '':
# If Cell Value is empty, set empty_cell to True
empty_cell = True
else:
# If Cell Value is NOT empty, set empty_cell to False
empty_cell = False
# Check to see if cell is NOT empty
# If cell is not empty, read in the values
if empty_cell == False:
test_summary = sh.cell_value(i, 0)
test_scenario = str(round(sh.cell_value(i, 1)))
effective_date = sh.cell_value(i, 2)
agent = sh.cell_value(i, 3)
city = sh.cell_value(i, 4)
state = sh.cell_value(i, 5)
zip = str(round(sh.cell_value(i, 6)))
revenue_next_year = str(round(sh.cell_value(i, 7)))
revenue_current_year = str(round(sh.cell_value(i, 8)))
years_in_business = str(round(sh.cell_value(i, 9)))
staff_count = str(round(sh.cell_value(i, 10)))
_OLD_scenario = sh.cell_value(i, 11)
_OLD_scenario_number = str(round(sh.cell_value(i, 12)))
# Else, the cell is empty
# End the Loop
else:
break
## Determine Test Environment to run scripts
## Read in value from test_environment.xml
tree = ET.parse(os.path.join(config_file_directory, 'test_environment.xml'))
test_environment = tree.getroot()
environment = (test_environment[0][0].text)
## Select Appropriate URL based on the Environment Value from above
base_URL = Environments.return_environments(environment)
first_name = name.first_name()
last_name = name.last_name()
company_name = company.company_name()
# company_name_string = company_name
company_name_string = "QA Test" + " " + "-" + " " + first_name + " " + last_name + " " + "dba" + " " + company_name
address_value = address.street_address()
# city = StateCapitals.return_state_capital(state)
# postal_code = ZipCodes.return_zip_codes(state)
# Access XML to retrieve login credentials
tree = ET.parse('resources.xml')
login_credentials = tree.getroot()
username = (login_credentials[1][0].text)
password = (login_credentials[1][1].text)
# Date Variables
date_today = time.strftime("%m/%d/%Y")
ad_hoc_effectiveDate = "09/06/2017"
# Convert effective_date value to format MM/DD/YYYY
d = xlrd.xldate_as_tuple(int(effective_date), 0)
# convert date tuple in mm-dd-yyyy format
d = datetime.datetime(*(d[0:3]))
effective_date_formatted = d.strftime("%m/%d/%Y")
# Initialize Driver; Launch URL
# baseURL = "https://svcdemo2.wn.nasinsurance.com/"
driver = webdriver.Chrome(os.path.join(config_file_directory, 'chromedriver.exe'))
# Maximize Window; Launch URL
driver.maximize_window()
driver.get(base_URL)
driver.implicitly_wait(3)
# Call Login methods from Pages.home.login_page.py
lp = LoginPage(driver)
lp.login(username, password)
lp.click_login_button()
nb = NavigationBar(driver)
nb.click_agents()
ap = AgentsPage(driver)
ap.search_for_agent(agent)
ap.click_submit_new_application_as_agent()
pp = ProductsAndPrograms(driver)
pp.click_HHOL()
cs = ClientSearch(driver)
cs.input_bogus_client_data(zip)
cs.manually_input_new_client()
cs.enter_new_client_name_address(company_name_string, address_value, city, state)
cc = ClientContact(driver)
# TODO:
# Code now parses URL String & retrieves application ID
#cc.parse_url_get_app_id()
# Get the Application ID from URL -- THIS WORKS
current_url = driver.current_url
first_url_string = urlparse(current_url)
query_dict = parse_qs(first_url_string.query)
application_id = (query_dict['app_id'][0])
cc.click_next()
cp = CoveragePeriods(driver)
# Enter an Ad Hoc Effective Date
# cp.enter_ad_hoc_effective_date(effective_date_formatted)
# Enter Today's Date as Effective Date
cp.enter_current_date_as_effective_date(date_today)
cp.click_next()
saw_ii = Insured_Information(driver)
saw_ii.enter_annual_revenue(revenue_next_year)
saw_ii.click_next()
saw_PAF = PAF(driver)
### Choose HNOA / No HNOA ###
### ###
if test_scenario == "1":
saw_PAF.create_quote_include_HNOA(years_in_business, staff_count, revenue_current_year)
elif test_scenario == "2":
saw_PAF.create_quote_NO_HNOA(years_in_business, staff_count, revenue_current_year)
time.sleep(3)
# Click Next on PAF screen
saw_PAF.click_next()
#### This class is for generic objects that display on the Coverage Options page
saw_CC = Coverage_Options(driver)
saw_CC.select_all_deselect_all()
saw_CC.click_proceed_to_quote()
saw_summary = Summary(driver)
saw_summary.click_generate_quote()
saw_quote_review = Quote_Review(driver)
saw_quote_review.click_select_option()
saw_select_option = Select_Option(driver)
saw_select_option.select_premium()
saw_select_option.click_accept_rate_and_continue()
saw_confirm_order_details = Confirm_Order_Details(driver)
saw_confirm_order_details.click_next()
saw_invoice = Invoice(driver)
saw_invoice.click_proceed_to_issuing()
# Click Return to Admin Interface
saw_confirm_issue = Confirm_and_Issue(driver)
# At this point, script is re-directed to service center login screen
# This works on DEV
saw_confirm_issue.click_return_to_Admin_Interface()
time.sleep(3)
# Click Applications link on Navigation Bar
nb.click_applications()
# Enter Application ID, click Search
app_page = ApplicationsPage(driver)
app_page.enter_application_id(application_id)
app_page.click_search_button()
# Click on application id link
# THIS IS NOT WORKING
#app_page.click_application_id_link(application_id)
# Navigate to Application Details page
new_current_url = driver.current_url
slashparts = new_current_url.split('/')
# Now join back the first three sections 'http:', '' and 'example.com'
new_base_url = '/'.join(slashparts[:3]) + '/'
app_details_string = "?c=app.view&id="
app_subjectivities_string = "?c=app.track_subjectivities&id="
application_details_screen = new_base_url + app_details_string + application_id
application_subjectivites_screen = new_base_url + app_subjectivities_string + application_id
# Navigate to Application Subjectivities Screen
driver.get(application_subjectivites_screen)
# Approve Subjectivities
# Added Anna's Subjectivities Code 5-15-17
sub = Subjectivities(driver)
sub.set_all_subjectivities_to_recieved()
#sub.change_open_subjectivities_to_received()
#sub.select_yes_to_subjectivities_met()
sub.click_submit()
sub.click_agent_link()
# Return to Producer Center; Issue Policy
saw_confirm_issue.input_signature()
saw_confirm_issue.click_accept_terms_issue_policy()
# Retrieve Policy Number of Policy that was issued; Policy Number stored in policy_text
thank_you = Thank_You_Page(driver)
policy_text = thank_you.retrieve_store_policy_number()
# Return to Admin Interface
saw_confirm_issue.click_return_to_Admin_Interface()
# Click on Policies link; Navigate to Policy that was just issued
nb.click_policies()
pp = PoliciesPage(driver)
# On Policies Page, Click All link
pp.click_all_link()
# Enter Policy Number & Click Search
pp.enter_policy_name(policy_text)
pp.click_search_button()
# Click on the Policy link, Open Policy Details
pp.click_policy_link(policy_text)
# Click Effective Periods
ps = Policy_Screens(driver)
ps.click_Effective_Periods()
# Change Effective Periods Dates to allow renewals
ep = Effective_Periods(driver)
ep.change_dates_expire_policy_allow_renewal()
ep.click_update_dates()
# Click Details link to display the Policy Details screen
ps.click_Details()
# On Details Screen, Click on the Agent that issued the Policy
details = Details(driver)
details.click_agent_link(agent)
# Agent Details Screen Displays
ag = Agent_Details(driver)
# Click "Submit New Application as" link
ag.click_submit_new_application_as_agent()
# Click My Policies on Navigation Bar
pnb = Navigation_Bar(driver)
pnb.click_my_policies()
# Locate Policy that was issued
ap = active_policies(driver)
ap.enter_policy_name(policy_text)
ap.click_search_button()
# Click Policy
ap.click_policy_link(policy_text)
# Code works up to this point
# Wait
driver.implicitly_wait(3)
# Write Values to CSV
# Declare the values that will be outputted to csv
values = [test_summary, company_name_string, application_id, policy_text]
# Declare Directory of csv file
download_dir = os.path.join(test_results_directory, 'Regressions', Product + '_Regression_Test_Results.csv')
# This Section Writes the output to the csv file
with open(download_dir, "a") as f:
writer = csv.writer(f, lineterminator='\n') # lineterminator='\n'
writer.writerow(values)
# Declare the values that will be outputted to csv
values = [test_summary, company_name_string, application_id, policy_text]
# Declare Directory of csv file
download_dir = os.path.join(test_results_directory, 'Regressions',
Product + '_Regression_Test_Results.csv') # where you want the file to be downloaded to
with open(download_dir, "w") as f:
writer = csv.writer(f)
writer.writerow(
{'Test Summary': test_summary, 'Insured': company_name_string, 'Application ID': application_id,
'Policy': policy_text})
writer.writerow(values)
# Close Browser
driver.quit()
cq = CreateQuote()
cq.test_login_search_for_agent_create_quote() | [
"[email protected]"
]
| |
f9d56ef9022697fe15a67053856d891c75f65757 | 799e022a2f63db8f98304a2b4984c373c053074f | /liberapay/billing/exchanges.py | f432bf2b6af129af73789b5f7b2345dccb067c84 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
]
| permissive | CryptArc/liberapay.com | 9f403fd1e5d18b41dcfcc16f6d466d834689ec8e | ef6d74332f937035537a47edf212897fd684611d | refs/heads/master | 2021-01-17T20:34:16.330629 | 2016-05-18T15:27:01 | 2016-05-18T15:27:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,567 | py | """Functions for moving money between Liberapay and the outside world.
"""
from __future__ import division, print_function, unicode_literals
from decimal import Decimal, ROUND_UP
from aspen import Response
from aspen.utils import typecheck
from mangopaysdk.entities.payin import PayIn
from mangopaysdk.entities.payout import PayOut
from mangopaysdk.entities.transfer import Transfer
from mangopaysdk.entities.wallet import Wallet
from mangopaysdk.types.exceptions.responseexception import ResponseException
from mangopaysdk.types.money import Money
from liberapay.billing import (
mangoapi,
PayInExecutionDetailsDirect,
PayInPaymentDetailsCard, PayInPaymentDetailsBankWire,
PayOutPaymentDetailsBankWire,
)
from liberapay.constants import (
D_CENT, D_ZERO,
PAYIN_CARD_MIN, FEE_PAYIN_CARD,
FEE_PAYIN_BANK_WIRE,
FEE_PAYOUT, FEE_PAYOUT_OUTSIDE_SEPA, FEE_PAYOUT_WARN, QUARANTINE, SEPA_ZONE,
FEE_VAT,
)
from liberapay.exceptions import (
LazyResponse, NegativeBalance, NotEnoughWithdrawableMoney, PaydayIsRunning,
FeeExceedsAmount, TransactionFeeTooHigh,
)
from liberapay.models import check_db
from liberapay.models.participant import Participant
from liberapay.models.exchange_route import ExchangeRoute
QUARANTINE = '%s days' % QUARANTINE.days
def upcharge(amount, fees, min_amount):
"""Given an amount, return a higher amount and the difference.
"""
typecheck(amount, Decimal)
if amount < min_amount:
amount = min_amount
# a = c - vf * c - ff => c = (a + ff) / (1 - vf)
# a = amount ; c = charge amount ; ff = fixed fee ; vf = variable fee
charge_amount = (amount + fees.fix) / (1 - fees.var)
charge_amount = charge_amount.quantize(D_CENT, rounding=ROUND_UP)
fee = charge_amount - amount
# + VAT
vat = (fee * FEE_VAT).quantize(D_CENT, rounding=ROUND_UP)
charge_amount += vat
fee += vat
return charge_amount, fee, vat
upcharge_bank_wire = lambda amount: upcharge(amount, FEE_PAYIN_BANK_WIRE, D_ZERO)
upcharge_card = lambda amount: upcharge(amount, FEE_PAYIN_CARD, PAYIN_CARD_MIN)
def skim_amount(amount, fees):
"""Given a nominal amount, compute the fees, taxes, and the actual amount.
"""
fee = amount * fees.var + fees.fix
vat = fee * FEE_VAT
fee += vat
fee = fee.quantize(D_CENT, rounding=ROUND_UP)
vat = vat.quantize(D_CENT, rounding=ROUND_UP)
return amount - fee, fee, vat
def skim_credit(amount, ba):
"""Given a payout amount, return a lower amount, the fee, and taxes.
The returned amount can be negative, look out for that.
"""
typecheck(amount, Decimal)
if ba.Type == 'IBAN':
country = ba.Details.IBAN[:2].upper()
elif ba.Type in ('US', 'GB', 'CA'):
country = ba.Type
else:
assert ba.Type == 'OTHER', ba.Type
country = ba.Details.Country.upper()
if country in SEPA_ZONE:
fee = FEE_PAYOUT
else:
fee = FEE_PAYOUT_OUTSIDE_SEPA
return skim_amount(amount, fee)
def repr_error(o):
r = o.ResultCode
if r == '000000':
return
msg = getattr(o, 'ResultMessage', None)
if msg:
r += ': ' + msg
return r
def repr_exception(e):
if isinstance(e, ResponseException):
return '%s %s' % (e.Code, e.Message)
else:
return repr(e)
def create_wallet(db, participant):
w = Wallet()
w.Owners.append(participant.mangopay_user_id)
w.Description = str(participant.id)
w.Currency = 'EUR'
w = mangoapi.wallets.Create(w)
db.run("""
UPDATE participants
SET mangopay_wallet_id = %s
WHERE id = %s
""", (w.Id, participant.id))
participant.set_attributes(mangopay_wallet_id=w.Id)
return w.Id
def test_hook():
return
def payout(db, participant, amount, ignore_high_fee=False):
assert amount > 0
payday = db.one("SELECT * FROM paydays WHERE ts_start > ts_end")
if payday:
raise PaydayIsRunning
route = ExchangeRoute.from_network(participant, 'mango-ba')
assert route
ba = mangoapi.users.GetBankAccount(participant.mangopay_user_id, route.address)
# Do final calculations
credit_amount, fee, vat = skim_credit(amount, ba)
if credit_amount <= 0 and fee > 0:
raise FeeExceedsAmount
fee_percent = fee / amount
if fee_percent > FEE_PAYOUT_WARN and not ignore_high_fee:
raise TransactionFeeTooHigh(fee_percent, fee, amount)
# Try to dance with MangoPay
e_id = record_exchange(db, route, -credit_amount, fee, vat, participant, 'pre')
payout = PayOut()
payout.AuthorId = participant.mangopay_user_id
payout.DebitedFunds = Money(int(amount * 100), 'EUR')
payout.DebitedWalletId = participant.mangopay_wallet_id
payout.Fees = Money(int(fee * 100), 'EUR')
payout.MeanOfPaymentDetails = PayOutPaymentDetailsBankWire(
BankAccountId=route.address,
BankWireRef=str(e_id),
)
payout.Tag = str(e_id)
try:
test_hook()
payout = mangoapi.payOuts.Create(payout)
return record_exchange_result(db, e_id, payout.Status.lower(), repr_error(payout), participant)
except Exception as e:
error = repr_exception(e)
return record_exchange_result(db, e_id, 'failed', error, participant)
def charge(db, participant, amount, return_url):
"""Charge the participant's credit card.
Amount should be the nominal amount. We'll compute fees below this function
and add it to amount to end up with charge_amount.
"""
typecheck(amount, Decimal)
route = ExchangeRoute.from_network(participant, 'mango-cc')
assert route
charge_amount, fee, vat = upcharge_card(amount)
amount = charge_amount - fee
e_id = record_exchange(db, route, amount, fee, vat, participant, 'pre')
payin = PayIn()
payin.AuthorId = participant.mangopay_user_id
if not participant.mangopay_wallet_id:
create_wallet(db, participant)
payin.CreditedWalletId = participant.mangopay_wallet_id
payin.DebitedFunds = Money(int(charge_amount * 100), 'EUR')
payin.ExecutionDetails = PayInExecutionDetailsDirect(
CardId=route.address,
SecureModeReturnURL=return_url,
)
payin.Fees = Money(int(fee * 100), 'EUR')
payin.PaymentDetails = PayInPaymentDetailsCard(CardType='CB_VISA_MASTERCARD')
payin.Tag = str(e_id)
try:
test_hook()
payin = mangoapi.payIns.Create(payin)
except Exception as e:
error = repr_exception(e)
return record_exchange_result(db, e_id, 'failed', error, participant)
if payin.ExecutionDetails.SecureModeRedirectURL:
raise Response(302, headers={'Location': payin.ExecutionDetails.SecureModeRedirectURL})
return record_exchange_result(db, e_id, payin.Status.lower(), repr_error(payin), participant)
def payin_bank_wire(db, participant, debit_amount):
"""Prepare to receive a bank wire payin.
The amount should be how much the user intends to send, not how much will
arrive in the wallet.
"""
route = ExchangeRoute.from_network(participant, 'mango-bw')
if not route:
route = ExchangeRoute.insert(participant, 'mango-bw', 'x')
amount, fee, vat = skim_amount(debit_amount, FEE_PAYIN_BANK_WIRE)
e_id = record_exchange(db, route, amount, fee, vat, participant, 'pre')
payin = PayIn()
payin.AuthorId = participant.mangopay_user_id
if not participant.mangopay_wallet_id:
create_wallet(db, participant)
payin.CreditedWalletId = participant.mangopay_wallet_id
payin.ExecutionDetails = PayInExecutionDetailsDirect()
payin.PaymentDetails = PayInPaymentDetailsBankWire(
DeclaredDebitedFunds=Money(int(debit_amount * 100), 'EUR'),
DeclaredFees=Money(int(fee * 100), 'EUR'),
)
payin.Tag = str(e_id)
try:
test_hook()
payin = mangoapi.payIns.Create(payin)
except Exception as e:
error = repr_exception(e)
return None, record_exchange_result(db, e_id, 'failed', error, participant)
e = record_exchange_result(db, e_id, payin.Status.lower(), repr_error(payin), participant)
return payin, e
def record_exchange(db, route, amount, fee, vat, participant, status, error=None):
"""Given a Bunch of Stuff, return an int (exchange_id).
Records in the exchanges table have these characteristics:
amount It's negative for credits (representing an outflow from
Liberapay to you) and positive for charges.
The sign is how we differentiate the two in, e.g., the
history page.
fee The payment processor's fee. It's always positive.
vat The amount of VAT included in the fee. Always positive.
"""
with db.get_cursor() as cursor:
e = cursor.one("""
INSERT INTO exchanges
(amount, fee, vat, participant, status, route, note)
VALUES (%s, %s, %s, %s, %s, %s, %s)
RETURNING *
""", (amount, fee, vat, participant.id, status, route.id, error))
if status == 'failed':
propagate_exchange(cursor, participant, e, route, error, 0)
elif amount < 0:
amount -= fee
propagate_exchange(cursor, participant, e, route, '', amount)
return e.id
def record_exchange_result(db, exchange_id, status, error, participant):
"""Updates the status of an exchange.
"""
with db.get_cursor() as cursor:
e = cursor.one("""
UPDATE exchanges e
SET status=%(status)s
, note=%(error)s
WHERE id=%(exchange_id)s
AND status <> %(status)s
RETURNING id, amount, fee, vat, participant, recorder, note, status, timestamp
, ( SELECT r.*::exchange_routes
FROM exchange_routes r
WHERE r.id = e.route
) AS route
""", locals())
if not e:
return
assert participant.id == e.participant
assert isinstance(e.route, ExchangeRoute)
amount = e.amount
if amount < 0:
amount = -amount + e.fee if status == 'failed' else 0
else:
amount = amount if status == 'succeeded' else 0
propagate_exchange(cursor, participant, e, e.route, error, amount)
return e
def propagate_exchange(cursor, participant, exchange, route, error, amount):
"""Propagates an exchange's result to the participant's balance and the
route's status.
"""
route.update_error(error or '')
new_balance = cursor.one("""
UPDATE participants
SET balance=(balance + %s)
WHERE id=%s
RETURNING balance
""", (amount, participant.id))
if amount < 0 and new_balance < 0:
raise NegativeBalance
if amount < 0:
bundles = cursor.all("""
LOCK TABLE cash_bundles IN EXCLUSIVE MODE;
SELECT *
FROM cash_bundles
WHERE owner = %s
AND ts < now() - INTERVAL %s
ORDER BY ts
""", (participant.id, QUARANTINE))
withdrawable = sum(b.amount for b in bundles)
x = -amount
if x > withdrawable:
raise NotEnoughWithdrawableMoney(Money(withdrawable, 'EUR'))
for b in bundles:
if x >= b.amount:
cursor.run("""
INSERT INTO e2e_transfers
(origin, withdrawal, amount)
VALUES (%s, %s, %s)
""", (b.origin, exchange.id, b.amount))
cursor.run("DELETE FROM cash_bundles WHERE id = %s", (b.id,))
x -= b.amount
if x == 0:
break
else:
assert x > 0
cursor.run("""
INSERT INTO e2e_transfers
(origin, withdrawal, amount)
VALUES (%s, %s, %s)
""", (b.origin, exchange.id, x))
cursor.run("""
UPDATE cash_bundles
SET amount = (amount - %s)
WHERE id = %s
""", (x, b.id))
break
elif amount > 0:
cursor.run("""
INSERT INTO cash_bundles
(owner, origin, amount, ts)
VALUES (%s, %s, %s, %s)
""", (participant.id, exchange.id, amount, exchange.timestamp))
participant.set_attributes(balance=new_balance)
if amount != 0:
participant.update_giving_and_tippees(cursor)
def transfer(db, tipper, tippee, amount, context, **kw):
t_id = db.one("""
INSERT INTO transfers
(tipper, tippee, amount, context, team, status)
VALUES (%s, %s, %s, %s, %s, 'pre')
RETURNING id
""", (tipper, tippee, amount, context, kw.get('team')))
get = lambda id, col: db.one("SELECT {0} FROM participants WHERE id = %s".format(col), (id,))
tr = Transfer()
tr.AuthorId = kw.get('tipper_mango_id') or get(tipper, 'mangopay_user_id')
tr.CreditedUserId = kw.get('tippee_mango_id') or get(tippee, 'mangopay_user_id')
tr.CreditedWalletID = kw.get('tippee_wallet_id') or get(tippee, 'mangopay_wallet_id')
if not tr.CreditedWalletID:
tr.CreditedWalletID = create_wallet(db, Participant.from_id(tippee))
tr.DebitedFunds = Money(int(amount * 100), 'EUR')
tr.DebitedWalletID = kw.get('tipper_wallet_id') or get(tipper, 'mangopay_wallet_id')
tr.Fees = Money(0, 'EUR')
tr.Tag = str(t_id)
tr = mangoapi.transfers.Create(tr)
return record_transfer_result(db, t_id, tr)
def record_transfer_result(db, t_id, tr):
error = repr_error(tr)
status = tr.Status.lower()
assert (not error) ^ (status == 'failed')
return _record_transfer_result(db, t_id, status)
def _record_transfer_result(db, t_id, status):
with db.get_cursor() as c:
tipper, tippee, amount = c.one("""
UPDATE transfers
SET status = %s
WHERE id = %s
RETURNING tipper, tippee, amount
""", (status, t_id))
if status == 'succeeded':
balance = c.one("""
UPDATE participants
SET balance = balance + %(amount)s
WHERE id = %(tippee)s;
UPDATE participants
SET balance = balance - %(amount)s
WHERE id = %(tipper)s
AND balance - %(amount)s >= 0
RETURNING balance;
""", locals())
if balance is None:
raise NegativeBalance
bundles = c.all("""
LOCK TABLE cash_bundles IN EXCLUSIVE MODE;
SELECT *
FROM cash_bundles
WHERE owner = %s
ORDER BY ts
""", (tipper,))
x = amount
for b in bundles:
if x >= b.amount:
c.run("""
UPDATE cash_bundles
SET owner = %s
WHERE id = %s
""", (tippee, b.id))
x -= b.amount
if x == 0:
break
else:
c.run("""
UPDATE cash_bundles
SET amount = (amount - %s)
WHERE id = %s;
INSERT INTO cash_bundles
(owner, origin, amount, ts)
VALUES (%s, %s, %s, %s);
""", (x, b.id, tippee, b.origin, x, b.ts))
break
return balance
raise LazyResponse(500, lambda _: _("Transfering the money failed, please try again."))
def sync_with_mangopay(db):
"""We can get out of sync with MangoPay if record_exchange_result wasn't
completed. This is where we fix that.
"""
check_db(db)
exchanges = db.all("SELECT * FROM exchanges WHERE status = 'pre'")
for e in exchanges:
p = Participant.from_id(e.participant)
transactions = mangoapi.users.GetTransactions(p.mangopay_user_id)
transactions = [x for x in transactions if x.Tag == str(e.id)]
assert len(transactions) < 2
if transactions:
t = transactions[0]
error = repr_error(t)
status = t.Status.lower()
assert (not error) ^ (status == 'failed')
record_exchange_result(db, e.id, status, error, p)
else:
# The exchange didn't happen
if e.amount < 0:
# Mark it as failed if it was a credit
record_exchange_result(db, e.id, 'failed', 'interrupted', p)
else:
# Otherwise forget about it
db.run("DELETE FROM exchanges WHERE id=%s", (e.id,))
transfers = db.all("SELECT * FROM transfers WHERE status = 'pre'")
for t in transfers:
tipper = Participant.from_id(t.tipper)
transactions = mangoapi.wallets.GetTransactions(tipper.mangopay_wallet_id)
transactions = [x for x in transactions if x.Type == 'TRANSFER' and x.Tag == str(t.id)]
assert len(transactions) < 2
if transactions:
record_transfer_result(db, t.id, transactions[0])
else:
# The transfer didn't happen, remove it
db.run("DELETE FROM transfers WHERE id = %s", (t.id,))
check_db(db)
| [
"[email protected]"
]
| |
d56411141515e59e13c8e7158958d63b0b9c075f | 6a1afd25fd19e24eecf2e7f233027681a05903b8 | /backend/tchr_3855/urls.py | a774b8dbe6339003537a5be0b1f3e8aadddee690 | []
| no_license | crowdbotics-apps/tchr-3855 | 85e0c264c9b5adef9ae782cf9b23c0754a268564 | 5c9ef3ff5e4662bf23ca10532eeafd4e55f847b0 | refs/heads/master | 2022-12-10T22:10:23.132461 | 2019-05-26T10:22:49 | 2019-05-26T10:22:49 | 188,668,403 | 0 | 0 | null | 2022-12-09T03:58:32 | 2019-05-26T10:22:34 | Python | UTF-8 | Python | false | false | 1,034 | py | """tchr_3855 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url('', include('home.urls')),
url(r'^accounts/', include('allauth.urls')),
url(r'^api/v1/', include('home.api.v1.urls')),
url(r'^admin/', admin.site.urls),
]
admin.site.site_header = 'Tchr'
admin.site.site_title = 'Tchr Admin Portal'
admin.site.index_title = 'Tchr Admin'
| [
"[email protected]"
]
| |
e81eb9f98632ada8bf853b3971d0d91c0cf15163 | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/signalrservice/signal_r_private_endpoint_connection.py | 49b758f6a41d13686debc173c54642140e57c45e | [
"Apache-2.0",
"BSD-3-Clause"
]
| permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 13,202 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['SignalRPrivateEndpointConnectionArgs', 'SignalRPrivateEndpointConnection']
@pulumi.input_type
class SignalRPrivateEndpointConnectionArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
resource_name: pulumi.Input[str],
private_endpoint: Optional[pulumi.Input['PrivateEndpointArgs']] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']] = None):
"""
The set of arguments for constructing a SignalRPrivateEndpointConnection resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param pulumi.Input[str] resource_name: The name of the SignalR resource.
:param pulumi.Input['PrivateEndpointArgs'] private_endpoint: Private endpoint associated with the private endpoint connection
:param pulumi.Input[str] private_endpoint_connection_name: The name of the private endpoint connection associated with the SignalR resource.
:param pulumi.Input['PrivateLinkServiceConnectionStateArgs'] private_link_service_connection_state: Connection state
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "resource_name", resource_name)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
if private_endpoint_connection_name is not None:
pulumi.set(__self__, "private_endpoint_connection_name", private_endpoint_connection_name)
if private_link_service_connection_state is not None:
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> pulumi.Input[str]:
"""
The name of the SignalR resource.
"""
return pulumi.get(self, "resource_name")
@resource_name.setter
def resource_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_name", value)
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional[pulumi.Input['PrivateEndpointArgs']]:
"""
Private endpoint associated with the private endpoint connection
"""
return pulumi.get(self, "private_endpoint")
@private_endpoint.setter
def private_endpoint(self, value: Optional[pulumi.Input['PrivateEndpointArgs']]):
pulumi.set(self, "private_endpoint", value)
@property
@pulumi.getter(name="privateEndpointConnectionName")
def private_endpoint_connection_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the private endpoint connection associated with the SignalR resource.
"""
return pulumi.get(self, "private_endpoint_connection_name")
@private_endpoint_connection_name.setter
def private_endpoint_connection_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_endpoint_connection_name", value)
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]:
"""
Connection state
"""
return pulumi.get(self, "private_link_service_connection_state")
@private_link_service_connection_state.setter
def private_link_service_connection_state(self, value: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]):
pulumi.set(self, "private_link_service_connection_state", value)
class SignalRPrivateEndpointConnection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
private_endpoint: Optional[pulumi.Input[pulumi.InputType['PrivateEndpointArgs']]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
A private endpoint connection to SignalR resource
API Version: 2020-05-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PrivateEndpointArgs']] private_endpoint: Private endpoint associated with the private endpoint connection
:param pulumi.Input[str] private_endpoint_connection_name: The name of the private endpoint connection associated with the SignalR resource.
:param pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']] private_link_service_connection_state: Connection state
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param pulumi.Input[str] resource_name_: The name of the SignalR resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SignalRPrivateEndpointConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A private endpoint connection to SignalR resource
API Version: 2020-05-01.
:param str resource_name: The name of the resource.
:param SignalRPrivateEndpointConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SignalRPrivateEndpointConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
private_endpoint: Optional[pulumi.Input[pulumi.InputType['PrivateEndpointArgs']]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
private_link_service_connection_state: Optional[pulumi.Input[pulumi.InputType['PrivateLinkServiceConnectionStateArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SignalRPrivateEndpointConnectionArgs.__new__(SignalRPrivateEndpointConnectionArgs)
__props__.__dict__["private_endpoint"] = private_endpoint
__props__.__dict__["private_endpoint_connection_name"] = private_endpoint_connection_name
__props__.__dict__["private_link_service_connection_state"] = private_link_service_connection_state
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if resource_name_ is None and not opts.urn:
raise TypeError("Missing required property 'resource_name_'")
__props__.__dict__["resource_name"] = resource_name_
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:signalrservice:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-native:signalrservice/v20200501:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:signalrservice/v20200501:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-native:signalrservice/v20200701preview:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:signalrservice/v20200701preview:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-native:signalrservice/v20210401preview:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:signalrservice/v20210401preview:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-native:signalrservice/v20210601preview:SignalRPrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:signalrservice/v20210601preview:SignalRPrivateEndpointConnection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(SignalRPrivateEndpointConnection, __self__).__init__(
'azure-native:signalrservice:SignalRPrivateEndpointConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'SignalRPrivateEndpointConnection':
"""
Get an existing SignalRPrivateEndpointConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = SignalRPrivateEndpointConnectionArgs.__new__(SignalRPrivateEndpointConnectionArgs)
__props__.__dict__["name"] = None
__props__.__dict__["private_endpoint"] = None
__props__.__dict__["private_link_service_connection_state"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
return SignalRPrivateEndpointConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> pulumi.Output[Optional['outputs.PrivateEndpointResponse']]:
"""
Private endpoint associated with the private endpoint connection
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> pulumi.Output[Optional['outputs.PrivateLinkServiceConnectionStateResponse']]:
"""
Connection state
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Provisioning state of the private endpoint connection
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource - e.g. "Microsoft.SignalRService/SignalR"
"""
return pulumi.get(self, "type")
| [
"[email protected]"
]
| |
fdb52e61ad56ed5ce29737ced83eb1ee158f29fe | 56cd86b5438db288a2b602166071b079ffc5c22f | /backend/windy_utils_rest.py | d582f5031a4b730fbd668b3001fb53e9644edd9f | [
"MIT"
]
| permissive | mmmaaaggg/QABAT | d789921151533fd0c9fd4b89dc5eee2137b67e80 | d6f20d926de047af6857e466cf28084d0ba69993 | refs/heads/master | 2022-07-13T10:10:28.985442 | 2021-01-27T07:05:46 | 2021-01-27T07:05:46 | 139,661,275 | 4 | 1 | MIT | 2022-06-21T22:03:11 | 2018-07-04T02:58:18 | Python | UTF-8 | Python | false | false | 7,528 | py | # -*- coding: utf-8 -*-
"""
Created on 2016-12-22
@author: MG
"""
import pandas as pd
import requests
import json
from datetime import datetime, date
import logging
logger = logging.getLogger()
STR_FORMAT_DATE = '%Y-%m-%d'
STR_FORMAT_DATETIME_WIND = '%Y-%m-%d %H:%M:%S' # 2017-03-06 00:00:00.005000
UN_AVAILABLE_DATETIME = datetime.strptime('1900-01-01', STR_FORMAT_DATE)
UN_AVAILABLE_DATE = UN_AVAILABLE_DATETIME.date()
def format_2_date_str(dt) -> str:
if dt is None:
return None
dt_type = type(dt)
if dt_type == str:
return dt
elif dt_type == date:
if dt > UN_AVAILABLE_DATE:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
elif dt_type == datetime:
if dt > UN_AVAILABLE_DATETIME:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
else:
return dt
def format_2_datetime_str(dt) -> str:
if dt is None:
return None
dt_type = type(dt)
if dt_type == str:
return dt
elif dt_type == date:
if dt > UN_AVAILABLE_DATE:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
elif dt_type == datetime:
if dt > UN_AVAILABLE_DATETIME:
return dt.strftime(STR_FORMAT_DATETIME_WIND)
else:
return None
else:
return dt
class APIError(Exception):
def __init__(self, status, ret_dic):
self.status = status
self.ret_dic = ret_dic
def __str__(self):
return "APIError:status=POST / {} {}".format(self.status, self.ret_dic)
class WindRest:
def __init__(self, url_str):
self.url = url_str
self.header = {'Content-Type': 'application/json'}
def _url(self, path: str) -> str:
return self.url + path
def public_post(self, path: str, req_data: str) -> list:
# print('self._url(path):', self._url(path))
ret_data = requests.post(self._url(path), data=req_data, headers=self.header)
ret_dic = ret_data.json()
if ret_data.status_code != 200:
raise APIError(ret_data.status_code, ret_dic)
else:
return ret_data.status_code, ret_dic
def wset(self, table_name, options) -> pd.DataFrame:
path = 'wset/'
req_data_dic = {"table_name": table_name, "options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def wss(self, codes, fields, options="") -> pd.DataFrame:
path = 'wss/'
req_data_dic = {"codes": codes, "fields": fields, "options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def wsd(self, codes, fields, begin_time, end_time, options="") -> pd.DataFrame:
path = 'wsd/'
req_data_dic = {"codes": codes, "fields": fields,
"begin_time": format_2_date_str(begin_time),
"end_time": format_2_date_str(end_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def wsi(self, codes, fields, begin_time, end_time, options="") -> pd.DataFrame:
path = 'wsi/'
req_data_dic = {"codes": codes, "fields": fields,
"begin_time": format_2_date_str(begin_time),
"end_time": format_2_date_str(end_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def wst(self, codes, fields, begin_time, end_time, options="") -> pd.DataFrame:
path = 'wst/'
req_data_dic = {"codes": codes, "fields": fields,
"begin_time": format_2_datetime_str(begin_time),
"end_time": format_2_datetime_str(end_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def wsq(self, codes, fields, options="") -> pd.DataFrame:
path = 'wsq/'
req_data_dic = {"codes": codes, "fields": fields, "options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
def tdaysoffset(self, offset, begin_time, options="") -> dict:
path = 'tdaysoffset/'
req_data_dic = {"offset": offset,
"begin_time": format_2_date_str(begin_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
date_str = json_dic['Date']
return date_str
def tdays(self, begin_time, end_time, options="") -> dict:
path = 'tdays/'
req_data_dic = {"begin_time": format_2_date_str(begin_time),
"end_time": format_2_date_str(end_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
# df = pd.DataFrame(json_dic)
return json_dic
def edb(self, codes, begin_time, end_time, options) -> pd.DataFrame:
path = 'edb/'
req_data_dic = {"codes": codes,
"begin_time": format_2_date_str(begin_time),
"end_time": format_2_date_str(end_time),
"options": options}
req_data = json.dumps(req_data_dic)
_, json_dic = self.public_post(path, req_data)
df = pd.DataFrame(json_dic).T
return df
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s: %(levelname)s [%(name)s:%(funcName)s] %(message)s')
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING)
# url_str = "http://10.0.5.65:5000/wind/"
url_str = "http://10.0.5.62:5000/wind/" # "http://10.0.3.78:5000/wind/"
rest = WindRest(url_str)
# data_df = rest.wset(table_name="sectorconstituent", options="date=2017-03-21;sectorid=1000023121000000")
# data_df = rest.wss(codes="QHZG160525.OF", fields="fund_setupdate,fund_maturitydate,fund_mgrcomp,fund_existingyear,fund_ptmyear,fund_type,fund_fundmanager")
# data_df = rest.wsd("601398.SH", "open,high,low,close,volume", "2017-01-04", "2017-02-28", "PriceAdj=F")
# data_df = rest.tdays(begin_time="2017-01-04", end_time="2017-02-28")
# data_df = rest.wst("600000.SH", "ask1,bid1,asize1,bsize1,volume,amt,pre_close,open,high,low,last", "2017-10-20 09:15:00", "2017-10-20 09:26:00", "")
# data_df = rest.wsi("RU1801.SHF", "open,high,low,close,volume,amt,oi", "2017-12-8 09:00:00", "2017-12-8 11:30:00", "")
try:
data_df = rest.wsd("000987.SZ", "open,high,low,close,volume", "2017-12-18", "2017-12-19", "")
print(data_df)
except APIError as exp:
if exp.status == 500:
print('APIError.status:', exp.status, exp.ret_dic['message'])
else:
print(exp.ret_dic.setdefault('error_code', ''), exp.ret_dic['message'])
# date_str = rest.tdaysoffset(1, '2017-3-31')
# print(date_str)
| [
"[email protected]"
]
| |
4a142a01a45fa51f9c863eef0f039791d863a013 | 9f884a3584eef771f8c010e296c5d763098be243 | /povary/apps/seo_v2/migrations/0001_initial.py | bce4b67bee7aa622577573aa590abe5b27575f0d | [
"BSD-3-Clause"
]
| permissive | TorinAsakura/cooking | fc8658ce2ac21c2e00dc307399a5fa24971a20c1 | cf0c78f613fa9ce0fcd4ec7a397ab880d9dd631a | refs/heads/master | 2023-01-24T13:07:38.529811 | 2020-12-08T22:14:33 | 2020-12-08T22:14:33 | 319,773,012 | 0 | 0 | BSD-3-Clause | 2020-12-08T22:14:34 | 2020-12-08T22:08:34 | null | UTF-8 | Python | false | false | 4,096 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SeoTarget'
db.create_table(u'seo_v2_seotarget', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128, unique=True, null=True, blank=True)),
))
db.send_create_signal(u'seo_v2', ['SeoTarget'])
# Adding model 'SeoTemplate'
db.create_table(u'seo_v2_seotemplate', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('target', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['seo_v2.SeoTarget'], null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], null=True, blank=True)),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('keywords', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('title', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('bottom_desc', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'seo_v2', ['SeoTemplate'])
# Adding unique constraint on 'SeoTemplate', fields ['object_id', 'content_type']
db.create_unique(u'seo_v2_seotemplate', ['object_id', 'content_type_id'])
def backwards(self, orm):
# Removing unique constraint on 'SeoTemplate', fields ['object_id', 'content_type']
db.delete_unique(u'seo_v2_seotemplate', ['object_id', 'content_type_id'])
# Deleting model 'SeoTarget'
db.delete_table(u'seo_v2_seotarget')
# Deleting model 'SeoTemplate'
db.delete_table(u'seo_v2_seotemplate')
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'seo_v2.seotarget': {
'Meta': {'object_name': 'SeoTarget'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'seo_v2.seotemplate': {
'Meta': {'unique_together': "(('object_id', 'content_type'),)", 'object_name': 'SeoTemplate'},
'bottom_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['seo_v2.SeoTarget']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['seo_v2'] | [
"[email protected]"
]
| |
66407c0b6c977324006de7c5c228296df7783a34 | 818e5e78f84596a7c086b218fd4aa9e8ea912afe | /hackatons/materials/algo/source/T5_LinearStructure/P3_List/ListWithCurrentElement.py | fdfd273b72742e9271e6be2dcdf2c5b282946b4b | []
| no_license | davendiy/forpythonanywhere | 44fbc63651309598b58391667f0fead40e8fad91 | 1b9292ca33b06b17cd516e4e9913479edb6d35cd | refs/heads/master | 2020-08-10T04:24:02.665635 | 2019-10-25T07:05:46 | 2019-10-25T07:05:46 | 214,255,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,289 | py | class Node:
""" Допоміжний клас - вузол списку. """
def __init__(self, item):
""" Конструктор """
self.mItem = item # навантаження вузла
self.mNext = None # посилання на наступний вузол списку
class ListWithCurrent:
def __init__(self):
""" Конструктор - створює новий порожній список.
"""
self.mHead = None # Перший вузол списку
self.mPrev = None # Вузол, що передує поточному елементу списку
self.mCurr = None # Поточний вузол списку
def empty(self):
""" Перевіряє чи список порожній
:return: True, якщо список не містить жодного елемента
"""
return self.mHead is None
def reset(self):
""" Зробити поточний елемент першим.
"""
self.mCurr = self.mHead
self.mPrev = None
def next(self):
""" Перейти до наступного елемента.
Породжує виключення StopIteration, якщо наступний елемент порожній
:return: None
"""
if self.mCurr is not None:
self.mPrev = self.mCurr
self.mCurr = self.mCurr.mNext
else:
raise StopIteration
def current(self):
""" Отримати поточний елемент
:return: Навантаження поточного елементу
"""
if self.mCurr is not None:
return self.mCurr.mItem
else:
return None
def insert(self, item):
""" Вставити новий елемент у список перед поточним
:param item: елемент, що вставляється у спиоск
:return: None
"""
node = Node(item)
node.mNext = self.mCurr
if self.mCurr == self.mHead:
self.mHead = node
if self.mPrev is not None:
self.mPrev.mNext = node
self.mPrev = node
def remove(self):
""" Видалити поточний елемент у списку
Видалення переставляє вказівник на поточний елемент на наступний
"""
pass # TODO: Implement by yourself
def __str__(self):
return str(self.current())
def __iter__(self):
self.mIterator = self.mHead
return self
def __next__(self):
if self.mIterator is not None:
cur = self.mIterator.mItem
self.mIterator = self.mIterator.mNext
return cur
else:
raise StopIteration
l = ListWithCurrent()
l.insert(11)
l.insert(12)
l.insert(13)
l.insert(14)
l.insert(15)
l.insert(16)
l.reset()
l.next()
print(l)
it = iter(l)
while True:
try:
print(next(l))
except StopIteration:
break
# l.reset()
# print(l)
# l.next()
# l.next()
# l.next()
# print(l)
# l.next()
#
#
# l.insert(555)
# #
# for el in l:
# print(el)
| [
"[email protected]"
]
| |
ad767fd0acf09ffa4bb1040d65480632d58051cd | b4361173b0605ad5efb9381610fb985ad27ee00b | /tests/test_manager.py | 291831d621e28387c96f8c599a68d7e0728d0c8c | [
"MIT"
]
| permissive | gardleopard/rhea | 24bd204735da0915f27b8cc060e377e37b610dba | 36a8e908281ca9af232c5ce2e2cf64259221c3a6 | refs/heads/master | 2020-04-09T18:15:06.731235 | 2018-10-08T15:35:46 | 2018-10-08T15:35:46 | 160,506,155 | 0 | 0 | MIT | 2018-12-05T11:10:19 | 2018-12-05T11:10:19 | null | UTF-8 | Python | false | false | 18,042 | py | import os
from unittest import TestCase
from rhea.exceptions import RheaError
from rhea.manager import Rhea
from rhea.specs import UriSpec
class TestRhea(TestCase):
def setUp(self):
os.environ['FOO_BAR_KEY'] = 'foo_bar'
self.config = Rhea.read_configs(
[os.environ,
'./tests/fixtures/configs/config_tests.json'])
def test_get_from_os_env(self):
assert self.config.get_string('FOO_BAR_KEY') == 'foo_bar'
def test_reading_invalid_config_raises_error(self):
with self.assertRaises(RheaError):
Rhea.read_configs(
['./tests/fixtures/configs/invalid_config_tests.json'])
def test_get_boolean(self):
value = self.config.get_boolean('bool_key_1')
self.assertEqual(value, True)
value = self.config.get_boolean('bool_key_2')
self.assertEqual(value, True)
value = self.config.get_boolean('bool_key_3')
self.assertEqual(value, False)
value = self.config.get_boolean('bool_key_4')
self.assertEqual(value, False)
value = self.config.get_boolean('bool_list_key_1', is_list=True)
self.assertEqual(value, [False, False, True, True, True, False])
with self.assertRaises(RheaError):
self.config.get_boolean('bool_error_key_1')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_error_key_2')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_error_key_3')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_error_key_4')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_error_key_5')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_list_key_1')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_boolean('bool_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_boolean('bool_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_boolean('bool_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_boolean('bool_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_boolean('bool_non_existing_key', is_list=True)
self.assertEqual(self.config.get_boolean('bool_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_boolean(
'bool_non_existing_key', is_optional=True, default=True), True)
self.assertEqual(self.config.get_boolean(
'bool_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(self.config.get_boolean(
'bool_non_existing_key', is_list=True, is_optional=True, default=[True, False]),
[True, False])
def test_get_int(self):
value = self.config.get_int('int_key_1')
self.assertEqual(value, 123)
value = self.config.get_int('int_key_2')
self.assertEqual(value, 123)
value = self.config.get_int('int_list_key_1', is_list=True)
self.assertEqual(value, [123, 124, 125, 125])
with self.assertRaises(RheaError):
self.config.get_int('int_error_key_1')
with self.assertRaises(RheaError):
self.config.get_int('int_error_key_2')
with self.assertRaises(RheaError):
self.config.get_int('int_error_key_3')
with self.assertRaises(RheaError):
self.config.get_int('int_list_key_1')
with self.assertRaises(RheaError):
self.config.get_int('int_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_int('int_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_int('int_list_error_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_int('int_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_int('int_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_int('int_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_int('int_non_existing_key', is_list=True)
self.assertEqual(self.config.get_int(
'int_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_int(
'int_non_existing_key', is_optional=True, default=34), 34)
self.assertEqual(self.config.get_int(
'int_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(self.config.get_int(
'int_non_existing_key', is_list=True, is_optional=True, default=[34, 1]), [34, 1])
def test_get_float(self):
value = self.config.get_float('float_key_1')
self.assertEqual(value, 1.23)
value = self.config.get_float('float_key_2')
self.assertEqual(value, 1.23)
value = self.config.get_float('float_key_3')
self.assertEqual(value, 123)
value = self.config.get_float('float_list_key_1', is_list=True)
self.assertEqual(value, [1.23, 13.3, 4.4, 555., 66.])
with self.assertRaises(RheaError):
self.config.get_float('float_error_key_1')
with self.assertRaises(RheaError):
self.config.get_float('float_error_key_2')
with self.assertRaises(RheaError):
self.config.get_float('float_error_key_3')
with self.assertRaises(RheaError):
self.config.get_float('float_error_key_4')
with self.assertRaises(RheaError):
self.config.get_float('float_list_key_1')
with self.assertRaises(RheaError):
self.config.get_float('float_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_float('float_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_float('float_list_error_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_float('float_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_float('float_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_float('float_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_float('float_non_existing_key', is_list=True)
self.assertEqual(self.config.get_float(
'float_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_float(
'float_non_existing_key', is_optional=True, default=3.4), 3.4)
self.assertEqual(self.config.get_float(
'float_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(self.config.get_float(
'float_non_existing_key', is_list=True, is_optional=True, default=[3.4, 1.2]),
[3.4, 1.2])
def test_get_string(self):
value = self.config.get_string('string_key_1')
self.assertEqual(value, "123")
value = self.config.get_string('string_key_2')
self.assertEqual(value, "1.23")
value = self.config.get_string('string_key_3')
self.assertEqual(value, "foo")
value = self.config.get_string('string_key_4')
self.assertEqual(value, "")
value = self.config.get_string('string_list_key_1', is_list=True)
self.assertEqual(value, ["123", "1.23", "foo", ""])
with self.assertRaises(RheaError):
self.config.get_string('string_error_key_1')
with self.assertRaises(RheaError):
self.config.get_string('string_error_key_2')
with self.assertRaises(RheaError):
self.config.get_string('string_error_key_3')
with self.assertRaises(RheaError):
self.config.get_string('string_error_key_4')
with self.assertRaises(RheaError):
self.config.get_string('string_list_key_1')
with self.assertRaises(RheaError):
self.config.get_string('string_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_list_error_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_list_error_key_4', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_key_4', is_list=True)
with self.assertRaises(RheaError):
self.config.get_string('string_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_string('string_non_existing_key', is_list=True)
self.assertEqual(self.config.get_string(
'string_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_string(
'string_non_existing_key', is_optional=True, default='foo'), 'foo')
self.assertEqual(self.config.get_string(
'string_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(self.config.get_string(
'string_non_existing_key', is_list=True, is_optional=True, default=['foo', 'bar']),
['foo', 'bar'])
def test_get_dict(self):
value = self.config.get_dict('dict_key_1')
self.assertEqual(value, {"key1": "foo", "key2": 2, "key3": False, "key4": "1"})
value = self.config.get_dict('dict_list_key_1', is_list=True)
self.assertEqual(value, [
{"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
{"key3": True, "key4": "2"},
{"key1": False, "key2": "3"}
])
with self.assertRaises(RheaError):
self.config.get_dict('dict_error_key_1')
with self.assertRaises(RheaError):
self.config.get_dict('dict_error_key_2')
with self.assertRaises(RheaError):
self.config.get_dict('dict_error_key_3')
with self.assertRaises(RheaError):
self.config.get_dict('dict_error_key_4')
with self.assertRaises(RheaError):
self.config.get_dict('dict_list_key_1')
with self.assertRaises(RheaError):
self.config.get_dict('dict_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_dict('dict_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_dict('dict_list_error_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_dict('dict_list_error_key_4', is_list=True)
with self.assertRaises(RheaError):
self.config.get_dict('dict_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_dict('dict_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_dict('dict_non_existing_key', is_list=True)
self.assertEqual(self.config.get_dict('dict_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_dict(
'dict_non_existing_key', is_optional=True, default={'foo': 'bar'}), {'foo': 'bar'})
self.assertEqual(self.config.get_dict(
'dict_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(self.config.get_dict(
'dict_non_existing_key', is_list=True, is_optional=True, default=[
{'foo': 'bar'}, {'foo': 'boo'}]),
[{'foo': 'bar'}, {'foo': 'boo'}])
def test_get_uri(self):
value = self.config.get_uri('uri_key_1')
self.assertEqual(value, UriSpec("user", "pass", "siteweb.ca"))
value = self.config.get_uri('uri_key_2')
self.assertEqual(value, UriSpec("user2", "pass", "localhost:8080"))
value = self.config.get_uri('uri_key_3')
self.assertEqual(value, UriSpec("user2", "pass", "https://quay.io"))
value = self.config.get_uri('uri_list_key_1', is_list=True)
self.assertEqual(value, [
UriSpec("user", "pass", "siteweb.ca"),
UriSpec("user2", "pass", "localhost:8080"),
UriSpec("user2", "pass", "https://quay.io")
])
with self.assertRaises(RheaError):
self.config.get_uri('uri_error_key_1')
with self.assertRaises(RheaError):
self.config.get_uri('uri_error_key_2')
with self.assertRaises(RheaError):
self.config.get_uri('uri_error_key_3')
with self.assertRaises(RheaError):
self.config.get_uri('uri_error_key_4')
with self.assertRaises(RheaError):
self.config.get_uri('uri_list_key_1')
with self.assertRaises(RheaError):
self.config.get_uri('uri_list_error_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_uri('uri_list_error_key_2', is_list=True)
with self.assertRaises(RheaError):
self.config.get_uri('uri_list_error_key_3', is_list=True)
with self.assertRaises(RheaError):
self.config.get_uri('uri_list_error_key_4', is_list=True)
with self.assertRaises(RheaError):
self.config.get_uri('uri_key_1', is_list=True)
with self.assertRaises(RheaError):
self.config.get_uri('uri_non_existing_key')
with self.assertRaises(RheaError):
self.config.get_uri('uri_non_existing_key', is_list=True)
self.assertEqual(self.config.get_uri('uri_non_existing_key', is_optional=True), None)
self.assertEqual(
self.config.get_uri('uri_non_existing_key',
is_optional=True,
default=UriSpec("user2", "pass", "localhost:8080")),
UriSpec("user2", "pass", "localhost:8080"))
self.assertEqual(
self.config.get_uri('uri_non_existing_key', is_list=True, is_optional=True), None)
self.assertEqual(
self.config.get_uri('uri_non_existing_key',
is_list=True,
is_optional=True,
default=[UriSpec("user", "pass", "siteweb.ca"),
UriSpec("user2", "pass", "localhost:8080")]),
[UriSpec("user", "pass", "siteweb.ca"), UriSpec("user2", "pass", "localhost:8080")])
def test_get_list(self):
value = self.config.get_list('list_key_1')
self.assertEqual(value, ['user:[email protected]', "'pp'", '0.1', "'foo'"])
value = self.config.get_list('list_key_2')
self.assertEqual(value, ['user1', 'user2', 'user3', 'user4', 'user5'])
value = self.config.get_list('list_key_3')
self.assertEqual(value, [False])
value = self.config.get_list('list_key_4')
self.assertEqual(value, ['foo'])
value = self.config.get_list('list_key_5')
self.assertEqual(value, [])
with self.assertRaises(RheaError):
self.config.get_list('list_error_key_1')
with self.assertRaises(RheaError):
self.config.get_list('list_error_key_2')
with self.assertRaises(RheaError):
self.config.get_list('list_error_key_3')
with self.assertRaises(RheaError):
self.config.get_list('list_error_key_4')
with self.assertRaises(RheaError):
self.config.get_list('list_non_existing_key')
self.assertEqual(self.config.get_list(
'list_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_list(
'list_non_existing_key', is_optional=True, default=['foo']), ['foo'])
def test_get_dict_of_dicts(self):
value = self.config.get_dict_of_dicts('dict_dicts_key_1')
self.assertEqual(value, {'data1': {'mountPath': '/data/21', 'existingClaim': 'data-1-pvc'}})
value = self.config.get_dict_of_dicts('dict_dicts_key_2')
self.assertEqual(value, {
'outputs1': {'mountPath': '/output/2', 'existingClaim': 'outputs-1-pvc'},
'outputs2': {'mountPath': '/output/2', 'existingClaim': 'output-2-pvc'}})
value = self.config.get_dict_of_dicts('dict_dicts_key_3')
self.assertEqual(value, None)
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_1')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_2')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_3')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_4')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_4')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_5')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_6')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_error_key_7')
with self.assertRaises(RheaError):
self.config.get_dict_of_dicts('dict_dicts_non_existing_key')
self.assertEqual(self.config.get_dict_of_dicts(
'dict_dicts_non_existing_key', is_optional=True), None)
self.assertEqual(self.config.get_dict_of_dicts(
'dict_dicts_non_existing_key', is_optional=True, default={}), {})
| [
"[email protected]"
]
| |
d80a97bb723810380e758ac7493d3b6fade462e5 | afd957cf224d7c0b1b9c7216c9767fbea0dbcfea | /Python codebetter training/class&objects/program6.py | f3921d4e010c7788eb403b820c09daf12f21f848 | []
| no_license | Chandu8817/python_code | d7fea1b81cbb84b98f5527beaa7350884f5e2ab8 | 2827ebeb463a764b67ba6621d08a58a3783d26e4 | refs/heads/master | 2023-05-03T01:09:37.199935 | 2021-05-28T13:07:39 | 2021-05-28T13:07:39 | 299,087,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | class Employe:
name="rohan"
def __init__(self,id,salary):
self.id=id
self.salary=salary
def show(self):
print(self.name,self.id,self.salary)
obj=Employe(123,40000)
obj.show()
| [
"[email protected]"
]
| |
c4dc8e90148e5925afe99b51bdd1f6a3b9dc57b6 | 956b70a8904fbbab3686f1b11e7ff1b6402caa48 | /codecademy/student_become_teacher/lesson1.py | 07e1de2ac66539c8d743b0f8c411c7d0c43c8a27 | []
| no_license | udoyen/andela-homestead | 3b839eec813084c48b8588f3d4977801077e360e | 74405ae893d5f3b0548f840c6ca76a4b9315760f | refs/heads/master | 2020-08-05T16:14:13.831835 | 2016-10-16T12:12:00 | 2016-10-16T12:12:00 | 66,066,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | lloyd = {
"name": "Lloyd",
"homework": [90.0, 97.0, 75.0, 92.0],
"quizzes": [88.0, 40.0, 94.0],
"tests": [75.0, 90.0]
}
alice = {
"name": "Alice",
"homework": [100.0, 92.0, 98.0, 100.0],
"quizzes": [82.0, 83.0, 91.0],
"tests": [89.0, 97.0]
}
tyler = {
"name": "Tyler",
"homework": [0.0, 87.0, 75.0, 22.0],
"quizzes": [0.0, 75.0, 78.0],
"tests": [100.0, 100.0]
}
students = [lloyd, alice, tyler]
for student in students:
print(student['name'])
print(student['homework'])
print(student['quizzes'])
print(student['tests'])
print()
def average(numbers):
total = sum(numbers)
total = float(total)
return total / len(numbers)
def get_average(student):
homework = average(student['homework'])
quizzes = average(student['quizzes'])
tests = average(student['tests'])
return (0.1 * homework) + (0.3 * quizzes) + (0.6 * tests)
def get_letter_grade(score):
if score >= 90:
return "A"
elif score >= 80:
return "B"
elif score >= 70:
return "C"
elif score >= 60:
return "D"
else:
return "F"
def get_class_average(students):
results = []
for student in students:
results.append(get_average(student))
return average(results)
print(get_letter_grade(get_average(lloyd)))
print(get_class_average(students))
print(get_letter_grade(get_class_average(students)))
| [
"[email protected]"
]
| |
257b3908e7b755a7f4c0fafd5ad2fd16de5664cd | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/169/usersdata/268/72565/submittedfiles/divisores.py | 1d0ddbb1446164b961708d65187171252ce94f5d | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # -*- coding: utf-8 -*-
import math
n=int(input('Digite a quantidade de multiplos: '))
a=int(input('Digite o numero a : '))
b=int(input('DIgite o numero b : '))
i=1
if (n%2)==0:
if (a<b):
while(i<=n/2):
multiploa=a*i
print(multiploa)
multiplob=b*i
print(multiplob)
i= i + 1
else:
while(i<=n/2):
multiplob=b*i
print(multiplob)
multiplob=a*i
print(multiploa)
i= i + 1
else:
if (a<b):
while(i<=(n//2)):
multiploa=a*i
print(multiploa)
multiplob=b*i
print(multiplob)
i= i + 1
else:
while(i<=n//2):
multiplob=b*i
print(multiplob)
multiplob=a*i
print(multiploa)
i= i + 1
if(a%2!=0):
if (a<b):
multiploa=a*i
print(multiploa)
else :
multiplob=b*i
print(multiplob)
| [
"[email protected]"
]
| |
f3ab348e0be0a838a775ac8a6672141ced7a15de | ad206aa0d228d5d3e41261316b88e190437e21c4 | /contrib/devtools/test-security-check.py | e179401c7ed762f5c793294d304d0b9f67e0dd66 | [
"MIT"
]
| permissive | gtacoin-dev/gtacoin | a0188517948afb4458913d87b2f600ffaf9b6803 | f66f063b47ba973856c200074db1b95abf5ab794 | refs/heads/master | 2021-01-22T10:59:35.068066 | 2017-02-15T15:29:16 | 2017-02-15T15:29:16 | 82,058,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,651 | py | #!/usr/bin/python2
# Copyright (c) 2015-2016 The Gtacoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Test script for security-check.py
'''
from __future__ import division,print_function
import subprocess
import sys
import unittest
def write_testcode(filename):
with open(filename, 'w') as f:
f.write('''
#include <stdio.h>
int main()
{
printf("the quick brown fox jumps over the lazy god\\n");
return 0;
}
''')
def call_security_check(cc, source, executable, options):
subprocess.check_call([cc,source,'-o',executable] + options)
p = subprocess.Popen(['./security-check.py',executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout.rstrip())
class TestSecurityChecks(unittest.TestCase):
def test_ELF(self):
source = 'test1.c'
executable = 'test1'
cc = 'gcc'
write_testcode(source)
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-zexecstack','-fno-stack-protector','-Wl,-znorelro']),
(1, executable+': failed PIE NX RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fno-stack-protector','-Wl,-znorelro']),
(1, executable+': failed PIE RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro']),
(1, executable+': failed PIE RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-pie','-fPIE']),
(1, executable+': failed RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE']),
(0, ''))
def test_PE(self):
source = 'test1.c'
executable = 'test1.exe'
cc = 'i686-w64-mingw32-gcc'
write_testcode(source)
self.assertEqual(call_security_check(cc, source, executable, []),
(1, executable+': failed PIE NX'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat']),
(1, executable+': failed PIE'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--dynamicbase']),
(0, ''))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
7152ebbf5e92af8028c28d7c542e1b6f87c1066b | f183df1dcdfee18a77b95ba395ab1f7cc15a5a5b | /python/flask/ex03/env/bin/pygmentize | 97ff469afb30e69e141002889a8e8a7f9789d71f | []
| no_license | ltakuno/arquivos | 7686578fbdefaf04d11e738325d7a34631b4c113 | c04198264cc9f32fd472453fea3b627a03794008 | refs/heads/master | 2023-01-24T13:11:29.861084 | 2021-10-26T10:52:03 | 2021-10-26T10:52:03 | 98,174,425 | 0 | 1 | null | 2023-01-11T12:02:14 | 2017-07-24T09:38:12 | Python | UTF-8 | Python | false | false | 266 | #!/home/leo/Desktop/Pessoal/arquivos/python/flask/ex03/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pygments.cmdline import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
698c24e7ea2ec00d6d552bae36318d0420665ece | 36b46e4c1d6ea1294269d57fc5467be600748db0 | /batch2/day15/image.py | 44f34193a29679072d7f522a0a466e3a6c53499d | []
| no_license | shaadomanthra/cbpython | 5a4f6b588d59e99d4d01ae19b018efe964f6a1c4 | 57e855b49221ff1a502c3f80a3ee62815f619c51 | refs/heads/master | 2022-09-26T09:19:17.168692 | 2020-06-04T12:15:08 | 2020-06-04T12:15:08 | 262,913,907 | 0 | 7 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | from cv2 import cv2
img = cv2.imread('f1.jpg')
cv2.imshow('image',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
# pip install opencv-python
# pip uninstall opencv-python
| [
"[email protected]"
]
| |
226587a22ab6d94abd1eaa631c9cf7d4c94bcbe3 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/GTop/glibtop_fsusage.py | ce0b9f44de9aed3e9066212f3111af5827cdf720 | []
| no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 5,569 | py | # encoding: utf-8
# module gi.repository.GTop
# from /usr/lib64/girepository-1.0/GTop-2.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
class glibtop_fsusage(__gi.Struct):
"""
:Constructors:
::
glibtop_fsusage()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
bavail = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
bfree = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
blocks = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
block_size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
ffree = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
files = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
flags = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
read = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
write = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(glibtop_fsusage), '__module__': 'gi.repository.GTop', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'glibtop_fsusage' objects>, '__weakref__': <attribute '__weakref__' of 'glibtop_fsusage' objects>, '__doc__': None, 'flags': <property object at 0x7f9700c73770>, 'blocks': <property object at 0x7f9700c73860>, 'bfree': <property object at 0x7f9700c73950>, 'bavail': <property object at 0x7f9700c73a40>, 'files': <property object at 0x7f9700c73b30>, 'ffree': <property object at 0x7f9700c73c20>, 'block_size': <property object at 0x7f9700c73d10>, 'read': <property object at 0x7f9700c73e00>, 'write': <property object at 0x7f9700c73ef0>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(glibtop_fsusage)
| [
"[email protected]"
]
| |
05e941b211ab3c58e344cee9ddbef30df1cc5c81 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-compute/azure/mgmt/compute/v2018_04_01/models/virtual_machine_capture_result.py | 2972e1f2fcda79359a1c924857d0281796beedb2 | [
"MIT"
]
| permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 1,895 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualMachineCaptureResult(SubResource):
"""Output of virtual machine capture operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource Id
:type id: str
:ivar schema: the schema of the captured virtual machine
:vartype schema: str
:ivar content_version: the version of the content
:vartype content_version: str
:ivar parameters: parameters of the captured virtual machine
:vartype parameters: object
:ivar resources: a list of resource items of the captured virtual machine
:vartype resources: list[object]
"""
_validation = {
'schema': {'readonly': True},
'content_version': {'readonly': True},
'parameters': {'readonly': True},
'resources': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'schema': {'key': '$schema', 'type': 'str'},
'content_version': {'key': 'contentVersion', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'object'},
'resources': {'key': 'resources', 'type': '[object]'},
}
def __init__(self, **kwargs):
super(VirtualMachineCaptureResult, self).__init__(**kwargs)
self.schema = None
self.content_version = None
self.parameters = None
self.resources = None
| [
"[email protected]"
]
| |
ef806be5869ec78751bf85efc29d25df572e1eb4 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_385/ch19_2019_03_12_19_38_14_663227.py | d013924ef9a73fc01e5311fa6f7f4983c515a29b | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import math
def calcula_distancia_do_projetil(v,a,y):
d=(v**2/(2*9.8))*((1+(1+(2*(9.8)*y)/((v**2)*(math.sin(a))**2)))**(0.5))*math.sin(2*a)
return d | [
"[email protected]"
]
| |
d796be626c577ac549f8adc4a444f94bcb301597 | 0d24036dcf8736c0392a1ee1c2f3b45633221d8a | /etc/src/genpy-mpls-ldp-oper/cisco_ios_xr_mpls_ldp_oper/mpls_ldp/global/standby/bindings_summary_all/ldp_binding_summary_pb2.py | c426f8c3156829c6c44f31a013afca2915718b26 | []
| no_license | mspiez/telemetry_collector | c4b97c6686748fc20748898a25e9fc756d2d0b63 | 52ed12c06debfe04181f0bfea9854a66ed8bb3df | refs/heads/master | 2020-12-19T23:28:08.358956 | 2020-05-02T19:54:38 | 2020-05-02T19:54:38 | 235,883,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 15,058 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cisco_ios_xr_mpls_ldp_oper/mpls_ldp/global/standby/bindings_summary_all/ldp_binding_summary.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='cisco_ios_xr_mpls_ldp_oper/mpls_ldp/global/standby/bindings_summary_all/ldp_binding_summary.proto',
package='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all',
syntax='proto3',
serialized_pb=_b('\nacisco_ios_xr_mpls_ldp_oper/mpls_ldp/global/standby/bindings_summary_all/ldp_binding_summary.proto\x12Gcisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all\"\x1a\n\x18ldp_binding_summary_KEYS\"\x9f\x04\n\x13ldp_binding_summary\x12\x62\n\x03vrf\x18\x32 \x01(\x0b\x32U.cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_vrf_info\x12\x16\n\x0e\x61\x64\x64ress_family\x18\x33 \x01(\t\x12p\n\x07\x62ind_af\x18\x34 \x03(\x0b\x32_.cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af\x12\x18\n\x10\x62inding_no_route\x18\x35 \x01(\r\x12\x1e\n\x16\x62inding_local_no_route\x18\x36 \x01(\r\x12\x1a\n\x12\x62inding_local_null\x18\x37 \x01(\r\x12#\n\x1b\x62inding_local_implicit_null\x18\x38 \x01(\r\x12#\n\x1b\x62inding_local_explicit_null\x18\x39 \x01(\r\x12\x1e\n\x16\x62inding_local_non_null\x18: \x01(\r\x12\x19\n\x11\x62inding_local_oor\x18; \x01(\r\x12\x1e\n\x16lowest_allocated_label\x18< \x01(\r\x12\x1f\n\x17highest_allocated_label\x18= \x01(\r\"(\n\x0cldp_vrf_info\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\r\"\xb6\x01\n\x16ldp_binding_summary_af\x12\x16\n\x0e\x61\x64\x64ress_family\x18\x01 \x01(\t\x12\x17\n\x0flast_lib_update\x18\x02 \x01(\r\x12%\n\x1dlib_minimum_revision_sent_all\x18\x03 \x01(\r\x12\x15\n\rbinding_total\x18\x04 \x01(\r\x12\x15\n\rbinding_local\x18\x05 \x01(\r\x12\x16\n\x0e\x62inding_remote\x18\x06 \x01(\rb\x06proto3')
)
_LDP_BINDING_SUMMARY_KEYS = _descriptor.Descriptor(
name='ldp_binding_summary_KEYS',
full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_KEYS',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=174,
serialized_end=200,
)
_LDP_BINDING_SUMMARY = _descriptor.Descriptor(
name='ldp_binding_summary',
full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vrf', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.vrf', index=0,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='address_family', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.address_family', index=1,
number=51, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bind_af', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.bind_af', index=2,
number=52, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_no_route', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_no_route', index=3,
number=53, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_no_route', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_no_route', index=4,
number=54, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_null', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_null', index=5,
number=55, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_implicit_null', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_implicit_null', index=6,
number=56, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_explicit_null', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_explicit_null', index=7,
number=57, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_non_null', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_non_null', index=8,
number=58, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local_oor', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.binding_local_oor', index=9,
number=59, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lowest_allocated_label', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.lowest_allocated_label', index=10,
number=60, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='highest_allocated_label', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary.highest_allocated_label', index=11,
number=61, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=203,
serialized_end=746,
)
_LDP_VRF_INFO = _descriptor.Descriptor(
name='ldp_vrf_info',
full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_vrf_info',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_vrf_info.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_vrf_info.id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=748,
serialized_end=788,
)
_LDP_BINDING_SUMMARY_AF = _descriptor.Descriptor(
name='ldp_binding_summary_af',
full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='address_family', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.address_family', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_lib_update', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.last_lib_update', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lib_minimum_revision_sent_all', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.lib_minimum_revision_sent_all', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_total', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.binding_total', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_local', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.binding_local', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='binding_remote', full_name='cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af.binding_remote', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=791,
serialized_end=973,
)
_LDP_BINDING_SUMMARY.fields_by_name['vrf'].message_type = _LDP_VRF_INFO
_LDP_BINDING_SUMMARY.fields_by_name['bind_af'].message_type = _LDP_BINDING_SUMMARY_AF
DESCRIPTOR.message_types_by_name['ldp_binding_summary_KEYS'] = _LDP_BINDING_SUMMARY_KEYS
DESCRIPTOR.message_types_by_name['ldp_binding_summary'] = _LDP_BINDING_SUMMARY
DESCRIPTOR.message_types_by_name['ldp_vrf_info'] = _LDP_VRF_INFO
DESCRIPTOR.message_types_by_name['ldp_binding_summary_af'] = _LDP_BINDING_SUMMARY_AF
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ldp_binding_summary_KEYS = _reflection.GeneratedProtocolMessageType('ldp_binding_summary_KEYS', (_message.Message,), dict(
DESCRIPTOR = _LDP_BINDING_SUMMARY_KEYS,
__module__ = 'cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_KEYS)
))
_sym_db.RegisterMessage(ldp_binding_summary_KEYS)
ldp_binding_summary = _reflection.GeneratedProtocolMessageType('ldp_binding_summary', (_message.Message,), dict(
DESCRIPTOR = _LDP_BINDING_SUMMARY,
__module__ = 'cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary)
))
_sym_db.RegisterMessage(ldp_binding_summary)
ldp_vrf_info = _reflection.GeneratedProtocolMessageType('ldp_vrf_info', (_message.Message,), dict(
DESCRIPTOR = _LDP_VRF_INFO,
__module__ = 'cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_vrf_info)
))
_sym_db.RegisterMessage(ldp_vrf_info)
ldp_binding_summary_af = _reflection.GeneratedProtocolMessageType('ldp_binding_summary_af', (_message.Message,), dict(
DESCRIPTOR = _LDP_BINDING_SUMMARY_AF,
__module__ = 'cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_pb2'
# @@protoc_insertion_point(class_scope:cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.standby.bindings_summary_all.ldp_binding_summary_af)
))
_sym_db.RegisterMessage(ldp_binding_summary_af)
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
badaf0e123f718073e3bea705aaaaec24180f8a9 | 430bd23decf16dc572a587b7af9f5c8e7dea5e6b | /clients/python/swagger_client/apis/leaderboard_api.py | e0906cb597edb1fa6abaa8f8a43e89f91389cf1f | [
"Apache-2.0"
]
| permissive | jltrade/api-connectors | 332d4df5e7e60bd27b6c5a43182df7d99a665972 | fa2cf561b414e18e9d2e1b5d68e94cc710d315e5 | refs/heads/master | 2020-06-19T10:20:46.022967 | 2016-09-24T13:12:17 | 2016-09-24T13:12:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,571 | py | # coding: utf-8
"""
BitMEX API
REST API for the BitMEX.com trading platform.<br><br><a href=\"/app/restAPI\">REST Documentation</a><br><a href=\"/app/wsAPI\">Websocket Documentation</a>
OpenAPI spec version: 1.2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class LeaderboardApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def leaderboard_get(self, **kwargs):
"""
Get current leaderboard.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.leaderboard_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str method: Ranking type. Options: \"notional\", \"ROE\"
:return: list[Leaderboard]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.leaderboard_get_with_http_info(**kwargs)
else:
(data) = self.leaderboard_get_with_http_info(**kwargs)
return data
def leaderboard_get_with_http_info(self, **kwargs):
"""
Get current leaderboard.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.leaderboard_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str method: Ranking type. Options: \"notional\", \"ROE\"
:return: list[Leaderboard]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['method']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method leaderboard_get" % key
)
params[key] = val
del params['kwargs']
resource_path = '/leaderboard'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'method' in params:
query_params['method'] = params['method']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Leaderboard]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| [
"[email protected]"
]
| |
7cc31d11d80087d2904c8f28fcb8dd61c8dc7740 | dab68b742da7945b75ac957deed6e9a72283934f | /Golf-Report/config/settings.py | 26159dff1d88930d1b129440dcef5b3d7dd6faba | []
| no_license | hyunmin0317/Stock-Insight | 90dd03665c8c5edbc041284ccefa78e877f9c3c3 | 558f4da73e62aa064994e680d923ba68d5b8ca4f | refs/heads/master | 2023-06-29T23:56:42.979878 | 2021-08-02T02:15:08 | 2021-08-02T02:15:08 | 389,097,513 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,463 | py | """
Django settings for config project.
Generated by 'django-admin startproject' using Django 3.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-1v-(2=2!&i%)-iti4z!_%f-kahyn0zj)@dndhorz@qrt1px()u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sass_processor',
'stock.apps.StockConfig',
]
SASS_PROCESSOR_ENABLED = True
SASS_PROCESSOR_ROOT = os.path.join(BASE_DIR, 'static')
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'ko-kr'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / 'static',
]
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
]
| |
408b33ccab6b8d5c16cf60ae37e714dabbbb9d32 | 57120090948f99de2258a6f01a0cc65443441ce9 | /hyperclass/graph/exe/pygsp.py | 0f58c1bf6917edac5157eac592fec080a30215f0 | []
| no_license | MysteriousSonOfGod/hyperclass | c67eff91f6f0f64fa4a92f8567243ef5cd8fa3c8 | e8cec11b364e8b049e7432b95ce20a2c5de94235 | refs/heads/master | 2023-01-28T16:42:09.289664 | 2020-12-07T22:54:50 | 2020-12-07T22:54:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 499 | py | from pygsp import graphs
import xarray as xa
import numpy as np
from hyperclass.data.spatial.tile import Tile, Block
import os, math, sys
block_shape = (500, 500)
block_indices = (0,0)
image_name = "ang20170720t004130_corr_v2p9"
N_neighbors = 8
dm = DataManager( image_name, block_shape=block_shape )
tile: Tile = dm.getTile()
block = tile.getBlock( *block_indices )
data: np.ndarray = block.getPointData().values
graph = graphs.NNGraph( data, 'knn', True, True, True, N_neighbors )
print (".") | [
"[email protected]"
]
| |
22e8481e0326ea50c9064e5eb25279ea2c83211d | e2c79931c43a1a6e566d05aafa2655ba8d128657 | /triv/io/mimetypes/__init__.py | b4a7c6465896d004c2a48d6e9ebe9d57bcfa4de6 | []
| no_license | pombredanne/trivio.datasources | c45c772270ba6ad80ade84d9b4be6b024e4a8f2d | 00f0c341e776c77516b17be036569d0eac7cdced | refs/heads/master | 2021-01-12T22:38:35.572615 | 2013-07-02T21:20:47 | 2013-07-02T21:20:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from . import application_json
from . import application_x_arc
from . import application_x_json_stream
from . import application_x_hadoop_sequence
from . import text_csv
| [
"[email protected]"
]
| |
d3d9bf07d206c7c98ab748808ca1ca6ff279dd7a | 37438771565238194ea997fa65619bd32c823706 | /detect_tracking/18.5.31_first_success/wyz_ws/devel/lib/python2.7/dist-packages/image_geometry/__init__.py | cfa550544b8f9c86f4e2a7cf963edf5e2583218a | []
| no_license | Aaron9477/restore | b040b8be695c513946c0243c4acb735f427d8bba | 8dc13ed7cf0c4e5cde911169d11e330d826f40bd | refs/heads/master | 2021-09-15T10:50:59.969952 | 2018-05-31T03:11:55 | 2018-05-31T03:11:55 | 110,834,815 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,050 | py | # -*- coding: utf-8 -*-
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/home/ubuntu/WYZ/wyz_ws/src/vision_opencv-kinetic/image_geometry/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file = os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
del src_init_file
del p
del os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles
| [
"[email protected]"
]
| |
7a3fe8d03675f0f2f25deafd18430d4b97b6da39 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/sets_20200605203258.py | f16a7163f3eec97f0550e2e5a9c3cfa32aa6775a | []
| no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | import json
def Strings(str):
values = {}
newArray = []
keys = []
finalArr = []
for i in str:
newArray.append(i.split(":"))
for j in range(len(newArray)):
if newArray[j][0] in values:
values[newArray[j][0]] += int(newArray[j][1])
else:
values[newArray[j][0]] = int(newArray[j][1])
for k in values:
keys.append(k)
keys = sorted(keys)
newString = " "
for i in range(len(keys)-1):
if keys[i] in values:
newString += keys[i] + ":"+ json.dumps(values[keysi]) + ","
print(newString)
Strings(["Z:1","B:3","C:3","Z:4","B:2"])
| [
"[email protected]"
]
| |
e338718887a056636b2427531d8b2b6041d3fd9f | 6e68584f2819351abe628b659c01184f51fec976 | /Centre_College/CSC_117/CSC_117_Python_Files/monteCarloGUI.py | 0b71697ca698fbc152b4b222c923e09586afee51 | []
| no_license | DanSGraham/code | 0a16a2bfe51cebb62819cd510c7717ae24b12d1b | fc54b6d50360ae12f207385b5d25adf72bfa8121 | refs/heads/master | 2020-03-29T21:09:18.974467 | 2017-06-14T04:04:48 | 2017-06-14T04:04:48 | 36,774,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,178 | py | #
# An approximation of pi
#Daniel Graham
#This method should give an approximation of pi due to the ratio of circle to rectangle.
#Since the area of the circle = pi and the area of the rectangle will = 4 by multiplying by 4,
#the function returns an approximation of pi
import math
import random
from graphics import *
def distance_from_center(x,y):
distance1 = math.sqrt((x)**2+(y)**2)
return distance1
def mainGUI():
win = GraphWin("Pi Approximation", 1000,1000)
win.setCoords( -1.5,-1.5,1.5,1.5)
square = Rectangle(Point(-1,1), Point(1,-1))
circle = Circle(Point(0,0),1)
title = Text(Point(0,1.25), "Pi Approximation")
prompt = Text(Point(-.2,-1.25), "How many points would you like to use? \n Click after Entry!")
prompt_entry = Entry(Point(0.4,-1.25), 8)
title.draw(win)
prompt.draw(win)
prompt_entry.draw(win)
circle.draw(win)
square.draw(win)
win.getMouse()
while prompt_entry.getText() == "":
win.getMouse()
prompt.undraw()
prompt_entry.undraw()
number_of_dots = int(prompt_entry.getText())
inside_circle = 0
outside_circle = 0
for a in xrange(number_of_dots):
pointx = random.uniform(-1,1)
pointy = random.uniform(-1,1)
point = Point(pointx,pointy)
point.draw(win)
distance = distance_from_center(pointx, pointy)
if distance <= 1.0:
inside_circle += 1
point.setFill('red')
else:
outside_circle +=1
point.setFill('blue')
pi_approx = inside_circle/float((inside_circle+outside_circle))*4
text_string = "According to this approximation, pi = " + str(pi_approx)
points_string = "You used " + str(number_of_dots) + " points to approximate pi. There were " + str(inside_circle) + " points inside the circle \n Click when Finished!"
end_text = Text(Point(0,-1.2), text_string)
points_text = Text(Point(0, -1.30), points_string)
points_text.draw(win)
end_text.draw(win)
win.getMouse()
win.close()
mainGUI()
| [
"[email protected]"
]
| |
48551fb220fdc14d1dfe72e50fcfacfa205e2e76 | e005e5fa3fdf18cf5a72d14379568b97dfd4754c | /lect02_codes/lect02_codes/proj/py_version/main.py | 997ea3e164e26dc651460daca9e531d6e62e75f9 | []
| no_license | evaseemefly/learn_sourcecode_DataAnalysis | 1962b23fa46987c671025c157be542fcd2eef60f | ec73d3962966d94ccb2e2f0a82486e8ac615c449 | refs/heads/master | 2018-10-25T16:56:48.367272 | 2018-10-08T04:18:38 | 2018-10-08T04:18:38 | 107,554,982 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,623 | py | # -*- coding: utf-8 -*-
"""
作者: 梁斌
版本: 1.0
日期: 2017/10
实战案例2:麦当劳菜单营养成分分析
该案例有配套的讲解版本,在jupyter演示版中可找到
声明:小象学院拥有完全知识产权的权利;只限于善意学习者在本课程使用,
不得在课程范围外向任何第三方散播。任何其他人或机构不得盗版、复制、仿造其中的创意,
我们将保留一切通过法律手段追究违反者的权利
"""
import os
import pandas as pd
# 指定数据集路径
dataset_path = '../data'
datafile = os.path.join(dataset_path, 'menu.csv')
# 分析的数据列
used_cols = ['Calories', 'Calories from Fat', 'Total Fat', 'Cholesterol', 'Sugars']
def inspect_data(df_data):
"""
查看数据集基本信息
"""
print('\n===================== 数据预览: =====================')
print(df_data.head())
print('\n===================== 数据信息: =====================')
print(df_data.info())
print('\n===================== 数据基本统计信息: =====================')
print(df_data.describe())
def main():
"""
主函数
"""
# 读入数据
menu_data = pd.read_csv(datafile)
# 查看数据集基本信息
inspect_data(menu_data)
# 任务1. 按单品类型分析查看数据
print('\n===================== 任务1. 按单品类型分析查看数据 =====================')
print('\n===================== 营养成分最高的单品: =====================')
max_idxs = [menu_data[col].argmax() for col in used_cols]
for col, max_idx in zip(used_cols, max_idxs):
print('{} 最高的单品:{}'.format(col, menu_data.iloc[max_idx]['Item']))
print('\n===================== 营养成分最低的单品: =====================')
min_idxs = [menu_data[col].argmin() for col in used_cols]
for col, min_idx in zip(used_cols, min_idxs):
print('{} 最低的单品:{}'.format(col, menu_data.iloc[min_idx]['Item']))
# 任务2. 按菜单类型分析查看数据
print('\n===================== 任务2. 按菜单类型分析查看数据 =====================')
print('\n===================== 菜单类型的单品数目分布: =====================')
cat_grouped = menu_data.groupby('Category')
print('菜单类型的单品数目:')
print(cat_grouped.size().sort_values(ascending=False))
# 菜单类型的营养成分分布
print('\n===================== 菜单类型的营养成分分布: =====================')
print(cat_grouped[used_cols].mean())
print('\n===================== 营养成分最高的菜单类型: =====================')
max_cats = [cat_grouped[col].mean().argmax() for col in used_cols]
for col, cat in zip(used_cols, max_cats):
print('{} 最高的菜单类型:{}'.format(col, cat))
print('\n===================== 营养成分最低的菜单类型: =====================')
min_cats = [cat_grouped[col].mean().argmin() for col in used_cols]
for col, cat in zip(used_cols, min_cats):
print('{} 最低的菜单类型:{}'.format(col, cat))
# 任务3. 查看分析单品及菜单的份量
print('\n===================== 任务3. 查看分析单品及菜单的份量 =====================')
# 过滤数据,只保留包含 'g'的单品
sel_menu_data = menu_data[menu_data['Serving Size'].str.contains('g')].copy()
def proc_size_str(size_str):
"""
处理serving size字符串,返回g
"""
start_idx = size_str.index('(') + 1
end_idx = size_str.index('g')
size_val = size_str[start_idx: end_idx]
return float(size_val)
sel_menu_data['Size'] = sel_menu_data['Serving Size'].apply(proc_size_str)
inspect_data(sel_menu_data)
max_idx = sel_menu_data['Size'].argmax()
print('份量最多的单品:{},{}g'.format(sel_menu_data.iloc[max_idx]['Item'], sel_menu_data['Size'].max()))
min_idx = sel_menu_data['Size'].argmin()
print('份量最少的单品:{},{}g'.format(sel_menu_data.iloc[min_idx]['Item'], sel_menu_data['Size'].min()))
sel_cat_grouped = sel_menu_data.groupby('Category')
print('份量最多的类别:{},{}g'.format(sel_cat_grouped['Size'].mean().argmax(),
sel_cat_grouped['Size'].mean().max()))
print('份量最少的类别:{},{}g'.format(sel_cat_grouped['Size'].mean().argmin(),
sel_cat_grouped['Size'].mean().min()))
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
21f20cba381b35b90be5ac12499ce8e389d2d0bd | 8e03374062754d568a75f6a0938224c1de3baf3c | /news_api/news/migrations/0002_auto_20210505_0040.py | 4911de0d05217fd93a635da0b7fa7a442a90b9aa | []
| no_license | PythonDjangoJavascript/news_api | dbad588cea338cedb085241713a1d7fe4e162d77 | 493f0ccc6dea577fe5a1fbeb03520ff2066bc12a | refs/heads/main | 2023-04-22T17:21:53.520734 | 2021-05-09T06:00:16 | 2021-05-09T06:00:16 | 364,386,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | # Generated by Django 3.2.1 on 2021-05-05 00:40
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='loacation',
field=models.CharField(default=django.utils.timezone.now, max_length=100),
preserve_default=False,
),
migrations.AlterField(
model_name='article',
name='publication_date',
field=models.DateField(blank=True, null=True),
),
]
| [
"[email protected]"
]
| |
a6ddac7dbb84f364bb7a1c24bb39dc942613b16a | f850e0f75a76c500f5ba8a9ab6fa6d5f40d22b23 | /cutecharts_demo/__init__.py | c6223d8fde18a448c03c6f8cbd0affbf3a225cd8 | [
"MIT"
]
| permissive | jay20161013/pywebio-chart-gallery | 805afa2643b0d330a4a2f80f1e0a8827e8f61afe | 11fd8a70b2e9ff5482cf5924b110a11f3469edfc | refs/heads/master | 2023-03-20T01:58:30.979109 | 2021-03-18T12:48:31 | 2021-03-18T12:48:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,503 | py | from collections import OrderedDict
from pywebio.output import *
from pywebio.session import hold, get_info
from .demos.example_bar import main as bar
from .demos.example_line import main as line
from .demos.example_page import main as page
from .demos.example_pie import main as pie
from .demos.example_radar import main as radar
from .demos.example_scatter import main as scatter
all_demos = OrderedDict([
("Bar", bar),
("Line", line),
("Pie", pie),
("Radar", radar),
("Scatter", scatter),
("Page", page),
])
def t(eng, chinese):
"""return English or Chinese text according to the user's browser language"""
return chinese if 'zh' in get_info().user_language else eng
@use_scope('demo', clear=True)
def show_demo(name):
if name not in all_demos:
return
all_demos[name]()
put_html('<a href="https://github.com/wang0618/pywebio-chart-gallery/blob/master'
'/cutecharts_demo/demos/example_%s.py" target="_blank">%s</a>' % (name.lower(), t('Source code', '源码')))
scroll_to('demo-list', 'top')
async def cutecharts():
"""PyWebIO cutechart Demo
Demo of using cutechart.py for data visualization in PyWebIO.
在PyWebIO中使用 cutechart.py 进行数据可视化示例"""
put_markdown(t(r"""## Cutecharts.py
[cutecharts.py](https://github.com/cutecharts/cutecharts.py) is a hand drawing style charts library for Python which uses [chart.xkcd](https://github.com/timqian/chart.xkcd) as underlying implementation.
In PyWebIO, you can use the following code to output the cutecharts.py chart instance:
```python
# `chart` is cutecharts chart instance
pywebio.output.put_html(chart.render_notebook())
```
For details, please refer to the source code of the demo below.
## Demos List
""", r"""## Cutecharts.py
[cutecharts.py](https://github.com/cutecharts/cutecharts.py) 是一个可以创建具有卡通风格的可视化图表的python库。底层使用了 [chart.xkcd](https://github.com/timqian/chart.xkcd) Javascript库。
PyWebIO 支持输出使用 cutecharts.py 库创建的图表。使用方式为在PyWebIO会话中调用
```python
# chart 为 cutecharts 的图表实例
pywebio.output.put_html(chart.render_notebook())
```
具体可以参考下面demo中的源码。
## Demos List
"""), strip_indent=4)
set_scope('demo-list')
put_buttons(list(all_demos.keys()), onclick=show_demo)
await hold()
| [
"[email protected]"
]
| |
99a358a0f5eecbab63327e91984945a6ce554457 | 6b791247919f7de90c8402abcca64b32edd7a29b | /multiprocessing/managers.py | e5683440caccf121433a5b534bb9999f17bba11c | [
"Apache-2.0"
]
| permissive | theclashingfritz/Cog-Invasion-Online-Dump | a9bce15c9f37b6776cecd80b309f3c9ec5b1ec36 | 2561abbacb3e2e288e06f3f04b935b5ed589c8f8 | refs/heads/master | 2021-01-04T06:44:04.295001 | 2020-02-14T05:23:01 | 2020-02-14T05:23:01 | 240,434,213 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,874 | py | # uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: multiprocessing.managers
__all__ = [
'BaseManager', 'SyncManager', 'BaseProxy', 'Token']
import os, sys, weakref, threading, array, Queue
from traceback import format_exc
from multiprocessing import Process, current_process, active_children, Pool, util, connection
from multiprocessing.process import AuthenticationString
from multiprocessing.forking import exit, Popen, assert_spawning, ForkingPickler
from multiprocessing.util import Finalize, info
try:
from cPickle import PicklingError
except ImportError:
from pickle import PicklingError
def reduce_array(a):
return (
array.array, (a.typecode, a.tostring()))
ForkingPickler.register(array.array, reduce_array)
view_types = [ type(getattr({}, name)()) for name in ('items', 'keys', 'values') ]
class Token(object):
__slots__ = ('typeid', 'address', 'id')
def __init__(self, typeid, address, id):
self.typeid, self.address, self.id = typeid, address, id
def __getstate__(self):
return (
self.typeid, self.address, self.id)
def __setstate__(self, state):
self.typeid, self.address, self.id = state
def __repr__(self):
return 'Token(typeid=%r, address=%r, id=%r)' % (
self.typeid, self.address, self.id)
def dispatch(c, id, methodname, args=(), kwds={}):
c.send((id, methodname, args, kwds))
kind, result = c.recv()
if kind == '#RETURN':
return result
raise convert_to_error(kind, result)
def convert_to_error(kind, result):
if kind == '#ERROR':
return result
if kind == '#TRACEBACK':
return RemoteError(result)
if kind == '#UNSERIALIZABLE':
return RemoteError('Unserializable message: %s\n' % result)
return ValueError('Unrecognized message type')
class RemoteError(Exception):
def __str__(self):
return '\n' + '-' * 75 + '\n' + str(self.args[0]) + '-' * 75
def all_methods(obj):
temp = []
for name in dir(obj):
func = getattr(obj, name)
if hasattr(func, '__call__'):
temp.append(name)
return temp
def public_methods(obj):
return [ name for name in all_methods(obj) if name[0] != '_' ]
class Server(object):
public = [
'shutdown', 'create', 'accept_connection', 'get_methods',
'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
def __init__(self, registry, address, authkey, serializer):
self.registry = registry
self.authkey = AuthenticationString(authkey)
Listener, Client = listener_client[serializer]
self.listener = Listener(address=address, backlog=16)
self.address = self.listener.address
self.id_to_obj = {'0': (None, ())}
self.id_to_refcount = {}
self.mutex = threading.RLock()
self.stop = 0
return
def serve_forever(self):
current_process()._manager_server = self
try:
while 1:
try:
c = self.listener.accept()
except (OSError, IOError):
continue
t = threading.Thread(target=self.handle_request, args=(c,))
t.daemon = True
t.start()
except (KeyboardInterrupt, SystemExit):
pass
finally:
self.stop = 999
self.listener.close()
def handle_request(self, c):
funcname = result = request = None
try:
connection.deliver_challenge(c, self.authkey)
connection.answer_challenge(c, self.authkey)
request = c.recv()
ignore, funcname, args, kwds = request
func = getattr(self, funcname)
except Exception:
msg = (
'#TRACEBACK', format_exc())
else:
try:
result = func(c, *args, **kwds)
except Exception:
msg = (
'#TRACEBACK', format_exc())
else:
msg = (
'#RETURN', result)
try:
c.send(msg)
except Exception as e:
try:
c.send(('#TRACEBACK', format_exc()))
except Exception:
pass
util.info('Failure to send message: %r', msg)
util.info(' ... request was %r', request)
util.info(' ... exception was %r', e)
c.close()
return
def serve_client(self, conn):
util.debug('starting server thread to service %r', threading.current_thread().name)
recv = conn.recv
send = conn.send
id_to_obj = self.id_to_obj
while not self.stop:
try:
methodname = obj = None
request = recv()
ident, methodname, args, kwds = request
obj, exposed, gettypeid = id_to_obj[ident]
if methodname not in exposed:
raise AttributeError('method %r of %r object is not in exposed=%r' % (
methodname, type(obj), exposed))
function = getattr(obj, methodname)
try:
res = function(*args, **kwds)
except Exception as e:
msg = (
'#ERROR', e)
else:
typeid = gettypeid and gettypeid.get(methodname, None)
if typeid:
rident, rexposed = self.create(conn, typeid, res)
token = Token(typeid, self.address, rident)
msg = ('#PROXY', (rexposed, token))
else:
msg = (
'#RETURN', res)
except AttributeError:
if methodname is None:
msg = (
'#TRACEBACK', format_exc())
else:
try:
fallback_func = self.fallback_mapping[methodname]
result = fallback_func(self, conn, ident, obj, *args, **kwds)
msg = (
'#RETURN', result)
except Exception:
msg = (
'#TRACEBACK', format_exc())
except EOFError:
util.debug('got EOF -- exiting thread serving %r', threading.current_thread().name)
sys.exit(0)
except Exception:
msg = (
'#TRACEBACK', format_exc())
try:
try:
send(msg)
except Exception as e:
send(('#UNSERIALIZABLE', repr(msg)))
except Exception as e:
util.info('exception in thread serving %r', threading.current_thread().name)
util.info(' ... message was %r', msg)
util.info(' ... exception was %r', e)
conn.close()
sys.exit(1)
return
def fallback_getvalue(self, conn, ident, obj):
return obj
def fallback_str(self, conn, ident, obj):
return str(obj)
def fallback_repr(self, conn, ident, obj):
return repr(obj)
fallback_mapping = {'__str__': fallback_str,
'__repr__': fallback_repr,
'#GETVALUE': fallback_getvalue}
def dummy(self, c):
pass
def debug_info(self, c):
self.mutex.acquire()
try:
result = []
keys = self.id_to_obj.keys()
keys.sort()
for ident in keys:
if ident != '0':
result.append(' %s: refcount=%s\n %s' % (
ident, self.id_to_refcount[ident],
str(self.id_to_obj[ident][0])[:75]))
return ('\n').join(result)
finally:
self.mutex.release()
def number_of_objects(self, c):
return len(self.id_to_obj) - 1
def shutdown(self, c):
try:
util.debug('manager received shutdown message')
c.send(('#RETURN', None))
if sys.stdout != sys.__stdout__:
util.debug('resetting stdout, stderr')
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
util._run_finalizers(0)
for p in active_children():
util.debug('terminating a child process of manager')
p.terminate()
for p in active_children():
util.debug('terminating a child process of manager')
p.join()
util._run_finalizers()
util.info('manager exiting with exitcode 0')
except:
import traceback
traceback.print_exc()
finally:
exit(0)
return
def create(self, c, typeid, *args, **kwds):
self.mutex.acquire()
try:
callable, exposed, method_to_typeid, proxytype = self.registry[typeid]
if callable is None:
obj = args[0]
else:
obj = callable(*args, **kwds)
if exposed is None:
exposed = public_methods(obj)
if method_to_typeid is not None:
exposed = list(exposed) + list(method_to_typeid)
ident = '%x' % id(obj)
util.debug('%r callable returned object with id %r', typeid, ident)
self.id_to_obj[ident] = (
obj, set(exposed), method_to_typeid)
if ident not in self.id_to_refcount:
self.id_to_refcount[ident] = 0
self.incref(c, ident)
return (
ident, tuple(exposed))
finally:
self.mutex.release()
return
def get_methods(self, c, token):
return tuple(self.id_to_obj[token.id][1])
def accept_connection(self, c, name):
threading.current_thread().name = name
c.send(('#RETURN', None))
self.serve_client(c)
return
def incref(self, c, ident):
self.mutex.acquire()
try:
self.id_to_refcount[ident] += 1
finally:
self.mutex.release()
def decref(self, c, ident):
self.mutex.acquire()
try:
self.id_to_refcount[ident] -= 1
if self.id_to_refcount[ident] == 0:
del self.id_to_obj[ident]
del self.id_to_refcount[ident]
util.debug('disposing of obj with id %r', ident)
finally:
self.mutex.release()
class State(object):
__slots__ = [
'value']
INITIAL = 0
STARTED = 1
SHUTDOWN = 2
listener_client = {'pickle': (
connection.Listener, connection.Client),
'xmlrpclib': (
connection.XmlListener, connection.XmlClient)}
class BaseManager(object):
_registry = {}
_Server = Server
def __init__(self, address=None, authkey=None, serializer='pickle'):
if authkey is None:
authkey = current_process().authkey
self._address = address
self._authkey = AuthenticationString(authkey)
self._state = State()
self._state.value = State.INITIAL
self._serializer = serializer
self._Listener, self._Client = listener_client[serializer]
return
def __reduce__(self):
return (
type(self).from_address,
(
self._address, self._authkey, self._serializer))
def get_server(self):
return Server(self._registry, self._address, self._authkey, self._serializer)
def connect(self):
Listener, Client = listener_client[self._serializer]
conn = Client(self._address, authkey=self._authkey)
dispatch(conn, None, 'dummy')
self._state.value = State.STARTED
return
def start(self, initializer=None, initargs=()):
if initializer is not None and not hasattr(initializer, '__call__'):
raise TypeError('initializer must be a callable')
reader, writer = connection.Pipe(duplex=False)
self._process = Process(target=type(self)._run_server, args=(
self._registry, self._address, self._authkey,
self._serializer, writer, initializer, initargs))
ident = (':').join((str(i) for i in self._process._identity))
self._process.name = type(self).__name__ + '-' + ident
self._process.start()
writer.close()
self._address = reader.recv()
reader.close()
self._state.value = State.STARTED
self.shutdown = util.Finalize(self, type(self)._finalize_manager, args=(
self._process, self._address, self._authkey,
self._state, self._Client), exitpriority=0)
return
@classmethod
def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()):
if initializer is not None:
initializer(*initargs)
server = cls._Server(registry, address, authkey, serializer)
writer.send(server.address)
writer.close()
util.info('manager serving at %r', server.address)
server.serve_forever()
return
def _create(self, typeid, *args, **kwds):
conn = self._Client(self._address, authkey=self._authkey)
try:
id, exposed = dispatch(conn, None, 'create', (typeid,) + args, kwds)
finally:
conn.close()
return (Token(typeid, self._address, id), exposed)
def join(self, timeout=None):
self._process.join(timeout)
def _debug_info(self):
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'debug_info')
finally:
conn.close()
return
def _number_of_objects(self):
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'number_of_objects')
finally:
conn.close()
return
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown()
@staticmethod
def _finalize_manager(process, address, authkey, state, _Client):
if process.is_alive():
util.info('sending shutdown message to manager')
try:
conn = _Client(address, authkey=authkey)
try:
dispatch(conn, None, 'shutdown')
finally:
conn.close()
except Exception:
pass
process.join(timeout=0.2)
if process.is_alive():
util.info('manager still alive')
if hasattr(process, 'terminate'):
util.info('trying to `terminate()` manager process')
process.terminate()
process.join(timeout=0.1)
if process.is_alive():
util.info('manager still alive after terminate')
state.value = State.SHUTDOWN
try:
del BaseProxy._address_to_local[address]
except KeyError:
pass
return
address = property(lambda self: self._address)
@classmethod
def register(cls, typeid, callable=None, proxytype=None, exposed=None, method_to_typeid=None, create_method=True):
if '_registry' not in cls.__dict__:
cls._registry = cls._registry.copy()
if proxytype is None:
proxytype = AutoProxy
exposed = exposed or getattr(proxytype, '_exposed_', None)
method_to_typeid = method_to_typeid or getattr(proxytype, '_method_to_typeid_', None)
if method_to_typeid:
for key, value in method_to_typeid.items():
pass
cls._registry[typeid] = (callable, exposed, method_to_typeid, proxytype)
if create_method:
def temp(self, *args, **kwds):
util.debug('requesting creation of a shared %r object', typeid)
token, exp = self._create(typeid, *args, **kwds)
proxy = proxytype(token, self._serializer, manager=self, authkey=self._authkey, exposed=exp)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
temp.__name__ = typeid
setattr(cls, typeid, temp)
return
class ProcessLocalSet(set):
def __init__(self):
util.register_after_fork(self, lambda obj: obj.clear())
def __reduce__(self):
return (
type(self), ())
class BaseProxy(object):
_address_to_local = {}
_mutex = util.ForkAwareThreadLock()
def __init__(self, token, serializer, manager=None, authkey=None, exposed=None, incref=True):
BaseProxy._mutex.acquire()
try:
tls_idset = BaseProxy._address_to_local.get(token.address, None)
if tls_idset is None:
tls_idset = (
util.ForkAwareLocal(), ProcessLocalSet())
BaseProxy._address_to_local[token.address] = tls_idset
finally:
BaseProxy._mutex.release()
self._tls = tls_idset[0]
self._idset = tls_idset[1]
self._token = token
self._id = self._token.id
self._manager = manager
self._serializer = serializer
self._Client = listener_client[serializer][1]
if authkey is not None:
self._authkey = AuthenticationString(authkey)
else:
if self._manager is not None:
self._authkey = self._manager._authkey
else:
self._authkey = current_process().authkey
if incref:
self._incref()
util.register_after_fork(self, BaseProxy._after_fork)
return
def _connect(self):
util.debug('making connection to manager')
name = current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'accept_connection', (name,))
self._tls.connection = conn
return
def _callmethod(self, methodname, args=(), kwds={}):
try:
conn = self._tls.connection
except AttributeError:
util.debug('thread %r does not own a connection', threading.current_thread().name)
self._connect()
conn = self._tls.connection
conn.send((self._id, methodname, args, kwds))
kind, result = conn.recv()
if kind == '#RETURN':
return result
if kind == '#PROXY':
exposed, token = result
proxytype = self._manager._registry[token.typeid][-1]
proxy = proxytype(token, self._serializer, manager=self._manager, authkey=self._authkey, exposed=exposed)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
raise convert_to_error(kind, result)
return
def _getvalue(self):
return self._callmethod('#GETVALUE')
def _incref(self):
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'incref', (self._id,))
util.debug('INCREF %r', self._token.id)
self._idset.add(self._id)
state = self._manager and self._manager._state
self._close = util.Finalize(self, BaseProxy._decref, args=(
self._token, self._authkey, state,
self._tls, self._idset, self._Client), exitpriority=10)
return
@staticmethod
def _decref(token, authkey, state, tls, idset, _Client):
idset.discard(token.id)
if state is None or state.value == State.STARTED:
try:
util.debug('DECREF %r', token.id)
conn = _Client(token.address, authkey=authkey)
dispatch(conn, None, 'decref', (token.id,))
except Exception as e:
util.debug('... decref failed %s', e)
else:
util.debug('DECREF %r -- manager already shutdown', token.id)
if not idset and hasattr(tls, 'connection'):
util.debug('thread %r has no more proxies so closing conn', threading.current_thread().name)
tls.connection.close()
del tls.connection
return
def _after_fork(self):
self._manager = None
try:
self._incref()
except Exception as e:
util.info('incref failed: %s' % e)
return
def __reduce__(self):
kwds = {}
if Popen.thread_is_spawning():
kwds['authkey'] = self._authkey
if getattr(self, '_isauto', False):
kwds['exposed'] = self._exposed_
return (
RebuildProxy,
(
AutoProxy, self._token, self._serializer, kwds))
return (
RebuildProxy,
(
type(self), self._token, self._serializer, kwds))
def __deepcopy__(self, memo):
return self._getvalue()
def __repr__(self):
return '<%s object, typeid %r at %s>' % (
type(self).__name__, self._token.typeid, '0x%x' % id(self))
def __str__(self):
try:
return self._callmethod('__repr__')
except Exception:
return repr(self)[:-1] + "; '__str__()' failed>"
def RebuildProxy(func, token, serializer, kwds):
server = getattr(current_process(), '_manager_server', None)
if server and server.address == token.address:
return server.id_to_obj[token.id][0]
incref = kwds.pop('incref', True) and not getattr(current_process(), '_inheriting', False)
return func(token, serializer, incref=incref, **kwds)
return
def MakeProxyType(name, exposed, _cache={}):
exposed = tuple(exposed)
try:
return _cache[(name, exposed)]
except KeyError:
pass
dic = {}
for meth in exposed:
exec 'def %s(self, *args, **kwds):\n return self._callmethod(%r, args, kwds)' % (meth, meth) in dic
ProxyType = type(name, (BaseProxy,), dic)
ProxyType._exposed_ = exposed
_cache[(name, exposed)] = ProxyType
return ProxyType
def AutoProxy(token, serializer, manager=None, authkey=None, exposed=None, incref=True):
_Client = listener_client[serializer][1]
if exposed is None:
conn = _Client(token.address, authkey=authkey)
try:
exposed = dispatch(conn, None, 'get_methods', (token,))
finally:
conn.close()
if authkey is None and manager is not None:
authkey = manager._authkey
if authkey is None:
authkey = current_process().authkey
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey, incref=incref)
proxy._isauto = True
return proxy
class Namespace(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = self.__dict__.items()
temp = []
for name, value in items:
if not name.startswith('_'):
temp.append('%s=%r' % (name, value))
temp.sort()
return 'Namespace(%s)' % str.join(', ', temp)
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def get(self):
return self._value
def set(self, value):
self._value = value
def __repr__(self):
return '%s(%r, %r)' % (type(self).__name__, self._typecode, self._value)
value = property(get, set)
def Array(typecode, sequence, lock=True):
return array.array(typecode, sequence)
class IteratorProxy(BaseProxy):
_exposed_ = ('__next__', 'next', 'send', 'throw', 'close')
def __iter__(self):
return self
def __next__(self, *args):
return self._callmethod('__next__', args)
def next(self, *args):
return self._callmethod('next', args)
def send(self, *args):
return self._callmethod('send', args)
def throw(self, *args):
return self._callmethod('throw', args)
def close(self, *args):
return self._callmethod('close', args)
class AcquirerProxy(BaseProxy):
_exposed_ = ('acquire', 'release')
def acquire(self, blocking=True):
return self._callmethod('acquire', (blocking,))
def release(self):
return self._callmethod('release')
def __enter__(self):
return self._callmethod('acquire')
def __exit__(self, exc_type, exc_val, exc_tb):
return self._callmethod('release')
class ConditionProxy(AcquirerProxy):
_exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
def notify(self):
return self._callmethod('notify')
def notify_all(self):
return self._callmethod('notify_all')
class EventProxy(BaseProxy):
_exposed_ = ('is_set', 'set', 'clear', 'wait')
def is_set(self):
return self._callmethod('is_set')
def set(self):
return self._callmethod('set')
def clear(self):
return self._callmethod('clear')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
class NamespaceProxy(BaseProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
def __getattr__(self, key):
if key[0] == '_':
return object.__getattribute__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__getattribute__', (key,))
def __setattr__(self, key, value):
if key[0] == '_':
return object.__setattr__(self, key, value)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__setattr__', (key, value))
def __delattr__(self, key):
if key[0] == '_':
return object.__delattr__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__delattr__', (key,))
class ValueProxy(BaseProxy):
_exposed_ = ('get', 'set')
def get(self):
return self._callmethod('get')
def set(self, value):
return self._callmethod('set', (value,))
value = property(get, set)
BaseListProxy = MakeProxyType('BaseListProxy', (
'__add__', '__contains__', '__delitem__', '__delslice__',
'__getitem__', '__getslice__', '__len__', '__mul__',
'__reversed__', '__rmul__', '__setitem__', '__setslice__',
'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
'reverse', 'sort', '__imul__'))
class ListProxy(BaseListProxy):
def __iadd__(self, value):
self._callmethod('extend', (value,))
return self
def __imul__(self, value):
self._callmethod('__imul__', (value,))
return self
DictProxy = MakeProxyType('DictProxy', (
'__contains__', '__delitem__', '__getitem__', '__len__',
'__setitem__', 'clear', 'copy', 'get', 'has_key', 'items',
'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'))
ArrayProxy = MakeProxyType('ArrayProxy', (
'__len__', '__getitem__', '__setitem__', '__getslice__', '__setslice__'))
PoolProxy = MakeProxyType('PoolProxy', (
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
'map', 'map_async', 'terminate'))
PoolProxy._method_to_typeid_ = {'apply_async': 'AsyncResult',
'map_async': 'AsyncResult',
'imap': 'Iterator',
'imap_unordered': 'Iterator'}
class SyncManager(BaseManager):
pass
SyncManager.register('Queue', Queue.Queue)
SyncManager.register('JoinableQueue', Queue.Queue)
SyncManager.register('Event', threading.Event, EventProxy)
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore, AcquirerProxy)
SyncManager.register('Condition', threading.Condition, ConditionProxy)
SyncManager.register('Pool', Pool, PoolProxy)
SyncManager.register('list', list, ListProxy)
SyncManager.register('dict', dict, DictProxy)
SyncManager.register('Value', Value, ValueProxy)
SyncManager.register('Array', Array, ArrayProxy)
SyncManager.register('Namespace', Namespace, NamespaceProxy)
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
SyncManager.register('AsyncResult', create_method=False) | [
"[email protected]"
]
| |
ea9458c6ac74bdd22e6377f815c1fe355c5d392b | 6d8c61b72db2afb6c6e7689ccf8efc8ea22a58b1 | /backend/manage.py | 13ed3e7ef0dd4a308ffbffd6fc7b8c72ed547b1b | []
| no_license | crowdbotics-apps/msm-sjshjs34-dev-12857 | 56f9eb4428489fb8141063badaaf8ec5a9380a10 | b8b82ad77d0dc7b5e3227183bb514d51f5084945 | refs/heads/master | 2022-12-18T21:14:01.968255 | 2020-10-07T06:39:26 | 2020-10-07T06:39:26 | 301,946,361 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'msm_sjshjs34_dev_12857.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
439395335e5c739d4423a055f98518f3acc20b6f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02580/s760103061.py | 925ab9a2461c29ace7811e08760b425431497890 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | def resolve():
#n=int(input())
#a,b=map(int,input().split())
#x=list(map(int,input().split()))
#a=[list(map(lambda x:int(x)%2,input().split())) for _ in range(h)]
import sys
input = sys.stdin.readline
H,W,m=map(int,input().split())
bomb=set()
cnt1=[0 for i in range(H)]
cnt2=[0 for i in range(W)]
for i in range(m):
h,w=map(int,input().split())
bomb.add((h-1,w-1))
cnt1[h-1]+=1
cnt2[w-1]+=1
m1=max(cnt1)
m2=max(cnt2)
hk,wk=[],[]
for i,x in enumerate(cnt1):
if x==m1:
hk.append(i)
for i,x in enumerate(cnt2):
if x==m2:
wk.append(i)
ans=m1+m2
for i in hk:
for j in wk:
if (i,j) in bomb:
continue
print(ans)
exit()
print(ans-1)
if __name__ == '__main__':
resolve() | [
"[email protected]"
]
| |
391d9a14de710453f5a3315da0ab8bf862bcd5f2 | 93e5b82332af9f0d3e203d086e30794fb90a2086 | /ForKids/chapter11/dark_green_circle.py | e339c298cdebe175e39607c47f1ada7d0798a7ae | []
| no_license | swell1009/ex | cfaae0b5fe917f12416170dce60f7dea8194f368 | 29b274fb51adbdc43af6ebecaec89c97bc58be6f | refs/heads/master | 2020-04-04T10:15:20.578932 | 2018-11-22T06:27:30 | 2018-11-22T06:27:30 | 155,848,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import turtle
t = turtle.Pen()
def mycircle(red, green, blue):
t.color(red, green, blue)
t.begin_fill()
t.circle(50)
t.end_fill()
mycircle(0, 0.5, 0)
| [
"[email protected]"
]
| |
18e21f4ddcedd079fc47959f47eab0660531d78c | d326cd8d4ca98e89b32e6a6bf6ecb26310cebdc1 | /BioinformaticsStronghold/rna/rna.py | adb1292a38f53d25bd1f4e80d516beb0647b67f3 | []
| no_license | dswisher/rosalind | d6af5195cdbe03adb5a19ed60fcbf8c05beac784 | 4519740350e47202f7a45ce70e434f7ee15c6afc | refs/heads/master | 2021-08-09T02:58:17.131164 | 2017-11-12T01:26:26 | 2017-11-12T01:26:26 | 100,122,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | import sys
if len(sys.argv) < 2:
print 'You must specify the name of the file to load.'
sys.exit(1)
s = file(sys.argv[1]).read()
rna = ''
for c in s:
if c == 'T':
rna += 'U'
else:
rna += c
print rna
| [
"[email protected]"
]
| |
1c05115db2ff9b5a220b165aa2ded0b8753fb727 | 5c72ec8fb05492ebbb97c2a2a1b24e85c4a2b03e | /Algorithms and data structures/2 Data structures/Arrays/Reverse/reverse.py | f0e937528bea08b1ba71b9194fbe5c14007a6914 | []
| no_license | mxmaslin/Shultais-education | 2b732a9dfd713fcc4c7bd29101fae4e4c2d7ffae | b7e82aa34b304a090a1acd16d309f95550c59dca | refs/heads/master | 2020-08-31T21:21:28.801596 | 2019-11-05T16:24:43 | 2019-11-05T16:24:43 | 218,789,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,764 | py | class Array:
"""
Линейный статический массив.
"""
def __init__(self, size):
# Данные массива, изначально массив пустой и все его элементы заполнены None.
# То есть сразу выделяем массив фиксированного объема.
self.data = [None] * size
# Длина заполненного массива.
# По умолчанию 0, так как массив пустой.
self.length = 0
# Полный размер массива.
self.size = size
def append(self, value):
"""
Добавление нового элемента в конец линейного массива.
Время работы O(1).
"""
if self.length == self.size:
raise OverflowError
self.data[self.length] = value
self.length += 1
def reverse(self):
"""
Разворачивает массив.
"""
half_arr_length = self.length // 2
for i in range(half_arr_length):
self.data[i], self.data[self.length-i-1] = self.data[self.length-i-1], self.data[i]
def __str__(self):
"""
Возвращает все элементы массива в виде строки.
"""
return "[" + ", ".join(map(str, self.data[:self.length])) + "]"
array = Array(4)
array.append(6)
array.append(2)
array.append(1)
array.append(9)
array.reverse()
assert str(array) == '[9, 1, 2, 6]'
array = Array(5)
array.append(6)
array.append(2)
array.append(1)
array.append(9)
array.append(10)
array.reverse()
assert str(array) == '[10, 9, 1, 2, 6]'
| [
"[email protected]"
]
| |
8598a3824baa01b0f2e672df3a660f0fa4b48861 | 40e9169343968444c764b41f5945c6e00ad6ecd6 | /test_for_calc_test.py | 115d2e221016755bd9f2368e9c04786f6923ff2c | []
| no_license | a-bautista/ci-python | 62ff4b3a19f9c293f665a4e62a44936c9ac001ce | 19f9ef9db261897a3ef5977e710ca42ee98de662 | refs/heads/master | 2020-04-15T01:37:31.290988 | 2019-01-06T07:15:21 | 2019-01-06T07:15:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | import calc_test
class TestCalculator:
def test_addition(self):
assert 4 == calc_test.add(2,2)
def test_subtraction(self):
assert 2 == calc_test.subtract(4,2)
| [
"[email protected]"
]
| |
953c91eff3d40c25d9b77ced31dd89cc264c21d9 | 6982c3c54ee9199d93fb89c61cfdcba15b9b7012 | /fluentpython/chapter11/demo06.py | 94b5b1596302e2ffce45b78a46450e090bfe2b27 | []
| no_license | gzgdouru/python_study | a640e1097ebc27d12049ded53fb1af3ba9729bac | e24b39e82e39ee5a5e54566781457e18c90a122a | refs/heads/master | 2020-03-29T11:33:13.150869 | 2019-03-08T09:24:29 | 2019-03-08T09:24:29 | 149,858,658 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | '''
多继承和方法解析顺序
'''
class A:
def ping(self):
print("ping:", self)
class B(A):
def pong(self):
print("pong:", self)
class C(A):
def pong(self):
print("PONG:", self)
class D(B, C):
def ping(self):
super().ping()
print("pong-ping:", self)
def pingpong(self):
self.ping()
super().ping()
self.pong()
super().pong()
C.pong(self)
if __name__ == "__main__":
d = D()
d.pingpong() | [
"[email protected]"
]
| |
6d7d036f9849749bfb43e031add73277c67f841f | b2ba670818623f8ab18162382f7394baed97b7cb | /test-data/AndroidSlicer/Carnote/DD/17.py | 32b94fd9d389f16b7824dc82e3080ae2e9a2589b | [
"MIT"
]
| permissive | hsumyatwin/ESDroid-artifact | 012c26c40537a79b255da033e7b36d78086b743a | bff082c4daeeed62ceda3d715c07643203a0b44b | refs/heads/main | 2023-04-11T19:17:33.711133 | 2022-09-30T13:40:23 | 2022-09-30T13:40:23 | 303,378,286 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,385 | py | #start monkey test seedNo 0
import os;
from subprocess import Popen
from subprocess import PIPE
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice, MonkeyImage
from com.android.monkeyrunner.MonkeyDevice import takeSnapshot
from com.android.monkeyrunner.easy import EasyMonkeyDevice
from com.android.monkeyrunner.easy import By
from com.android.chimpchat.hierarchyviewer import HierarchyViewer
from com.android.monkeyrunner import MonkeyView
import random
import sys
import subprocess
from sys import exit
from random import randint
device = MonkeyRunner.waitForConnection()
package = 'com.spisoft.quicknote'
activity ='com.spisoft.quicknote.MainActivity'
runComponent = package+'/'+activity
device.startActivity(component=runComponent)
MonkeyRunner.sleep(0.5)
MonkeyRunner.sleep(0.5)
device.touch(945,1127, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(982,153, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(699,932, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(923,1695, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(963,1730, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(62,124, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(165,437, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(432,463, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(660,748, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(467,678, 'DOWN_AND_UP')
| [
"[email protected]"
]
| |
e4c2776272edbecaf19e7be9738e39202d4db21b | bf1257f42fbef87d3e2c41414c2215e46c3b7fa0 | /day3/db_demo_01/manage.py | 8a1ccc29497f268c38e449e6c4eae5e4df59509d | [
"Apache-2.0"
]
| permissive | gaohj/django1903 | 29746e32bdb56451ccde0a56534ded8da311f035 | af14b0e46831f64b25a791053be05e1bea457b73 | refs/heads/master | 2020-09-22T17:09:20.703440 | 2019-12-18T09:36:03 | 2019-12-18T09:36:03 | 225,280,582 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'db_demo_01.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
da294679cccbfb84c52a9e21d94a08a84ffd77b2 | 723e7aab2dddb92655801bd7cfe1469d08e664c6 | /reveal_fp7_module/reveal-popularity-prediction/reveal_popularity_prediction/output/wp5_output.py | 72312b2ee91f439c1e552471e58e9c0c7dd361a5 | [
"Apache-2.0"
]
| permissive | elceespatial/news-popularity-prediction | 2fdf1186782da7a7604aeffcbb3eeed46214b47e | 5f66982c659de017665116297bb4fd29ca13f835 | refs/heads/master | 2022-02-09T03:27:49.179710 | 2017-12-15T17:08:10 | 2017-12-15T17:08:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,663 | py | # -*- coding: <UTF-8> -*-
__author__ = 'Georgios Rizos ([email protected])'
import json
from kombu.utils import uuid
from reveal_popularity_prediction.output.rabbitmq_util import rabbitmq_server_service,\
establish_rabbitmq_connection,\
simple_notification, simpler_notification
def publish_to_wp5(prediction_json,
rabbitmq_dict,
assessment_id):
rabbitmq_uri = rabbitmq_dict["rabbitmq_uri"]
rabbitmq_queue = rabbitmq_dict["rabbitmq_queue"]
rabbitmq_exchange = rabbitmq_dict["rabbitmq_exchange"]
rabbitmq_routing_key = rabbitmq_dict["rabbitmq_routing_key"]
rabbitmq_channel = rabbitmq_dict["channel"]
# rabbitmq_server_service("restart")
# rabbitmq_connection = establish_rabbitmq_connection(rabbitmq_uri)
# Make wp5 json report.
json_report = make_w5_json_report(prediction_json,
assessment_id)
json_report_string = json.dumps(json_report)
# print("wp5", json_report_string)
# simple_notification(rabbitmq_connection, rabbitmq_queue, rabbitmq_exchange, rabbitmq_routing_key, json_report_string)
simpler_notification(rabbitmq_channel, rabbitmq_queue, rabbitmq_exchange, rabbitmq_routing_key, json_report_string)
def make_w5_json_report(prediction_json,
assessment_id):
json_report = dict()
tweet_url = form_tweet_url(prediction_json)
highly_controversial = is_highly_controversial(prediction_json)
json_report["certh:tweet_url"] = tweet_url
json_report["certh:highly_controversial"] = highly_controversial
json_report["certh:item_url"] = prediction_json["url"]
json_report["certh:time_posted"] = prediction_json["snapshots"][-1]["timestamp_list"][0]
json_report["certh:assessment_timestamp"] = prediction_json["assessment_timestamp"]
json_report["certh:assessment_id"] = assessment_id
json_report["certh:platform"] = prediction_json["platform_name"]
json_report["certh:current_time_stats"] = form_current_time_stats(prediction_json)
json_report["certh:prediction_stats"] = form_prediction_stats(prediction_json)
return json_report
def form_tweet_url(item):
user_screen_name = item["user_screen_name"]
tweet_id = item["tweet_id"]
tweet_url = "https://twitter.com/" + user_screen_name + "/status/" + repr(tweet_id)
return tweet_url
def is_highly_controversial(item):
if item["predictions"]["controversiality"] > 0.1:
highly_controversial = True
else:
highly_controversial = False
return highly_controversial
def form_features_dict(item):
features_dict = dict()
for feature_name, feature_value in item["snapshots"][-1]["features"].items():
features_dict["certh:" + feature_name] = feature_value
return features_dict
def form_current_time_stats(item):
current_time_stats_dict = dict()
current_time_stats_dict["certh:time_collected"] = item["tweet_timestamp"]
current_time_stats_dict["certh:features"] = form_features_dict(item)
if item["platform_name"] == "YouTube":
current_time_stats_dict["certh:user_set"] = ["https://www.youtube.com/channel/" + user_url for user_url in item["snapshots"][-1]["user_set"]]
elif item["platform_name"] == "Reddit":
current_time_stats_dict["certh:user_set"] = ["https://www.reddit.com/user/" + user_url for user_url in item["snapshots"][-1]["user_set"]]
else:
print("Invalid platform name.")
raise RuntimeError
current_time_stats_dict["certh:comment_count"] = item["targets"]["comment_count"]
current_time_stats_dict["certh:user_count"] = item["targets"]["user_count"]
current_time_stats_dict["certh:upvote_count"] = item["targets"]["upvote_count"]
current_time_stats_dict["certh:downvote_count"] = item["targets"]["downvote_count"]
current_time_stats_dict["certh:score"] = item["targets"]["score"]
current_time_stats_dict["certh:controversiality"] = item["targets"]["controversiality"]
return current_time_stats_dict
def form_prediction_stats(item):
prediction_stats_dict = dict()
prediction_stats_dict["certh:prediction_window"] = [item["prediction_window"]["prediction_lower_timestamp"],
item["prediction_window"]["prediction_upper_timestamp"]]
prediction_stats_dict["certh:comment_count_prediction"] = item["predictions"]["comments"]
prediction_stats_dict["certh:user_count_prediction"] = item["predictions"]["users"]
prediction_stats_dict["certh:score_prediction"] = item["predictions"]["score"]
prediction_stats_dict["certh:controversiality_prediction"] = item["predictions"]["controversiality"]
return prediction_stats_dict
def check_wp5_rabbitmq_connection(wp5_rabbitmq_connection,
wp5_rabbitmq_queue,
wp5_rabbitmq_exchange,
wp5_rabbitmq_routing_key,
rabbitmq_connection,
rabbitmq_queue,
rabbitmq_exchange,
rabbitmq_routing_key,
assessment_id):
wp5_rabbitmq_exchange = assessment_id + "_certh_popularity_prediction"
wp5_rabbitmq_queue = "certh_popularity_prediction.gen-%s" % uuid()
wp5_rabbitmq_routing_key = "reveal_routing"
if wp5_rabbitmq_connection is None:
wp5_rabbitmq_connection = rabbitmq_connection
return wp5_rabbitmq_connection,\
wp5_rabbitmq_queue,\
wp5_rabbitmq_exchange,\
wp5_rabbitmq_routing_key
| [
"[email protected]"
]
| |
ca2302979f2083dfd6d0626801152c39e5a0d6b1 | 8c2f8c6a355a0a0514ebeb8c686be43e38624c3a | /checkModels.py | 51dca65172a5dca80ff316af71b487d63c8284a3 | []
| no_license | nickvandewiele/RMG-tests | 0e5e16f904164444ded845a8784f18f31c8394ff | e07fafe5a6cb19c2f8df109a22445f34a45a0af4 | refs/heads/master | 2020-12-24T19:04:51.047741 | 2016-01-24T20:51:12 | 2016-01-24T20:51:12 | 50,184,904 | 0 | 0 | null | 2016-01-24T20:51:12 | 2016-01-22T14:05:46 | null | UTF-8 | Python | false | false | 10,094 | py | #!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2009-2011 by the RMG Team ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import sys
import os
import os.path
import math
import logging
import argparse
from rmgpy.tools.diff_models import execute
def parseCommandLineArguments():
parser = argparse.ArgumentParser()
parser.add_argument('name', metavar='NAME', type=str, nargs=1,
help='Name of test target model')
parser.add_argument('chemkin', metavar='CHEMKIN', type=str, nargs=1,
help='the Chemkin file of the tested model')
parser.add_argument('speciesDict', metavar='SPECIESDICT', type=str, nargs=1,
help='the species dictionary file of the tested model')
args = parser.parse_args()
return args
def main():
"""
Driver function that parses command line arguments and passes them to the execute function.
"""
args = parseCommandLineArguments()
initializeLog(logging.WARNING, 'comparison.log')
name = args.name[0]
chemkin = os.path.join(os.getcwd(), args.chemkin[0])
speciesDict = os.path.join(os.getcwd(), args.speciesDict[0])
check(name, chemkin, speciesDict)
def check(name, chemkin, speciesDict):
"""
Compare the provided chemkin model to the
default chemkin model.
"""
filename_chemkin = os.path.split(chemkin)[-1]
filename_spcDict = os.path.split(speciesDict)[-1]
folder = os.path.join(os.getcwd(),'testing/check/', name)
chemkinOrig = os.path.join(folder,filename_chemkin)
speciesDictOrig = os.path.join(folder,filename_spcDict)
kwargs = {
'wd': os.getcwd(),
'web': True,
}
thermo, thermoOrig = None, None
commonSpecies, uniqueSpeciesTest, uniqueSpeciesOrig, commonReactions, uniqueReactionsTest, uniqueReactionsOrig = \
execute(chemkin, speciesDict, thermo, chemkinOrig, speciesDictOrig, thermoOrig, **kwargs)
errorModel = checkModel(commonSpecies, uniqueSpeciesTest, uniqueSpeciesOrig, commonReactions, uniqueReactionsTest, uniqueReactionsOrig)
errorSpecies = checkSpecies(commonSpecies, uniqueSpeciesTest, uniqueSpeciesOrig)
errorReactions = checkReactions(commonReactions, uniqueReactionsTest, uniqueReactionsOrig)
def checkModel(commonSpecies, uniqueSpeciesTest, uniqueSpeciesOrig, commonReactions, uniqueReactionsTest, uniqueReactionsOrig):
"""
Compare the species and reaction count of both models.
"""
testModelSpecies = len(commonSpecies) + len(uniqueSpeciesTest)
origModelSpecies = len(commonSpecies) + len(uniqueSpeciesOrig)
logging.error('Test model has {} species.'.format(testModelSpecies))
logging.error('Original model has {} species.'.format(origModelSpecies))
testModelRxns = len(commonReactions) + len(uniqueReactionsTest)
origModelRxns = len(commonReactions) + len(uniqueReactionsOrig)
logging.error('Test model has {} reactions.'.format(testModelRxns))
logging.error('Original model has {} reactions.'.format(origModelRxns))
return (testModelSpecies != origModelSpecies) or (testModelRxns != origModelRxns)
def checkSpecies(commonSpecies, uniqueSpeciesTest, uniqueSpeciesOrig):
error = False
# check for unique species in one of the models:
if uniqueSpeciesOrig:
error = True
logging.error(
'The original model has {} species that the tested model does not have.'
.format(len(uniqueSpeciesOrig))
)
printSpecies(uniqueSpeciesOrig)
if uniqueSpeciesTest:
error = True
logging.error(
'The tested model has {} species that the original model does not have.'
.format(len(uniqueSpeciesTest))
)
printSpecies(uniqueSpeciesTest)
# check for different thermo among common species::
if commonSpecies:
for spec1, spec2 in commonSpecies:
logging.info(' {0!s}'.format(spec1))
if spec1.thermo and spec2.thermo:
if not spec1.thermo.isIdenticalTo(spec2.thermo):
error = True
logging.error('Non-identical thermo for tested {} and original species {}.'
.format(spec1.label, spec2.label)
)
printThermo(spec1)
printThermo(spec2)
return error
def checkReactions(commonReactions, uniqueReactionsTest, uniqueReactionsOrig):
error = False
# check for unique reactions in one of the models:
if uniqueReactionsOrig:
error = True
logging.error(
'The original model has {} reactions that the tested model does not have.'
.format(len(uniqueReactionsOrig))
)
printReactions(uniqueReactionsOrig)
if uniqueReactionsTest:
error = True
logging.error(
'The tested model has {} reactions that the original model does not have.'
.format(len(uniqueReactionsTest))
)
printReactions(uniqueReactionsTest)
if commonReactions:
for rxn1, rxn2 in commonReactions:
logging.info(' {0!s}'.format(rxn1))
if rxn1.kinetics and rxn2.kinetics:
if not rxn1.kinetics.isIdenticalTo(rxn2.kinetics):
error = True
logging.error('Non-identical kinetics for\ntested {}\nand original {} reaction.'
.format(rxn1, rxn2)
)
printKinetics(rxn1)
printKinetics(rxn2)
return error
def printReactions(reactions):
"""
"""
for rxn in reactions:
logging.error(
'rxn: {}'.format(rxn)
)
def printSpecies(spcs):
"""
"""
for spc in spcs:
logging.error(
'spc: {}'.format(spc)
)
def printKinetics(rxn):
"""
"""
logging.error(' k(300K,1bar) k(400K,1bar) k(500K,1bar) k(600K,1bar) k(800K,1bar) k(1000K,1bar) k(1500K,1bar) k(2000K,1bar) ')
logging.error(' {0:7.2f} {1:7.2f} {2:7.2f} {3:7.2f} {4:7.2f} {5:7.2f} {6:7.2f} {7:7.2f}'.format(
math.log10(rxn.kinetics.getRateCoefficient(300, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(400, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(500, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(600, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(800, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(1000, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(1500, 1e5)),
math.log10(rxn.kinetics.getRateCoefficient(2000, 1e5)),
))
def printThermo(spec):
"""
"""
logging.error(' Hf(300K) S(300K) Cp(300K) Cp(400K) Cp(500K) Cp(600K) Cp(800K) Cp(1000K) Cp(1500K)')
logging.error(' {0:7.2f} {1:7.2f} {2:7.2f} {3:7.2f} {4:7.2f} {5:7.2f} {6:7.2f} {7:7.2f} {8:7.2f}'.format(
spec.thermo.getEnthalpy(300) / 4184.,
spec.thermo.getEntropy(300) / 4.184,
spec.thermo.getHeatCapacity(300) / 4.184,
spec.thermo.getHeatCapacity(400) / 4.184,
spec.thermo.getHeatCapacity(500) / 4.184,
spec.thermo.getHeatCapacity(600) / 4.184,
spec.thermo.getHeatCapacity(800) / 4.184,
spec.thermo.getHeatCapacity(1000) / 4.184,
spec.thermo.getHeatCapacity(1500) / 4.184,
))
def initializeLog(verbose, log_file_name):
"""
Set up a logger for RMG to use to print output to stdout. The
`verbose` parameter is an integer specifying the amount of log text seen
at the console; the levels correspond to those of the :data:`logging` module.
"""
# Create logger
logger = logging.getLogger()
logger.setLevel(verbose)
# Create console handler and set level to debug; send everything to stdout
# rather than stderr
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(verbose)
logging.addLevelName(logging.CRITICAL, 'Critical: ')
logging.addLevelName(logging.ERROR, 'Error: ')
logging.addLevelName(logging.WARNING, 'Warning: ')
logging.addLevelName(logging.INFO, '')
logging.addLevelName(logging.DEBUG, '')
logging.addLevelName(1, '')
# Create formatter and add to console handler
formatter = logging.Formatter('%(levelname)s%(message)s')
ch.setFormatter(formatter)
# create file handler
fh = logging.FileHandler(filename=log_file_name) #, backupCount=3)
fh.setLevel(min(logging.DEBUG,verbose)) # always at least VERBOSE in the file
fh.setFormatter(formatter)
# remove old handlers!
while logger.handlers:
logger.removeHandler(logger.handlers[0])
# Add console and file handlers to logger
logger.addHandler(ch)
logger.addHandler(fh)
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
fbe8c840dba4c6a96ac98a3b1fa2ad50500dd26c | 763841bf8447c5490ebc3bf74523fd5470944a80 | /forms.py | eb7db2f2490088671fcd6c907f89330646e2a20b | []
| no_license | sumy7/little-bolg | bad7e4ad2755cd1e77edb44a3b1a52780714059f | 13d825ad4fb1ad6fc97f41259ff094664a4664bf | refs/heads/app_hxn_1 | 2021-01-10T23:21:22.062356 | 2016-10-08T14:40:36 | 2016-10-08T14:40:36 | 70,610,888 | 1 | 0 | null | 2016-10-11T16:01:40 | 2016-10-11T16:01:40 | null | UTF-8 | Python | false | false | 780 | py | from flask_wtf import FlaskForm
from wtforms import StringField,TextField,IntegerField
from wtforms.validators import DataRequired
class ArticleForm(FlaskForm):
title = StringField('title',validators = [DataRequired()])
content = TextField('content',validators = [DataRequired()])
class UserForm(FlaskForm):
username = StringField('username',validators = [DataRequired()])
userpass = StringField('password',validators = [DataRequired()])
class SignUpForm(FlaskForm):
username = StringField('username',validators = [DataRequired()])
userpass = StringField('password',validators = [DataRequired()])
email = StringField('email',validators=[DataRequired()])
class ReplyForm(FlaskForm):
content = TextField('content',validators = [DataRequired()])
| [
"[email protected]"
]
| |
11d131e67824ef499030da4c17db0ece1ce20b21 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_21956.py | a1a29e05be6d20512499012515acee2bcd1e343a | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | # Why can't my setup.py-generated script find my also-installed module?
>>> from mkdocs.mkdocs import main_entry_point
| [
"[email protected]"
]
| |
d980b7457b1a48b0e8331322105ae6eda61bfad7 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Projects/twilio/twilio/rest/events/v1/subscription/__init__.py | 2518ccf20431928bb5df6b6532f2ee53c8234e95 | [
"LicenseRef-scancode-other-permissive"
]
| permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:0255d8652c98e97ffafdedd4dd83a7f814b0062c7bb3a6ea44f2491fa9a04f72
size 15857
| [
"[email protected]"
]
| |
77757d4c47da531054934f91c32859169ad5780d | 29fa274ae2bf847df8d6f0b03bc28a78f52119aa | /dndsearch/web.py | 9c3dff4f39ba88c1cb4422dbc1f2a7e49bd38744 | []
| no_license | pranav/dndsearch | a6b42d354ea0a3d59dc9dc47f6ec511a820399d7 | 4f63c6b86fb2d53021b409f6ff6527ca8d286799 | refs/heads/master | 2020-12-24T14:45:41.684076 | 2014-12-21T07:31:19 | 2014-12-21T07:31:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | import os
import json
from flask import Flask, render_template
from sql import Page
app = Flask(__name__)
@app.route('/query/<query>', methods=['GET'])
def simple_query(query):
return json.dumps([{'book': p[0], 'page': p[1]} for p in Page.search(query)])
@app.route('/')
def render_home():
return render_template('home.html')
if __name__ == '__main__':
app.debug = False
try:
PORT = int(os.getenv('PORT'))
except IndexError:
PORT = 80
app.run(host='0.0.0.0', port=PORT)
| [
"[email protected]"
]
| |
e5ba59f2b9321764767fe0a59be34fd36dd50f78 | d6d0a751d2093b86f733648f9ba0af28e757662b | /serializer/mydev/myapp/models.py | 7e710ed20c5f36054f8faf6c86b19401adc1399c | []
| no_license | nivyashri05/Django_REST_API | 2d2460896d8f61eacfd873dd5d657a46a2f13eeb | 884037e18d11d13921b15f652833f2da10581beb | refs/heads/master | 2022-12-30T21:41:43.707944 | 2020-10-26T11:33:19 | 2020-10-26T11:33:19 | 307,348,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | from django.db import models
class Collegetb(models.Model):
deptid=models.IntegerField()
deptname=models.CharField(max_length=256)
depthod=models.CharField(max_length=256)
location=models.CharField(max_length=256)
| [
"[email protected]"
]
| |
6770180f243b6a6f2e41b662fcd4326032622ecb | fde950cc136ac38f9bd7e3e3c4a2e469df6c320a | /tests/core/test_commands.py | 4e655f9f3dbbac70f726f768cc6cda2fe954e119 | [
"MIT"
]
| permissive | Ilgrim/cwmud | 47180185f7462a1bb9fa3e338c167ffa87c87f63 | bee8b126a5e70edd0593dae9753a6be8d52357cf | refs/heads/master | 2022-03-12T02:39:35.550565 | 2019-10-19T21:51:56 | 2019-10-19T21:51:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,758 | py | # -*- coding: utf-8 -*-
"""Test for command management and processing."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2017 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
import pytest
from cwmud.core.commands import AlreadyExists, Command, CommandManager
class TestCommands:
"""A collection of tests for command management."""
commands = None
command_class = None
command = None
class _FakeSession:
pass
session = _FakeSession()
def test_command_manager_create(self):
"""Test that we can create a command manager.
This is currently redundant, importing the commands package already
creates one, but we can keep it for symmetry and in case that
isn't always so.
"""
type(self).commands = CommandManager()
assert self.commands
def test_command_manager_get_name(self):
"""Test that we can figure out the name for an argument."""
assert self.commands._get_name(Command) == "Command"
assert self.commands._get_name("TestCommand") == "TestCommand"
def test_command_manager_register(self):
"""Test that we can register new commands through a command manager."""
@self.commands.register
class TestCommand(Command):
"""A test command."""
def __init__(self, session, args):
super().__init__(session, args)
self.called = False
def _action(self):
self.called = True
type(self).command_class = TestCommand
assert "TestCommand" in self.commands
def test_command_manager_register_by_argument(self):
"""Test that we can register a new command by argument."""
self.commands.register(command=Command)
assert "Command" in self.commands
def test_command_manager_register_not_command(self):
"""Test that trying to register a non-command fails."""
with pytest.raises(TypeError):
self.commands.register(command=object())
def test_command_manager_register_already_exists(self):
"""Test that trying to register an existing command name fails."""
with pytest.raises(AlreadyExists):
self.commands.register(command=self.command_class)
def test_command_manager_contains(self):
"""Test that we can see if a command manager contains a command."""
assert "TestCommand" in self.commands
assert Command in self.commands
assert "some_nonexistent_command" not in self.commands
assert CommandManager not in self.commands
def test_command_manager_get_command(self):
"""Test that we can get a command from a command manager."""
assert self.commands["TestCommand"] is self.command_class
with pytest.raises(KeyError):
self.commands["some_nonexistent_command"].process()
def test_command_instance(self):
"""Test that we can create a command instance."""
type(self).command = self.command_class(None, ())
assert self.command
def test_command_execute_no_session(self):
"""Test that a command instance without a session won't execute."""
self.command.execute()
assert not self.command.called
def test_command_session_property(self):
"""Test that we can get and set the session property of a command."""
assert self.command.session is None
self.command.session = self.session
assert self.command.session is self.session
def test_command_execute(self):
"""Test that we can execute a command."""
self.command.execute()
assert self.command.called
| [
"[email protected]"
]
| |
0f21947ee7c2b9045e2bafdf343d409245ab9b40 | fa3e527114cd5799dddb0a25067da4923eae354e | /DataPrepare/FastSim/GAN/BES/Dedx/makeDataSet.py | 65596edf23a6fa591fe64774ab00af09397a15d7 | []
| no_license | wenxingfang/FastSim_ML | e64c6b56ce2afd703d1ddda0ada2de6f65fde049 | d2f1abbb2f6879313d5f4f137b64c4d8bf10fe83 | refs/heads/master | 2022-11-28T01:35:39.727895 | 2020-08-03T15:47:37 | 2020-08-03T15:47:37 | 284,734,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,755 | py | import ROOT as rt
import numpy as np
import h5py
import sys
import gc
import math
import argparse
rt.gROOT.SetBatch(rt.kTRUE)
from sklearn.utils import shuffle
#######################################
# use digi step data and use B field ##
# use cell ID for ECAL ##
# add HCAL
# add HoE cut
#######################################
def get_parser():
parser = argparse.ArgumentParser(
description='root to hdf5',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--input', action='store', type=str,
help='input root file')
parser.add_argument('--output', action='store', type=str,
help='output root file')
parser.add_argument('--tag', action='store', type=str,
help='tag name for plots')
parser.add_argument('--str_particle', action='store', type=str,
help='e^{-}')
return parser
def plot_gr(gr,out_name,title):
canvas=rt.TCanvas("%s"%(out_name),"",800,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.1)
canvas.SetLeftMargin(0.13)
canvas.SetRightMargin(0.15)
if 'logy' in out_name:
canvas.SetLogy()
#gr.GetXaxis().SetTitle("#phi(AU, 0 #rightarrow 2#pi)")
#gr.GetYaxis().SetTitle("Z(AU) (-19.5 #rightarrow 19.5 m)")
#gr.SetTitle(title)
#gr.Draw("pcol")
gr.Draw("hist")
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
del canvas
gc.collect()
def plot_hist(hist,out_name,title):
canvas=rt.TCanvas("%s"%(out_name),"",800,800)
canvas.cd()
canvas.SetTopMargin(0.13)
canvas.SetBottomMargin(0.1)
canvas.SetLeftMargin(0.13)
canvas.SetRightMargin(0.15)
canvas.SetGridy()
canvas.SetGridx()
#h_corr.Draw("COLZ")
#h_corr.LabelsDeflate("X")
#h_corr.LabelsDeflate("Y")
#h_corr.LabelsOption("v")
hist.SetStats(rt.kFALSE)
#hist.GetXaxis().SetTitle("#Delta Z (mm)")
if 'x_z' in out_name:
#hist.GetYaxis().SetTitle("X (mm)")
hist.GetYaxis().SetTitle("cell X")
hist.GetXaxis().SetTitle("cell Z")
elif 'y_z' in out_name:
#hist.GetYaxis().SetTitle("#Delta Y (mm)")
hist.GetYaxis().SetTitle("cell Y")
hist.GetXaxis().SetTitle("cell Z")
elif 'z_r' in out_name:
hist.GetYaxis().SetTitle("bin R")
hist.GetXaxis().SetTitle("bin Z")
elif 'z_phi' in out_name:
hist.GetYaxis().SetTitle("bin #phi")
hist.GetXaxis().SetTitle("bin Z")
hist.SetTitle(title)
#hist.SetTitleSize(0.1)
#hist.Draw("COLZ TEXT")
hist.Draw("COLZ")
canvas.SaveAs("%s/%s.png"%(plot_path,out_name))
del canvas
gc.collect()
if __name__ == '__main__':
test_percent = 0.5
for_em = True
for_ep = False
plot_path = './plots/'
f_in = rt.TFile("/besfs/groups/cal/dedx/zhuk/calib/663/26577-27090/Simulation/hadron_track/electron/electron.root","READ")
tree = f_in.Get('n103')
print('entries=',tree.GetEntries())
h_pt = rt.TH1F('H_pt' , '', 220, 0, 2.2)
h_pt0 = rt.TH1F('H_pt0' , '', 220, 0, 2.2)
h_charge = rt.TH1F('H_charge' , '', 20 , -2, 2)
h_costheta = rt.TH1F('H_costheta' , '', 20 , -2, 2)
h_theta = rt.TH1F('H_theta' , '', 200 , 0, 200)
h_dEdx_meas = rt.TH1F('H_dEdx_meas' , '', 901,-1, 900)
h_dedx_theta= rt.TH2F('H_dedx_theta', '', 900, 0, 900, 200 , 0, 200)
h_dedx_pt = rt.TH2F('H_dedx_pt' , '', 900, 0, 900, 220 , 0, 2.2)
maxEvent = tree.GetEntries()
Data = np.full((maxEvent, 3), 0 ,dtype=np.float32)#init
for i in range(maxEvent):
tree.GetEntry(i)
ptrk = getattr(tree, 'ptrk')
charge = getattr(tree, 'charge')
costheta = getattr(tree, 'costheta')
dEdx_meas = getattr(tree, 'dEdx_meas')
if for_em and charge != -1: continue
if for_ep and charge != 1 : continue
Data[i,0] = ptrk/2.0
#Data[i,1] = charge
Data[i,1] = costheta
Data[i,2] = (dEdx_meas - 546)/(3*32)
h_pt .Fill(ptrk)
h_pt0 .Fill(math.sqrt(ptrk))
h_charge .Fill(charge)
h_costheta .Fill(costheta)
tmp_theta = math.acos(costheta)*180/math.pi
h_theta .Fill(tmp_theta)
h_dEdx_meas.Fill(dEdx_meas)
h_dedx_theta.Fill(dEdx_meas, tmp_theta)
h_dedx_pt .Fill(dEdx_meas, ptrk)
if True:
dele_list = []
for i in range(Data.shape[0]):
if Data[i,0]==0:
dele_list.append(i) ## remove the empty event
Data = np.delete(Data, dele_list, axis = 0)
print('final size=', Data.shape[0])
plot_gr(h_pt , "h_pt_track" ,"")
plot_gr(h_pt0 , "h_pt_track0","")
plot_gr(h_charge , "h_charge" ,"")
plot_gr(h_costheta , "h_costheta" ,"")
plot_gr(h_theta , "h_theta" ,"")
plot_gr(h_dEdx_meas, "h_dEdx_meas","")
plot_gr(h_dEdx_meas, "h_dEdx_meas_logy","")
plot_hist(h_dedx_theta, "h_dedx_theta","")
plot_hist(h_dedx_pt , "h_dedx_pt" ,"")
Data = shuffle(Data)
all_evt = Data.shape[0]
training_data = Data[0:int((1-test_percent)*all_evt) ,:]
test_data = Data[int((1-test_percent)*all_evt):all_evt,:]
theta_range0 = 35
theta_range1 = 145
training_data_barrel = training_data [ np.logical_and( np.arccos(training_data[:,1])*180/math.pi < 145, np.arccos(training_data[:,1])*180/math.pi > 35), : ]
test_data_barrel = test_data [ np.logical_and( np.arccos(test_data [:,1])*180/math.pi < 145, np.arccos(test_data [:,1])*180/math.pi > 35), : ]
training_data_endcap = training_data [ np.logical_or ( np.arccos(training_data[:,1])*180/math.pi > 145, np.arccos(training_data[:,1])*180/math.pi < 35), : ]
test_data_endcap = test_data [ np.logical_or ( np.arccos(test_data [:,1])*180/math.pi > 145, np.arccos(test_data [:,1])*180/math.pi < 35), : ]
hf = h5py.File('electron_train_barrel.h5', 'w')
hf.create_dataset('dataset', data=training_data_barrel)
print ('training_data_barrel shape=',training_data_barrel.shape)
hf.close()
hf = h5py.File('electron_train_endcap.h5', 'w')
hf.create_dataset('dataset', data=training_data_endcap)
print ('training_data_endcap shape=',training_data_endcap.shape)
hf.close()
hf = h5py.File('electron_test_barrel.h5', 'w')
hf.create_dataset('dataset' , data=test_data_barrel)
print ('test_data_barrel shape=',test_data_barrel.shape)
hf.close()
hf = h5py.File('electron_test_endcap.h5', 'w')
hf.create_dataset('dataset' , data=test_data_endcap)
print ('test_data_endcap shape=',test_data_endcap.shape)
hf.close()
print ('Done')
| [
"[email protected]"
]
| |
0d830f5647c98236a85d3ab95cba2544e777ab52 | ba916d93dfb8074241b0ea1f39997cb028509240 | /problems/min_cost_buy_candies.py | 43dde065bba3be50479abf9804d66f154e3f7138 | []
| no_license | satojkovic/algorithms | ecc1589898c61d2eef562093d3d2a9a2d127faa8 | f666b215bc9bbdab2d2257c83ff1ee2c31c6ff8e | refs/heads/master | 2023-09-06T08:17:08.712555 | 2023-08-31T14:19:01 | 2023-08-31T14:19:01 | 169,414,662 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | def min_cost_buy_candies(cost):
cost = sorted(cost, reverse=True)
return sum(cost) - sum(cost[2::3])
def test_min_cost_buy_candies():
assert min_cost_buy_candies([1]) == 1
assert min_cost_buy_candies([1, 2]) == 3
assert min_cost_buy_candies([3, 4, 10]) == 14
assert min_cost_buy_candies([1, 1, 1, 1, 1]) == 4
| [
"[email protected]"
]
| |
dfe69914c2a842af060ce7b0b27c48cba80fe47e | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-3675.py | 9191ecf1404a4163be64fc5579e07c3a47a98432 | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,757 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: $Type) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
]
| |
5f8cb45c7040fd7e59a958d419df1cbcb228ccc9 | 8a4bc47685427204365b1668b3d7b5a6fd7546f1 | /packages/example/common/environment.py | 19665e4adcd19fef2aa761bcb6b45bee6664d2dc | []
| no_license | myronww/hello-service | 3b1705ad8c25a6763d5a9673086b01d388b7817a | 8b59054dd4cb09fb5f1697e14a050d8251b3ada8 | refs/heads/master | 2020-04-07T03:43:08.728638 | 2019-04-10T17:12:24 | 2019-04-10T17:12:24 | 158,027,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 289 | py |
import os
DEBUG = False
if "DEBUG" in os.environ:
if os.environ["DEBUG"].lower() in ["1", "true", "on"]:
DEBUG = True
DEVELOPER_MODE = False
if "DEVELOPER_MODE" in os.environ:
if os.environ["DEVELOPER_MODE"].lower() in ["1", "true", "on"]:
DEVELOPER_MODE = True
| [
"[email protected]"
]
| |
c6f4b6bb73e281de8c4aa2b5cececbe8004e9fc1 | 4c7eea9d402dbda526c880cb55339e73d783568f | /ch02/input_number.py | d2ea8590366c875de74036c781b035647bf2f81b | []
| no_license | kks4866/pyworks | 9dd8e9e66ba419c50e6111c42b9fb7311ce402fa | a3f599f96ae367e2cdc0da0ab9429186629f3c1c | refs/heads/master | 2023-06-28T23:54:18.640460 | 2021-07-06T04:08:52 | 2021-07-06T04:08:52 | 378,803,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | # 숫자를 입력 받기
print("숫자를 입력해 주세요: ")
num = int(input()) #num의 타입이 정수로 변환됨 - 형변환
print("제곱수: ", num*num)
print(type(num))
| [
"[email protected]"
]
| |
6c98862cef6bebdb92b27e051d1e0386d84cef74 | c8296b9479cbefd26bb3ebaaf6ab55bd090f9735 | /mitest/views/module.py | fa86e862af1f5372fd34923fcbb84eca9d020404 | []
| no_license | WangYongjun1990/test-core | 81db49f54e291bd09d329c7c2c560adcbb3b3466 | 0ec13174b58a41f35fce2bd2b895b8ac441dfd37 | refs/heads/master | 2022-12-12T11:45:48.502241 | 2018-08-17T08:38:55 | 2018-08-17T08:40:32 | 138,014,671 | 0 | 0 | null | 2022-12-08T02:10:28 | 2018-06-20T10:05:28 | Python | UTF-8 | Python | false | false | 4,878 | py | # -*- coding:utf-8 -*-
"""
File Name: `module`.py
Version:
Description:
Author: wangyongjun
Date: 2018/6/21 13:44
"""
import json
from flask import Blueprint
from flask_restful import Resource
from mitest.api.comm_log import logger
from mitest.views.wrappers import timer
from mitest.utils.common import get_request_json, make_response
from mitest.api.mysql_manager import ModuleInfoManager,TestsuiteInfoManager
module = Blueprint('module_interface', __name__)
import mitest.config.sit
from flask import Flask
Flask(__name__).config.from_object(mitest.config.sit)
class Module(Resource):
def __init__(self):
pass
@timer
def post(self, action):
data = get_request_json()
mim = ModuleInfoManager()
if action == 'add':
try:
system_id = data["systemId"]
module_name = data["moduleName"]
except KeyError:
return make_response({"code": "100", "desc": "入参校验失败"})
module_name_list = mim.query_module(system_id,module_name)
if len(module_name_list) != 0:
return make_response({"code": "201", "desc": "这个名称的测试模块已经存在"})
mim.insert_module(module_name = module_name,system_id = system_id)
return make_response({"code": "000", "desc": "{}模块添加成功".format(module_name)})
elif action == 'edit':
try:
id = data["id"]
module_name = data["moduleName"]
except KeyError:
return make_response({"code": "100", "desc": "入参校验失败"})
res_module_name = mim.query_module_id(id=id)[0].module_name
if res_module_name == module_name:
return make_response({"code": "201", "desc": "您修改的模块名称已存在"})
mim.update_module(id_=id,module_name=module_name)
return make_response({"code": "000", "desc": "操作成功"})
elif action == 'delete':
try:
id = data["id"]
except KeyError:
return make_response({"code": "100", "desc": "入参校验失败"})
mim.delete_module(id_=id)
return make_response({"code": "000", "desc": "操作成功"})
elif action == 'detail':
pass
elif action == 'list':
try:
system_id = data["systemId"]
except KeyError:
return make_response({"code": "100", "desc": "入参校验失败"})
module_list = mim.query_all_module(system_id=system_id)
res = list()
id = 0
for i in module_list:
module_dict = dict()
if module_list:
id += 1
module_dict["id"] = id
module_dict["moduleId"] = i.id
module_dict["label"] = i.module_name
testsuite_list = TestsuiteInfoManager.query_all_testsuite(module_id=i.id)
testsuite = list()
for j in testsuite_list:
testsuite_dict = dict()
if testsuite_list:
id += 1
testsuite_dict["id"] = id
testsuite_dict["testsuiteId"] = j.id
testsuite_dict["label"] = j.testsuite_name
testsuite.append(testsuite_dict)
module_dict["children"] = testsuite
res.append(module_dict)
return make_response({"code": "000", "desc": res})
elif action == 'queryBySystemId':
""" 根据SystemId查询系统下的所有模块
url: /module/queryBySystemId
input:
{"systemId":"9"}
output:
{
"code": "000",
"data": [
{
"systemId": 4,
"systemName": "申请"
}
]
}
"""
try:
system_id = data.pop('systemId')
except KeyError:
return make_response({"code": "100", "desc": "入参校验失败"})
obj = ModuleInfoManager.query_all_module(system_id)
module_list = []
for m in obj:
module_info_dic = {
"systemName": m.module_name,
"systemId": m.id,
}
module_list.append(module_info_dic)
return make_response({"code": "000", "data": module_list})
else:
return make_response({"code": "100", "desc": "url错误,不存在的接口动作<{action}>".format(action=action)})
if __name__ == '__main__':
Module = Module()
res = Module.post("list")
print(res) | [
"[email protected]"
]
| |
16343f081959d18ba076d1ee9ba2fc91db0493bf | a50bd0cbf51b0578fd8249785a35796dfbcb5728 | /poem/Poem/api/urls_internal.py | c664b83878667a62275dcf8f5dd34587c667d063 | [
"Apache-2.0"
]
| permissive | ARGOeu/poem | d24cc8126abec7dbae687ca0f854cabb54f982a4 | 40c17484c6184fe3cf6547401d258b95644fa18f | refs/heads/master | 2020-12-25T17:24:36.601759 | 2019-04-05T11:51:34 | 2019-04-05T11:51:34 | 26,312,136 | 0 | 2 | NOASSERTION | 2020-09-17T09:31:12 | 2014-11-07T09:08:48 | Python | UTF-8 | Python | false | false | 889 | py | from django.urls import path
from . import views_internal
app_name = 'poem'
urlpatterns = [
path('metrics/<str:group>', views_internal.ListMetricsInGroup.as_view(), name='metrics'),
path('tokens/', views_internal.ListTokens.as_view(), name='tokens'),
path('tokens/<str:name>', views_internal.ListTokenForTenant.as_view(), name='tokens'),
path('users/', views_internal.ListUsers.as_view(), name='users'),
path('groups/', views_internal.ListGroupsForUser.as_view(), name='groups'),
path('groups/<str:group>', views_internal.ListGroupsForUser.as_view(), name='groups'),
path('probes/<str:probe_name>', views_internal.ListProbes.as_view(), name='probes'),
path('aggregations/', views_internal.ListAggregations.as_view(), name='aggregations'),
path('aggregations/<str:aggregation_name>', views_internal.ListAggregations.as_view(), name='aggregations'),
]
| [
"[email protected]"
]
| |
c8f5b570ac0f747bd85d400b8384633527b4849d | 0a47f736deacb7e8d55adb807575617f873c4787 | /simulate_place_cell_Type_A_shape_inh.py | 1607594ee48b303dade8a14abf3460f419e13b91 | []
| no_license | domni/CA1Sim | 9bd23c746c325d3387b54303dccc0dcbb4f5ba1f | 37b90d15e7eec7073736460ac0f5ab26ad97b70c | refs/heads/master | 2021-01-15T15:23:47.726695 | 2016-07-18T02:18:02 | 2016-07-18T02:18:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,790 | py | __author__ = 'milsteina'
from specify_cells import *
from plot_results import *
import random
import sys
"""
In this version of the simulation, phase precession of CA3 inputs is implemented using the method from Chadwick et al.,
Elife, 2015, which uses a circular gaussian with a phase sensitivity factor that effectively compresses the range of
phases within each theta cycle that each input is active, which will reduce jitter across within-cycle input sequences.
"""
morph_filename = 'EB2-late-bifurcation.swc'
#mech_filename = '020516 altered km2 rinp - ampa nmda_kin5'
mech_filename = '043016 Type A - km2_NMDA_KIN5_Pr'
if len(sys.argv) > 1:
synapses_seed = int(sys.argv[1])
else:
synapses_seed = 0
if len(sys.argv) > 2:
num_exc_syns = int(sys.argv[2])
else:
num_exc_syns = 3200
if len(sys.argv) > 3:
num_inh_syns = int(sys.argv[3])
else:
num_inh_syns = 500
# whether to modulate the peak rate of all inhibitory inputs (0 = no, 1 = out of field at track start, 2 = in field)
# input_field_width)
if len(sys.argv) > 4:
mod_inh = int(sys.argv[4])
else:
mod_inh = 0
# the inhibitory conductances in-field are multiplied by a factor with this value at the peak of the
# field, and decays with cosine spatial modulation away from the field
if len(sys.argv) > 5:
shape_inh = float(sys.argv[5])
else:
shape_inh = 1.
# allows parallel computation of multiple trials for the same spines with the same peak_locs, but with different
# input spike trains and stochastic synapses for each trial
if len(sys.argv) > 6:
trial_seed = int(sys.argv[6])
else:
trial_seed = 0
rec_filename = 'output'+datetime.datetime.today().strftime('%m%d%Y%H%M')+'-pid'+str(os.getpid())+'-seed'+\
str(synapses_seed)+'-e'+str(num_exc_syns)+'-i'+str(num_inh_syns)+'-mod_inh'+str(mod_inh)+\
'-shape_inh_'+str(shape_inh)+'_'+str(trial_seed)
def get_dynamic_theta_phase_force(phase_ranges, peak_loc, input_field_duration, stim_t, dt):
"""
Expects a list of tuples containing times and phases relative to peak_loc and the non-modulated phase preference
(zero degrees). Returns a waveform of phase vs time.
:param phase_ranges: list of tuple (ms, degrees)
:param peak_loc:
:param input_field_duration:
:param stim_t:
:param dt:
:return: :class: 'np.array'
"""
start_phase_val = phase_ranges[0][1] * 2. * np.pi / 360. # convert degrees to radians
end_phase_val = phase_ranges[-1][1] * 2. * np.pi / 360. # convert degrees to radians
phase_force = np.ones_like(stim_t) * start_phase_val
phase_gradient = np.array([])
for i in range(len(phase_ranges)-1):
t0 = phase_ranges[i][0]
t1 = phase_ranges[i+1][0]
phase0 = phase_ranges[i][1] * 2. * np.pi / 360. # convert degrees to radians
phase1 = phase_ranges[i+1][1] * 2. * np.pi / 360.
del_t = t1 - t0
del_phase = phase1 - phase0
if abs(del_phase) > 0.:
del_phase = del_phase / del_t * dt
this_range_piece = np.arange(phase0, phase1, del_phase)
else:
this_range_piece = np.ones(del_t / dt) * phase0
phase_gradient = np.append(phase_gradient, this_range_piece)
if stim_t[0] <= peak_loc-input_field_duration*0.5 <= stim_t[-1]:
phase_start = np.where(peak_loc-input_field_duration*0.5 >= stim_t)[0]
if np.any(phase_start):
phase_start = phase_start[-1]
phase_end = min(len(stim_t), phase_start+len(phase_gradient))
phase_force[:phase_start] = start_phase_val
phase_force[phase_start:phase_end] = phase_gradient[:phase_end-phase_start]
phase_force[phase_end:] = end_phase_val
elif stim_t[0] <= peak_loc+input_field_duration*0.5 <= stim_t[-1]:
phase_end = np.where(peak_loc+input_field_duration*0.5 >= stim_t)[0]
if np.any(phase_end):
phase_end = phase_end[-1]
phase_start = max(0, phase_end-len(phase_gradient))
phase_force[:phase_start] = start_phase_val
phase_force[phase_start:phase_end] = phase_gradient[-(phase_end-phase_start):]
phase_force[phase_end:] = end_phase_val
return phase_force
def run_trial(simiter):
"""
:param simiter: int
"""
local_random.seed(simiter)
global_phase_offset = local_random.uniform(-np.pi, np.pi)
with h5py.File(data_dir+rec_filename+'-working.hdf5', 'a') as f:
f.create_group(str(simiter))
f[str(simiter)].create_group('train')
f[str(simiter)].create_group('inh_train')
f[str(simiter)].attrs['phase_offset'] = global_phase_offset / 2. / np.pi * global_theta_cycle_duration
if mod_inh > 0:
if mod_inh == 1:
mod_inh_start = int(track_equilibrate / dt)
mod_inh_stop = mod_inh_start + int(inhibitory_manipulation_duration * input_field_duration / dt)
elif mod_inh == 2:
mod_inh_start = int((track_equilibrate + modulated_field_center - 0.3 * input_field_duration) / dt)
mod_inh_stop = mod_inh_start + int(inhibitory_manipulation_duration * input_field_duration / dt)
elif mod_inh == 3:
mod_inh_start = 0
mod_inh_stop = len(stim_t)
sim.parameters['mod_inh_start'] = stim_t[mod_inh_start]
sim.parameters['mod_inh_stop'] = stim_t[mod_inh_stop-1]
index = 0
for group in stim_exc_syns:
for i, syn in enumerate(stim_exc_syns[group]):
# the stochastic sequence used for each synapse is unique for each trial,
# up to 1000 input spikes per spine
if excitatory_stochastic:
syn.randObj.seq(rand_exc_seq_locs[group][i]+int(simiter*1e3))
gauss_force = excitatory_peak_rate[group] * np.exp(-((stim_t - peak_locs[group][i]) / gauss_sigma)**2.)
if group in excitatory_precession_range:
phase_force = get_dynamic_theta_phase_force(excitatory_precession_range[group], peak_locs[group][i],
input_field_duration, stim_t, stim_dt)
theta_force = np.exp(excitatory_theta_phase_tuning_factor[group] * np.cos(phase_force +
excitatory_theta_phase_offset[group] - 2. * np.pi * stim_t /
global_theta_cycle_duration + global_phase_offset))
else:
theta_force = np.exp(excitatory_theta_phase_tuning_factor[group] *
np.cos(excitatory_theta_phase_offset[group] - 2. * np.pi * stim_t /
global_theta_cycle_duration + global_phase_offset))
theta_force -= np.min(theta_force)
theta_force /= np.max(theta_force)
theta_force *= excitatory_theta_modulation_depth[group]
theta_force += 1. - excitatory_theta_modulation_depth[group]
stim_force = np.multiply(gauss_force, theta_force)
train = get_inhom_poisson_spike_times(stim_force, stim_t, dt=stim_dt, generator=local_random)
syn.source.play(h.Vector(np.add(train, equilibrate + track_equilibrate)))
with h5py.File(data_dir+rec_filename+'-working.hdf5', 'a') as f:
f[str(simiter)]['train'].create_dataset(str(index), compression='gzip', compression_opts=9, data=train)
f[str(simiter)]['train'][str(index)].attrs['group'] = group
f[str(simiter)]['train'][str(index)].attrs['index'] = syn.node.index
f[str(simiter)]['train'][str(index)].attrs['type'] = syn.node.parent.parent.type
f[str(simiter)]['train'][str(index)].attrs['peak_loc'] = peak_locs[group][i]
index += 1
index = 0
for group in stim_inh_syns:
for syn in stim_inh_syns[group]:
inhibitory_theta_force = np.exp(inhibitory_theta_phase_tuning_factor[group] *
np.cos(inhibitory_theta_phase_offset[group] - 2. * np.pi * stim_t /
global_theta_cycle_duration + global_phase_offset))
inhibitory_theta_force -= np.min(inhibitory_theta_force)
inhibitory_theta_force /= np.max(inhibitory_theta_force)
inhibitory_theta_force *= inhibitory_theta_modulation_depth[group]
inhibitory_theta_force += 1. - inhibitory_theta_modulation_depth[group]
inhibitory_theta_force *= inhibitory_peak_rate[group]
stim_force = np.multiply(inhibitory_theta_force, cos_mod_inh)
if mod_inh > 0 and group in inhibitory_manipulation_fraction and syn in manipulated_inh_syns[group]:
if mod_inh == 3:
train = []
else:
stim_force[mod_inh_start:mod_inh_stop] = 0.
train = get_inhom_poisson_spike_times(stim_force, stim_t, dt=stim_dt,
generator=local_random)
else:
train = get_inhom_poisson_spike_times(stim_force, stim_t, dt=stim_dt,
generator=local_random)
syn.source.play(h.Vector(np.add(train, equilibrate + track_equilibrate)))
with h5py.File(data_dir+rec_filename+'-working.hdf5', 'a') as f:
f[str(simiter)]['inh_train'].create_dataset(str(index), compression='gzip', compression_opts=9,
data=train)
f[str(simiter)]['inh_train'][str(index)].attrs['group'] = group
f[str(simiter)]['inh_train'][str(index)].attrs['index'] = syn.node.index
f[str(simiter)]['inh_train'][str(index)].attrs['loc'] = syn.loc
f[str(simiter)]['inh_train'][str(index)].attrs['type'] = syn.node.type
index += 1
sim.run(v_init)
with h5py.File(data_dir+rec_filename+'-working.hdf5', 'a') as f:
sim.export_to_file(f, simiter)
if excitatory_stochastic:
f[str(simiter)].create_group('successes')
index = 0
for group in stim_exc_syns:
for syn in stim_exc_syns[group]:
f[str(simiter)]['successes'].create_dataset(str(index), compression='gzip', compression_opts=9,
data=np.subtract(syn.netcon('AMPA_KIN').get_recordvec().to_python(),
equilibrate + track_equilibrate))
index += 1
# save the spike output of the cell, removing the equilibration offset
f[str(simiter)].create_dataset('output', compression='gzip', compression_opts=9,
data=np.subtract(cell.spike_detector.get_recordvec().to_python(),
equilibrate + track_equilibrate))
NMDA_type = 'NMDA_KIN5'
equilibrate = 250. # time to steady-state
global_theta_cycle_duration = 150. # (ms)
input_field_width = 20 # (theta cycles per 6 standard deviations)
input_field_duration = input_field_width * global_theta_cycle_duration
track_length = 2.5 # field widths
track_duration = track_length * input_field_duration
track_equilibrate = 2. * global_theta_cycle_duration
duration = equilibrate + track_equilibrate + track_duration # input_field_duration
excitatory_peak_rate = {'CA3': 40., 'ECIII': 40.}
excitatory_theta_modulation_depth = {'CA3': 0.7, 'ECIII': 0.7}
# From Chadwick et al., ELife 2015
excitatory_theta_phase_tuning_factor = {'CA3': 0.8, 'ECIII': 0.8}
excitatory_precession_range = {}
excitatory_precession_range['CA3'] = [(-input_field_duration*0.5, 180.), (-input_field_duration*0.35, 180.),
(input_field_duration*0.35, -180.), (input_field_duration*0.5, -180.)] # (ms, degrees)
excitatory_theta_phase_offset = {}
excitatory_theta_phase_offset['CA3'] = 165. / 360. * 2. * np.pi # radians
excitatory_theta_phase_offset['ECIII'] = 0. / 360. * 2. * np.pi # radians
excitatory_stochastic = 1
inhibitory_manipulation_fraction = {'perisomatic': 0.325, 'axo-axonic': 0.325, 'apical dendritic': 0.325,
'distal apical dendritic': 0.325, 'tuft feedback': 0.325}
inhibitory_manipulation_duration = 0.6 # Ratio of input_field_duration
inhibitory_peak_rate = {'perisomatic': 40., 'axo-axonic': 40., 'apical dendritic': 40., 'distal apical dendritic': 40.,
'tuft feedforward': 40., 'tuft feedback': 40.}
inhibitory_theta_modulation_depth = {'perisomatic': 0.5, 'axo-axonic': 0.5, 'apical dendritic': 0.5,
'distal apical dendritic': 0.5, 'tuft feedforward': 0.5, 'tuft feedback': 0.5}
inhibitory_theta_phase_tuning_factor = {'perisomatic': 0.6, 'axo-axonic': 0.6, 'apical dendritic': 0.6,
'distal apical dendritic': 0.6, 'tuft feedforward': 0.6, 'tuft feedback': 0.6}
inhibitory_precession_range = {}
inhibitory_theta_phase_offset = {}
inhibitory_theta_phase_offset['perisomatic'] = 135. / 360. * 2. * np.pi # Like PV+ Basket
inhibitory_theta_phase_offset['axo-axonic'] = 45. / 360. * 2. * np.pi # Vargas et al., ELife, 2014
inhibitory_theta_phase_offset['apical dendritic'] = 200. / 360. * 2. * np.pi # Like PYR-layer Bistratified
inhibitory_theta_phase_offset['distal apical dendritic'] = 180. / 360. * 2. * np.pi # Like SR/SLM Border Cells
inhibitory_theta_phase_offset['tuft feedforward'] = 340. / 360. * 2. * np.pi # Like Neurogliaform
inhibitory_theta_phase_offset['tuft feedback'] = 200. / 360. * 2. * np.pi # Like SST+ O-LM
stim_dt = 0.02
dt = 0.02
v_init = -67.
syn_types = ['AMPA_KIN', NMDA_type]
local_random = random.Random()
# choose a subset of synapses to stimulate with inhomogeneous poisson rates
local_random.seed(synapses_seed)
cell = CA1_Pyr(morph_filename, mech_filename, full_spines=True)
#cell.set_terminal_branch_na_gradient()
cell.zero_na()
cell.insert_inhibitory_synapses_in_subset()
trunk_bifurcation = [trunk for trunk in cell.trunk if cell.is_bifurcation(trunk, 'trunk')]
if trunk_bifurcation:
trunk_branches = [branch for branch in trunk_bifurcation[0].children if branch.type == 'trunk']
# get where the thickest trunk branch gives rise to the tuft
trunk = max(trunk_branches, key=lambda node: node.sec(0.).diam)
trunk = (node for node in cell.trunk if cell.node_in_subtree(trunk, node) and 'tuft' in (child.type
for child in node.children)).next()
else:
trunk_bifurcation = [node for node in cell.trunk if 'tuft' in (child.type for child in node.children)]
trunk = trunk_bifurcation[0]
all_exc_syns = {sec_type: [] for sec_type in ['basal', 'trunk', 'apical', 'tuft']}
all_inh_syns = {sec_type: [] for sec_type in ['soma', 'ais', 'basal', 'trunk', 'apical', 'tuft']}
stim_exc_syns = {'CA3': [], 'ECIII': []}
stim_inh_syns = {'perisomatic': [], 'axo-axonic': [], 'apical dendritic': [], 'distal apical dendritic': [],
'tuft feedforward': [], 'tuft feedback': []}
stim_successes = []
peak_locs = {'CA3': [], 'ECIII': []}
# place synapses in trunk for inheritance of mechanisms (for testing)
if 'trunk' not in all_exc_syns:
for node in cell.trunk:
for spine in node.spines:
syn = Synapse(cell, spine, syn_types, stochastic=excitatory_stochastic)
# place synapses in every spine
for sec_type in all_exc_syns:
for node in cell.get_nodes_of_subtype(sec_type):
for spine in node.spines:
syn = Synapse(cell, spine, syn_types, stochastic=excitatory_stochastic)
all_exc_syns[sec_type].append(syn)
cell.init_synaptic_mechanisms()
# collate inhibitory synapses
for sec_type in all_inh_syns:
for node in cell.get_nodes_of_subtype(sec_type):
for syn in node.synapses:
if 'GABA_A_KIN' in syn._syn:
all_inh_syns[sec_type].append(syn)
sim = QuickSim(duration, cvode=0, dt=0.01)
sim.parameters['equilibrate'] = equilibrate
sim.parameters['track_equilibrate'] = track_equilibrate
sim.parameters['global_theta_cycle_duration'] = global_theta_cycle_duration
sim.parameters['input_field_duration'] = input_field_duration
sim.parameters['track_length'] = track_length
sim.parameters['duration'] = duration
sim.parameters['stim_dt'] = stim_dt
sim.append_rec(cell, cell.tree.root, description='soma', loc=0.)
sim.append_rec(cell, trunk_bifurcation[0], description='proximal_trunk', loc=1.)
sim.append_rec(cell, trunk, description='distal_trunk', loc=1.)
spike_output_vec = h.Vector()
cell.spike_detector.record(spike_output_vec)
# get the fraction of total spines contained in each sec_type
total_exc_syns = {sec_type: len(all_exc_syns[sec_type]) for sec_type in ['basal', 'trunk', 'apical', 'tuft']}
fraction_exc_syns = {sec_type: float(total_exc_syns[sec_type]) / float(np.sum(total_exc_syns.values())) for sec_type in
['basal', 'trunk', 'apical', 'tuft']}
for sec_type in all_exc_syns:
for i in local_random.sample(range(len(all_exc_syns[sec_type])), int(num_exc_syns*fraction_exc_syns[sec_type])):
syn = all_exc_syns[sec_type][i]
if sec_type == 'tuft':
stim_exc_syns['ECIII'].append(syn)
else:
stim_exc_syns['CA3'].append(syn)
# get the fraction of inhibitory synapses contained in each sec_type
total_inh_syns = {sec_type: len(all_inh_syns[sec_type]) for sec_type in ['soma', 'ais', 'basal', 'trunk', 'apical',
'tuft']}
fraction_inh_syns = {sec_type: float(total_inh_syns[sec_type]) / float(np.sum(total_inh_syns.values())) for sec_type in
['soma', 'ais', 'basal', 'trunk', 'apical', 'tuft']}
num_inh_syns = min(num_inh_syns, int(np.sum(total_inh_syns.values())))
for sec_type in all_inh_syns:
for i in local_random.sample(range(len(all_inh_syns[sec_type])), int(num_inh_syns*fraction_inh_syns[sec_type])):
syn = all_inh_syns[sec_type][i]
if syn.node.type == 'tuft':
if cell.is_terminal(syn.node):
# GABAergic synapses on terminal tuft branches are about 25% feedforward
group = local_random.choice(['tuft feedforward', 'tuft feedback', 'tuft feedback', 'tuft feedback'])
else:
# GABAergic synapses on intermediate tuft branches are about 50% feedforward
group = local_random.choice(['tuft feedforward', 'tuft feedback'])
elif syn.node.type == 'trunk':
distance = cell.get_distance_to_node(cell.tree.root, syn.node, syn.loc)
if distance <= 50.:
group = 'perisomatic'
elif distance <= 150.:
group = local_random.choice(['apical dendritic', 'apical dendritic', 'distal apical dendritic'])
else:
group = local_random.choice(['apical dendritic', 'distal apical dendritic', 'distal apical dendritic'])
elif syn.node.type == 'basal':
distance = cell.get_distance_to_node(cell.tree.root, syn.node, syn.loc)
group = 'perisomatic' if distance <= 50. and not cell.is_terminal(syn.node) else 'apical dendritic'
elif syn.node.type == 'soma':
group = 'perisomatic'
elif syn.node.type == 'apical':
distance = cell.get_distance_to_node(cell.tree.root, cell.get_dendrite_origin(syn.node), loc=1.)
if distance <= 150.:
group = local_random.choice(['apical dendritic', 'apical dendritic', 'distal apical dendritic'])
else:
group = local_random.choice(['apical dendritic', 'distal apical dendritic', 'distal apical dendritic'])
elif syn.node.type == 'ais':
group = 'axo-axonic'
stim_inh_syns[group].append(syn)
stim_t = np.arange(-track_equilibrate, track_duration, dt)
gauss_sigma = global_theta_cycle_duration * input_field_width / 3. / np.sqrt(2.) # contains 99.7% gaussian area
rand_exc_seq_locs = {}
for group in stim_exc_syns:
rand_exc_seq_locs[group] = []
if stim_exc_syns[group]:
peak_locs[group] = np.arange(-0.75 * input_field_duration, (0.75 + track_length) * input_field_duration,
(1.5 + track_length) * input_field_duration / int(len(stim_exc_syns[group])))
peak_locs[group] = peak_locs[group][:len(stim_exc_syns[group])]
for group in stim_exc_syns:
for syn in stim_exc_syns[group]:
#peak_loc = local_random.uniform(-0.75 * input_field_duration, (0.75 + track_length) * input_field_duration)
#peak_locs.append(peak_loc)
if excitatory_stochastic:
success_vec = h.Vector()
stim_successes.append(success_vec)
syn.netcon('AMPA_KIN').record(success_vec)
rand_exc_seq_locs[group].append(syn.randObj.seq())
# if syn.node.parent.parent not in [rec['node'] for rec in sim.rec_list]:
# sim.append_rec(cell, syn.node.parent.parent)
# sim.append_rec(cell, syn.node, object=syn.target('AMPA_KIN'), param='_ref_i', description='i_AMPA')
# sim.append_rec(cell, syn.node, object=syn.target(NMDA_type), param='_ref_i', description='i_NMDA')
# remove this synapse from the pool, so that additional "modulated" inputs
# can be selected from those that remain
all_exc_syns[syn.node.parent.parent.type].remove(syn)
# rand_inh_seq_locs = [] will need this when inhibitory synapses become stochastic
# stim_inh_successes = [] will need this when inhibitory synapses become stochastic
# modulate the weights of inputs with peak_locs along this stretch of the track
modulated_field_center = track_duration * 0.6
cos_mod_weight = {}
peak_mod_weight = 2.5
tuning_amp = (peak_mod_weight - 1.) / 2.
tuning_offset = tuning_amp + 1.
for group in stim_exc_syns:
this_cos_mod_weight = tuning_amp * np.cos(2. * np.pi / (input_field_duration * 1.2) * (peak_locs[group] -
modulated_field_center)) + tuning_offset
left = np.where(peak_locs[group] >= modulated_field_center - input_field_duration * 1.2 / 2.)[0][0]
right = np.where(peak_locs[group] > modulated_field_center + input_field_duration * 1.2 / 2.)[0][0]
cos_mod_weight[group] = np.array(this_cos_mod_weight)
cos_mod_weight[group][:left] = 1.
cos_mod_weight[group][right:] = 1.
peak_locs[group] = list(peak_locs[group])
cos_mod_weight[group] = list(cos_mod_weight[group])
indexes = range(len(peak_locs[group]))
local_random.shuffle(indexes)
peak_locs[group] = map(peak_locs[group].__getitem__, indexes)
cos_mod_weight[group] = map(cos_mod_weight[group].__getitem__, indexes)
for i, syn in enumerate(stim_exc_syns[group]):
syn.netcon('AMPA_KIN').weight[0] = cos_mod_weight[group][i]
manipulated_inh_syns = {}
for group in inhibitory_manipulation_fraction:
num_syns = int(len(stim_inh_syns[group]) * inhibitory_manipulation_fraction[group])
manipulated_inh_syns[group] = local_random.sample(stim_inh_syns[group], num_syns)
inh_tuning_amp = (shape_inh - 1.) / 2.
inh_tuning_offset = inh_tuning_amp + 1.
left = np.where(stim_t >= modulated_field_center - input_field_duration * 1.2 / 2.)[0][0]
right = np.where(stim_t > modulated_field_center + input_field_duration * 1.2 / 2.)[0][0]
cos_mod_inh = inh_tuning_amp * np.cos(2. * np.pi / (input_field_duration * 1.2) * (stim_t - modulated_field_center)) \
+ inh_tuning_offset
cos_mod_inh[:left] = 1.
cos_mod_inh[right:] = 1.
run_trial(trial_seed)
if os.path.isfile(data_dir+rec_filename+'-working.hdf5'):
os.rename(data_dir+rec_filename+'-working.hdf5', data_dir+rec_filename+'.hdf5')
| [
"[email protected]"
]
| |
83f39d930bf9b33f24dae1eff820a417779e8ba2 | a7b6741f345aad73117bc747c4e93e148f5fe769 | /Basic-Course/07-Flask-JWT-Extended/resources/user.py | 80d17e2cd51af9d4af3b4d76b35812651d483995 | [
"MIT"
]
| permissive | suzynakayama/python-flask-udemy | e07f2d30d1f4b66aae06d1dcd775bd58ed5d2083 | 95d2c5fa328e2f50d0893d73fd386fb713d1f12b | refs/heads/master | 2022-12-28T16:51:30.507702 | 2020-10-09T22:52:59 | 2020-10-09T22:52:59 | 299,960,039 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,889 | py | from flask_restful import Resource, reqparse
from werkzeug.security import safe_str_cmp
from flask_jwt_extended import (
create_access_token,
create_refresh_token,
jwt_refresh_token_required,
get_jwt_identity,
jwt_required,
get_raw_jwt
)
from models.user import UserModel
from blacklist import BLACKLIST
_user_parser = reqparse.RequestParser()
_user_parser.add_argument('username',
type=str,
required=True,
help="This field cannot be blank."
)
_user_parser.add_argument('password',
type=str,
required=True,
help="This field cannot be blank."
)
class UserRegister(Resource):
def post(self):
data = _user_parser.parse_args()
if UserModel.find_by_username(data['username']):
return {"message": "A user with that username already exists"}, 400
user = UserModel(**data)
user.save_to_db()
return {"message": "User created successfully."}, 201
class User(Resource):
@classmethod
def get(cls, user_id):
user = UserModel.find_by_id(user_id)
if not user:
return {'message': 'User not found.'}, 404
return user.json()
@classmethod
def delete(cls, user_id):
user = UserModel.find_by_id(user_id)
if not user:
return {'message': 'User not found.'}, 404
user.delete_from_db()
return {'message': 'User deleted from db.'}, 200
class UserLogin(Resource):
@classmethod
def post(cls):
# get data from parser
data = _user_parser.parse_args()
# find user in Db
user = UserModel.find_by_username(data['username'])
# check password / create access token / create refresh token / return tokens
if user and safe_str_cmp(user.password, data['password']):
access_token = create_access_token(identity=user.id, fresh=True) # `identity=` is the same as `identity()`
refresh_token = create_refresh_token(user.id)
return {
'access_token': access_token,
'refresh_token': refresh_token
}, 200
return {'message': 'Invalid credentials.'}, 401
class UserLogout(Resource):
@jwt_required
def get(self):
# we only want to blacklist the token, not the user itself, so they will have to login again
jti = get_raw_jwt()['jti'] # jwt id, a unique identifier for a jwt
BLACKLIST.add(jti)
return {'message': 'Successfully logged out.'}, 200
class TokenRefresh(Resource):
@jwt_refresh_token_required
def post(self):
current_user = get_jwt_identity()
new_token = create_access_token(identity=current_user, fresh=False)
return {'access_token': new_token}, 200 | [
"[email protected]"
]
| |
3e2a3416e9043be473464f5178214bc1d21ac811 | 2dfbb97b47fd467f29ffb26faf9a9f6f117abeee | /leetcode/151.py | 5ed59c72ca216a929c05706680799872276990c7 | []
| no_license | liuweilin17/algorithm | 0e04b2d36dfb6b7b1b0e0425daf69b62273c54b5 | d3e8669f932fc2e22711e8b7590d3365d020e189 | refs/heads/master | 2020-12-30T11:03:40.085105 | 2020-04-10T03:46:01 | 2020-04-10T03:46:01 | 98,844,919 | 3 | 1 | null | 2018-10-05T03:01:02 | 2017-07-31T03:35:14 | C++ | UTF-8 | Python | false | false | 688 | py | ###########################################
# Let's Have Some Fun
# File Name: 151.py
# Author: Weilin Liu
# Mail: [email protected]
# Created Time: Mon Aug 26 22:38:31 2019
###########################################
#coding=utf-8
#!/usr/bin/python
# 151. Reverse Words in a String
class Solution:
def reverseWords(self, s: str) -> str:
s = s.strip()
N = len(s)
ret = []
t = ''
for i in range(N-1, -1, -1):
if s[i] != ' ':
t = s[i] + t
elif t != '':
ret.append(t)
t = ''
else: pass
if t:
ret.append(t)
return ' '.join(ret)
| [
"[email protected]"
]
| |
d27a7cc8735ca16e17b623666767925f67dc0aa9 | 3bdf35a266547425501fdfe684ba6049f38b6690 | /Paddy/api/serializers.py | 7f93d5a7ecb0205720eb071b89616eab5485c1cb | []
| no_license | SIBU99/ChhatraViswaKarmaOurServerCode | a7b95a2a3b27c97b4488b1825721cc2b4063568d | b36e0f9e866f76b7d6a104897f62eacceb2faa22 | refs/heads/master | 2023-01-07T09:22:24.428522 | 2020-11-10T09:02:13 | 2020-11-10T09:02:13 | 311,376,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | from rest_framework import serializers
from ..models import Paddy
from Account.api.serializers import FarmerSerializer
from Account.models import Farmer
from rest_framework.exceptions import ValidationError
class PaddySerializer(serializers.ModelSerializer):
"This the serializer for the model : Corn"
farmer = FarmerSerializer(read_only = True)
class Meta:
model = Paddy
fields = [
"id",
"farmer",
"image",
"result_tag",
"disease1",
"disease2",
"disease3",
"when",
"map_disease",
]
| [
"[email protected]"
]
| |
0438f4c787caade1a5b8e1a3ef8e3b498ef03a8d | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Fisher/trend_Lag1Trend/cycle_30/ar_12/test_artificial_32_Fisher_Lag1Trend_30_12_0.py | fb2a9442dbc8a4731f3e87bf0942881517feb6bd | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 266 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "Fisher", sigma = 0.0, exog_count = 0, ar_order = 12); | [
"[email protected]"
]
| |
bda7687bc1bb236ab785aa669b8d8f79f222c32c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_coffined.py | 156a15c662f97716fc2ceae82faeba7aa8cdc092 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
#calss header
class _COFFINED():
def __init__(self,):
self.name = "COFFINED"
self.definitions = coffin
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['coffin']
| [
"[email protected]"
]
| |
1d4a679d7e01b82f721f9675a3b7087c03fc0dfe | b000127408f96db7411f301553585f5da0e426cd | /code/Letter-Tile-Possibilities.py | faf5ee09133f12527c8ee128149d054486b16934 | []
| no_license | SaiVK/Leetcode-Archive | 5f758faf97d1ab559c4c75d26ae5cf7a256baef8 | 56bafeaaced2d0fd3b3d2f1a0365d24d5b41e504 | refs/heads/master | 2022-11-23T08:50:17.610247 | 2020-07-27T02:09:53 | 2020-07-27T02:09:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | import itertools
class Solution(object):
def numTilePossibilities(self, tiles):
a = 0
for e in range(1, (len(tiles) + 1)):
for val in set(itertools.permutations(tiles, e)):
a += 1
return a
| [
"[email protected]"
]
| |
c63f96624ea7308bc8b1af5b80a4d286f0439386 | 06476bc4cb7fc3ce378beb357fac7d5aacb87b3b | /Prototype/env/lib/python3.8/site-packages/pyfilterbank/rbj_audio_eq.py | ea96d6f1ca70909b6076a14e89b3e8c77d95a2cb | [
"MIT"
]
| permissive | marc-ortuno/VOPEC | 44d3a74d3e0686474dd57fcb21e845fd5fd48897 | e7ed1f13cc1868a824f4036dd08ec6bed4266c08 | refs/heads/main | 2023-06-12T19:15:18.060897 | 2021-07-01T17:15:03 | 2021-07-01T17:15:03 | 344,433,646 | 0 | 0 | MIT | 2021-06-14T19:15:47 | 2021-03-04T10:22:05 | Python | UTF-8 | Python | false | false | 5,621 | py | from numpy import sqrt, pi, cos, sin, sinh, log
def rbj_sos(filtertype, sample_rate, f0, gain_db=None,
q_factor=None, band_width=None, shelf_slope=None):
if 'shelf' in filtertype and not shelf_slope:
raise(ValueError('shelf_slope mus be specified.'))
w0 = 2*pi * f0/sample_rate
amplitude = None if not gain_db else sqrt(10**(gain_db/20.0))
alpha = _compute_alpha(amplitude, w0, q_factor, band_width, shelf_slope)
params = {'amplitude': amplitude, 'w0': w0, 'alpha': alpha}
filterfun = _filtertype_to_filterfun_dict[filtertype]
sos = filterfun(**params)
return sos
class RbjEqCascade:
def __init__(self, sample_rate):
self._sample_rate = sample_rate
self._sosmat = []
self._filterlist = []
def add(self, filtertype):
self._filtertypelist += [filtertype]
filtobj = RbjEq(filtertype, self._sample_rate)
self._filterlist += [filtobj]
self._sosmat += [filtobj.sos]
class RbjEq:
def __init__(self, filtertype, sample_rate, params=None):
self._filtertype = filtertype
self._sample_rate = sample_rate
self._filterfun = _filtertype_to_filterfun_dict[filtertype]
if not params:
params, param_names = _get_params_filtertype(filtertype)
self._params = params
self._update(**params)
def update(self, f0,
gain_db=None,
q_factor=None,
band_width=None,
shelf_slope=None):
w0 = 2*pi * f0/self.sample_rate
amplitude = None if not gain_db else sqrt(10**(gain_db/20.0))
alpha = _compute_alpha(amplitude, w0, q_factor, band_width, shelf_slope)
params = {'amplitude': amplitude, 'w0': w0, 'alpha': alpha}
self._sos = self._filterfun(**params)
@property
def sos(self):
return self._sos
@property
def params(self):
return self._params
@params.setter
def params(self, value):
self._params = value
self.update(**self.params)
def _compute_alpha(amplitude=None, w0=None, q_factor=None,
band_width=None,
shelf_slope=None):
if q_factor:
return sin(w0) / (2*q_factor)
elif band_width:
return sin(w0) * sinh(0.5*log(2.0) * band_width * w0/sin(w0))
elif shelf_slope:
return sin(w0) / 2 * sqrt((amplitude + 1/alpha) * (1/shelf_slope - 1) +2)
else:
raise(ValueError(
'''You need to specify at least one of:
q_factor, band_width or shelf_slope.'''))
def _lowpass(w0, alpha):
b0 = (1 - cos(w0)) / 2.0
b1 = 1 - cos(w0)
b2 = (1 - cos(w0)) / 2.0
a0 = 1 + alpha
a1 = -2 * cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _highpass(w0, alpha):
b0 = (1 + cos(w0)) / 2.0
b1 = -(1 + cos(w0))
b2 = (1 + cos(w0)) / 2.0
a0 = 1 + alpha
a1 = -2 * cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _bandpassQ(w0, alpha):
b0 = sin(w0) / 2.0 # = Q*alpha
b1 = 0.0
b2 = -sin(w0) / 2.0 # = -Q*alpha
a0 = 1 + alpha
a1 = -2 * cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _bandpass(w0, alpha):
b0 = alpha
b1 = 0.0
b2 = -alpha
a0 = 1 + alpha
a1 = -2 * cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _notch(w0, alpha):
b0 = 1.0
b1 = -2 * cos(w0)
b2 = 1.0
a0 = 1 + alpha
a1 = -2 * cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _apf(w0, alpha):
b0 = 1 - alpha
b1 = -2 * cos(w0)
b2 = 1 + alpha
a0 = 1 + alpha
a1 = -2 *cos(w0)
a2 = 1 - alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _peq(amplitude, w0, alpha):
b0 = 1 + alpha*amplitude
b1 = -2 * cos(w0)
b2 = 1 - alpha*amplitude
a0 = 1 + alpha/amplitude
a1 = -2 * cos(w0)
a2 = 1 - alpha/amplitude
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _lowshelf(amplitude, w0, alpha):
b0 = amplitude*((amplitude+1) - (amplitude-1)*cos(w0) + 2*sqrt(amplitude)*alpha)
b1 = 2*amplitude*((amplitude-1) - (amplitude+1)*cos(w0))
b2 = amplitude*((amplitude+1) - (amplitude-1)*cos(w0) - 2*sqrt(amplitude)*alpha)
a0 = (amplitude+1) + (amplitude-1)*cos(w0) + 2*sqrt(amplitude)*alpha
a1 = -2*((amplitude-1) + (amplitude+1)*cos(w0))
a2 = (amplitude+1) + (amplitude-1)*cos(w0) - 2*sqrt(amplitude)*alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
def _highshelf(amplitude, w0, alpha):
b0 = amplitude*((amplitude+1) + (amplitude-1)*cos(w0) + 2*sqrt(amplitude)*alpha)
b1 = -2*amplitude*((amplitude-1) + (amplitude+1)*cos(w0))
b2 = amplitude*((amplitude+1) + (amplitude-1)*cos(w0) - 2*sqrt(amplitude)*alpha)
a0 = (amplitude+1) - (amplitude-1)*cos(w0) + 2*sqrt(amplitude)*alpha
a1 = 2*((amplitude-1) - (amplitude+1)*cos(w0))
a2 = (amplitude+1) - (amplitude-1)*cos(w0) - 2*sqrt(amplitude)*alpha
sos = array([b0, b1, b2, a0, a1, a2]) / a0
return sos
_filtertype_to_filterfun_dict = {
'lowpass': _lowpass,
'highpass': _highpass,
'bandpassQ': _bandpassQ,
'bandpass': _bandpass,
'notch': _notch,
'apf': _apf,
'peq': _peq,
'lowshelf': _lowshelf,
'highshelf': _highshelf,
}
available_filtertypes = list(_filtertype_to_filterfun_dict.keys())
| [
"[email protected]"
]
| |
c9886d0249b8595088fa4db7338186268e2c81b8 | 8c77b0d14dd720a89470f2aa6243a8e8d4167424 | /py4e/CodeUp/1072_repeat_execute.py | 619fba19b76681461ab606ae0c0cac0991d2d90d | []
| no_license | oshsage/Python_Pandas | 64909c10fd98b0f2290c081dde9e6b87e17032e4 | a131598d62c834d63979eda56ea15763e35fab4e | refs/heads/master | 2022-12-19T19:32:20.436935 | 2020-10-24T07:17:10 | 2020-10-24T07:17:10 | 298,832,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | # n개의 정수가 순서대로 입력된다.
# -2147483648 ~ +2147483647, 단 n의 최대 개수는 알 수 없다.
# n개의 입력된 정수를 순서대로 출력해보자.
# while( ), for( ), do~while( ) 등의 반복문을 사용할 수 없다.
cnt = int(input())
i = 1
list = list(input().split(' '))
while i < cnt+1:
print(list[i-1])
i += 1
# 사용한 개념: while, +=, list[n]
# list[n]: 리스트의 n번째 항. n은 0부터 시작한다! | [
"[email protected]"
]
| |
0908c5536c97723509afee287b17b5b5981324ec | 5cec1ff43bf38cf31316254dabe3f972d38744ad | /src/hydrat/classifier/scikits_learn.py | f471c9e8b1665a01199d88be5270701ded21ba07 | []
| no_license | eyadsibai/hydrat | 7fb63f3c54f1fca25d04ab7266712c1077ffa2e3 | 5a68c6b8f32bc6bad59c3f002340bf7ef62e868c | refs/heads/master | 2016-09-06T14:16:46.082697 | 2013-08-06T05:14:02 | 2013-08-06T05:14:02 | 33,199,904 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,423 | py | """
hydrat's interface to scikit.learn
http://scikit-learn.sourceforge.net
Marco Lui <[email protected]> October 2010
"""
import numpy
from hydrat.task.sampler import isOneofM
from hydrat.classifier.abstract import Learner, Classifier, NotInstalledError
class ScikitL(Learner):
"""
Lightweight wrapper for scikit's learner interface
"""
__name__ = 'scikit'
def __init__(self, learn_class, **kwargs):
Learner.__init__(self)
self.learn_class = learn_class
self.kwargs = kwargs
def _check_installed(self):
try:
import sklearn
except ImportError:
raise NotInstalledError("sklearn not installed")
def is_pickleable(self):
# TODO: Mark as false until we look into this more closely
return False
def _params(self):
md = dict(self.kwargs)
md['learner'] = self.learn_class.__name__
return md
def _learn(self, feature_map, class_map):
if not isOneofM(class_map):
raise ValueError, "can only use one-of-m classmaps"
learner = self.learn_class(**self.kwargs)
targets = class_map.argmax(axis=1)
learner.fit(feature_map.todense(), targets)
return ScikitC(learner, class_map.shape[1])
class ScikitC(Classifier):
__name__ = 'scikits'
def __init__(self, learner, num_class):
Classifier.__init__(self)
self.learner = learner
self.num_class = num_class
def _classify(self, feature_map):
if hasattr(self.learner, 'predict_proba'):
# use probabilistic output
classif = self.learner.predict_proba(feature_map.todense())
else:
pred = self.learner.predict(feature_map.todense())
classif = numpy.zeros((feature_map.shape[0], self.num_class), dtype='bool')
for i,p in enumerate(pred):
classif[i,p] = True
return classif
# Convenience methods
from sklearn import svm
def SVC(**kwargs): return ScikitL(svm.sparse.SVC, **kwargs)
def NuSVC(**kwargs): return ScikitL(svm.sparse.NuSVC, **kwargs)
def LinearSVC(**kwargs): return ScikitL(svm.sparse.LinearSVC, **kwargs)
from sklearn.ensemble import RandomForestClassifier
def RandomForest(**kwargs): return ScikitL(RandomForestClassifier, **kwargs)
# TODO: There are generalized linear models available for sparse features
# TODO: Some of the classifiers are only implemented for dense features, could investigate
# using them but would need to be careful of very large spaces.
# TODO: Warn if scikits.learn is not installed
| [
"[email protected]"
]
| |
156830a676d14598a35ddbffe19483abee65d4ef | a4a63eedacd544872fbfa33fc58d7cf1558829b7 | /backend/manage.py | d16b861497712dfed65ee38b10618872047b7a96 | []
| no_license | crowdbotics-apps/revil-18107 | 3d9bd52855e33debaa60f4f5c801629fb1aa60da | 2671f3410b43cd8ed2ccc51780a80366fb594684 | refs/heads/master | 2022-10-17T09:34:39.097853 | 2020-06-15T00:05:02 | 2020-06-15T00:05:02 | 272,301,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "revil_18107.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
8c1f992909d87f31cc7c977d65ad54a7e5165a10 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03200/s727157767.py | 3bb8da266eec20606492b32fc65225b1b0ffa5ff | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | s = list(input())
ans = 0
cntB = 0
for i in range(len(s)):
if s[i] == 'B':
cntB += 1
else:
ans += cntB
print(ans) | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.