blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a468ac84eca5711fba49ccbc853f7c7e6841ca2f | 0953f9aa0606c2dfb17cb61b84a4de99b8af6d2c | /python/ray/tests/test_component_failures_2.py | e0bebf7bd94d429633105b75055895a0c3c07d53 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | oscarknagg/ray | da3dc03e24945ff4d5718fd35fc1b3408d8907eb | 20d47873c9e8f5bbb80fe36e5d16256c337c4db3 | refs/heads/master | 2023-09-01T01:45:26.364731 | 2021-10-21T07:46:52 | 2021-10-21T07:46:52 | 382,402,491 | 2 | 1 | Apache-2.0 | 2021-09-15T12:34:41 | 2021-07-02T16:25:05 | Python | UTF-8 | Python | false | false | 5,689 | py | import os
import signal
import sys
import time
import pytest
import ray
import ray.ray_constants as ray_constants
from ray.cluster_utils import Cluster
from ray._private.test_utils import (
RayTestTimeoutException,
get_other_nodes,
wait_for_condition,
)
SIGKILL = signal.SIGKILL if sys.platform != "win32" else signal.SIGTERM
@pytest.fixture(params=[(1, 4), (4, 4)])
def ray_start_workers_separate_multinode(request):
num_nodes = request.param[0]
num_initial_workers = request.param[1]
# Start the Ray processes.
cluster = Cluster()
for _ in range(num_nodes):
cluster.add_node(num_cpus=num_initial_workers)
ray.init(address=cluster.address)
yield num_nodes, num_initial_workers
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
def test_worker_failed(ray_start_workers_separate_multinode):
num_nodes, num_initial_workers = (ray_start_workers_separate_multinode)
if num_nodes == 4 and sys.platform == "win32":
pytest.skip("Failing on Windows.")
@ray.remote
def get_pids():
time.sleep(0.25)
return os.getpid()
start_time = time.time()
pids = set()
while len(pids) < num_nodes * num_initial_workers:
new_pids = ray.get([
get_pids.remote()
for _ in range(2 * num_nodes * num_initial_workers)
])
for pid in new_pids:
pids.add(pid)
if time.time() - start_time > 60:
raise RayTestTimeoutException(
"Timed out while waiting to get worker PIDs.")
@ray.remote
def f(x):
time.sleep(0.5)
return x
# Submit more tasks than there are workers so that all workers and
# cores are utilized.
object_refs = [f.remote(i) for i in range(num_initial_workers * num_nodes)]
object_refs += [f.remote(object_ref) for object_ref in object_refs]
# Allow the tasks some time to begin executing.
time.sleep(0.1)
# Kill the workers as the tasks execute.
for pid in pids:
try:
os.kill(pid, SIGKILL)
except OSError:
# The process may have already exited due to worker capping.
pass
time.sleep(0.1)
# Make sure that we either get the object or we get an appropriate
# exception.
for object_ref in object_refs:
try:
ray.get(object_ref)
except (ray.exceptions.RayTaskError,
ray.exceptions.WorkerCrashedError):
pass
def _test_component_failed(cluster, component_type):
"""Kill a component on all worker nodes and check workload succeeds."""
# Submit many tasks with many dependencies.
@ray.remote
def f(x):
return x
@ray.remote
def g(*xs):
return 1
# Kill the component on all nodes except the head node as the tasks
# execute. Do this in a loop while submitting tasks between each
# component failure.
time.sleep(0.1)
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
# Submit a round of tasks with many dependencies.
x = 1
for _ in range(1000):
x = f.remote(x)
xs = [g.remote(1)]
for _ in range(100):
xs.append(g.remote(*xs))
xs.append(g.remote(1))
# Kill a component on one of the nodes.
process.terminate()
time.sleep(1)
process.kill()
process.wait()
assert not process.poll() is None
# Make sure that we can still get the objects after the
# executing tasks died.
ray.get(x)
ray.get(xs)
def check_components_alive(cluster, component_type, check_component_alive):
"""Check that a given component type is alive on all worker nodes."""
worker_nodes = get_other_nodes(cluster)
assert len(worker_nodes) > 0
for node in worker_nodes:
process = node.all_processes[component_type][0].process
if check_component_alive:
assert process.poll() is None
else:
print("waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
process.wait()
print("done waiting for " + component_type + " with PID " +
str(process.pid) + "to terminate")
assert not process.poll() is None
@pytest.mark.parametrize(
"ray_start_cluster", [{
"num_cpus": 8,
"num_nodes": 4,
"_system_config": {
"num_heartbeats_timeout": 10
},
}],
indirect=True)
def test_raylet_failed(ray_start_cluster):
cluster = ray_start_cluster
# Kill all raylets on worker nodes.
_test_component_failed(cluster, ray_constants.PROCESS_TYPE_RAYLET)
def test_get_node_info_after_raylet_died(ray_start_cluster_head):
cluster = ray_start_cluster_head
def get_node_info():
return ray._private.services.get_node_to_connect_for_driver(
cluster.redis_address,
cluster.head_node.node_ip_address,
redis_password=cluster.redis_password)
assert get_node_info(
).raylet_socket_name == cluster.head_node.raylet_socket_name
cluster.head_node.kill_raylet()
wait_for_condition(
lambda: not cluster.global_state.node_table()[0]["Alive"], timeout=30)
with pytest.raises(RuntimeError):
get_node_info()
node2 = cluster.add_node()
assert get_node_info().raylet_socket_name == node2.raylet_socket_name
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| [
"[email protected]"
] | |
08cdc3d43b17366cc761a55a2a4f143677cdb5ab | bd300a8b8a0cd370e514580992c9480f64d076da | /django_schoolweb/django_schoolweb/urls.py | 4ee5649975f5eaf43d0a9d52fd0387b89f82197c | [] | no_license | 33Da/schoolweb | 546a19046e42da59d082e0f1f492a14a21a17078 | 9d92bab3b1590180231efb8e74a68c375149599e | refs/heads/main | 2023-02-08T12:48:35.250628 | 2021-01-05T06:58:55 | 2021-01-05T06:58:55 | 326,911,447 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py |
from django.urls import path
from django_schoolweb.settings import MEDIA_ROOT
from django.conf.urls import url,include
from django.views.static import serve
urlpatterns = [
path('admin/', include("adminuser.urls")),
path('', include("school.urls")),
url(r"^media/(?P<path>.*)$", serve, {"document_root": MEDIA_ROOT}),
]
| [
"[email protected]"
] | |
7bf83f3656a04903bcc4167925b70c4650a67487 | e7be64135aac3cbc931489485b1107c594f8d9e8 | /manage.py | 2e16268719e7dff610dfe646c3459d0388e4d6ca | [] | no_license | velinovasen/bethub_rebuild | 86c10217354a1794aab829dce823b569cc451c19 | 41c2256e373848b376d42e91b12dcd61b679ca5f | refs/heads/main | 2023-02-23T09:36:57.501632 | 2021-01-27T13:26:54 | 2021-01-27T13:26:54 | 317,016,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bethub.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
35a43d22bca52737c7a041fd115e08514fb30e46 | e953679220ff59b58eb964b97a98ef026283c8e6 | /Ch26/2603.py | 61dc80014e773ba476aeef0ea4b76edae8f61819 | [] | no_license | lhy0807/A2CS | 9e440b85b53c79eb0367f3c478f866911422b8d8 | 6d793c1cc4989b123ba8ff1676e376681531c7d2 | refs/heads/master | 2021-04-15T06:10:36.178244 | 2018-03-23T02:54:55 | 2018-03-23T02:54:55 | 125,968,982 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,856 | py | # Chapter 26.03 for normal file
# Exception Handling
# Tianhe Zhang
import pickle
import random
class CarRecord(object):
def __init__(self):
self.vhicleID = ""
self.registration = ""
self.dateRegist = None
self.engineSize = 0
self.purchasePrice = 0.00
def __repr__(self):
return \
"vhicleID: {};\
registration: {};\
dateRegist: {};\
engineSize: {};\
purchasePrice: {}".format(self.vhicleID, self.registration, \
self.dateRegist, self.engineSize, self.purchasePrice)
def write():
cars = []
length = 5
for i in range(length):
newCar = CarRecord()
newCar.vhicleID = str(hash(i))
newCar.registration = str(random.randint(0,1000))
newCar.dateRegist = "12/28/1999"
newCar.engineSize = random.randrange(5) * 10
newCar.purchasePrice = float(random.randint(10000, 99999))
cars.append(newCar)
file = open("LOGS.car", "wb")
for i in range(length):
pickle.dump(cars[i], file)
file.close()
def read():
try:
file = open("LOGS.car", "rb")
except:
raise NameError("File Not Found")
cars = []
length = 5
i = 0
while i < length:
cars.append(pickle.load(file))
i += 1
return cars
def out():
write()
c = read()
for i in range(len(c)):
print(c[i])
out()
#############
# TASK 26.02
'''
import pickle
class CarRecord(object):
def __init__(self):
self.vhicleID = ""
self.registration = ""
self.dateRegist = None
self.engineSize = 0
self.purchasePrice = 0.00
def __repr__(self):
return \
"vhicleID: {};\
registration: {};\
dateRegist: {};\
engineSize: {};\
purchasePrice: {}".format(self.vhicleID, self.registration, \
self.dateRegist, self.engineSize, self.purchasePrice)
def createFile():
newCar1 = CarRecord()
newCar1.vhicleID = "499500"
newCar1.registration = "abc"
newCar1.dateRegist = "1999/12/28"
newCar1.engineSize = 100
newCar1.purchasePrice = 1000.02
newCar2 = CarRecord()
newCar2.vhicleID = "100112"
newCar2.registration = "flk"
newCar2.dateRegist = "1989/06/04"
newCar2.engineSize = 200
newCar2.purchasePrice = 13200.02
newCar3 = CarRecord()
newCar3.vhicleID = "549123"
newCar2.registration = "grs"
newCar2.dateRegist = "2001/09/11"
newCar2.engineSize = 400
newCar2.purchasePrice = 4569.78
l1 = [newCar1, newCar2, newCar3]
car_file = open('RAND.car', "wb")###
car_file.close()
car_file = open('RAND.car', "ab+")
for i in range(len(l1)):
cur_car = l1[i]
addr = abs(hash(cur_car.vhicleID))
car_file.seek(addr)
print(len(pickle.dumps(cur_car)))
pickle.dump(cur_car, car_file)
car_file.close()
def find(vhicleID):
r = []
try:
file = open("RAND.car", "rb")
except:
raise KeyError("File Not Found")
for i in range(3):
addr = abs(hash(vhicleID))
print(addr)
file.seek(addr)
cur_car = pickle.load(file)
r.append(cur_car)
file.close()
for i in range(len(r)):
print(r[i])
createFile()
find('499500')
''' | [
"[email protected]"
] | |
379cd0a110dc1777f690b55254f5e937043c2108 | 41e57fc5a59200a6734418be363b15a3c3cd47a5 | /Pense_em_Python/capitulo_3/q_3_2_1.py | 79cb8fb1a4ec64b14b3fe0510971e99c35562068 | [] | no_license | dapazjunior/ifpi-ads-algoritmos2020 | cae6f95c10beed488d09bd03f2487cbbc040cf08 | a6f03b13595d527fe9759f62f216f671e57d8552 | refs/heads/master | 2023-01-08T04:39:17.200152 | 2020-11-03T12:16:45 | 2020-11-03T12:16:45 | 245,900,901 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | def do_twice(f):
f()
f()
def print_spam():
print('spam')
do_twice(print_spam)
| [
"[email protected]"
] | |
fd95b47ac6d809723b958ffb0b5efb3258c2147e | d8edd97f8f8dea3f9f02da6c40d331682bb43113 | /networks349.py | 768b477ea4b88f0a007d8e534888113870d95ca9 | [] | no_license | mdubouch/noise-gan | bdd5b2fff3aff70d5f464150443d51c2192eeafd | 639859ec4a2aa809d17eb6998a5a7d217559888a | refs/heads/master | 2023-07-15T09:37:57.631656 | 2021-08-27T11:02:45 | 2021-08-27T11:02:45 | 284,072,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,512 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
__version__ = 205
# Number of wires in the CDC
n_wires = 3606
# Number of continuous features (E, t, dca)
n_features = 3
class Gen(nn.Module):
def __init__(self, ngf, latent_dims, seq_len, encoded_dim):
super().__init__()
self.ngf = ngf
self.seq_len = seq_len
self.version = __version__
# Input: (B, latent_dims, 1)
self.act = nn.ReLU()
self.lin0 = nn.Linear(latent_dims, seq_len//64*4096, bias=True)
class GBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convp = nn.ConvTranspose1d(in_channels, out_channels, 1, 1, 0)
self.convu = nn.ConvTranspose1d(in_channels, out_channels, 4, 2, 1)
self.conv1 = nn.ConvTranspose1d(out_channels, out_channels, 3, 1, 1)
self.bnu = nn.BatchNorm1d(out_channels)
self.bn1 = nn.BatchNorm1d(out_channels)
self.act = nn.ReLU()
def forward(self, x):
y0 = F.interpolate(self.convp(x), scale_factor=2, mode='linear')
y = self.act(self.bnu(self.convu(x)))
y = self.act(y0 + self.bn1(self.conv1(y)))
return y
self.gb1 = GBlock(4096, 3072)
self.gb2 = GBlock(3072, 2048)
self.gb3 = GBlock(2048, 1024)
self.gb4 = GBlock(1024, 768)
self.gb5 = GBlock(768, 512)
self.gb6 = GBlock(512, 256)
self.convw1 = nn.ConvTranspose1d(256, n_wires, 1, 1, 0)
self.bnp0 = nn.BatchNorm1d(n_wires)
self.convp1 = nn.ConvTranspose1d(n_wires, 256, 3, 1, 1)
self.bnp1 = nn.BatchNorm1d(256)
self.convp2 = nn.ConvTranspose1d(256, 64, 3, 1, 1)
self.bnp2 = nn.BatchNorm1d(64)
self.convp3 = nn.ConvTranspose1d(64, n_features, 1, 1, 0)
self.out = nn.Tanh()
def forward(self, z):
# z: random point in latent space
x = self.act(self.lin0(z).view(-1, 4096, self.seq_len // 64))
x = self.gb1(x)
x = self.gb2(x)
x = self.gb3(x)
x = self.gb4(x)
x = self.gb5(x)
x = self.gb6(x)
w = self.convw1(x)
p = self.act(self.bnp1(self.convp1(self.act(self.bnp0(w)))))
p = self.act(self.bnp2(self.convp2(p)))
p = self.convp3(p)
return torch.cat([self.out(p), w], dim=1)
class Disc(nn.Module):
def __init__(self, ndf, seq_len, encoded_dim):
super().__init__()
self.version = __version__
# (B, n_features, 256)
self.act = nn.LeakyReLU(0.2)
class DBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convd = nn.Conv1d(in_channels, out_channels, 4, 2, 1)
self.act = nn.LeakyReLU(0.2)
def forward(self, x):
y = self.act(self.convd(x))
return y
self.conv0 = nn.Conv1d(n_features+2, 64, 1, 1, 0)
self.conv1 = nn.Conv1d(64, 128, 9, 1, 4)
self.dbw1 = DBlock(128, 128)
self.dbw2 = DBlock(128, 256)
self.dbw3 = DBlock(256, 512)
self.lin0 = nn.Linear(512 * seq_len // 8, 512, bias=True)
self.lin1 = nn.Linear(512, 1, bias=True)
self.out = nn.Identity()
def forward(self, x_):
# x_ is concatenated tensor of p_ and w_, shape (batch, features+n_wires, seq_len)
# p_ shape is (batch, features, seq_len),
# w_ is AE-encoded wire (batch, encoded_dim, seq_len)
seq_len = x_.shape[2]
x = x_
#dist = ((xy - nn.ConstantPad1d((1, 0), 0.0)(xy[:,:,:-1]))**2).sum(dim=1).unsqueeze(1)
p = x[:,:n_features]
w = x[:,n_features:]
#x = torch.cat([p, w], dim=1)
x = self.act(self.conv0(x))
x = self.act(self.conv1(x))
x = self.dbw1(x)
x = self.dbw2(x)
x = self.dbw3(x)
x = self.lin0(x.flatten(1,2))
x = self.lin1(self.act(x))
return self.out(x).squeeze(1)
class VAE(nn.Module):
def __init__(self, encoded_dim):
super().__init__()
class Enc(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.LeakyReLU(0.2)
self.lin1 = nn.Linear(n_wires, hidden_size)
self.lin2 = nn.Linear(hidden_size, encoded_dim)
self.out = nn.Tanh()
def forward(self, x):
x = self.act(self.lin1(x))
return self.out(self.lin2(x))
class Dec(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.ReLU()
self.lin1 = nn.Linear(encoded_dim, hidden_size)
self.lin2 = nn.Linear(hidden_size, n_wires)
def forward(self, x):
x = self.act(self.lin1(x))
return self.lin2(x)
self.enc_net = Enc(512)
self.dec_net = Dec(512)
def enc(self, x):
return self.enc_net(x.permute(0, 2, 1)).permute(0,2,1)
def dec(self, x):
return self.dec_net(x.permute(0, 2, 1)).permute(0,2,1)
def forward(self, x):
y = self.dec_net(self.enc_net(x))
return y
def get_n_params(model):
return sum(p.reshape(-1).shape[0] for p in model.parameters())
| [
"[email protected]"
] | |
2160c0622d58e13c8a5422b204dfb0198977bb42 | 61dfa0ac80a6979d135e969b5b7b78a370c16904 | /analysis/power_spectrum/power_spectrum.py | e8c551fdf69f535d6d6071493a7b91a78f082cb2 | [] | no_license | bvillasen/cosmo_tools | 574d84f9c18d92d2a9610d1d156113730d80f5a4 | 6bb54534f2242a15a6edcf696f29a3cf22edd342 | refs/heads/master | 2021-07-13T06:43:32.902153 | 2020-10-05T21:17:30 | 2020-10-05T21:17:30 | 207,036,538 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,991 | py | import numpy as np
import matplotlib.pyplot as plt
import h5py as h5
def get_skewer_flux_fft_amplitude( vel_Hubble, delta_F ):
n = len( vel_Hubble )
dv = ( vel_Hubble[-1] - vel_Hubble[0] ) / n
k_vals = 2 *np.pi * np.fft.fftfreq( n, d=dv )
ft = 1./n * np.fft.fft( delta_F )
ft_amp2 = ft.real * ft.real + ft.imag * ft.imag
return k_vals, ft_amp2
def get_skewer_flux_power_spectrum( vel_Hubble, delta_F, d_log_k=None, n_bins=None, k_edges=None ):
n = len(vel_Hubble)
dv = vel_Hubble[1] - vel_Hubble[0]
vel_max = n * dv
k_vals, ft_amp2 = get_skewer_flux_fft_amplitude( vel_Hubble, delta_F )
indices = k_vals > 0
k_vals = k_vals[indices]
ft_amp2 = ft_amp2[indices]
# if d_log_k == None and n_bins == None :
# print("ERROR: Specify d_log_k or n_bins or k_edges for Power Spectrum binning.")
# return
# if d_log_k != None and n_bins != None:
# print("ERROR: Both d_log_k and n_bins were specified, make up your mind!")
# return
k_min = k_vals.min()
k_max = k_vals.max()
if d_log_k != None:
intervals_log = np.arange( np.log10(k_min), np.log10(k_max), d_log_k )
intervals = 10**(intervals_log)
elif n_bins != None: intervals = np.logspace( np.log10(k_min), np.log10(k_max), n_bins )
else: intervals = k_edges
power, bin_edges= np.histogram( k_vals, bins=intervals, weights=ft_amp2 )
n_in_bin, bin_edges = np.histogram( k_vals, bins=intervals )
n_in_bin = n_in_bin.astype('float')
bin_centers = np.sqrt(bin_edges[1:] * bin_edges[:-1])
indices = n_in_bin > 0
bin_centers = bin_centers[indices]
power = power[indices]
n_in_bin = n_in_bin[indices]
power_avrg = power / n_in_bin * vel_max
return bin_centers, power_avrg
def get_delta_k_1D( signal, nx, dx ):
delta_signal = ( signal - signal.mean() ) / signal.mean()
FT = np.fft.fftn( delta_signal )
FT2 = FT.real*FT.real + FT.imag*FT.imag
FT2 = np.fft.fftshift(FT2)
fft_kx = 2*np.pi*np.fft.fftfreq( nx, d=dx )
# fft_kx = 2*np.pi*np.fft.fftfreq( nx )
# fft_ky = 2*np.pi*np.fft.fftfreq( ny )
# fft_kz = 2*np.pi*np.fft.fftfreq( nz )
kx = np.fft.fftshift( fft_kx )
# Kz, Ky, Kx = np.mgrid[ kz.min():kz.max():nz*1j, ky.min():ky.max():ny*1j, kx.min():kx.max():nx*1j ]
# K2 = Kx*Kx + Ky*Ky + Kz*Kz
delta_k = np.sqrt(FT2)
delta_k2 = FT2
return delta_k2, kx
def get_power_spectrum_1D(signal, Lbox, nx, dx, n_kSamples=20, binning='log' ):
delta_k2, kx = get_delta_k_1D( signal, nx, dx, )
k_min = kx.min()
k_max = kx.max()
# print K_mag.max()
nBins = n_kSamples
intervals = np.logspace(k_min, k_max, nBins+1)
if binning == 'linear': intervals = np.linspace(k_min, k_max, nBins+1)
power, bin_edges= np.histogram( kx, bins=intervals, weights=delta_k2 )
n_in_bin, bin_edges = np.histogram( kx, bins=intervals )
n_in_bin = n_in_bin.astype('float')
bin_centers = np.sqrt(bin_edges[1:] * bin_edges[:-1])
power = power / n_in_bin / Lbox**3
error = power * np.sqrt(n_in_bin)
return power, bin_centers, n_in_bin
def get_delta_k( dens, nx, ny, nz, dx, dy, dz ):
delta_dens = ( dens - dens.mean() ) / dens.mean()
FT = np.fft.fftn( delta_dens, )
FT2 = FT.real*FT.real + FT.imag*FT.imag
FT2 = np.fft.fftshift(FT2)
fft_kx = 2*np.pi*np.fft.fftfreq( nx, d=dx )
fft_ky = 2*np.pi*np.fft.fftfreq( ny, d=dy )
fft_kz = 2*np.pi*np.fft.fftfreq( nz, d=dz )
# fft_kx = 2*np.pi*np.fft.fftfreq( nx )
# fft_ky = 2*np.pi*np.fft.fftfreq( ny )
# fft_kz = 2*np.pi*np.fft.fftfreq( nz )
kx = np.fft.fftshift( fft_kx )
ky = np.fft.fftshift( fft_ky )
kz = np.fft.fftshift( fft_kz )
# Kz, Ky, Kx = np.mgrid[ kz.min():kz.max():nz*1j, ky.min():ky.max():ny*1j, kx.min():kx.max():nx*1j ]
# K2 = Kx*Kx + Ky*Ky + Kz*Kz
delta_k = np.sqrt(FT2)
delta_k2 = FT2
return delta_k2, kx, ky, kz
def get_delta_k_memory_save( dens, nx, ny, nz, dx, dy, dz ):
dens_mean = dens.mean()
dens = ( dens - dens_mean ) / dens_mean
print(' Computing Fourier Transform')
FT = np.fft.fftn( dens )
print(' Computing FT Magnitude')
FT = FT.real*FT.real + FT.imag*FT.imag
print(' Shifting Fourier Transform')
FT = np.fft.fftshift(FT)
fft_kx = 2*np.pi*np.fft.fftfreq( nx, d=dx )
fft_ky = 2*np.pi*np.fft.fftfreq( ny, d=dy )
fft_kz = 2*np.pi*np.fft.fftfreq( nz, d=dz )
kx = np.fft.fftshift( fft_kx )
ky = np.fft.fftshift( fft_ky )
kz = np.fft.fftshift( fft_kz )
return FT, kx, ky, kz
def get_delta_k_fftw( dens, nx, ny, nz, dx, dy, dz, n_threads ):
import pyfftw
dens_mean = dens.mean()
delta_dens = ( dens - dens_mean ) / dens_mean
dens = None
print(' Computing Fourier Transform')
FT = pyfftw.interfaces.numpy_fft.fftn(delta_dens, overwrite_input=True, threads=n_threads)
print(' Computing FT Magnitude')
FT = FT.real*FT.real + FT.imag*FT.imag
print(' Shifting Fourier Transform')
FT = np.fft.fftshift(FT)
print(' Computing k')
fft_kx = 2*np.pi*np.fft.fftfreq( nx, d=dx )
fft_ky = 2*np.pi*np.fft.fftfreq( ny, d=dy )
fft_kz = 2*np.pi*np.fft.fftfreq( nz, d=dz )
print(' Shifting k')
kx = np.fft.fftshift( fft_kx )
ky = np.fft.fftshift( fft_ky )
kz = np.fft.fftshift( fft_kz )
return FT, kx, ky, kz
def get_power_spectrum_fftw(dens, Lbox, nx, ny, nz, dx, dy, dz, n_kSamples=20, n_threads=1 ):
# delta_k2, kx, ky, kz = get_delta_k( dens, nx, ny, nz, dx, dy, dz, n_threads=n_threads )
delta_k2, kx, ky, kz = get_delta_k_fftw( dens, nx, ny, nz, dx, dy, dz, n_threads )
print(' Computing k grid')
Kz, Ky, Kx = np.meshgrid( kz, ky, kx )
kx, ky, kz = None, None, None
print(' Computing k mag')
K_mag = np.sqrt( Kz*Kz + Ky*Ky + Kx*Kx )
Kx, Ky, Kz = None, None, None
K_mag = K_mag.reshape(K_mag.size)
delta_k2 = delta_k2.reshape(delta_k2.size)
k_min = (K_mag[np.where(K_mag>0)]).min() * 0.99
k_max = K_mag.max()*0.99
# print K_mag.max()
nBins = n_kSamples
print(' Computing Power Spectrum')
intervals = np.logspace(np.log10(k_min), np.log10(k_max), nBins+1)
power, bin_edges= np.histogram( K_mag, bins=intervals, weights=delta_k2 )
n_in_bin, bin_edges = np.histogram( K_mag, bins=intervals )
n_in_bin = n_in_bin.astype('float')
bin_centers = np.sqrt(bin_edges[1:] * bin_edges[:-1])
power = power / n_in_bin / Lbox**3
# error = power * np.sqrt(n_in_bin)
return power, bin_centers, n_in_bin
def get_power_spectrum(dens, Lbox, nx, ny, nz, dx, dy, dz, n_kSamples=20, n_threads=1 ):
# delta_k2, kx, ky, kz = get_delta_k( dens, nx, ny, nz, dx, dy, dz, n_threads=n_threads )
delta_k2, kx, ky, kz = get_delta_k_memory_save( dens, nx, ny, nz, dx, dy, dz, )
Kz, Ky, Kx = np.meshgrid( kz, ky, kx )
K_mag = np.sqrt( Kz*Kz + Ky*Ky + Kx*Kx )
K_mag = K_mag.reshape(K_mag.size)
delta_k2 = delta_k2.reshape(delta_k2.size)
k_min = (K_mag[np.where(K_mag>0)]).min() * 0.99
k_max = K_mag.max()*0.99
# print K_mag.max()
nBins = n_kSamples
intervals = np.logspace(np.log10(k_min), np.log10(k_max), nBins+1)
print(' Computing Histogram 1')
power, bin_edges= np.histogram( K_mag, bins=intervals, weights=delta_k2 )
print(' Computing Histogram 2')
n_in_bin, bin_edges = np.histogram( K_mag, bins=intervals )
n_in_bin = n_in_bin.astype('float')
bin_centers = np.sqrt(bin_edges[1:] * bin_edges[:-1])
power = power / n_in_bin / Lbox**3
error = power * np.sqrt(n_in_bin)
return power, bin_centers, n_in_bin
# L = 50.
# nx = 2048
# dx = L / nx
# z = 3
# dx /= ( z+ 1)
# kx = 2*np.pi*np.fft.fftfreq( nx, d=dx )
# ky = kx
# kz = kx
# K_mag = np.sqrt( kx*kx + ky*ky + kz*kz )
# print K_mag.min(), K_mag.max()]
def get_power_spectrum_interp( dens, nx, ny, nz, dx, dy, dz, k_start, k_end, n_kSamples=50, nSamples=500 ):
delta_k2, kx, ky, kz = get_delta_k( dens, nx, ny, nz, dx, dy, dz )
get_interp_val = RegularGridInterpolator( (kx, ky, kz), delta_k2, method='linear' )
d_tetha = np.pi/nSamples
d_phi = 2*np.pi/nSamples
# theta_linear = np.linspace(0, 1, nSamples)
# theta_vals = np.arccos( 2*theta_linear - 1) - np.pi/2
# theta_vals.sort()
# grid = np.mgrid[-np.pi/2:np.pi/2:nSamples*1j, 0:2*np.pi:nSamples*1j ]
theta_vals = np.linspace( -np.pi/2, np.pi/2, nSamples )
phi_vals = np.linspace( 0, 2*np.pi, nSamples )
grid = np.meshgrid(theta_vals, phi_vals )
THETA, PHI = grid
THETA = THETA.reshape(nSamples**2)
PHI = PHI.reshape(nSamples**2)
k_vals = np.logspace(np.log10(k_start),np.log10(k_end), n_kSamples)
result = []
for k_mag in k_vals:
k_x = k_mag * np.cos(THETA) * np.cos(PHI)
k_y = k_mag * np.cos(THETA) * np.sin(PHI)
k_z = k_mag * np.sin(THETA)
k_vecs = np.array([k_x,k_y,k_z]).T
delta_k = get_interp_val( k_vecs )
int_val = (delta_k* k_mag**2 * np.cos(THETA)*d_tetha*d_phi).sum() / (4*np.pi*k_mag**2 )
# int_val = (delta_k * np.cos(THETA)*d_tetha*d_phi).sum() / (4*np.pi )
# int_val = ( delta_k * d_tetha * d_phi).sum() / (4*np.pi*k_mag**2 )
result.append(int_val)
# result = np.array( result ) / Lbox**3 * h
result = np.array( result ) / Lbox**3
return result, k_vals
| [
"[email protected]"
] | |
242cec66659f5a0202d4e14a03b74c217602d3c2 | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/Autodesk/Revit/DB/__init___parts/Connector.py | 0093d1d602dbfb7d29dbd8d06deb4bb55b4935db | [
"MIT"
] | permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,169 | py | class Connector(object,IConnector,IDisposable):
""" A connector in an Autodesk Revit MEP project document. """
def ConnectTo(self,connector):
"""
ConnectTo(self: Connector,connector: Connector)
Make connection between two connectors.
connector: Indicate the connector will be connected to.
"""
pass
def DisconnectFrom(self,connector):
"""
DisconnectFrom(self: Connector,connector: Connector)
Remove connection between two connectors.
connector: Indicate the connector,connection will be removed from.
"""
pass
def Dispose(self):
""" Dispose(self: Connector) """
pass
def GetFabricationConnectorInfo(self):
"""
GetFabricationConnectorInfo(self: Connector) -> FabricationConnectorInfo
Gets fabrication connectivity information.
Returns: Returns ll if there is no fabrication connector information associated.
"""
pass
def GetMEPConnectorInfo(self):
"""
GetMEPConnectorInfo(self: Connector) -> MEPConnectorInfo
Gets MEP connector information.
Returns: Returns ll if there is no MEP connector information associated.
"""
pass
def IsConnectedTo(self,connector):
"""
IsConnectedTo(self: Connector,connector: Connector) -> bool
Identifies if the connector is connected to the specified connector.
"""
pass
def ReleaseUnmanagedResources(self,*args):
""" ReleaseUnmanagedResources(self: Connector,disposing: bool) """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
AllowsSlopeAdjustments=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Indicates if the connector allows the slope adjustment.
Get: AllowsSlopeAdjustments(self: Connector) -> bool
"""
AllRefs=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""All references of the connector.
Get: AllRefs(self: Connector) -> ConnectorSet
"""
Angle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The angle of the Connector.
Get: Angle(self: Connector) -> float
Set: Angle(self: Connector)=value
"""
AssignedDuctFlowConfiguration=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned duct flow configuration of the connector.
Get: AssignedDuctFlowConfiguration(self: Connector) -> DuctFlowConfigurationType
"""
AssignedDuctLossMethod=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The duct loss method of the connector.
Get: AssignedDuctLossMethod(self: Connector) -> DuctLossMethodType
"""
AssignedFixtureUnits=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned fixture units of the connector.
Get: AssignedFixtureUnits(self: Connector) -> float
Set: AssignedFixtureUnits(self: Connector)=value
"""
AssignedFlow=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned flow of the connector.
Get: AssignedFlow(self: Connector) -> float
Set: AssignedFlow(self: Connector)=value
"""
AssignedFlowDirection=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned flow direction of the connector.
Get: AssignedFlowDirection(self: Connector) -> FlowDirectionType
"""
AssignedFlowFactor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned flow factor of this connector.
Get: AssignedFlowFactor(self: Connector) -> float
Set: AssignedFlowFactor(self: Connector)=value
"""
AssignedKCoefficient=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned kCoefficient of the connector.
Get: AssignedKCoefficient(self: Connector) -> float
Set: AssignedKCoefficient(self: Connector)=value
"""
AssignedLossCoefficient=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned loss coefficient of the connector.
Get: AssignedLossCoefficient(self: Connector) -> float
Set: AssignedLossCoefficient(self: Connector)=value
"""
AssignedPipeFlowConfiguration=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The pipe flow configuration type of the connector.
Get: AssignedPipeFlowConfiguration(self: Connector) -> PipeFlowConfigurationType
"""
AssignedPipeLossMethod=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The pipe loss method of the connector.
Get: AssignedPipeLossMethod(self: Connector) -> PipeLossMethodType
"""
AssignedPressureDrop=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The assigned pressure drop of the connector.
Get: AssignedPressureDrop(self: Connector) -> float
Set: AssignedPressureDrop(self: Connector)=value
"""
Coefficient=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The coefficient of the connector.
Get: Coefficient(self: Connector) -> float
"""
ConnectorManager=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The connector manager of the connector.
Get: ConnectorManager(self: Connector) -> ConnectorManager
"""
ConnectorType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The connector type of the connector.
Get: ConnectorType(self: Connector) -> ConnectorType
"""
CoordinateSystem=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The coordinate system of the connector.
Get: CoordinateSystem(self: Connector) -> Transform
"""
Demand=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The demand of the connector.
Get: Demand(self: Connector) -> float
"""
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The description.
Get: Description(self: Connector) -> str
"""
Direction=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The direction of the connector.
Get: Direction(self: Connector) -> FlowDirectionType
"""
Domain=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The domain of the connector.
Get: Domain(self: Connector) -> Domain
"""
DuctSystemType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The duct system type of the connector.
Get: DuctSystemType(self: Connector) -> DuctSystemType
"""
ElectricalSystemType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The electrical system type of the connector.
Get: ElectricalSystemType(self: Connector) -> ElectricalSystemType
"""
EngagementLength=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Connector engagement length
Get: EngagementLength(self: Connector) -> float
"""
Flow=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The flow of the connector.
Get: Flow(self: Connector) -> float
"""
Height=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The height of the connector.
Get: Height(self: Connector) -> float
Set: Height(self: Connector)=value
"""
Id=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""A unique identifier to identify this connector.
Get: Id(self: Connector) -> int
"""
IsConnected=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Identifies if the connector is physically connected to a connector on another element.
Get: IsConnected(self: Connector) -> bool
"""
IsMovable=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""whether the connector can be moved.
Get: IsMovable(self: Connector) -> bool
"""
IsValidObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: Connector) -> bool
"""
MEPSystem=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The system of the connector belong to.
Get: MEPSystem(self: Connector) -> MEPSystem
"""
Origin=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The location of the connector.
Get: Origin(self: Connector) -> XYZ
Set: Origin(self: Connector)=value
"""
Owner=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The host of the connector.
Get: Owner(self: Connector) -> Element
"""
PipeSystemType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The pipe system type of the connector.
Get: PipeSystemType(self: Connector) -> PipeSystemType
"""
PressureDrop=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The pressure drop of the connector.
Get: PressureDrop(self: Connector) -> float
"""
Radius=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The radius of the connector.
Get: Radius(self: Connector) -> float
Set: Radius(self: Connector)=value
"""
Shape=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The shape of the connector.
Get: Shape(self: Connector) -> ConnectorProfileType
"""
Utility=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Indicates if the connector is a utility connector.
Get: Utility(self: Connector) -> bool
"""
VelocityPressure=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The velocity pressure of the connector.
Get: VelocityPressure(self: Connector) -> float
"""
Width=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The width of the connector.
Get: Width(self: Connector) -> float
Set: Width(self: Connector)=value
"""
| [
"[email protected]"
] | |
a72324c97d49222640aa847c71384a651ca6219c | d04d73bed28c366712103663d3e3be13622611b9 | /pactools/dar_model/stable_dar.py | 30eecbf63154b2b87defb16172559556714ed1c8 | [] | no_license | EtienneCmb/pactools | fcb13cde6f57a5c6abf4b033c24aec72c1201ca7 | 6e5a53deefc4dcede6a4a0293958e39a660dba97 | refs/heads/master | 2021-01-21T23:20:18.491959 | 2017-06-08T12:49:55 | 2017-06-08T12:49:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,217 | py | import numpy as np
from .baseLattice import BaseLattice
class StableDAR(BaseLattice):
"""
A stable driven auto-regressive (DAR) model, as described in [1].
This model is designed to have an stable instantaneous AR model at each
time.
This model uses the parametrization:
.. math:: y(t) + \\sum_{i=1}^p a_i(t) y(t-i)= \\varepsilon(t)
with:
.. math:: a_p^{(p)} = k_p; \;\;\;\;
a_i^{(p)} = a_i^{(p-1)} + k_p a_{p-i}^{(p-1)}
.. math:: \\gamma_i = \\log\\left(\\frac{1+k_i}{1-k_i}\\right); \;\;\;\;
\\gamma_{i}(t)=\\sum_{j=0}^{m}\\gamma_{ij}x(t)^j
References
----------
[1] Dupre la Tour, T. , Grenier, Y., & Gramfort, A. (2017). Parametric
estimation of spectrum driven by an exogenous signal. Acoustics, Speech
and Signal Processing (ICASSP), 2017 IEEE International Conference on,
4301--4305.
"""
# ------------------------------------------------ #
# Functions that overload abstract methods #
# ------------------------------------------------ #
def decode(self, lar):
"""Extracts parcor coefficients from encoded version (e.g. LAR)
lar : array containing the encoded coefficients
returns:
ki : array containing the decoded coefficients (same size as lar)
"""
exp_lar = np.exp(lar)
ki = (exp_lar - 1.0) / (exp_lar + 1.0)
return ki
def encode(self, ki):
"""Encodes parcor coefficients to LAR coefficients
ki : array containing the original parcor coefficients
returns:
lar : array containing the encoded coefficients (same size as ki)
"""
lar = np.log((1.0 + ki) / (1.0 - ki))
return lar
def common_gradient(self, p, ki):
"""Compute common factor in gradient. The gradient is computed as
G[p] = sum from t=1 to T {g[p,t] * F(t)}
where F(t) is the vector of driving signal and its powers
g[p,t] = (e_forward[p, t] * e_backward[p-1, t-1]
+ e_backward[p, t] * e_forward[p-1, t]) * phi'[k[p,t]]
phi is the encoding function, and phi' is its derivative.
p : order corresponding to the current lattice cell
ki : array containing the original parcor coefficients
returns:
g : array containing the factors (size (n_epochs, n_points - 1))
"""
e_forward = self.forward_residual
e_backward = self.backward_residual
_, n_epochs, n_points = e_forward.shape
g = e_forward[p, :, 1:n_points] * e_backward[p - 1, :, 0:n_points - 1]
g += e_backward[p, :, 1:n_points] * e_forward[p - 1, :, 1:n_points]
g *= 0.5 * (1.0 - ki[:, 1:n_points] ** 2) # phi'[k[p,t]])
return np.reshape(g, (n_epochs, n_points - 1))
def common_hessian(self, p, ki):
"""Compute common factor in Hessian. The Hessian is computed as
H[p] = sum from t=1 to T {F(t) * h[p,t] * F(t).T}
where F(t) is the vector of driving signal and its powers
h[p,t] = (e_forward[p, t-1]**2 + e_backward[p-1, t-1]**2)
* phi'[k[p,t]]**2
+ (e_forward[p, t] * e_backward[p-1, t-1]
e_backward[p, t] * e_forward[p-1, t]) * phi''[k[p,t]]
phi is the encoding function, phi' is its first derivative,
and phi'' is its second derivative.
p : order corresponding to the current lattice cell
ki : array containing the original parcor coefficients
returns:
h : array containing the factors (size (n_epochs, n_points - 1))
"""
e_forward = self.forward_residual
e_backward = self.backward_residual
_, n_epochs, n_points = e_forward.shape
h1 = e_forward[p - 1, :, 1:n_points] ** 2
h1 += e_backward[p - 1, :, 0:n_points - 1] ** 2
h1 *= (0.5 * (1.0 - ki[:, 1:n_points] ** 2)) ** 2
h2 = e_forward[p, :, 1:n_points] * e_backward[p - 1, :, 0:n_points - 1]
h2 += e_backward[p, :, 1:n_points] * e_forward[p - 1, :, 1:n_points]
h2 *= (-0.5 * ki[:, 1:n_points] * (1.0 - ki[:, 1:n_points] ** 2))
return np.reshape(h1 + h2, (n_epochs, n_points - 1))
| [
"[email protected]"
] | |
200563d19042e582fedc874dc3439dc35a2edde5 | 925f2935b34042abc9161795413031ae68f45b9a | /multimodel_inference/fold_SC3imlsm.py | a33c67d65aca02ed924dbf5fb636eb928ca937a4 | [] | no_license | Farhad63/AFS-analysis-with-moments | 7e1d17f47c06ed97ebb7c9ec8245fe52a88622c3 | 7874b1085073e5f62d910ef2d79a22b29ff3be84 | refs/heads/master | 2022-04-09T22:11:12.341235 | 2020-03-11T21:15:42 | 2020-03-11T21:15:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,801 | py | #!/usr/bin/env python
# split, three epochs in each pop, symmetric migration in middle and late epochs
# genomic islands
# n(para): 13
import matplotlib
matplotlib.use('PDF')
import moments
import pylab
import random
import matplotlib.pyplot as plt
import numpy as np
from numpy import array
from moments import Misc,Spectrum,Numerics,Manips,Integration,Demographics1D,Demographics2D
import sys
infile=sys.argv[1]
pop_ids=[sys.argv[2],sys.argv[3]]
projections=[int(sys.argv[4]),int(sys.argv[5])]
#params=[float(sys.argv[6]),float(sys.argv[7]),float(sys.argv[8]),float(sys.argv[9]),float(sys.argv[10]),float(sys.argv[11])]
params=[1,1,1,1,1,1,1,1,1,1,1,0.5]
# mutation rate per sequenced portion of genome per generation: for A.millepora, 0.02
mu=float(sys.argv[6])
# generation time, in thousand years: 0.005 (5 years)
gtime=float(sys.argv[7])
dd = Misc.make_data_dict(infile)
# set Polarized=False below for folded AFS analysis
data = Spectrum.from_data_dict(dd, pop_ids,projections,polarized=False)
ns=data.sample_sizes
np.set_printoptions(precision=3)
#-------------------
# split into unequal pop sizes with asymmetrical migration
def sc3imsm(params , ns):
# p_misid: proportion of misidentified ancestral states
nu1_1, nu2_1, nu1_2,nu2_2,nu1_3,nu2_3,T1, T2, T3,m,mi, P = params
sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1])
fs = moments.Spectrum(sts)
fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1])
fs.integrate([nu1_1, nu2_1], T1, m = np.array([[0, 0], [0, 0]]))
fs.integrate([nu1_2, nu2_2], T2, m = np.array([[0, m], [m, 0]]))
fs.integrate([nu1_3, nu2_3], T3, m = np.array([[0, m], [m, 0]]))
stsi = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1])
fsi = moments.Spectrum(stsi)
fsi = moments.Manips.split_1D_to_2D(fsi, ns[0], ns[1])
fsi.integrate([nu1_1, nu2_1], T1, m = np.array([[0, 0], [0, 0]]))
fsi.integrate([nu1_2, nu2_2], T2, m = np.array([[0, mi], [mi, 0]]))
fsi.integrate([nu1_3, nu2_3], T3, m = np.array([[0, mi], [mi, 0]]))
fs2=P*fsi+(1-P)*fs
return fs2
func=sc3imsm
upper_bound = [100, 100, 100,100,100, 100, 100, 100,100, 200,200,0.999]
lower_bound = [1e-3,1e-3, 1e-3,1e-3,1e-3,1e-3,1e-3,1e-3,1e-3,1e-5,1e-5,0.001]
params = moments.Misc.perturb_params(params, fold=2, upper_bound=upper_bound,
lower_bound=lower_bound)
poptg = moments.Inference.optimize_log(params, data, func,
lower_bound=lower_bound,
upper_bound=upper_bound,
verbose=False, maxiter=30)
# extracting model predictions, likelihood and theta
model = func(poptg, ns)
ll_model = moments.Inference.ll_multinom(model, data)
theta = moments.Inference.optimal_sfs_scaling(model, data)
# random index for this replicate
ind=str(random.randint(0,999999))
# plotting demographic model
plot_mod = moments.ModelPlot.generate_model(func, poptg, ns)
moments.ModelPlot.plot_model(plot_mod, save_file="sc3imlsm_"+ind+".png", pop_labels=pop_ids, nref=theta/(4*mu), draw_scale=False, gen_time=gtime, gen_time_units="KY", reverse_timeline=True)
# bootstrapping for SDs of params and theta
all_boot=moments.Misc.bootstrap(dd,pop_ids,projections)
uncert=moments.Godambe.GIM_uncert(func,all_boot,poptg,data)
# printing parameters and their SDs
print "RESULT","sc3imlsm",ind,len(params),ll_model,sys.argv[1],sys.argv[2],sys.argv[3],poptg,theta,uncert
# plotting quad-panel figure witt AFS, model, residuals:
moments.Plotting.plot_2d_comp_multinom(model, data, vmin=1, resid_range=3,
pop_ids =pop_ids)
plt.savefig("sc3imlsm_"+ind+"_"+sys.argv[1]+"_"+sys.argv[2]+"_"+sys.argv[3]+"_"+sys.argv[4]+"_"+sys.argv[5]+'.pdf')
| [
"[email protected]"
] | |
9d8fad03fbcd9b26dbe40081fb48b3d40e173dda | 6f3a7844321241ab2c46215a1ed3d1a246727c18 | /MPs/MP1/common_friends.py | 8e3b328d057d2b475f67ec5bb733adb500384c0c | [] | no_license | gouthamp900/cs199-fa17 | 899fa345bf3c1c3d5eb745820a2c884216d7f657 | 4eb635c6a064dd2c61253654ca729769d995f563 | refs/heads/master | 2021-06-24T21:44:10.021054 | 2017-09-11T00:58:30 | 2017-09-11T00:58:30 | 103,450,202 | 4 | 0 | null | 2017-09-13T21:03:27 | 2017-09-13T21:03:27 | null | UTF-8 | Python | false | false | 676 | py | from map_reducer import MapReduce
def friend_mapper(line):
''' write your code here! '''
pass
def friend_reducer(friend_tuples):
''' write your code here! '''
pass
def _run_common_friend_finder(filename):
with open(filename) as f:
lines = f.readlines()
mr = MapReduce(friend_mapper, friend_reducer)
common_friends = mr(lines)
for relationship, friends in common_friends:
print('{}\t{}'.format(relationship, friends))
if __name__ == '__main__':
print('friend_graph_example.txt')
_run_common_friend_finder('friend_graph_example.txt')
print('friend_graph.txt')
_run_common_friend_finder('friend_graph.txt')
| [
"[email protected]"
] | |
fc8dc6ce2041692e59638cacdceb23346d7a52db | f8da830331428a8e1bbeadf23345f79f1750bd98 | /msgraph-cli-extensions/beta/education_beta/azext_education_beta/generated/action.py | c30154d6d1aaa55e6339e1b49cb9af08c8bcd2ba | [
"MIT"
] | permissive | ezkemboi/msgraph-cli | e023e1b7589461a738e42cbad691d9a0216b0779 | 2ceeb27acabf7cfa219c8a20238d8c7411b9e782 | refs/heads/main | 2023-02-12T13:45:03.402672 | 2021-01-07T11:33:54 | 2021-01-07T11:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,034 | py | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=protected-access
import argparse
from collections import defaultdict
from knack.util import CLIError
class AddCourse(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.course = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'course-number':
d['course_number'] = v[0]
elif kl == 'description':
d['description'] = v[0]
elif kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'external-id':
d['external_id'] = v[0]
elif kl == 'subject':
d['subject'] = v[0]
return d
class AddTerm(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.term = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'end-date':
d['end_date'] = v[0]
elif kl == 'external-id':
d['external_id'] = v[0]
elif kl == 'start-date':
d['start_date'] = v[0]
return d
class AddAssignmentCategories(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddAssignmentCategories, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddAddress(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.address = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'city':
d['city'] = v[0]
elif kl == 'country-or-region':
d['country_or_region'] = v[0]
elif kl == 'postal-code':
d['postal_code'] = v[0]
elif kl == 'post-office-box':
d['post_office_box'] = v[0]
elif kl == 'state':
d['state'] = v[0]
elif kl == 'street':
d['street'] = v[0]
elif kl == 'type':
d['type'] = v[0]
return d
class AddAdministrativeUnitMembers(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddAdministrativeUnitMembers, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'deleted-date-time':
d['deleted_date_time'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddAdministrativeUnitExtensions(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddAdministrativeUnitExtensions, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'id':
d['id'] = v[0]
return d
class AddLicensesToAssign(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddLicensesToAssign, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'applies-to':
d['applies_to'] = v[0]
elif kl == 'sku-ids':
d['sku_ids'] = v
return d
class AddErrors(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddErrors, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'entry-type':
d['entry_type'] = v[0]
elif kl == 'error-code':
d['error_code'] = v[0]
elif kl == 'error-message':
d['error_message'] = v[0]
elif kl == 'joining-value':
d['joining_value'] = v[0]
elif kl == 'recorded-date-time':
d['recorded_date_time'] = v[0]
elif kl == 'reportable-identifier':
d['reportable_identifier'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddProfileStatus(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.profile_status = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'last-synchronization-date-time':
d['last_synchronization_date_time'] = v[0]
elif kl == 'status':
d['status'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddInstructions(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
namespace.instructions = action
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'content':
d['content'] = v[0]
elif kl == 'content-type':
d['content_type'] = v[0]
return d
class AddCategories(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddCategories, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'display-name':
d['display_name'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddEducationClassesResources(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEducationClassesResources, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'distribute-for-student-work':
d['distribute_for_student_work'] = v[0]
elif kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'display-name-resource-display-name':
d['display_name_resource_display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'display-name-resource-last-modified-by-user-display-name':
d['display_name_resource_last_modified_by_user_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-user-id':
d['id_resource_last_modified_by_user_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-device-display-name':
d['display_name_resource_last_modified_by_device_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-device-id':
d['id_resource_last_modified_by_device_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-application-display-name':
d['display_name_resource_last_modified_by_application_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-application-id':
d['id_resource_last_modified_by_application_id'] = v[0]
elif kl == 'display-name-resource-created-by-user-display-name':
d['display_name_resource_created_by_user_display_name'] = v[0]
elif kl == 'id-resource-created-by-user-id':
d['id_resource_created_by_user_id'] = v[0]
elif kl == 'display-name-resource-created-by-device-display-name':
d['display_name_resource_created_by_device_display_name'] = v[0]
elif kl == 'id-resource-created-by-device-id':
d['id_resource_created_by_device_id'] = v[0]
elif kl == 'display-name-resource-created-by-application-display-name':
d['display_name_resource_created_by_application_display_name'] = v[0]
elif kl == 'id-resource-created-by-application-id':
d['id_resource_created_by_application_id'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddOutcomes(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddOutcomes, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'display-name-last-modified-by-user-display-name':
d['display_name_last_modified_by_user_display_name'] = v[0]
elif kl == 'id-last-modified-by-user-id':
d['id_last_modified_by_user_id'] = v[0]
elif kl == 'display-name-last-modified-by-device-display-name':
d['display_name_last_modified_by_device_display_name'] = v[0]
elif kl == 'id-last-modified-by-device-id':
d['id_last_modified_by_device_id'] = v[0]
elif kl == 'display-name-last-modified-by-application-display-name':
d['display_name_last_modified_by_application_display_name'] = v[0]
elif kl == 'id-last-modified-by-application-id':
d['id_last_modified_by_application_id'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddEducationClassesAssignmentsResources(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddEducationClassesAssignmentsResources, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'assignment-resource-url':
d['assignment_resource_url'] = v[0]
elif kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'display-name-resource-display-name':
d['display_name_resource_display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'display-name-resource-last-modified-by-user-display-name':
d['display_name_resource_last_modified_by_user_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-user-id':
d['id_resource_last_modified_by_user_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-device-display-name':
d['display_name_resource_last_modified_by_device_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-device-id':
d['id_resource_last_modified_by_device_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-application-display-name':
d['display_name_resource_last_modified_by_application_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-application-id':
d['id_resource_last_modified_by_application_id'] = v[0]
elif kl == 'display-name-resource-created-by-user-display-name':
d['display_name_resource_created_by_user_display_name'] = v[0]
elif kl == 'id-resource-created-by-user-id':
d['id_resource_created_by_user_id'] = v[0]
elif kl == 'display-name-resource-created-by-device-display-name':
d['display_name_resource_created_by_device_display_name'] = v[0]
elif kl == 'id-resource-created-by-device-id':
d['id_resource_created_by_device_id'] = v[0]
elif kl == 'display-name-resource-created-by-application-display-name':
d['display_name_resource_created_by_application_display_name'] = v[0]
elif kl == 'id-resource-created-by-application-id':
d['id_resource_created_by_application_id'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
class AddSubmittedResources(argparse._AppendAction):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
super(AddSubmittedResources, self).__call__(parser, namespace, action, option_string)
def get_action(self, values, option_string): # pylint: disable=no-self-use
try:
properties = defaultdict(list)
for (k, v) in (x.split('=', 1) for x in values):
properties[k].append(v)
properties = dict(properties)
except ValueError:
raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
d = {}
for k in properties:
kl = k.lower()
v = properties[k]
if kl == 'assignment-resource-url':
d['assignment_resource_url'] = v[0]
elif kl == 'created-date-time':
d['created_date_time'] = v[0]
elif kl == 'display-name-resource-display-name':
d['display_name_resource_display_name'] = v[0]
elif kl == 'last-modified-date-time':
d['last_modified_date_time'] = v[0]
elif kl == 'display-name-resource-last-modified-by-user-display-name':
d['display_name_resource_last_modified_by_user_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-user-id':
d['id_resource_last_modified_by_user_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-device-display-name':
d['display_name_resource_last_modified_by_device_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-device-id':
d['id_resource_last_modified_by_device_id'] = v[0]
elif kl == 'display-name-resource-last-modified-by-application-display-name':
d['display_name_resource_last_modified_by_application_display_name'] = v[0]
elif kl == 'id-resource-last-modified-by-application-id':
d['id_resource_last_modified_by_application_id'] = v[0]
elif kl == 'display-name-resource-created-by-user-display-name':
d['display_name_resource_created_by_user_display_name'] = v[0]
elif kl == 'id-resource-created-by-user-id':
d['id_resource_created_by_user_id'] = v[0]
elif kl == 'display-name-resource-created-by-device-display-name':
d['display_name_resource_created_by_device_display_name'] = v[0]
elif kl == 'id-resource-created-by-device-id':
d['id_resource_created_by_device_id'] = v[0]
elif kl == 'display-name-resource-created-by-application-display-name':
d['display_name_resource_created_by_application_display_name'] = v[0]
elif kl == 'id-resource-created-by-application-id':
d['id_resource_created_by_application_id'] = v[0]
elif kl == 'id':
d['id'] = v[0]
return d
| [
"[email protected]"
] | |
8c32175a2770eff6b1c971db34e742e17ff04c5e | 5d74051293a4740c597abb016870a56a58cecf5b | /modules/shared/infrastructure/passwords/django/__init__.py | d5c02785b58d9ae5462b7f2646e11fb08dc3e688 | [
"BSD-3-Clause"
] | permissive | eduardolujan/hexagonal_architecture_django | 98e707148745f5a36f166c0584cfba21cca473f0 | 8055927cb460bc40f3a2651c01a9d1da696177e8 | refs/heads/develop | 2023-02-21T22:46:20.614779 | 2021-01-16T02:48:37 | 2021-01-16T02:48:37 | 305,813,872 | 5 | 2 | BSD-3-Clause | 2021-01-16T18:00:26 | 2020-10-20T19:32:46 | Python | UTF-8 | Python | false | false | 171 | py | # -*- coding: utf-8 -*-
from .password_creator import PasswordCreator
from .password_checker import PasswordChecker
__all__ = ('PasswordCreator', 'PasswordChecker', )
| [
"[email protected]"
] | |
036514a7caa71a638bee78a085ccf1cb5efe4853 | 0326f06f68fb0d919f8467f4744dfd60a654836a | /eggs/django_lfs-0.9.0a1-py2.7.egg/lfs/manage/manufacturers/products.py | 09654cc08032b8bfccd2f21f7e286e87edb4a7a9 | [] | no_license | ethirajit/onlinepos | 67de6023241339ae08c3b88a9e7b62b837ec17a3 | 186ba6585d0b29f96a5c210462764515cccb3b47 | refs/heads/master | 2021-01-17T13:23:36.490727 | 2014-07-01T10:30:17 | 2014-07-01T10:30:17 | 34,388,218 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,843 | py | import json
# django imports
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.db.models import Q
from django.http import HttpResponse
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
# lfs imports
from lfs.caching.utils import lfs_get_object_or_404
from lfs.core.signals import product_changed
from lfs.core.signals import manufacturer_changed
from lfs.core.utils import LazyEncoder
from lfs.catalog.settings import VARIANT
from lfs.catalog.models import Category
from lfs.catalog.models import Product
# Views
from lfs.manufacturer.models import Manufacturer
@permission_required("core.manage_shop")
def manage_products(request, manufacturer_id, template_name="manage/manufacturers/products.html"):
"""
"""
manufacturer = Manufacturer.objects.get(pk=manufacturer_id)
inline = products_inline(request, manufacturer_id, True)
# amount options
amount_options = []
for value in (10, 25, 50, 100):
amount_options.append({
"value": value,
"selected": value == request.session.get("manufacturer-products-amount")
})
return render_to_string(template_name, RequestContext(request, {
"manufacturer": manufacturer,
"products_inline": inline,
"amount_options": amount_options,
}))
# Parts
@permission_required("core.manage_shop")
def products_inline(request, manufacturer_id, as_string=False, template_name="manage/manufacturers/products_inline.html"):
"""Displays the products-tab of a manufacturer.
This is called at start from the manage_products view to assemble the
whole manage manufacturer view and is subsequently called via ajax requests to
update this part independent of others.
"""
manufacturer = Manufacturer.objects.get(pk=manufacturer_id)
if request.REQUEST.get("keep-session"):
page = request.REQUEST.get("manufacturer_page", request.session.get("manufacturer_page", 1))
filter_ = request.REQUEST.get("manufacturer_filter", request.session.get("manufacturer_filter", ""))
category_filter = request.REQUEST.get("manufacturer_category_filter", request.session.get("manufacturer_category_filter", ""))
else:
page = 1
filter_ = ""
category_filter = ""
s = request.session
s["manufacturer_page"] = page
s["manufacturer_filter"] = filter_
s["manufacturer_category_filter"] = category_filter
try:
s["manufacturer-products-amount"] = int(request.REQUEST.get("manufacturer-products-amount",
s.get("manufacturer-products-amount")))
except TypeError:
s["manufacturer-products-amount"] = 25
filters = Q()
if filter_:
filters &= (Q(name__icontains=filter_) | Q(sku__icontains=filter_))
if category_filter:
if category_filter == "None":
filters &= Q(categories=None)
elif category_filter == "All":
pass
else:
category_temp = lfs_get_object_or_404(Category, pk=category_filter)
categories_temp = [category_temp]
categories_temp.extend(category_temp.get_all_children())
filters &= Q(categories__in=categories_temp)
selectable_products = Product.objects.filter(
filters).exclude(sub_type=VARIANT).exclude(manufacturer=manufacturer).distinct()
paginator = Paginator(selectable_products, s["manufacturer-products-amount"])
try:
page = paginator.page(page)
except (EmptyPage, InvalidPage):
page = paginator.page(1)
result = render_to_string(template_name, RequestContext(request, {
"manufacturer": manufacturer,
"paginator": paginator,
"page": page,
"selected_products": selected_products(request, manufacturer_id, as_string=True),
}))
if as_string:
return result
else:
return HttpResponse(json.dumps({
"html": [["#products-inline", result]],
}), mimetype='application/json')
# Actions
@permission_required("core.manage_shop")
def products_tab(request, manufacturer_id):
"""Returns the products tab for given manufacturer id.
"""
result = manage_products(request, manufacturer_id)
return HttpResponse(result)
@permission_required("core.manage_shop")
def selected_products(request, manufacturer_id, as_string=False, template_name="manage/manufacturers/selected_products.html"):
"""The selected products part of the products-tab of a manufacturer.
This is called at start from the products_inline method to assemble the
whole manage category view and is later called via ajax requests to update
this part independent of others.
"""
manufacturer = Manufacturer.objects.get(pk=manufacturer_id)
if request.REQUEST.get("keep-session"):
page_2 = request.REQUEST.get("manufacturer_page_2", request.session.get("manufacturer_page_2", 2))
filter_2 = request.REQUEST.get("manufacturer_filter_2", request.session.get("manufacturer_filter_2", ""))
else:
page_2 = 1
filter_2 = ""
request.session["manufacturer_page_2"] = page_2
request.session["manufacturer_filter_2"] = filter_2
try:
request.session["manufacturer-products-amount"] = int(request.REQUEST.get("manufacturer-products-amount", request.session.get("manufacturer-products-amount")))
except TypeError:
request.session["manufacturer-products-amount"] = 25
filters = Q(manufacturer=manufacturer)
if filter_2:
filters &= (Q(name__icontains=filter_2) | Q(sku__icontains=filter_2))
products = Product.objects.filter(filters).exclude(sub_type=VARIANT).distinct()
paginator_2 = Paginator(products, request.session["manufacturer-products-amount"])
try:
page_2 = paginator_2.page(page_2)
except (EmptyPage, InvalidPage):
page_2 = paginator_2.page(1)
result = render_to_string(template_name, RequestContext(request, {
"manufacturer": manufacturer,
"products": products,
"paginator_2": paginator_2,
"page_2": page_2,
"filter_2": filter_2,
}))
if as_string:
return result
else:
return HttpResponse(json.dumps({
"html": [["#selected-products", result]],
}), mimetype='application/json')
@permission_required("core.manage_shop")
def add_products(request, manufacturer_id):
"""Adds products (passed via request body) to category with passed id.
"""
manufacturer = Manufacturer.objects.get(pk=manufacturer_id)
for product_id in request.POST.keys():
if product_id.startswith("manufacturer_page") or product_id.startswith("manufacturer_filter") or \
product_id.startswith("keep-session") or product_id.startswith("action"):
continue
try:
product = Product.objects.get(pk=product_id)
product.manufacturer = manufacturer
product.save()
product_changed.send(product)
except Product.DoesNotExist:
continue
manufacturer_changed.send(manufacturer)
html = [["#products-inline", products_inline(request, manufacturer_id, as_string=True)]]
result = json.dumps({
"html": html,
"message": _(u"Selected products have been assigned to manufacturer.")
}, cls=LazyEncoder)
return HttpResponse(result, mimetype='application/json')
@permission_required("core.manage_shop")
def remove_products(request, manufacturer_id):
"""Removes product (passed via request body) from category with passed id.
"""
manufacturer = Manufacturer.objects.get(pk=manufacturer_id)
for product_id in request.POST.keys():
if product_id.startswith("manufacturer_page") or product_id.startswith("manufacturer_filter") or \
product_id.startswith("keep-session") or product_id.startswith("action"):
continue
product = Product.objects.get(pk=product_id)
product.manufacturer = None
product.save()
product_changed.send(product)
manufacturer_changed.send(manufacturer)
html = [["#products-inline", products_inline(request, manufacturer_id, as_string=True)]]
result = json.dumps({
"html": html,
"message": _(u"Selected products are no longer assigned to manufacturer.")
}, cls=LazyEncoder)
return HttpResponse(result, mimetype='application/json')
| [
"[email protected]"
] | |
bf02de413f3d6e40eb57f713c624fff6d8fbd472 | 87e60b0504be11c6997f1b20b72e9428cc128342 | /python/cowbells/geom/surfaces.py | 3e835c423e358ade9f4e13d82437578dd2b758dd | [] | no_license | brettviren/cowbells | 70a85856fdfc54526c847f115d5dc01ec85ec215 | 1ceca86383f4f774d56c3f159658518242875bc6 | refs/heads/master | 2021-01-10T18:44:41.531525 | 2014-04-09T15:17:29 | 2014-04-09T15:17:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,484 | py | #!/usr/bin/env python
'''
Describe optical surfaces
'''
import base, volumes
store = []
class OpticalSurface(base.Base):
# Known parameters
known_parameters = ['type', 'model', 'finish', 'first', 'second',
'polish', 'sigmaalpha']
# Known properties
known_properties = ['RINDEX','REALRINDEX','IMAGINARYRINDEX',
'REFLECTIVITY','EFFICIENCY','TRANSMITTANCE',
'SPECULARLOBECONSTANT','SPECULARSPIKECONSTANT',
'BACKSCATTERCONSTANT']
def __init__(self, name, **parameters):
self.name = name
self.parameters = {}
self.properties = {}
for k,v in parameters.iteritems():
self.add_parameter(k,v)
continue
store.append(self)
return
def add_parameter(self, key, value):
assert key in self.known_parameters, \
'Unknown parameter given to surface %s: "%s"' % (self.name, key)
if key in ['first','second']:
if isinstance(value, volumes.LogicalVolume):
value = value.name
self.parameters[key] = value
return
def add_property(self, propname, x, y):
self.properties[propname] = {'x':x, 'y':y}
return
pass
def get(surf):
if isinstance(surf, OpticalSurface):
return surf
for s in store:
if s.name == surf:
return s
return None
def pod(): return base.pod(store)
| [
"[email protected]"
] | |
8210e6228b034876d6073be5b96b8126496060ab | 1af78033850e5bbe7a66ad83a238b96e7e2f2778 | /app/models/post.py | fd56443c27624b53a2c22c678039e88e9560e1e4 | [
"MIT"
] | permissive | Sean10/flask_demo | e7c0aed4a0633f03ded079cadec322dc4bdc6076 | a04b284a1e812f5d291b67fbd04e3073063003f1 | refs/heads/master | 2020-03-27T22:22:30.677486 | 2018-09-03T15:55:10 | 2018-09-03T15:55:10 | 147,225,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,971 | py | import datetime
import html
from bson.objectid import ObjectId
from ..utils import *
class Post:
def __init__(self, default_config):
self.collection = default_config['POSTS_COLLECTION']
self.response = {'error': None, 'data': None}
self.debug_mode = default_config['DEBUG']
def get_posts(self, limit, skip, tag=None, search=None):
'''
:param limit:
:param skip:
:param tag:
:param search:
:return:
'''
self.response['error'] = None
cond = {}
if tag is not None:
cond = {'tags': tag}
elif search is not None:
cond = {'$or': [
{'title': {'$regex': search, '$options': 'i'}},
{'body': {'$regex': search, '$options': 'i'}},
{'preview': {'$regex': search, '$options': 'i'}}]}
try:
cursor = self.collection.find(cond).sort(
'date', direction=-1).skip(skip).limit(limit)
self.response['data'] = []
for post in cursor:
if 'tags' not in post:
post['tags'] = []
if 'comments' not in post:
post['comments'] = []
if 'preview' not in post:
post['preview'] = ''
self.response['data'].append({'id': post['_id'],
'title': post['title'],
'body': post['body'],
'preview': post['preview'],
'date': post['date'],
'permalink': post['permalink'],
'tags': post['tags'],
'author': post['author'],
'comments': post['comments']})
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Posts not found..'
return self.response
def get_post_by_permalink(self, permalink):
self.response['error'] = None
try:
self.response['data'] = self.collection.find_one(
{'permalink': permalink})
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post not found..'
return self.response
def get_post_by_id(self, post_id):
self.response['error'] = None
try:
self.response['data'] = self.collection.find_one(
{'_id': ObjectId(post_id)})
if self.response['data']:
if 'tags' not in self.response['data']:
self.response['data']['tags'] = ''
else:
self.response['data']['tags'] = ','.join(
self.response['data']['tags'])
if 'preview' not in self.response['data']:
self.response['data']['preview'] = ''
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post not found..'
return self.response
def get_total_count(self, tag=None, search=None):
cond = {}
if tag is not None:
cond = {'tags': tag}
elif search is not None:
cond = {'$or': [
{'title': {'$regex': search, '$options': 'i'}},
{'body': {'$regex': search, '$options': 'i'}},
{'preview': {'$regex': search, '$options': 'i'}}]}
return self.collection.find(cond).count()
def get_tags(self):
self.response['error'] = None
try:
self.response['data'] = list(self.collection.aggregate([
{'$unwind': '$tags'},
{'$group': {'_id': '$tags', 'count': {'$sum': 1}}},
{'$sort': {'count': -1}},
{'$limit': 10},
{'$project': {'title': '$_id', 'count': 1, '_id': 0}}
]))
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Get tags error..'
return self.response
def create_new_post(self, post_data):
self.response['error'] = None
try:
self.response['data'] = self.collection.insert(post_data)
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Adding post error..'
return self.response
def edit_post(self, post_id, post_data):
self.response['error'] = None
del post_data['date']
del post_data['permalink']
try:
self.collection.update(
{'_id': ObjectId(post_id)}, {"$set": post_data}, upsert=False)
self.response['data'] = True
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Post update error..'
return self.response
def delete_post(self, post_id):
self.response['error'] = None
try:
if self.get_post_by_id(post_id) and self.collection.remove({'_id': ObjectId(post_id)}):
self.response['data'] = True
else:
self.response['data'] = False
except Exception as e:
self.print_debug_info(e, self.debug_mode)
self.response['error'] = 'Deleting post error..'
return self.response
@staticmethod
def validate_post_data(post_data):
print("while")
permalink = random_string(12)
print(permalink)
#exp = re.compile('\W')
#whitespace = re.compile('\s')
#temp_title = whitespace.sub("_", post_data['title'])
#permalink = exp.sub('', temp_title)
post_data['title'] = html.escape(post_data['title'])
post_data['preview'] = html.escape(post_data['preview'], quote=True)
post_data['body'] = html.escape(post_data['body'], quote=True)
post_data['date'] = datetime.datetime.utcnow()
post_data['permalink'] = permalink
return post_data
@staticmethod
def print_debug_info(msg, show=False):
if show:
import sys
import os
error_color = '\033[32m'
error_end = '\033[0m'
error = {'type': sys.exc_info()[0].__name__,
'file': os.path.basename(sys.exc_info()[2].tb_frame.f_code.co_filename),
'line': sys.exc_info()[2].tb_lineno,
'details': str(msg)}
print(error_color)
print('\n\n---\nError type: %s in file: %s on line: %s\nError details: %s\n---\n\n'\
% (error['type'], error['file'], error['line'], error['details']))
print(error_end)
| [
"[email protected]"
] | |
ee8ca2cdad8861221f07769c684b849247fb52ab | 5e20e9281c15587e8de2cce5b8eb342cae6b8645 | /astrohut/examples/collision3d.py | 3c0b384bc89b24bcdcfb12a8ce04744406d87dc6 | [] | no_license | jsbarbosa/astrohut | b6d0a76328d09f205a711b607e7fca4e9a51e178 | c2f8b721ec3ea8396ce321d44d881aa92dfa94f3 | refs/heads/master | 2021-09-18T11:51:10.142641 | 2018-07-13T18:50:18 | 2018-07-13T18:50:18 | 85,311,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 848 | py | import numpy as np
import astrohut as ah
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
G = 1.0
m = 1.0
N = 50
pos1 = np.zeros((N, 3))
pos2 = np.zeros_like(pos1)
pos1[:, :2] = np.random.normal(size = (N, 2))
pos2[:, :2] = np.random.normal(loc = 3.0, size = (N, 2))
pos2[:, 2] = 5.0
speeds1 = ah.generateSpeeds(pos1, G, m)
speeds2 = ah.generateSpeeds(pos2, G, m)
pos = np.vstack((pos1, pos2))
speeds = np.vstack((speeds1, speeds2))
system = ah.createArray(pos, speeds)
sim = ah.Simulation(system, dim = 3, dt = 1e-3, G = G, mass_unit = m, epsilon = 1e-2)
sim.start(5000, save_to_array_every = 125, print_progress = True)
# if boxes are wanted: boxed = True
ani = sim.makeAnimation()
sim.ax.set_xlim(-3, 5)
sim.ax.set_ylim(-3, 5)
# ani.save("collision3d.gif", writer="imagemagick", dpi = 72, fps = 12)
plt.show()
| [
"[email protected]"
] | |
71e5f0f9c68ae0973f094f30416e50780b207773 | dea85fb5330baf4ed9a185f040b258ef02bfa844 | /projecteuler/problem_005.py | 813f0d0ce716ad19ed0a0c6c0d538c799d34d4af | [] | no_license | icejoywoo/school | 595018d08bb971171106101dcd463bc435f29eff | bfc9ac94d67e02264da28055a932a86602efb2dc | refs/heads/master | 2022-07-24T02:43:37.747264 | 2016-04-29T07:14:42 | 2016-04-29T07:14:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 988 | py | #!/usr/bin/env python2.7
# encoding: utf-8
from __future__ import division
import math
def prime(end):
for i in xrange(2, end+1):
flag = True
for j in xrange(2, int(math.sqrt(i))+1):
if i % j == 0:
flag = False
if flag:
yield i
def prime_factor(number):
n = number
k = 1
while n != 1:
for k in prime(number):
if n % k == 0:
yield k
n = n / k
break
if __name__ == '__main__':
all_prime = {}
for i in range(2, 21):
prime_counter = {}
for j in prime_factor(i):
prime_counter.setdefault(j, 0)
prime_counter[j] += 1
for k, v in prime_counter.items():
if all_prime.get(k, 0) < v:
all_prime[k] = v
print all_prime
r = reduce(lambda x, y: x * y, [k**v for k, v in all_prime.items()])
for i in range(1, 21):
print i, r / i
print r
| [
"[email protected]"
] | |
b8ae343b776a8e117360d6f81ec56f8bb36bde1c | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /sARz4TDdxCuqK6pja_12.py | 60b354a533deca46411ecac892304d607e16cdd0 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,842 | py | """
**Mubashir** needs your help to identify the spread of a deadly virus. He can
provide you with the following parameters:
* A two-dimensional array `persons`, containing **affected persons 'V'** and **unaffected persons 'P'**.
* Number of hours `n`, each infected person is spreading the virus to one person _up, down, left and right_ **each hour**.
Your function should return the updated array containing affected and
unaffected persons after `n` hours.
### Examples
persons = [
["P", "P", "P", "P", "P"],
["V", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"]
]
deadly_virus(persons, 0) ➞ [
["P", "P", "P", "P", "P"],
["V", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"]
]
deadly_virus(persons, 1) ➞ [
["V", "P", "P", "P", "P"],
["V", "V", "P", "P", "P"],
["V", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"]
]
deadly_virus(persons, 2) ➞ [
["V", "V", "P", "P", "P"],
["V", "V", "V", "P", "P"],
["V", "V", "P", "P", "P"],
["V", "P", "P", "P", "P"],
["P", "P", "P", "P", "P"]
]
### Notes
N/A
"""
def deadly_virus(people, n):
len_x = len(people); len_y = len(people[0])
for hour in range(n):
infected = set()
for i in range(len_x):
for j in range(len_y):
if people[i][j] == 'V':
if i > 0:
infected.add((i - 1, j))
if i < (len_x - 1):
infected.add((i + 1, j))
if j > 0:
infected.add((i, j - 1))
if j < (len_y - 1):
infected.add((i, j + 1))
for i, j in infected:
people[i][j] = 'V'
return people
| [
"[email protected]"
] | |
0d32643ef83c8d8d718272d65f217f90ed5bc4bf | 9505e191cb287507c7df05212ab562bea1eda553 | /python_fishc/14.0.py | a439b8f61d38e52515082a5498ca97088d36971d | [
"MIT"
] | permissive | iisdd/Courses | c7a662305f3efe7d61eb23f766381290b1107bb8 | a47d202e0d7e1ba85a38c6fe3dd9619eceb1045c | refs/heads/main | 2023-04-15T17:40:36.474322 | 2021-04-27T14:31:42 | 2021-04-27T14:31:42 | 316,904,233 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,479 | py | '''0. 请写一个密码安全性检查的代码代码:check.py'''
# 密码安全性检查代码
#
# 低级密码要求:
# 1. 密码由单纯的数字或字母组成
# 2. 密码长度小于等于8位
#
# 中级密码要求:
# 1. 密码必须由数字、字母或特殊字符(仅限:~!@#$%^&*()_=-/,.?<>;:[]{}|\)任意两种组合
# 2. 密码长度不能低于8位
#
# 高级密码要求:
# 1. 密码必须由数字、字母及特殊字符(仅限:~!@#$%^&*()_=-/,.?<>;:[]{}|\)三种组合
# 2. 密码只能由字母开头
# 3. 密码长度不能低于16位
def check():
symbol = '~!@#$%^&*()_=-/,.?<>;:[]{}|\\'
test = input('请输入需要检查的密码组合:')
length = len(test)
flag = 0
notice = '''请按以下方式提升宁的密码安全级别:
1.密码必须由数字、字母及特殊字符三种组合
2.密码只能由字母开头
3.密码长度不能低于16位'''
print('宁的密码安全级别评定为:' , end ='')
for each in test:
if each in symbol:
flag = 1
break
if test.isalnum() or length <= 8:
print('低')
print(notice)
elif test[0].isalpha() and length >= 16 and flag == 1 :
print('高')
print('请继续保持')
return True
else:
print('中')
print(notice)
while 1 :
if check():
break
| [
"[email protected]"
] | |
6b2fb25453491c119aaf9cf115995aecb0ca1840 | c077ee590d003ebada9e292bed0de8cc27fc1c7b | /other/sqllite.py | dccef0327b666dbc654e328b29879e2898378103 | [] | no_license | pwchen21/pros | bb744bc451a0ede2a31a6a5c74f7cda7f5abf206 | 9b48e2ec40a3eea12c79b89a00b2be60d65cc8d1 | refs/heads/master | 2023-07-07T11:50:52.599589 | 2023-06-24T19:46:02 | 2023-06-24T19:46:02 | 140,359,861 | 0 | 0 | null | 2020-10-19T17:53:52 | 2018-07-10T01:17:30 | Python | UTF-8 | Python | false | false | 786 | py | import sqlite3
conn=sqlite3.connect(r'D:\se\py\db\test.db')
# Create Table
#conn.execute('CREATE TABLE USER ( `ID` INTEGER PRIMARY KEY AUTOINCREMENT, `NAME` TEXT NOT NULL, `NICKNAME` TEXT, `PASSWORD` TEXT, `MAIL` TEXT )')
# Insert Data
'''
conn.execute('INSERT INTO USER (NAME, NICKNAME) VALUES ("Tester1", "N1");')
conn.execute('INSERT INTO USER (NAME, NICKNAME) VALUES ("Tester2", "N2");')
conn.execute('INSERT INTO USER (NAME, NICKNAME) VALUES ("Tester2", "N2");')
'''
# Commit Insert
conn.commit()
# Get User Data
cursor=conn.execute('SELECT P from USER')
# Print Data in ROW
for x in cursor:
#print('ID: ', x[0],' ','NAME:', x[1],' ', 'NICKNAME: ', x[2])
if x[1] == 'Tester1':
print('Nickname:', x[2])
conn.close()
#, (idr.get(), nic.get(), mailr.get(), pwr.get())) | [
"[email protected]"
] | |
3321af51db5e0bf76d7c034134aa3971bf647c1d | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/lobby/prb_windows/__init__.py | 126fc2da1e4c41925328ad15223c68599c5addbe | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 6,865 | py | # 2017.05.04 15:23:45 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/prb_windows/__init__.py
from gui.Scaleform.daapi.settings.views import VIEW_ALIAS
from gui.Scaleform.framework import ScopeTemplates
from gui.Scaleform.framework import ViewSettings, GroupedViewSettings, ViewTypes
from gui.Scaleform.framework.package_layout import PackageBusinessHandler
from gui.Scaleform.genConsts.CONTEXT_MENU_HANDLER_TYPE import CONTEXT_MENU_HANDLER_TYPE
from gui.Scaleform.genConsts.PREBATTLE_ALIASES import PREBATTLE_ALIASES
from gui.app_loader.settings import APP_NAME_SPACE
from gui.shared import EVENT_BUS_SCOPE
from gui.shared.utils.functions import getViewName
def getContextMenuHandlers():
from gui.Scaleform.daapi.view.lobby.prb_windows.PrebattleUserCMHandler import PrebattleUserCMHandler
return ((CONTEXT_MENU_HANDLER_TYPE.PREBATTLE_USER, PrebattleUserCMHandler),)
def getViewSettings():
from gui.Scaleform.daapi.view.lobby.prb_windows import invite_windows
from gui.Scaleform.daapi.view.lobby.prb_windows.BattleSessionList import BattleSessionList
from gui.Scaleform.daapi.view.lobby.prb_windows.BattleSessionWindow import BattleSessionWindow
from gui.Scaleform.daapi.view.lobby.prb_windows.CompanyListView import CompanyListView
from gui.Scaleform.daapi.view.lobby.prb_windows.CompanyMainWindow import CompanyMainWindow
from gui.Scaleform.daapi.view.lobby.prb_windows.CompanyRoomView import CompanyRoomView
from gui.Scaleform.daapi.view.lobby.SendInvitesWindow import SendInvitesWindow
from gui.Scaleform.daapi.view.lobby.prb_windows.SquadPromoWindow import SquadPromoWindow
from gui.Scaleform.daapi.view.lobby.prb_windows.squad_view import SquadView, FalloutSquadView
from gui.Scaleform.daapi.view.lobby.prb_windows.squad_view import EventSquadView
from gui.Scaleform.daapi.view.lobby.prb_windows.squad_window import SquadWindow, FalloutSquadWindow, EventSquadWindow
from gui.Scaleform.daapi.view.lobby.prb_windows.SwitchPeripheryWindow import SwitchPeripheryWindow
return (GroupedViewSettings(PREBATTLE_ALIASES.SEND_INVITES_WINDOW_PY, SendInvitesWindow, 'sendInvitesWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.SEND_INVITES_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.AUTO_INVITE_WINDOW_PY, invite_windows.AutoInviteWindow, 'receivedInviteWindow.swf', ViewTypes.WINDOW, 'receivedInviteWindow', None, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.SQUAD_WINDOW_PY, SquadWindow, 'squadWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.SQUAD_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.FALLOUT_SQUAD_WINDOW_PY, FalloutSquadWindow, 'squadWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.FALLOUT_SQUAD_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.EVENT_SQUAD_WINDOW_PY, EventSquadWindow, 'squadWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.EVENT_SQUAD_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.COMPANY_WINDOW_PY, CompanyMainWindow, 'companyMainWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.COMPANY_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.BATTLE_SESSION_ROOM_WINDOW_PY, BattleSessionWindow, 'battleSessionWindow.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.BATTLE_SESSION_ROOM_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(PREBATTLE_ALIASES.BATTLE_SESSION_LIST_WINDOW_PY, BattleSessionList, 'battleSessionList.swf', ViewTypes.WINDOW, '', PREBATTLE_ALIASES.BATTLE_SESSION_LIST_WINDOW_PY, ScopeTemplates.DEFAULT_SCOPE, True),
GroupedViewSettings(VIEW_ALIAS.SQUAD_PROMO_WINDOW, SquadPromoWindow, 'squadPromoWindow.swf', ViewTypes.WINDOW, '', None, ScopeTemplates.DEFAULT_SCOPE),
GroupedViewSettings(VIEW_ALIAS.SWITCH_PERIPHERY_WINDOW, SwitchPeripheryWindow, 'switchPeripheryWindow.swf', ViewTypes.TOP_WINDOW, '', None, ScopeTemplates.DEFAULT_SCOPE),
ViewSettings(PREBATTLE_ALIASES.SQUAD_VIEW_PY, SquadView, None, ViewTypes.COMPONENT, None, ScopeTemplates.DEFAULT_SCOPE),
ViewSettings(PREBATTLE_ALIASES.EVENT_SQUAD_VIEW_PY, EventSquadView, None, ViewTypes.COMPONENT, None, ScopeTemplates.DEFAULT_SCOPE),
ViewSettings(PREBATTLE_ALIASES.FALLOUT_SQUAD_VIEW_PY, FalloutSquadView, None, ViewTypes.COMPONENT, None, ScopeTemplates.DEFAULT_SCOPE),
ViewSettings(PREBATTLE_ALIASES.COMPANY_LIST_VIEW_PY, CompanyListView, None, ViewTypes.COMPONENT, None, ScopeTemplates.DEFAULT_SCOPE),
ViewSettings(PREBATTLE_ALIASES.COMPANY_ROOM_VIEW_PY, CompanyRoomView, None, ViewTypes.COMPONENT, None, ScopeTemplates.DEFAULT_SCOPE))
def getBusinessHandlers():
return (_PrbPackageBusinessHandler(),)
class _PrbPackageBusinessHandler(PackageBusinessHandler):
def __init__(self):
listeners = ((PREBATTLE_ALIASES.SQUAD_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.EVENT_SQUAD_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.FALLOUT_SQUAD_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.COMPANY_WINDOW_PY, self.__showCompanyMainWindow),
(PREBATTLE_ALIASES.BATTLE_SESSION_ROOM_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.BATTLE_SESSION_LIST_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.SEND_INVITES_WINDOW_PY, self.__showPrebattleWindow),
(PREBATTLE_ALIASES.AUTO_INVITE_WINDOW_PY, self.__showAutoInviteWindow),
(VIEW_ALIAS.SQUAD_PROMO_WINDOW, self.loadViewByCtxEvent),
(VIEW_ALIAS.SWITCH_PERIPHERY_WINDOW, self.loadViewByCtxEvent))
super(_PrbPackageBusinessHandler, self).__init__(listeners, APP_NAME_SPACE.SF_LOBBY, EVENT_BUS_SCOPE.LOBBY)
def __showPrebattleWindow(self, event):
alias = name = event.eventType
self.loadViewWithDefName(alias, name, event.ctx)
def __showAutoInviteWindow(self, event):
alias = PREBATTLE_ALIASES.AUTO_INVITE_WINDOW_PY
name = getViewName(PREBATTLE_ALIASES.AUTO_INVITE_WINDOW_PY, event.ctx.get('prbID'))
self.loadViewWithDefName(alias, name, event.ctx)
def __showCompanyMainWindow(self, event):
alias = name = PREBATTLE_ALIASES.COMPANY_WINDOW_PY
window = self.findViewByAlias(ViewTypes.WINDOW, alias)
if window is not None:
window.updateWindowState(event.ctx)
else:
self.loadViewWithDefName(alias, name, event.ctx if event else None)
return
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\lobby\prb_windows\__init__.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:23:45 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
ab5ccec07088f8d9d3787c3d31cfc69fcc04f54f | bb8838e3eec624fd35a61d6d646f941eac1b266a | /saga/utils/threads.py | 69f6f1c07f22481e262743a916595b2708709f1f | [
"MIT"
] | permissive | agrill/saga-python | 55087c03e72635ffbb2fe1ca56b5cc02b7ff2094 | 35101e3a40d3cfcb39cb9f0d0c5f64c6f8de5930 | refs/heads/master | 2021-01-22T10:14:11.922145 | 2013-11-19T14:38:50 | 2013-11-19T14:38:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,604 | py |
__author__ = "Andre Merzky"
__copyright__ = "Copyright 2012-2013, The SAGA Project"
__license__ = "MIT"
import sys
import threading
import saga.exceptions as se
import saga.utils.misc as sumisc
_out_lock = threading.RLock ()
# ------------------------------------------------------------------------------
#
NEW = 'New'
RUNNING = 'Running'
FAILED = 'Failed'
DONE = 'Done'
# ------------------------------------------------------------------------------
#
def lout (txt, stream=sys.stdout) :
with _out_lock :
stream.write (txt)
stream.flush ()
# ------------------------------------------------------------------------------
#
class Thread (threading.Thread) : pass
def Event (*args, **kwargs) :
return threading.Event (*args, **kwargs)
# ------------------------------------------------------------------------------
#
class RLock (object) :
# see http://stackoverflow.com/questions/6780613/
# is-it-possible-to-subclass-lock-objects-in-python-if-not-other-ways-to-debug
# ------------------------------------------------------------------------------
#
def __init__ (self, obj=None) :
self._lock = threading.RLock ()
# with self._lock :
# self._obj = obj
# self._cnt = 0
# ------------------------------------------------------------------------------
#
def acquire (self) :
# ind = (self._cnt)*' '+'>'+(30-self._cnt)*' '
# lout ("%s -- %-10s %50s acquire - %s\n" % (ind, threading.current_thread().name, self, self._lock))
self._lock.acquire ()
# self._cnt += 1
# ind = (self._cnt)*' '+'|'+(30-self._cnt)*' '
# lout ("%s %-10s %50s acquired - %s\n" % (ind, threading.current_thread().name, self, self._lock))
# ------------------------------------------------------------------------------
#
def release (self) :
# ind = (self._cnt)*' '+'-'+(30-self._cnt)*' '
# lout ("%s %-10s %50s release - %s\n" % (ind, threading.current_thread().name, self, self._lock))
self._lock.release ()
# self._cnt -= 1
# ind = (self._cnt)*' '+'<'+(30-self._cnt)*' '
# lout ("%s -- %-10s %50s released - %s\n" % (ind, threading.current_thread().name, self, self._lock))
# ------------------------------------------------------------------------------
#
def __enter__ (self) : self.acquire ()
def __exit__ (self, type, value, traceback) : self.release ()
# ------------------------------------------------------------------------------
#
class SagaThread (Thread) :
def __init__ (self, call, *args, **kwargs) :
if not callable (call) :
raise se.BadParameter ("Thread requires a callable to function, not %s" \
% (str(call)))
Thread.__init__ (self)
self._call = call
self._args = args
self._kwargs = kwargs
self._state = NEW
self._result = None
self._exception = None
self.daemon = True
@classmethod
def Run (self, call, *args, **kwargs) :
t = self (call, *args, **kwargs)
t.start ()
return t
@property
def tid (self) :
return self.tid
def run (self) :
try :
self._state = RUNNING
self._result = self._call (*self._args, **self._kwargs)
self._state = DONE
except Exception as e :
print ' ========================================== '
print repr(e)
print ' ========================================== '
print str(e)
print ' ========================================== '
print sumisc.get_trace ()
print ' ========================================== '
self._exception = e
self._state = FAILED
def wait (self) :
if self.isAlive () :
self.join ()
def cancel (self) :
# FIXME: this is not really implementable generically, so we ignore
# cancel requests for now.
pass
def get_state (self) :
return self._state
state = property (get_state)
def get_result (self) :
if not self._state == DONE :
return None
return self._result
result = property (get_result)
def get_exception (self) :
if not self._state == FAILED :
return None
return self._exception
exception = property (get_exception)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| [
"[email protected]"
] | |
d2712d6ad0380ba56c92dbda2082dd9c9a137afa | c2f35e5d3cfbbb73188a0cd6c43d161738e63bd1 | /07-mini-web框架/06-通过传递字典实现浏览器请求的资源不一样得到响应不一样/web_server.py | d55807d19181b3f74fd963dd53e7597e6e885e4b | [] | no_license | yangh-zzf-itcast/Python_heima_Study | 2a7cd0d801d9d6f49548905d373bb409efc4b559 | 7d753c1cdd5c46a0e78032e12b1d2f5d9be0bf68 | refs/heads/master | 2020-04-30T06:59:04.000451 | 2019-04-19T12:15:30 | 2019-04-19T12:15:30 | 176,670,172 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,940 | py | import socket
import re
import multiprocessing
import mini_frame # 逻辑处理代码模块
class WSGIServer(object):
"""WSGI服务器类"""
def __init__(self):
# 1. 创建套接字
self.tcp_server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 设定套接字选项, 可以重复使用地址
self.tcp_server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# 2. 绑定
self.tcp_server_socket.bind(("", 7890))
# 3. 监听
self.tcp_server_socket.listen(128)
def service_client(self, tcp_client_socket):
"""为客户端服务"""
# 1. 接收浏览器发送过来的 http 请求
# GET /index.html HTTP/1.1
# ......
#
# 请求数据内容,对数据内容进行解码
request = tcp_client_socket.recv(1024).decode("utf-8")
print(request)
try:
# 对接收到的请求协议字符串进行按行切割
# 返回的是由每一行组成的一个列表
request_lines = request.splitlines()
# 第一行就是http请求头,其中有浏览器需要访问的文件名
ret = re.match(r"[^/]+(/[^ ]*)", request_lines[0])
# 获取文件名 /index.html
if ret:
file_name = ret.group(1)
if file_name == "/":
file_name = "/index.html"
else:
pass
except IndexError:
file_name = "/index.html"
# 2.返回http格式的数据给浏览器
# 如果请求的资源不是以.py为结尾,那么就认为是静态资源(html/css/js/png,jpg等)
if not file_name.endswith(".py"):
try:
f = open("./html" + file_name, "rb")
except:
response = "HTTP/1.1 404 NOT FOUND\r\n"
response += "\r\n"
response += "------file not found------"
tcp_client_socket.send(response.encode("utf-8"))
else:
html_content = f.read()
f.close()
# 2.1 发给浏览器的数据----header
# 注意末尾换行一定要加上\r\n 表示换行
response = "HTTP/1.1 200 OK\r\n"
response += "\r\n" # 在协议头和 请求的数据之间有一个空行
# 2.2 发给浏览器的数据----body
# response += "<h1>YangHang love ZhangZifan</h1>"
# 发送回应头
tcp_client_socket.send(response.encode("utf-8"))
# 发送客户端请求的内容
tcp_client_socket.send(html_content)
else:
# 如果是以.py结尾,那么就认为是动态资源请求
# body = "hhhh"
# if file_name == "/login.py":
# body = mini_frame.login()
# 实现解耦, 在简单框架内进行逻辑处理
#WSGI协议
env = dict() # 字典存储浏览器要访问的信息
env['PATH_INFO'] = file_name
body = mini_frame.application(env, self.set_response_header)
header = "HTTP/1.1 %s\r\n" % self.status
# 遍历响应头的元组
for temp in self.headers:
header +="%s:%s\r\n" % (temp[0], temp[1])
header += "\r\n"
response = header + body
tcp_client_socket.send(response.encode("utf-8"))
# 关闭服务套接字
tcp_client_socket.close()
# 将函数引用传入框架的application函数,获得响应头信息,然后存入实例属性中
def set_response_header(self, status, headers):
self.status = status
# 与服务器相关的信息,在服务器的函数内添加,与框架的信息区分开
self.headers = [('server:','mini_web v1.0')]
# 服务器信息与框架信息合并
self.headers += headers
def run_forever(self):
"""完成服务器的整体控制,无限循环运行"""
while True:
# 4. 等待新客户端的连接
new_socket, client_addr = self.tcp_server_socket.accept()
# 5. 创建一个子进程为这个客户端服务
p = multiprocessing.Process(target=self.service_client, args=(new_socket, ))
p.start()
# 关闭父进程中的 new_socket
new_socket.close()
# 关闭监听套接字
self.tcp_server_socket.close()
def main():
"""控制整体,创建一个web服务器对象,然后调用这个对象的run_forever方法运行"""
wsgi_server = WSGIServer()
wsgi_server.run_forever()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
470f082d4e4f775112b238965cc902c710b8d8b6 | 521efcd158f4c69a686ed1c63dd8e4b0b68cc011 | /airflow/api_connexion/endpoints/version_endpoint.py | 077d7f8a1cfe4dcc05b12aafbc14528af5d0c696 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | coutureai/RaWorkflowOrchestrator | 33fd8e253bfea2f9a82bb122ca79e8cf9dffb003 | cd3ea2579dff7bbab0d6235fcdeba2bb9edfc01f | refs/heads/main | 2022-10-01T06:24:18.560652 | 2021-12-29T04:52:56 | 2021-12-29T04:52:56 | 184,547,783 | 5 | 12 | Apache-2.0 | 2022-11-04T00:02:55 | 2019-05-02T08:38:38 | Python | UTF-8 | Python | false | false | 1,430 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import NamedTuple, Optional
import airflow
from airflow.api_connexion.schemas.version_schema import version_info_schema
from airflow.api_connexion.types import APIResponse
from airflow.utils.platform import get_airflow_git_version
class VersionInfo(NamedTuple):
"""Version information"""
version: str
git_version: Optional[str]
def get_version() -> APIResponse:
"""Get version information"""
airflow_version = airflow.__version__
git_version = get_airflow_git_version()
version_info = VersionInfo(version=airflow_version, git_version=git_version)
return version_info_schema.dump(version_info)
| [
"[email protected]"
] | |
a5fdff54dca575404713d802a9baf77ff4c1e16c | 72579db4299be6d512a766ce38ae50e3c7753368 | /.history/Pythonlearning/day9_20200802095738.py | 8bf2b42a5e3793092b534eb4601b7c71e24450db | [] | no_license | moteily/Python_Learning | f0d1abf360ad417112051ba52f32a141452adb2d | c294aa1e373254739fb372918507cd7dbe12c999 | refs/heads/master | 2022-11-26T11:09:48.145308 | 2020-08-04T08:47:15 | 2020-08-04T08:47:15 | 284,379,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,626 | py | # 接上一天的第九章
# 静态方法和类方法:
# 定义和表示:静态方法和类方法分别包装在staticmethod和classmethod类的对象中。
# 静态方法的定义中没有参数self,可直接通过类调用。类方法的定义中包含类似self的参数,
# 通常被命名为cls。对于类方法,也可通过对象直接调用,但参数cls将自动关联到类。如下
class Myclass:
def smeth():
print('This is a static method')
smeth = staticmethod(smeth)
def cmeth(cls):#类方法的特有参数
print('This is a class method of ', cls)
cmeth = classmethod(cmeth)
# 像这样手工包装和替换方法有点繁琐。引入了一种名为装饰器的新方法,可用于像这样包装方法.
# (实际上,装饰器可用于包含任何可调用的对象,并且可用于方法和函数)可指定一个或多个装饰器
# ,为此可在方法(或函数)前面使用运算符@列出这些装饰器(指定了多个装饰器,应用的顺序与列出
# 的顺序相反)
class Myclass:
@staticmethod
def smeth():
print('This is a static method')
@classmethod
def cmeth(cls):
print('This is a class method of',cls)
# 定义这些方法后,就像下面这样使用它们(无需实例化类):
Myclass.smeth()
Myclass.cmeth()
# __getattr__ ,__setattr等方法
# 可以拦截对象对象属性的所有访问企图,其用途之一是在旧类中实现特性(在旧式类中,函数property的
# 的行为可能不符合预期)要在属性被访问时执行一段代码,必须使用一些魔法方法 | [
"[email protected]"
] | |
9838043a9799e8e36f6c7fa133e5dbde8d92d24a | 34ef7e2955415e92806dd918df0013e39539b99c | /tests/test_scheduler.py | 919cedf0d2555b0e2251e3e1afc728d549fe3981 | [] | no_license | MasoniteFramework/scheduler | d2e843ba845e2cfe97197dbc50093f0b1ac9a07f | d783bef47df49893fdfc6dc761197fd5f1bb047c | refs/heads/3.0 | 2021-06-14T23:21:33.951389 | 2020-09-18T15:09:34 | 2020-09-18T15:09:34 | 135,189,744 | 1 | 2 | null | 2023-08-21T05:57:56 | 2018-05-28T17:17:51 | Python | UTF-8 | Python | false | false | 5,189 | py | import pytest
import pendulum
from src.masonite.scheduler.Task import Task
class MockTask(Task):
run_every = "5 minutes"
timezone = "America/New_York"
class TestScheduler:
def setup_method(self):
self.task = MockTask()
def test_scheduler_should_run(self):
assert self.task.run_every == "5 minutes"
time = pendulum.now().on(2018, 5, 21).at(22, 5, 5)
self.task._date = time
assert self.task.should_run(time) == True
time = pendulum.now().on(2018, 5, 21).at(22, 6, 5)
self.task._date = time
assert self.task.should_run(time) == False
def test_scheduler_should_run_every_minute(self):
self.task.run_every = "1 minute"
time = pendulum.now().on(2018, 5, 21).at(22, 5, 5)
self.task._date = time
assert self.task.should_run(time) == True
time = pendulum.now().on(2018, 5, 21).at(22, 6, 5)
self.task._date = time
assert self.task.should_run(time) == True
def test_scheduler_should_run_every_2_minutes(self):
self.task.run_every = "2 minutes"
time = pendulum.now().on(2018, 5, 21).at(14, 56, 5)
self.task._date = time
assert self.task.should_run(time) == True
time = pendulum.now().on(2018, 5, 21).at(14, 58, 5)
self.task._date = time
assert self.task.should_run(time) == True
def test_scheduler_should_run_every_hour(self):
self.task.run_every = "1 hour"
time = pendulum.now().on(2018, 5, 21).at(2, 0, 1)
self.task._date = time
assert self.task.should_run(time) == True
time = pendulum.now().on(2018, 5, 21).at(3, 0, 1)
self.task._date = time
assert self.task.should_run(time) == True
self.task.run_every = "2 hours"
time = pendulum.now().on(2018, 5, 21).at(2, 0, 1)
self.task._date = time
assert self.task.should_run(time) == True
self.task.run_every = "2 hours"
time = pendulum.now().on(2018, 5, 21).at(3, 0, 1)
self.task._date = time
assert self.task.should_run(time) == False
time = pendulum.now().on(2018, 5, 21).at(4, 0, 1)
self.task._date = time
assert self.task.should_run(time) == True
def test_scheduler_should_run_every_days(self):
self.task.run_every = "2 days"
time = pendulum.now().on(2018, 5, 21).at(0, 0, 1)
self.task._date = time
assert self.task.should_run(time) == False
time = pendulum.now().on(2018, 5, 23).at(0, 0, 1)
self.task._date = time
assert self.task.should_run(time) == False
self.task.run_at = "5:30"
time = pendulum.now().on(2018, 5, 22).at(5, 30, 0)
self.task._date = time
assert self.task.should_run(time) == True
self.task.run_at = "5:35"
time = pendulum.now().on(2018, 5, 22).at(5, 30, 0)
self.task._date = time
assert self.task.should_run(time) == False
def test_scheduler_should_run_every_months(self):
self.task.run_every = "2 months"
time = pendulum.now().on(2018, 1, 1).at(0, 0, 1)
self.task._date = time
assert self.task.should_run(time) == False
time = pendulum.now().on(2018, 2, 1).at(0, 0, 1)
self.task._date = time
assert self.task.should_run(time) == True
time = pendulum.now().on(2018, 2, 1).at(10, 0, 1)
self.task._date = time
assert self.task.should_run(time) == False
self.task.run_at = "5:30"
time = pendulum.now().on(2018, 2, 1).at(5, 30, 0)
self.task._date = time
assert self.task.should_run(time) == False
def test_twice_daily_at_correct_time(self):
time = pendulum.now().on(2018, 1, 1).at(1, 20, 5)
self.task.run_every = ""
self.task.twice_daily = (1, 13)
self.task._date = time
assert self.task.should_run()
time = pendulum.now().on(2018, 1, 1).at(13, 20, 5)
self.task._date = time
assert self.task.should_run()
def test_twice_daily_at_incorrect_time(self):
time = pendulum.now().on(2018, 1, 1).at(12, 20, 5)
self.task.run_every = ""
self.task.twice_daily = (1, 13)
self.task._date = time
assert self.task.should_run() is False
def test_run_at(self):
self.task.run_every = ""
self.task.run_at = None
self.task.run_at = "13:00"
time = pendulum.now().on(2018, 1, 1).at(13, 0, 5)
self.task._date = time
self.task.run_at = "13:05"
time = pendulum.now().on(2018, 1, 1).at(13, 5, 5)
self.task._date = time
assert self.task.should_run() is True
time = pendulum.now().on(2018, 1, 1).at(13, 6, 5)
self.task._date = time
assert self.task.should_run() is False
def test_method_calls(self):
task = MockTask()
task.at("13:00")
time = pendulum.now().on(2018, 1, 1).at(13, 0, 5)
task._date = time
task = MockTask()
task.every_minute()
time = pendulum.now().on(2018, 5, 21).at(22, 5, 5)
task._date = time
assert task.should_run(time) == True
| [
"[email protected]"
] | |
052968e050a51a8a22ec5d942182c99cb8f68f01 | 3d96cee3f0c986c7195e7677d85e91dc837d8dd4 | /Web/E/4/4.9/sql.py | 709d060270f8e011120ba344fdb61e742a11439c | [] | no_license | dannycrief/full-stack-web-dev-couse | 7faffe1c9e6c39baf03d6ee54f716e4f8b4c8733 | 0b22bc84742d8e78bd6a2e03adfbc44137f3d607 | refs/heads/master | 2023-01-12T09:25:16.378035 | 2021-03-21T16:51:18 | 2021-03-21T16:51:18 | 220,825,261 | 0 | 1 | null | 2023-01-05T12:57:14 | 2019-11-10T17:34:02 | Python | UTF-8 | Python | false | false | 181 | py | from sqlalchemy import create_engine, MetaData, Table, and_, or_, asc
engine = create_engine('postgresql+psycopg2://postgres:211217ns@localhost:5433/movies')
conn = engine.connect() | [
"[email protected]"
] | |
e223be854296cb648b6cd4f1db9b6eb064402213 | d780df6e068ab8a0f8007acb68bc88554a9d5b50 | /python/foreman/tests/testdata/path1/pkg1/pkg2/build.py | cda68e9a8ee9ba90409e8ffba39667c3487d8d67 | [
"MIT"
] | permissive | clchiou/garage | ed3d314ceea487b46568c14b51e96b990a50ed6f | 1d72863d3a5f5d620b170f4dd36f605e6b72054f | refs/heads/master | 2023-08-27T13:57:14.498182 | 2023-08-15T07:09:57 | 2023-08-15T19:53:52 | 32,647,497 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | from foreman import define_parameter, define_rule, get_relpath
if __name__ != 'pkg1.pkg2':
raise AssertionError('incorrect __name__: %s' % __name__)
if not __file__.endswith('tests/testdata/path1/pkg1/pkg2/build.py'):
raise AssertionError('incorrect __file__: %s' % __file__)
if str(get_relpath()) != 'pkg1/pkg2':
raise AssertionError('incorrect relpath: %s' % get_relpath())
COUNT = 0
if COUNT > 0:
raise AssertionError('load more than once')
COUNT += 1
define_parameter('par_x')
define_rule('rule_x').depend('//pkg1:pkg1')
| [
"[email protected]"
] | |
1c7e24b97e0bbeab4768fbcfa5cbbc723708b0a6 | a8fffbce7bd4d4e7e91f07b7aaaf0801ca64686e | /0x0F-python-object_relational_mapping/11-model_state_insert.py | f8589486bf72442bd5c4c25a9548a01450e5c593 | [] | no_license | bmuha1/holbertonschool-higher_level_programming | 8f603c07e4b3cb87d89c3a1fff9fd5cdef5bc9f5 | 79cca6ecb77ed8de65b55bcdd715a3a923c5cb3a | refs/heads/master | 2020-07-22T15:52:04.069523 | 2020-02-13T23:29:50 | 2020-02-13T23:29:50 | 207,251,416 | 2 | 7 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | #!/usr/bin/python3
"""
Write a script that adds the State object “Louisiana” to the database
hbtn_0e_6_usa
"""
if __name__ == "__main__":
from sys import argv
from model_state import Base, State
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
engine = create_engine(
'mysql+mysqldb://{}:{}@localhost/{}'.format(argv[1], argv[2], argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
louisiana = State(name='Louisiana')
session.add(louisiana)
session.commit()
print(louisiana.id)
session.close()
| [
"[email protected]"
] | |
650489a082c5d10237a32919336dd5231d3ca41f | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/dms/scenario_inputs.py | 624f3bac2ee7f6b656a4e80f7643cb2464007bd6 | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 13,055 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.command_modules.dms.validators import throw_if_not_dictionary, throw_if_not_list
from azure.cli.core.azclierror import ValidationError
from azure.mgmt.datamigration.models import (MigrateSqlServerSqlDbTaskInput,
MigrateSqlServerSqlDbDatabaseInput,
MigrationValidationOptions,
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput,
MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput,
MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput,
MigrateMySqlAzureDbForMySqlOfflineTaskInput,
MigrateMySqlAzureDbForMySqlOfflineDatabaseInput)
def get_migrate_sql_to_sqldb_offline_input(database_options_json,
source_connection_info,
target_connection_info,
enable_schema_validation,
enable_data_integrity_validation,
enable_query_analysis_validation):
database_options = []
for d in database_options_json:
database_options.append(
MigrateSqlServerSqlDbDatabaseInput(
name=d.get('name', None),
target_database_name=d.get('target_database_name', None),
make_source_db_read_only=d.get('make_source_db_read_only', None),
table_map=d.get('table_map', None)))
validation_options = MigrationValidationOptions(enable_schema_validation=enable_schema_validation,
enable_data_integrity_validation=enable_data_integrity_validation,
enable_query_analysis_validation=enable_query_analysis_validation)
return MigrateSqlServerSqlDbTaskInput(source_connection_info=source_connection_info,
target_connection_info=target_connection_info,
selected_databases=database_options,
validation_options=validation_options)
def get_migrate_postgresql_to_azuredbforpostgresql_sync_input(database_options_json,
source_connection_info,
target_connection_info):
database_options = []
for d in database_options_json:
s_t = d.get('selectedTables', None)
t = None if s_t is None else [MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseTableInput(name=t) for t in s_t]
database_options.append(
MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInput(
name=d.get('name', None),
target_database_name=d.get('target_database_name', None),
migration_setting=d.get('migrationSetting', None),
source_setting=d.get('sourceSetting', None),
target_setting=d.get('targetSetting', None),
selected_tables=t))
return MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInput(source_connection_info=source_connection_info,
target_connection_info=target_connection_info,
selected_databases=database_options)
def get_migrate_mysql_to_azuredbformysql_sync_input(database_options_json,
source_connection_info,
target_connection_info):
return get_migrate_mysql_to_azuredbformysql_input(database_options_json,
source_connection_info,
target_connection_info,
has_schema_migration_options=True,
has_consistent_snapshot_options=True,
requires_consistent_snapshot=True,
has_binlog_position=False)
def get_migrate_mysql_to_azuredbformysql_offline_input(database_options_json,
source_connection_info,
target_connection_info):
return get_migrate_mysql_to_azuredbformysql_input(database_options_json,
source_connection_info,
target_connection_info,
has_schema_migration_options=True,
has_consistent_snapshot_options=True,
requires_consistent_snapshot=False,
has_binlog_position=False)
def get_migrate_mysql_to_azuredbformysql_cdc_input(database_options_json,
source_connection_info,
target_connection_info):
return get_migrate_mysql_to_azuredbformysql_input(database_options_json,
source_connection_info,
target_connection_info,
has_schema_migration_options=False,
has_consistent_snapshot_options=False,
requires_consistent_snapshot=False,
has_binlog_position=True)
def get_migrate_mysql_to_azuredbformysql_input(database_options_json,
source_connection_info,
target_connection_info,
has_schema_migration_options: bool,
has_consistent_snapshot_options: bool,
requires_consistent_snapshot: bool,
has_binlog_position: bool):
database_options = []
migration_level_settings = {}
make_source_server_read_only = False
migration_properties = {}
if not isinstance(database_options_json, dict):
raise ValidationError('Format of the database option file is wrong')
if 'selected_databases' not in database_options_json:
raise ValidationError('Database option file should contain at least one selected database for migration')
selected_databases = database_options_json.get('selected_databases')
for database in selected_databases:
if not isinstance(database, dict):
raise ValidationError('Format of the selected database file is wrong')
if 'name' not in database:
raise ValidationError('Selected database should have a name')
if 'target_database_name' not in database:
raise ValidationError('Selected database should have a target_database_name')
if 'table_map' in database and (not isinstance(database.get('table_map'), dict) or
len(database.get('table_map')) == 0):
raise ValidationError('table_map should be dictionary and non empty, to select all tables remove table_map')
db_input = create_db_input(database, has_schema_migration_options)
database_options.append(db_input)
set_optional(migration_properties, 'sourceServerResourceId', database_options_json, 'source_server_resource_id')
set_optional(migration_properties, 'targetServerResourceId', database_options_json, 'target_server_resource_id')
if 'migration_level_settings' in database_options_json:
migration_level_settings = database_options_json.get('migration_level_settings')
if not isinstance(migration_level_settings, dict):
raise ValidationError('migration_level_settings should be a dictionary')
if requires_consistent_snapshot:
migration_level_settings['enableConsistentBackup'] = 'true'
elif has_consistent_snapshot_options:
make_source_server_read_only = database_options_json.get('make_source_server_read_only', False)
set_optional(migration_level_settings, 'enableConsistentBackup', database_options_json,
'enable_consistent_backup')
if has_schema_migration_options:
extract_schema_migration_options(migration_properties, database_options_json)
if has_binlog_position:
set_required(migration_properties, 'binLogInfo', database_options_json, 'binlog_info', throw_if_not_dictionary)
task_input = MigrateMySqlAzureDbForMySqlOfflineTaskInput(source_connection_info=source_connection_info,
target_connection_info=target_connection_info,
selected_databases=database_options,
optional_agent_settings=migration_level_settings,
make_source_server_read_only=make_source_server_read_only)
if len(migration_properties) > 0:
task_input.additional_properties = migration_properties
task_input.enable_additional_properties_sending()
return task_input
def extract_schema_migration_options(migration_properties, database_options_json):
set_optional(migration_properties, 'migrateAllViews', database_options_json, 'migrate_all_views')
set_optional(migration_properties, 'migrateAllTriggers', database_options_json, 'migrate_all_triggers')
set_optional(migration_properties, 'migrateAllEvents', database_options_json, 'migrate_all_events')
set_optional(migration_properties, 'migrateAllRoutines', database_options_json, 'migrate_all_routines')
set_optional(migration_properties, 'migrateAllTablesSchema', database_options_json, 'migrate_all_tables_schema')
set_optional(migration_properties, 'migrateUserSystemTables', database_options_json, 'migrate_user_system_tables')
def create_db_input(database, has_schema_migration_options):
db_input = MigrateMySqlAzureDbForMySqlOfflineDatabaseInput(
name=database.get('name'),
target_database_name=database.get('target_database_name'),
table_map=database.get('table_map'))
if has_schema_migration_options:
db_properties = {}
set_optional(db_properties, 'tablesToMigrateSchema', database, 'tables_to_migrate_schema',
throw_if_not_dictionary)
set_optional(db_properties, 'selectedViews', database, 'selected_views', throw_if_not_list)
set_optional(db_properties, 'selectedTriggers', database, 'selected_triggers', throw_if_not_list)
set_optional(db_properties, 'selectedRoutines', database, 'selected_routines', throw_if_not_list)
set_optional(db_properties, 'selectedEvents', database, 'selected_events', throw_if_not_list)
set_optional(db_properties,
'selectDatabaseForSchemaMigration',
database,
'select_database_for_schema_migration')
if len(db_properties) > 0:
db_input.additional_properties = db_properties
db_input.enable_additional_properties_sending()
return db_input
def set_optional(target: dict,
target_property: str,
source: dict,
source_property: str,
validator: callable([any, str]) = None):
if source_property in source:
value = source[source_property]
if validator is not None:
validator(value, source_property)
target[target_property] = value
def set_required(target: dict,
target_property: str,
source: dict,
source_property: str,
validator: callable([any, str]) = None):
if source_property in source:
set_optional(target, target_property, source, source_property, validator)
else:
raise ValidationError("'%s' attribute is required but it is not found in the input json" % source_property)
| [
"[email protected]"
] | |
aa20facdce0abd3184f7a0b97c113bf2ae0b90f4 | 11656c882c83bb5ea364b7c92f763788fa4db5ae | /Pokhi/Pokhi/Rest/config.py | 3a97163750987d4bda5f85416a3424376b97920f | [] | no_license | abhijeetdtu/pokhiin | e28d22bd38975a1d25c5425c34a1ce6dce79b65e | 1eb512924da7c59e18dcf0c95819fd8d9e85c03d | refs/heads/master | 2021-05-14T04:03:07.621127 | 2018-04-28T18:08:51 | 2018-04-28T18:08:51 | 116,633,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | import os
class Config:
ENV = {}
ENV["DATA_DIR"] = os.path.abspath(os.path.join(os.path.realpath(__file__), "../../static/Data/"))
print(ENV["DATA_DIR"])
if('OPENSHIFT_DATA_DIR' in os.environ):
ENV["DATA_DIR"] = os.environ['OPENSHIFT_DATA_DIR']
ENV["UPLOAD_FOLDER"] = os.path.abspath(os.path.join( ENV["DATA_DIR"], "UploadedFiles/"))
print(ENV["DATA_DIR"])
print(ENV["UPLOAD_FOLDER"])
ENV["OPEN_CV_HOME"] = "C:\\Users\\Abhijeet\\Downloads\\OpenCv\\opencv\\sources\\data"
if(os.path.exists(ENV["UPLOAD_FOLDER"]) == False):
os.mkdir(ENV["UPLOAD_FOLDER"]) | [
"[email protected]"
] | |
d1da6360b081ce9719d4418c3a06f2e027120c06 | e44ff4069f5b559954e7a66685c86b054a70de7a | /MockVita 2/digit_pairs.py | 72aa08d4b84cb9d5d1e7233c8d5b7013e00f0f86 | [] | no_license | SayanDutta001/Competitive-Programming-Codes | 2912985e037f83bcde8e7fcb0036f1e31fa626df | 6dac061c0a4b1c5e82b99ec134e9e77606508e15 | refs/heads/master | 2023-03-17T04:25:47.507594 | 2021-03-05T16:23:09 | 2021-03-05T16:23:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 704 | py | def bitscore(s):
l = list(s)
score = int(max(s))*11 + int(min(s))*7
if score >= 100:
return str(score)[1:]
return str(score)
def pairs(a):
count = 0
t = []
for i in range(len(a)):
for j in range(i+1, len(a)):
if(((i+1)%2==0) and ((j+1)%2==0) and (t.count(a[i][0])<2) and (a[i][0] == a[j][0])):
count += 1
t.append(a[i][0])
elif(((i+1)%2==1) and ((j+1)%2==1) and (t.count(a[i][0])<2) and (a[i][0] == a[j][0])):
count += 1
t.append(a[i][0])
return count
n = int(input())
a = []
s = list(input().split())
for i in s:
a.append(bitscore(i))
#print(a)
print(pairs(a))
| [
"[email protected]"
] | |
e080a1ca9f234923883169d8071f48e08ec53e81 | f6078890ba792d5734d289d7a0b1d429d945a03a | /hw4/submissions/duongmatthew/duongmatthew_24972_1303227_HW_4_3-1.py | 5dd52c46487e761c59542a30f9077f7a76a40c30 | [] | no_license | huazhige/EART119_Lab | 1c3d0b986a0f59727ee4ce11ded1bc7a87f5b7c0 | 47931d6f6a2c7bc053cd15cef662eb2f2027712c | refs/heads/master | 2020-05-04T23:40:53.709217 | 2019-06-11T18:30:45 | 2019-06-11T18:30:45 | 179,552,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,462 | py | # -*- coding: utf-8 -*-
"""
Created on Sat May 4 13:35:08 2019
- A new function, "my_Newton", that solves for a root depending on how small
the difference between the current and last fct value is, rather than how small
the fct value, itself, is.
author: maduong
"""
import numpy as np
#===================================================================================
# Fct Definitions
#===================================================================================
def my_Newton(fct, df_dx, x0):
"""
- implementation of Newton's method for solving f(x) = 0, when f'(x)
is known
"""
xn = float(x0)
eps = 1e-6
N = 20
i = 1
x_next = xn - fct(xn)/df_dx(xn)
# solved for the very first x_next term in order to define while loop
print(0 , 'fct_(x_next) -', abs(fct(x_next) - fct(xn)), x_next)
# printed out first set of values
while abs(fct(x_next) - fct(xn)) > eps and i < N:
xn = x_next # sets the first x_next term defined earlier to the new xn
x_next = xn - fct(xn)/df_dx(xn) # solved for new x_next
print(i , 'fct_(x_next) -', abs(fct(x_next) - fct(xn)), x_next)
i += 1
if abs(fct(x_next) - fct(xn)) < eps:
# now the loop stops if the difference of the fct values is less than eps
return x_next
else: #solution did not converge
return np.nan
| [
"[email protected]"
] | |
d02de539b71e1698a057d12c5f6f979c6ccada0e | 98a1c37ccda91f2c4be14683f5899393f6b97d29 | /04-Pygame/飞机大战.py | 57d629dd0b7fc805a276a487a6064cf6f7a621b2 | [] | no_license | yeshixuan/Python | 1b564d9e46b469f62f59a3a04b340abd68ea4419 | 98ba749ca9ea12004cdff1fdb7e002dea2f42096 | refs/heads/master | 2020-04-29T22:17:01.014401 | 2019-05-14T05:15:29 | 2019-05-14T05:15:29 | 176,442,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,413 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/4/3 22:00
# @Author : Yebiyun
# @Site :
# @File : 飞机大战.py
# @Software: PyCharm
import pygame
from pygame.locals import *
from sys import exit
from random import randint
# 定义窗口的分辨率
SCREEN_WIDTH = 480
SCREEN_HEIGHT = 640
hero_is_hit = False
ticks = 0
offset = {pygame.K_LEFT:0, pygame.K_RIGHT:0, pygame.K_UP:0, pygame.K_DOWN:0}
# 定义画面帧率
FRAME_RATE = 60
# 定义动画周期(帧数)
ANIMATE_CYCLE = 30
pos = [200, 500]
#英雄类
class Hero(pygame.sprite.Sprite):
def __init__(self, hero_surface, hero_init_pos):
super(Hero, self).__init__()
self.image = hero_surface
self.rect = self.image.get_rect()
self.rect.topleft = hero_init_pos
self.speed = 6
self.down_index = 0
self.bullet_sprite = pygame.sprite.Group()
def move(self, offset):
x = self.rect[0] + offset[pygame.K_RIGHT] - offset[pygame.K_LEFT]
y = self.rect[1] + offset[pygame.K_DOWN] - offset[pygame.K_UP]
if x < 0:
self.rect[0] = 0
elif x > SCREEN_WIDTH - self.rect.width:
self.rect[0] = SCREEN_WIDTH - self.rect.width
else:
self.rect[0] = x
if y < 0:
self.rect[1] = 0
elif y > SCREEN_HEIGHT - self.rect.height:
self.rect[1] = SCREEN_HEIGHT - self.rect.height
else:
self.rect[1] = y
# 子弹类
class Bullet(pygame.sprite.Sprite):
def __init__(self, bullet_surface, bullet_init_pos):
super().__init__()
self.image = bullet_surface
self.rect = self.image.get_rect()
self.rect.topleft = bullet_init_pos
self.speed = 8
def update(self):
self.rect.top -= self.speed
if self.rect.top < -self.rect.height:
self.kill()
# 敌机类
class Enemy(pygame.sprite.Sprite):
def __init__(self, enemy_surface, enemy_init_pos):
super().__init__()
self.image = enemy_surface
self.rect = self.image.get_rect()
self.rect.topleft = enemy_init_pos
self.speed = 2
self.down_index = 0
def update(self):
self.rect.top += self.speed
if self.rect.top > SCREEN_HEIGHT:
self.kill()
# 开始
pygame.init()
screen = pygame.display.set_mode([SCREEN_WIDTH, SCREEN_HEIGHT])
pygame.display.set_caption("飞机大战")
background = pygame.image.load("images/background.gif")
hero_surface = pygame.image.load("images/hero.gif")
hero_down_surface = []
hero_down_surface.append(pygame.image.load("images/hero1.gif"))
hero_down_surface.append(pygame.image.load("images/hero2.gif"))
hero_down_surface.append(pygame.image.load("images/hero3.gif"))
hero_down_surface.append(pygame.image.load("images/hero4.gif"))
bullet_surface = pygame.image.load("images/bullet.gif")
enemy_surface = pygame.image.load("images/smallplane.gif")
enemy_down_surface = []
enemy_down_surface.append(pygame.image.load("images/smallplane1.gif"))
enemy_down_surface.append(pygame.image.load("images/smallplane2.gif"))
enemy_down_surface.append(pygame.image.load("images/smallplane3.gif"))
enemy_down_surface.append(pygame.image.load("images/smallplane4.gif"))
gameover = pygame.image.load("images/gameover.gif")
hero = Hero(hero_surface, pos)
enemy_sprite = pygame.sprite.Group()
# 敌机击毁组
enemy_down_group = pygame.sprite.Group()
clock = pygame.time.Clock()
print(len(hero_down_surface))
while True:
clock.tick(FRAME_RATE)
screen.blit(background,(0,0))
screen.blit(hero.image, hero.rect)
if ticks % 10 == 0:
hero.bullet_sprite.add(Bullet(bullet_surface, hero.rect.midtop))
hero.bullet_sprite.update()
hero.bullet_sprite.draw(screen)
if ticks % ANIMATE_CYCLE == 0:
enemy_sprite.add(Enemy(enemy_surface, (randint(0,SCREEN_WIDTH-enemy_surface.get_width()), -enemy_surface.get_height())))
enemy_sprite.update()
enemy_sprite.draw(screen)
enemy_down_group.add(pygame.sprite.groupcollide(enemy_sprite, hero.bullet_sprite, True, True))
for enemy in enemy_down_group:
if ticks % (ANIMATE_CYCLE//2) != 0:
screen.blit(enemy_down_surface[enemy.down_index], enemy.rect)
else:
if enemy.down_index < 3:
enemy.down_index += 1
else:
enemy_down_group.remove(enemy)
enemy_list = pygame.sprite.spritecollide(hero, enemy_sprite, True)
if len(enemy_list):
hero_is_hit = True
enemy_down_group.add(enemy_list)
if hero_is_hit:
if ticks % (ANIMATE_CYCLE//2) != 0:
hero.image = hero_down_surface[hero.down_index]
else:
if hero.down_index < 3:
hero.down_index += 1
else:
break
pygame.display.update()
ticks += 1
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.KEYDOWN:
if event.key in offset:
offset[event.key] = hero.speed
if event.type == pygame.KEYUP:
if event.key in offset:
offset[event.key] = 0
hero.move(offset)
screen.blit(gameover,(0,0))
ticks = 0
while True:
clock.tick(FRAME_RATE)
ticks += 1
pygame.display.update()
if ticks % (5*ANIMATE_CYCLE) == 0:
break
| [
"[email protected]"
] | |
b1322642c22f16e262b114f04965e50a992a34ee | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/devices/v20200801/list_iot_hub_resource_keys_for_key_name.py | 3135b5f75e15d808bf5dedb68d251d50c673538c | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 4,003 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListIotHubResourceKeysForKeyNameResult',
'AwaitableListIotHubResourceKeysForKeyNameResult',
'list_iot_hub_resource_keys_for_key_name',
]
@pulumi.output_type
class ListIotHubResourceKeysForKeyNameResult:
"""
The properties of an IoT hub shared access policy.
"""
def __init__(__self__, key_name=None, primary_key=None, rights=None, secondary_key=None):
if key_name and not isinstance(key_name, str):
raise TypeError("Expected argument 'key_name' to be a str")
pulumi.set(__self__, "key_name", key_name)
if primary_key and not isinstance(primary_key, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", primary_key)
if rights and not isinstance(rights, str):
raise TypeError("Expected argument 'rights' to be a str")
pulumi.set(__self__, "rights", rights)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> str:
"""
The name of the shared access policy.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> Optional[str]:
"""
The primary key.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter
def rights(self) -> str:
"""
The permissions assigned to the shared access policy.
"""
return pulumi.get(self, "rights")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> Optional[str]:
"""
The secondary key.
"""
return pulumi.get(self, "secondary_key")
class AwaitableListIotHubResourceKeysForKeyNameResult(ListIotHubResourceKeysForKeyNameResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListIotHubResourceKeysForKeyNameResult(
key_name=self.key_name,
primary_key=self.primary_key,
rights=self.rights,
secondary_key=self.secondary_key)
def list_iot_hub_resource_keys_for_key_name(key_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListIotHubResourceKeysForKeyNameResult:
"""
The properties of an IoT hub shared access policy.
:param str key_name: The name of the shared access policy.
:param str resource_group_name: The name of the resource group that contains the IoT hub.
:param str resource_name: The name of the IoT hub.
"""
__args__ = dict()
__args__['keyName'] = key_name
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:devices/v20200801:listIotHubResourceKeysForKeyName', __args__, opts=opts, typ=ListIotHubResourceKeysForKeyNameResult).value
return AwaitableListIotHubResourceKeysForKeyNameResult(
key_name=__ret__.key_name,
primary_key=__ret__.primary_key,
rights=__ret__.rights,
secondary_key=__ret__.secondary_key)
| [
"[email protected]"
] | |
1da89dbcd832978c8723bebd5c2fe3a26ce58426 | 61ff94d2987b3bc95f82c5a58897f50d1efa1db8 | /hive/db/query_stats.py | 68a1b207535d39dcfcfcad00384343b91a5c69cf | [
"MIT"
] | permissive | arpwv/hivemind | ee77c9805731fda2bb95e1127a56152fe53b707a | a87e5578f9020be02c867021a8acdfff41f06777 | refs/heads/master | 2021-01-24T03:43:46.507207 | 2018-02-23T22:18:56 | 2018-02-23T22:18:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,811 | py | import time
import re
import atexit
class QueryStats:
stats = {}
ttl_time = 0.0
def __init__(self):
atexit.register(QueryStats.print)
def __call__(self, fn):
def wrap(*args, **kwargs):
time_start = time.perf_counter()
result = fn(*args, **kwargs)
time_end = time.perf_counter()
QueryStats.log(args[1], (time_end - time_start) * 1000)
return result
return wrap
@classmethod
def log(cls, sql, ms):
nsql = cls.normalize_sql(sql)
cls.add_nsql_ms(nsql, ms)
cls.check_timing(nsql, ms)
if cls.ttl_time > 30 * 60 * 1000:
cls.print()
@classmethod
def add_nsql_ms(cls, nsql, ms):
if nsql not in cls.stats:
cls.stats[nsql] = [ms, 1]
else:
cls.stats[nsql][0] += ms
cls.stats[nsql][1] += 1
cls.ttl_time += ms
@classmethod
def normalize_sql(cls, sql):
nsql = re.sub(r'\s+', ' ', sql).strip()[0:256]
nsql = re.sub(r'VALUES (\s*\([^)]+\),?)+', 'VALUES (...)', nsql)
return nsql
@classmethod
def check_timing(cls, nsql, ms):
if ms > 100:
print("\033[93m[SQL][%dms] %s\033[0m" % (ms, nsql[:250]))
@classmethod
def print(cls):
if not cls.stats:
return
ttl = cls.ttl_time
print("[DEBUG] total SQL time: {}s".format(int(ttl / 1000)))
for arr in sorted(cls.stats.items(), key=lambda x: -x[1][0])[0:40]:
sql, vals = arr
ms, calls = vals
print("% 5.1f%% % 7dms % 9.2favg % 8dx -- %s"
% (100 * ms/ttl, ms, ms/calls, calls, sql[0:180]))
cls.clear()
@classmethod
def clear(cls):
cls.stats = {}
cls.ttl_time = 0
| [
"[email protected]"
] | |
fa36261172803b39dd43394343057295f6489945 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2007-EOL/desktop/kde/transKode/actions.py | 19d1559602adac4f930b844d24d29a64980cf019 | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import kde
WorkDir = 'transkode'
def setup():
kde.configure()
def build():
kde.make()
def install():
kde.install()
| [
"[email protected]"
] | |
7ebdaf861d679f2335f1ded9db414c98a4108191 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Tensorflow_Pandas_Numpy/source3.6/tensorflow/python/framework/meta_graph.py | a8bc2d2e3fb1bdddf163ff226d6430a9222bb769 | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 33,960 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""MetaGraph and related functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os.path
import re
import six
from google.protobuf.any_pb2 import Any
from google.protobuf import text_format
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.eager import context
from tensorflow.python.framework import graph_io
from tensorflow.python.framework import importer
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import versions
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
# Prefix to be added to unbound input names so they are easily identifiable.
_UNBOUND_INPUT_PREFIX = "$unbound_inputs_"
# List of collections that didn't register proto functions, as a result in
# a previously exported meta_graph the items are of a different data type.
_COMPAT_COLLECTION_LIST = [ops.GraphKeys.LOCAL_VARIABLES,
ops.GraphKeys.MODEL_VARIABLES]
def _node_def(from_node_def, export_scope, unbound_inputs, clear_devices=False):
"""Create a `NodeDef` proto with export_scope stripped.
Args:
from_node_def: A `node_def_pb2.NodeDef` protocol buffer.
export_scope: A `string` representing the name scope to remove.
unbound_inputs: An array of unbound input names if they exist.
clear_devices: Boolean which controls whether to clear device information
from node_def. Default false.
Returns:
A `node_def_pb2.NodeDef` protocol buffer.
"""
node_def = copy.deepcopy(from_node_def)
for i, v in enumerate(node_def.input):
if (export_scope and
not node_def.input[i].lstrip("^").startswith(export_scope)):
# Adds "$unbound_inputs_" prefix to the unbound name so they are easily
# identifiable.
node_def.input[i] = re.sub(r"([\^]|^)(.*)",
r"\1" + _UNBOUND_INPUT_PREFIX + r"\2",
compat.as_str(v))
unbound_inputs.append(node_def.input[i])
else:
node_def.input[i] = ops.strip_name_scope(v, export_scope)
node_def.name = compat.as_bytes(
ops.strip_name_scope(from_node_def.name, export_scope))
for k, v in six.iteritems(from_node_def.attr):
if k == "_class":
new_s = [compat.as_bytes(
ops.strip_name_scope(s, export_scope)) for s in v.list.s
if not export_scope or
compat.as_str(s).split("@")[1].startswith(export_scope)]
node_def.attr[k].CopyFrom(attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(s=new_s)))
else:
node_def.attr[k].CopyFrom(v)
if clear_devices:
node_def.device = ""
return node_def
def _read_file(filename):
"""Reads a file containing `GraphDef` and returns the protocol buffer.
Args:
filename: `graph_def` filename including the path.
Returns:
A `GraphDef` protocol buffer.
Raises:
IOError: If the file doesn't exist, or cannot be successfully parsed.
"""
graph_def = graph_pb2.GraphDef()
if not file_io.file_exists(filename):
raise IOError("File %s does not exist." % filename)
# First try to read it as a binary file.
file_content = file_io.FileIO(filename, "rb").read()
try:
graph_def.ParseFromString(file_content)
return graph_def
except Exception: # pylint: disable=broad-except
pass
# Next try to read it as a text file.
try:
text_format.Merge(file_content, graph_def)
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (filename, str(e)))
return graph_def
def ops_used_by_graph_def(graph_def):
"""Collect the list of ops used by a graph.
Does not validate that the ops are all registered.
Args:
graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`.
Returns:
A list of strings, each naming an op used by the graph.
"""
# Map function names to definitions
name_to_function = {}
for fun in graph_def.library.function:
name_to_function[fun.signature.name] = fun
# Collect the list of op names. Since functions can reference functions, we
# need a recursive traversal.
used_ops = set() # Includes both primitive ops and functions
functions_to_process = [] # A subset of used_ops
def mark_op_as_used(op):
if op not in used_ops and op in name_to_function:
functions_to_process.append(name_to_function[op])
used_ops.add(op)
for node in graph_def.node:
mark_op_as_used(node.op)
while functions_to_process:
fun = functions_to_process.pop()
for node in fun.node_def:
mark_op_as_used(node.op)
return [op for op in used_ops if op not in name_to_function]
def stripped_op_list_for_graph(graph_def):
"""Collect the stripped OpDefs for ops used by a graph.
This function computes the `stripped_op_list` field of `MetaGraphDef` and
similar protos. The result can be communicated from the producer to the
consumer, which can then use the C++ function
`RemoveNewDefaultAttrsFromGraphDef` to improve forwards compatibility.
Args:
graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`.
Returns:
An `OpList` of ops used by the graph.
Raises:
ValueError: If an unregistered op is used.
"""
# This is the Python equivalent of StrippedOpListForGraph in C++.
# Unfortunately, since the Python op registry can differ from that in C++, we
# can't remove the duplication using swig (at least naively).
# TODO(irving): Support taking graphs directly.
used_ops = ops_used_by_graph_def(graph_def)
# Verify that all used ops are registered.
registered_ops = op_def_registry.get_registered_ops()
# These internal ops used by functions are not registered, so we need to
# whitelist them. # TODO(irving): Do something better here.
op_whitelist = ("_Arg", "_Retval", "_ListToArray", "_ArrayToList")
for op in used_ops:
if op not in registered_ops and op not in op_whitelist:
raise ValueError("Op %s is used by the graph, but is not registered" % op)
# Build the stripped op list in sorted order
return op_def_pb2.OpList(op=[registered_ops[op] for op in sorted(used_ops)
if op in registered_ops])
def _get_kind_name(item):
"""Returns the kind name in CollectionDef.
Args:
item: A data item.
Returns:
The string representation of the kind in CollectionDef.
"""
if isinstance(item, (six.string_types, six.binary_type)):
kind = "bytes_list"
elif isinstance(item, six.integer_types):
kind = "int64_list"
elif isinstance(item, float):
kind = "float_list"
elif isinstance(item, Any):
kind = "any_list"
else:
kind = "node_list"
return kind
SAVE_AND_RESTORE_OPS = ["SaveV2",
"Save", "SaveSlice",
"LegacySave", "LegacySaveSlice",
"RestoreV2",
"Restore", "RestoreSlice",
"LegacyRestore", "LegacyRestoreSlice"]
def _op_name(tensor_name):
"""Extract the Op name from a Tensor name.
The Op name is everything before a colon, if present,
not including any ^ prefix denoting a control dependency.
Args:
tensor_name: the full name of a Tensor in the graph.
Returns:
The name of the Op of which the given Tensor is an output.
Raises:
ValueError: if tensor_name is None or empty.
"""
if not tensor_name:
raise ValueError("Tensor name cannot be empty or None.")
# Control dependency inputs start with ^.
if tensor_name.startswith("^"):
tensor_name = tensor_name[1:]
if ":" in tensor_name:
op_name, _ = tensor_name.split(":")
return op_name
return tensor_name
def _get_scope(node_name):
"""Extract the scope name from a node name.
The scope name is everything before the final slash,
not including any ^ prefix denoting a control dependency.
Args:
node_name: the full name of an Op or a Tensor in the graph.
Returns:
The deepest named scope containing the node.
Raises:
ValueError: if tensor_name is None or empty
"""
if not node_name:
raise ValueError("Node name cannot be empty or None.")
# Control dependency inputs start with ^.
if node_name.startswith("^"):
node_name = node_name[1:]
if "/" in node_name:
scope, _ = node_name.rsplit("/", 1)
return scope
return ""
def _find_extraneous_saver_nodes(graph_def, saver_def):
"""Identifies any nodes in the graph_def related to unused Savers.
This approach assumes that each Saver is cleanly isolated in its own name
scope, so we need only identify the scopes associated with extraneous Savers
and return all the nodes in those scopes.
Args:
graph_def: a GraphDef proto to evaluate.
saver_def: a SaverDef proto referencing Save/Restore ops to be retained.
Returns:
An iterable of node names that may be safely omitted.
"""
# TODO(soergel): confirm that the assumption of scope isolation is valid.
# If not, we need to walk up the graph from any restore_all nodes, and walk
# down the graph from any Save/Restore nodes. I drafted that approach too,
# but it seems unnecessarily complex given the name scope solution.
# load the graph DAG in minimal form, without initializing a full Graph object
nodes = {node_def.name:
(set([_op_name(x) for x in node_def.input]), node_def.op)
for node_def in graph_def.node}
retain_scope_save = None
retain_scope_restore = None
# It's possible to have no saver if the graph has no Variables
if saver_def is not None:
save_op_name = _op_name(saver_def.save_tensor_name)
restore_op_name = _op_name(saver_def.restore_op_name)
# The save and restore scopes should always be the same, but if they differ
# for some reason, we retain them both to be safe.
retain_scope_restore = _get_scope(restore_op_name) + "/"
retain_scope_save = _get_scope(save_op_name) + "/"
all_saver_node_names = set([name for name, (_, op) in nodes.items()
if op in SAVE_AND_RESTORE_OPS])
all_saver_scopes = (set([_get_scope(x) for x in all_saver_node_names])
- all_saver_node_names)
all_saver_scopes = set([x + "/" for x in all_saver_scopes])
extraneous_scopes = all_saver_scopes - set([retain_scope_save,
retain_scope_restore])
extraneous_node_names = set()
for name, _ in nodes.items():
for extraneous_scope in extraneous_scopes:
if name.startswith(extraneous_scope):
extraneous_node_names.add(name)
break
return extraneous_node_names
def _should_include_node(node_or_node_name, export_scope, exclude_nodes):
"""Returns `True` if a node should be included.
Args:
node_or_node_name: A node or `string` node name.
export_scope: `string`. Name scope under which to extract the subgraph. The
scope name will be stripped from the node definitions for easy import
later into new name scopes.
exclude_nodes: An iterable of nodes or `string` node names to omit from the
export, or None. Note no sanity-checking is done, so this list must be
carefully constructed to avoid producing an invalid graph.
Returns:
`True` if the node should be included.
"""
if not isinstance(node_or_node_name, six.string_types):
try:
node_name = node_or_node_name.name
except AttributeError:
# Keep the object that we don't know how to process.
return True
else:
node_name = node_or_node_name
if exclude_nodes and (node_or_node_name in exclude_nodes
or node_name in exclude_nodes):
return False
return (node_name.startswith(_UNBOUND_INPUT_PREFIX) or
(not export_scope or node_name.startswith(export_scope)))
def add_collection_def(meta_graph_def, key, graph=None,
export_scope=None, exclude_nodes=None,
override_contents=None):
"""Adds a collection to MetaGraphDef protocol buffer.
Args:
meta_graph_def: MetaGraphDef protocol buffer.
key: One of the GraphKeys or user-defined string.
graph: The `Graph` from which to get collections.
export_scope: Optional `string`. Name scope to remove.
exclude_nodes: An iterable of nodes or `string` node names to omit from the
collection, or None.
override_contents: An iterable of values to place in the collection,
ignoring the current values (if set).
"""
if graph and not isinstance(graph, ops.Graph):
raise TypeError("graph must be of type Graph, not %s", type(graph))
if not isinstance(key, six.string_types) and not isinstance(key, bytes):
logging.warning("Only collections with string type keys will be "
"serialized. This key has %s", type(key))
return
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
if override_contents:
collection_list = override_contents
else:
collection_list = graph.get_collection(key)
# Remove nodes that should not be exported from the collection list.
collection_list = [x for x in collection_list if
_should_include_node(x, export_scope, exclude_nodes)]
if not collection_list:
return
try:
col_def = meta_graph_def.collection_def[key]
to_proto = ops.get_to_proto_function(key)
proto_type = ops.get_collection_proto_type(key)
if to_proto:
kind = "bytes_list"
for x in collection_list:
# Additional type check to make sure the returned proto is indeed
# what we expect.
proto = to_proto(x, export_scope=export_scope)
if proto:
assert isinstance(proto, proto_type)
getattr(col_def, kind).value.append(proto.SerializeToString())
else:
kind = _get_kind_name(collection_list[0])
if kind == "node_list":
for x in collection_list:
if not export_scope or x.name.startswith(export_scope):
getattr(col_def, kind).value.append(
ops.strip_name_scope(x.name, export_scope))
elif kind == "bytes_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python3 distinguishes between bytes and strings.
getattr(col_def, kind).value.extend(
[compat.as_bytes(x) for x in collection_list])
else:
getattr(col_def, kind).value.extend([x for x in collection_list])
except Exception as e: # pylint: disable=broad-except
logging.warning("Error encountered when serializing %s.\n"
"Type is unsupported, or the types of the items don't "
"match field type in CollectionDef.\n%s", key, str(e))
if key in meta_graph_def.collection_def:
del meta_graph_def.collection_def[key]
return
def create_meta_graph_def(meta_info_def=None,
graph_def=None,
saver_def=None,
collection_list=None,
graph=None,
export_scope=None,
exclude_nodes=None,
clear_extraneous_savers=False):
"""Construct and returns a `MetaGraphDef` protocol buffer.
Args:
meta_info_def: `MetaInfoDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
saver_def: `SaverDef` protocol buffer.
collection_list: List of string keys to collect.
graph: The `Graph` to create `MetaGraphDef` out of.
export_scope: Optional `string`. Name scope to remove.
exclude_nodes: An iterable of nodes or `string` node names to omit from all
collection, or None.
clear_extraneous_savers: Remove any preexisting SaverDefs from the SAVERS
collection. Note this method does not alter the graph, so any
extraneous Save/Restore ops should have been removed already, as needed.
Returns:
MetaGraphDef protocol buffer.
Raises:
TypeError: If the arguments are not of the correct proto buffer type.
"""
# Type check.
if graph and not isinstance(graph, ops.Graph):
raise TypeError("graph must be of type Graph, not %s", type(graph))
if meta_info_def and not isinstance(meta_info_def,
meta_graph_pb2.MetaGraphDef.MetaInfoDef):
raise TypeError("meta_info_def must be of type MetaInfoDef, not %s",
type(meta_info_def))
if graph_def and not isinstance(graph_def, graph_pb2.GraphDef):
raise TypeError("graph_def must be of type GraphDef, not %s",
type(graph_def))
if saver_def and not isinstance(saver_def, saver_pb2.SaverDef):
raise TypeError("saver_def must be of type SaverDef, not %s",
type(saver_def))
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Creates a MetaGraphDef proto.
meta_graph_def = meta_graph_pb2.MetaGraphDef()
# Adds meta_info_def.
if not meta_info_def:
meta_info_def = meta_graph_pb2.MetaGraphDef.MetaInfoDef()
# Set the tf version strings to the current tf build.
meta_info_def.tensorflow_version = versions.__version__
meta_info_def.tensorflow_git_version = versions.__git_version__
meta_graph_def.meta_info_def.MergeFrom(meta_info_def)
# Adds graph_def or the default.
if not graph_def:
meta_graph_def.graph_def.MergeFrom(graph.as_graph_def(add_shapes=True))
else:
meta_graph_def.graph_def.MergeFrom(graph_def)
# Fills in meta_info_def.stripped_op_list using the ops from graph_def.
# pylint: disable=g-explicit-length-test
if len(meta_graph_def.meta_info_def.stripped_op_list.op) == 0:
meta_graph_def.meta_info_def.stripped_op_list.MergeFrom(
stripped_op_list_for_graph(meta_graph_def.graph_def))
# pylint: enable=g-explicit-length-test
# Adds saver_def.
if saver_def:
meta_graph_def.saver_def.MergeFrom(saver_def)
# Adds collection_list.
if collection_list is not None:
clist = collection_list
else:
clist = graph.get_all_collection_keys()
for ctype in clist:
if clear_extraneous_savers and ctype == ops.GraphKeys.SAVERS:
# Avoid importing Saver here
from_proto = ops.get_from_proto_function(ctype)
add_collection_def(meta_graph_def, ctype,
graph=graph,
export_scope=export_scope,
exclude_nodes=exclude_nodes,
override_contents=[from_proto(saver_def)])
else:
add_collection_def(meta_graph_def, ctype,
graph=graph,
export_scope=export_scope,
exclude_nodes=exclude_nodes)
return meta_graph_def
def read_meta_graph_file(filename):
"""Reads a file containing `MetaGraphDef` and returns the protocol buffer.
Args:
filename: `meta_graph_def` filename including the path.
Returns:
A `MetaGraphDef` protocol buffer.
Raises:
IOError: If the file doesn't exist, or cannot be successfully parsed.
"""
meta_graph_def = meta_graph_pb2.MetaGraphDef()
if not file_io.file_exists(filename):
raise IOError("File %s does not exist." % filename)
# First try to read it as a binary file.
file_content = file_io.FileIO(filename, "rb").read()
try:
meta_graph_def.ParseFromString(file_content)
return meta_graph_def
except Exception: # pylint: disable=broad-except
pass
# Next try to read it as a text file.
try:
text_format.Merge(file_content.decode("utf-8"), meta_graph_def)
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (filename, str(e)))
return meta_graph_def
def import_scoped_meta_graph(meta_graph_or_file,
clear_devices=False,
graph=None,
import_scope=None,
input_map=None,
unbound_inputs_col_name="unbound_inputs",
restore_collections_predicate=(lambda key: True)):
"""Recreates a `Graph` saved in a `MetaGraphDef` proto.
This function takes a `MetaGraphDef` protocol buffer as input. If
the argument is a file containing a `MetaGraphDef` protocol buffer ,
it constructs a protocol buffer from the file content. The function
then adds all the nodes from the `graph_def` field to the
current graph, recreates the desired collections, and returns a dictionary of
all the Variables imported into the name scope.
In combination with `export_scoped_meta_graph()`, this function can be used to
* Serialize a graph along with other Python objects such as `QueueRunner`,
`Variable` into a `MetaGraphDef`.
* Restart training from a saved graph and checkpoints.
* Run inference from a saved graph and checkpoints.
Args:
meta_graph_or_file: `MetaGraphDef` protocol buffer or filename (including
the path) containing a `MetaGraphDef`.
clear_devices: Boolean which controls whether to clear device information
from graph_def. Default false.
graph: The `Graph` to import into. If `None`, use the default graph.
import_scope: Optional `string`. Name scope into which to import the
subgraph. If `None`, the graph is imported to the root name scope.
input_map: A dictionary mapping input names (as strings) in `graph_def` to
`Tensor` objects. The values of the named input tensors in the imported
graph will be re-mapped to the respective `Tensor` values.
unbound_inputs_col_name: Collection name for looking up unbound inputs.
restore_collections_predicate: a predicate on collection names. A collection
named c (i.e whose key is c) will be restored iff
1) `restore_collections_predicate(c)` is True, and
2) `c != unbound_inputs_col_name`.
Returns:
A dictionary of all the `Variables` imported into the name scope.
Raises:
ValueError: If the graph_def contains unbound inputs.
"""
if context.in_eager_mode():
raise ValueError("Exporting/importing meta graphs is not supported when "
"eager execution is enabled.")
if isinstance(meta_graph_or_file, meta_graph_pb2.MetaGraphDef):
meta_graph_def = meta_graph_or_file
else:
meta_graph_def = read_meta_graph_file(meta_graph_or_file)
if unbound_inputs_col_name:
for key, col_def in meta_graph_def.collection_def.items():
if key == unbound_inputs_col_name:
kind = col_def.WhichOneof("kind")
field = getattr(col_def, kind)
if field.value and (
not input_map or
sorted([compat.as_str(v) for v in field.value]) !=
sorted(input_map)):
raise ValueError("Graph contains unbound inputs: %s. Must "
"provide these inputs through input_map." %
",".join([compat.as_str(v) for v in field.value
if not input_map or v not in input_map]))
break
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Gathers the list of nodes we are interested in.
with graph.as_default():
producer_op_list = None
if meta_graph_def.meta_info_def.HasField("stripped_op_list"):
producer_op_list = meta_graph_def.meta_info_def.stripped_op_list
input_graph_def = meta_graph_def.graph_def
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ""
importer.import_graph_def(
input_graph_def, name=(import_scope or ""), input_map=input_map,
producer_op_list=producer_op_list)
scope_to_prepend_to_names = "/".join(
[part for part in [graph.get_name_scope(), import_scope] if part])
# Restores all the other collections.
for key, col_def in meta_graph_def.collection_def.items():
# Don't add unbound_inputs to the new graph.
if key == unbound_inputs_col_name:
continue
if not restore_collections_predicate(key):
continue
kind = col_def.WhichOneof("kind")
if kind is None:
logging.error("Cannot identify data type for collection %s. Skipping.",
key)
continue
from_proto = ops.get_from_proto_function(key)
if from_proto and kind == "bytes_list":
proto_type = ops.get_collection_proto_type(key)
for value in col_def.bytes_list.value:
proto = proto_type()
proto.ParseFromString(value)
graph.add_to_collection(
key, from_proto(proto, import_scope=scope_to_prepend_to_names))
else:
field = getattr(col_def, kind)
if key in _COMPAT_COLLECTION_LIST:
logging.warning(
"The saved meta_graph is possibly from an older release:\n"
"'%s' collection should be of type 'byte_list', but instead "
"is of type '%s'.", key, kind)
if kind == "node_list":
for value in field.value:
col_op = graph.as_graph_element(
ops.prepend_name_scope(value, scope_to_prepend_to_names))
graph.add_to_collection(key, col_op)
elif kind == "int64_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python2 distinguishes between int and long, while Python3 has
# only int.
for value in field.value:
graph.add_to_collection(key, int(value))
else:
for value in field.value:
graph.add_to_collection(
key, ops.prepend_name_scope(value, scope_to_prepend_to_names))
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=scope_to_prepend_to_names)
for v in variables:
var_list[ops.strip_name_scope(v.name, scope_to_prepend_to_names)] = v
return var_list
def export_scoped_meta_graph(filename=None,
graph_def=None,
graph=None,
export_scope=None,
as_text=False,
unbound_inputs_col_name="unbound_inputs",
clear_devices=False,
saver_def=None,
clear_extraneous_savers=False,
**kwargs):
"""Returns `MetaGraphDef` proto. Optionally writes it to filename.
This function exports the graph, saver, and collection objects into
`MetaGraphDef` protocol buffer with the intention of it being imported
at a later time or location to restart training, run inference, or be
a subgraph.
Args:
filename: Optional filename including the path for writing the
generated `MetaGraphDef` protocol buffer.
graph_def: `GraphDef` protocol buffer.
graph: The `Graph` to export. If `None`, use the default graph.
export_scope: Optional `string`. Name scope under which to extract
the subgraph. The scope name will be stripped from the node definitions
for easy import later into new name scopes. If `None`, the whole graph
is exported.
as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto.
unbound_inputs_col_name: Optional `string`. If provided, a string collection
with the given name will be added to the returned `MetaGraphDef`,
containing the names of tensors that must be remapped when importing the
`MetaGraphDef`.
clear_devices: Boolean which controls whether to clear device information
before exporting the graph.
saver_def: `SaverDef` protocol buffer.
clear_extraneous_savers: Remove any Saver-related information from the
graph (both Save/Restore ops and SaverDefs) that are not associated
with the provided SaverDef.
**kwargs: Optional keyed arguments, including meta_info_def and
collection_list.
Returns:
A `MetaGraphDef` proto and dictionary of `Variables` in the exported
name scope.
Raises:
ValueError: When the `GraphDef` is larger than 2GB.
"""
if context.in_eager_mode():
raise ValueError("Exporting/importing meta graphs is not supported when "
"Eager Execution is enabled.")
graph = graph or ops.get_default_graph()
exclude_nodes = None
unbound_inputs = []
if export_scope or clear_extraneous_savers or clear_devices:
if graph_def:
new_graph_def = graph_pb2.GraphDef()
new_graph_def.versions.CopyFrom(graph_def.versions)
if clear_extraneous_savers:
exclude_nodes = _find_extraneous_saver_nodes(graph_def, saver_def)
for node_def in graph_def.node:
if _should_include_node(node_def.name, export_scope, exclude_nodes):
new_node_def = _node_def(node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
new_graph_def.node.extend([new_node_def])
graph_def = new_graph_def
else:
# Only do this complicated work if we want to remove a name scope.
graph_def = graph_pb2.GraphDef()
# pylint: disable=protected-access
graph_def.versions.CopyFrom(graph.graph_def_versions)
bytesize = 0
if clear_extraneous_savers:
exclude_nodes = _find_extraneous_saver_nodes(graph.as_graph_def(),
saver_def)
for key in sorted(graph._nodes_by_id):
if _should_include_node(graph._nodes_by_id[key].name,
export_scope,
exclude_nodes):
value = graph._nodes_by_id[key]
# pylint: enable=protected-access
node_def = _node_def(value.node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
graph_def.node.extend([node_def])
if value.outputs:
assert "_output_shapes" not in graph_def.node[-1].attr
graph_def.node[-1].attr["_output_shapes"].list.shape.extend([
output.get_shape().as_proto() for output in value.outputs])
bytesize += value.node_def.ByteSize()
if bytesize >= (1 << 31) or bytesize < 0:
raise ValueError("GraphDef cannot be larger than 2GB.")
# It's possible that not all the inputs are in the export_scope.
# If we would like such information included in the exported meta_graph,
# add them to a special unbound_inputs collection.
if unbound_inputs_col_name:
# Clears the unbound_inputs collections.
graph.clear_collection(unbound_inputs_col_name)
for k in unbound_inputs:
graph.add_to_collection(unbound_inputs_col_name, k)
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=export_scope)
for v in variables:
if _should_include_node(v, export_scope, exclude_nodes):
var_list[ops.strip_name_scope(v.name, export_scope)] = v
scoped_meta_graph_def = create_meta_graph_def(
graph_def=graph_def,
graph=graph,
export_scope=export_scope,
exclude_nodes=exclude_nodes,
clear_extraneous_savers=clear_extraneous_savers,
saver_def=saver_def,
**kwargs)
if filename:
graph_io.write_graph(
scoped_meta_graph_def,
os.path.dirname(filename),
os.path.basename(filename),
as_text=as_text)
return scoped_meta_graph_def, var_list
def copy_scoped_meta_graph(from_scope, to_scope,
from_graph=None, to_graph=None):
"""Copies a sub-meta_graph from one scope to another.
Args:
from_scope: `String` name scope containing the subgraph to be copied.
to_scope: `String` name scope under which the copied subgraph will reside.
from_graph: Optional `Graph` from which to copy the subgraph. If `None`, the
default graph is use.
to_graph: Optional `Graph` to which to copy the subgraph. If `None`, the
default graph is used.
Returns:
A dictionary of `Variables` that has been copied into `to_scope`.
Raises:
ValueError: If `from_scope` and `to_scope` are the same while
`from_graph` and `to_graph` are also the same.
"""
from_graph = from_graph or ops.get_default_graph()
to_graph = to_graph or ops.get_default_graph()
if from_graph == to_graph and from_scope == to_scope:
raise ValueError("'from_scope' and 'to_scope' need to be different "
"when performing copy in the same graph.")
orig_meta_graph, var_list = export_scoped_meta_graph(
export_scope=from_scope, graph=from_graph)
var_list = import_scoped_meta_graph(orig_meta_graph,
graph=to_graph,
import_scope=to_scope)
return var_list
| [
"[email protected]"
] | |
3db4dccc53642460bd253d657b3e1e6860dc134f | 0fdb402809188c34702bc70e4d106e56ca8e2bd0 | /Algorithms/mobile.py | 7cc1afd0b2f037442a49adc901644212600de01c | [] | no_license | the07/Python | 356f2018a85caeb9dd6ccb251636ff697eb613b6 | af34cf3ffe01504632cf3654a0a5f89653e163cb | refs/heads/master | 2021-01-06T20:36:33.718087 | 2017-11-24T06:58:32 | 2017-11-24T06:58:32 | 90,789,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | phone_numbers = []
for _ in range(int(input())):
S = input()
phone_numbers.append(S[-10:])
for i in sorted(phone_numbers):
print('+91 {} {}'.format(i[:5], i[5:]))
| [
"[email protected]"
] | |
d174932faeffa0b07a3e4466044164eb769e3dc1 | 609ec378fadcbd81a8307064cd11c0e27b585cca | /setup.py | 57138ace5c78f29d4f361b9eb6d70b4b692207ea | [] | no_license | oboberg/QuickReduce | b2184c212774e61f430ba62dda024ce672bd6dca | 19f42ed8105a24b4191066915543ee70022b5bfb | refs/heads/master | 2021-01-19T07:02:45.877576 | 2016-08-09T19:15:58 | 2016-08-09T19:15:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
from Cython.Distutils import build_ext
import numpy
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension("podi_cython",
sources=['cython_src/podi_cython.pyx',
"cython_src/sigma_clip_mean.c",
"cython_src/sigma_clip_median.c",
"cython_src/lacosmics.c",
],
include_dirs=["cython_src", numpy.get_include()],
libraries=['gslcblas', "gsl", "m"]
)
]
)
| [
"[email protected]"
] | |
c7b577d0088d48af19791f2282ca10b34db9bfcc | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/65/usersdata/201/32254/submittedfiles/investimento.py | 57cbb58258e023fea00db2eb9598bfa4a3514d35 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | # -*- coding: utf-8 -*-
from __future__ import division
#COMECE SEU CODIGO AQUI
a=float(input('Investimento inicial:'))
b=float(input('Taxa de crescimento percentual:'))
x=(a*b)
Total=(a+x)
print('%.2f' %Total)
| [
"[email protected]"
] | |
bbca92b38602a503a8e5e884d77be44b3b03e2c0 | dd745566ceee1760c714b17cabd50d2a0a919747 | /Stream-Three/django_todo_project/env/lib/python3.5/site-packages/corsheaders/conf.py | e5b964ccffb7dcd50b8ca4ad678d32401bd435c0 | [] | no_license | hyohannesgithub/full_stack_solutions | 668fc9de020aa8aa18c64d38d13ca6bfcac12278 | e572d6154c3d63681c124698d7962905f1384671 | refs/heads/master | 2021-01-13T02:53:27.623809 | 2017-01-04T02:49:30 | 2017-01-04T02:49:30 | 77,099,093 | 1 | 0 | null | 2016-12-22T01:24:02 | 2016-12-22T01:24:01 | null | UTF-8 | Python | false | false | 1,515 | py | from django.conf import settings
from .defaults import default_headers, default_methods # Kept here for backwards compatibility
class Settings(object):
"""
Shadow Django's settings with a little logic
"""
@property
def CORS_ALLOW_HEADERS(self):
return getattr(settings, 'CORS_ALLOW_HEADERS', default_headers)
@property
def CORS_ALLOW_METHODS(self):
return getattr(settings, 'CORS_ALLOW_METHODS', default_methods)
@property
def CORS_ALLOW_CREDENTIALS(self):
return getattr(settings, 'CORS_ALLOW_CREDENTIALS', False)
@property
def CORS_PREFLIGHT_MAX_AGE(self):
return getattr(settings, 'CORS_PREFLIGHT_MAX_AGE', 86400)
@property
def CORS_ORIGIN_ALLOW_ALL(self):
return getattr(settings, 'CORS_ORIGIN_ALLOW_ALL', False)
@property
def CORS_ORIGIN_WHITELIST(self):
return getattr(settings, 'CORS_ORIGIN_WHITELIST', ())
@property
def CORS_ORIGIN_REGEX_WHITELIST(self):
return getattr(settings, 'CORS_ORIGIN_REGEX_WHITELIST', ())
@property
def CORS_EXPOSE_HEADERS(self):
return getattr(settings, 'CORS_EXPOSE_HEADERS', ())
@property
def CORS_URLS_REGEX(self):
return getattr(settings, 'CORS_URLS_REGEX', r'^.*$')
@property
def CORS_MODEL(self):
return getattr(settings, 'CORS_MODEL', None)
@property
def CORS_REPLACE_HTTPS_REFERER(self):
return getattr(settings, 'CORS_REPLACE_HTTPS_REFERER', False)
conf = Settings()
| [
"[email protected]"
] | |
e8fc466e3e5524ecb40f9b242aa5198d18146f25 | 02ce6d29fec0d68ca2a2a778d37d2f2cff1a590e | /Old/PythonOne/18.2.4-tcp-client.py | 786c1fd2479c368035b73888d47e2d549a49b923 | [] | no_license | CalvinCheungCoder/Python-100-Days | 605045122e40c119abc32466c32479559a4d4b9b | 0f9bec8893954d4afbe2037dad92885c7d4d31f8 | refs/heads/master | 2020-04-17T11:49:42.148478 | 2019-09-19T10:22:37 | 2019-09-19T10:22:37 | 166,556,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', 8880))
s.send(b'hello')
data = s.recv(1024)
print('从服务器接收的消息:{0}'.format(data.decode()))
s.close() | [
"[email protected]"
] | |
6bcf2b63c59525f3e7ccc1b4759864d27f05aae2 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5751500831719424_0/Python/Bremsstrahlung/repeater.py | d918e0d1d86b831dda2d3f18915cfd9f173d497b | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,858 | py | input = open("A-small-attempt1.in")
output = open("output.out","w")
t = int(input.readline())
for i in range(t):
n = int(input.readline())
r = 0
impossible = False
lines = []
iter = []
for x in range(n):
lines.append(input.readline().strip())
iter.append(0)
seq = ""
for x in range(len(lines)):
lineseq = ""
for y in range(len(lines[x])):
if len(lineseq) == 0 or lines[x][y] != lineseq[-1]:
lineseq += lines[x][y]
if x == 0:
seq = lineseq
elif lineseq != seq:
impossible = True
for x in range(len(seq)):
ns = []
for y in range(len(lines)):
ns.append(0)
if iter[y] >= len(lines[y]):
impossible = True
break
if lines[y][iter[y]] == seq[x]:
while True:
if lines[y][iter[y]] == seq[x]:
ns[y] += 1
iter[y] += 1
else:
break
if iter[y] >= len(lines[y]):
break
else:
impossible = True
break
if not impossible:
op = []
for y in range(len(ns)):
q = 0
for z in range(len(ns)):
if z != y:
q += abs(ns[y] - ns[z])
op.append(q)
r += min(op)
if impossible:
output.write("Case #{}: Fegla Won\n".format(i + 1))
print("Case #{}: Fegla Won".format(i + 1))
else:
output.write("Case #{}: {}\n".format(i + 1,r))
print("Case #{}: {}".format(i + 1,r))
output.close();
input.close()
| [
"[email protected]"
] | |
5a2cc545de9989dca3aee1cc209489094d2b6f8b | 74912c10f66e90195bf87fd71e9a78fa09f017ec | /execroot/syntaxnet/bazel-out/local-opt/bin/syntaxnet/parser_trainer_test.runfiles/org_tensorflow/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py | 9c20a33b637074f3c648dc8ba30cb9cf6712ee67 | [] | no_license | koorukuroo/821bda42e7dedbfae9d936785dd2d125- | 1f0b8f496da8380c6e811ed294dc39a357a5a8b8 | 237fcc152ff436f32b2b5a3752a4181d279b3a57 | refs/heads/master | 2020-03-17T03:39:31.972750 | 2018-05-13T14:35:24 | 2018-05-13T14:35:24 | 133,244,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | /root/.cache/bazel/_bazel_root/821bda42e7dedbfae9d936785dd2d125/external/org_tensorflow/tensorflow/contrib/grid_rnn/python/ops/grid_rnn_cell.py | [
"k"
] | k |
d9af2bf195ef94fdfd5718ec834efd154a5d6ec0 | 09b24540ee5337fa823d1e9858159474f399b8e0 | /exercises/CursoemVideo/ex007.py | db5b3178214c83b369a1908d539a0187d4dfdc54 | [
"MIT"
] | permissive | arthurguerra/cursoemvideo-python | eada3e2faf2a6a46f4e20f25a7d3e2851e113cb8 | 37f45ec25f422673fa9bbeee682e098f14d8ceab | refs/heads/main | 2023-03-22T04:51:20.320126 | 2021-03-16T14:38:45 | 2021-03-16T14:38:45 | 348,374,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | n1 = float(input('Nota 1: '))
n2 = float(input('Nota 2: '))
m = (n1 + n2)/2
print('A média do aluno é {:.2f}'.format(m)) | [
"[email protected]"
] | |
265c7ab856b2c98d610440b3351232af034746b5 | 6fe9c6f3c5783f1ca0de480a2bd2b595e02b1af7 | /src/algorithms/score/spr2.py | 4add4462379775425a57d3e620d509aa492c9698 | [] | no_license | lgqiao/distributed-coordination | 019c51de705837547ce321e38a3fc29c780ea231 | db3df1d7cf63b833d41f74f730d0b8bff2d12f95 | refs/heads/master | 2023-09-06T08:50:45.993627 | 2020-11-03T09:47:34 | 2020-11-03T09:47:34 | 583,760,321 | 1 | 0 | null | 2022-12-30T20:36:33 | 2022-12-30T20:36:32 | null | UTF-8 | Python | false | false | 20,798 | py | import math
import time
import os
import logging
from collections import defaultdict
import networkx as nx
from auxiliary.link import Link
from auxiliary.placement import Placement
from siminterface.simulator import ExtendedSimulatorAction
from siminterface.simulator import Simulator
class NoCandidateException(Exception):
"""
Signal that no suitable routing/placement candidate could be determined
"""
pass
class SPR2Algo:
"""
SPR-2
Score: closeness + compound path length + remaining node cap + node mortality + path occupancy
Node cap requirement: hard
"""
def __init__(self, simulator: Simulator):
# Besides interaction we need the simulator reference to query all needed information. Not all information can
# conveniently put into the simulator state, nevertheless it is justified that the algorithm can access these.
self.simulator = simulator
# To evaluate if some operations are feasible we need to modify the network topology, that must not happen on
# the shared network instance
self.network_copy = None
# Timeout determines, after which period a unused vnf is removed from a node
self.vnf_timeout = 10
def init(self, network_path, service_functions_path, config_path, seed, output_path,
resource_functions_path=""):
init_state = self.simulator.init(network_path, service_functions_path, config_path, seed, output_path,
resource_functions_path=resource_functions_path,
interception_callbacks=
{'pass_flow': self.pass_flow,
'init_flow': self.init_flow,
'depart_flow': self.depart_flow,
'drop_flow': self.drop_flow,
'periodic': [(self.periodic_measurement, 100, 'Measurement'),
(self.periodic_remove, 10, 'Remove SF interception.')]})
self.network_copy = self.simulator.get_network_copy()
sum_of_degrees = sum(map(lambda x: x[1], self.network_copy.degree()))
self.avg_ceil_degree = int(math.ceil(sum_of_degrees / len(self.network_copy)))
# All pairs shortest path calculations
self.apsp = dict(nx.all_pairs_dijkstra_path(self.network_copy, weight='delay'))
self.apsp_length = dict(nx.all_pairs_dijkstra_path_length(self.network_copy, weight='delay'))
# Record how often a flow was passed to a node, used to calculate score
self.node_mortality = defaultdict(int)
# Record current general load, used to calculate score
self.occupancy_list = defaultdict(list)
# measure decisions
# decision in case of bjointsp = "init_flow". flow id --> (node id --> list of times)
# attention: needs lots of memory when running long!
self.decision_times = defaultdict(lambda: defaultdict(list))
def run(self):
placement = defaultdict(list)
processing_rules = defaultdict(lambda: defaultdict(list))
forwarding_rules = defaultdict(dict)
action = ExtendedSimulatorAction(placement=placement, scheduling={}, flow_forwarding_rules=forwarding_rules,
flow_processing_rules=processing_rules)
self.simulator.apply(action)
self.simulator.run()
self.simulator.write_state()
self.simulator.write_decisions()
self.simulator.writer.write_decision_times(self.decision_times)
def init_flow(self, flow):
"""
<Callback>
"""
start = time.time()
flow['state'] = 'transit'
flow['blocked_links'] = []
try:
self.plan_placement(flow)
self.try_set_new_path(flow)
except NoCandidateException:
flow['state'] = 'drop'
flow['path'] = []
# record decision time
decision_time = time.time() - start
# all done centrally at one logical global node for Bjointsp
self.decision_times[flow.flow_id][flow.current_node_id].append(decision_time)
def pass_flow(self, flow):
"""
<Callback>
This is the main dynamic logic of the algorithm, whenever a flow is passed to node this function is called.
The associated node is determined and all actions and information are computed from its perspective.
"""
start = time.time()
# Get state information
state = self.simulator.get_state()
placement = state.placement
forwarding_rules = state.flow_forwarding_rules
processing_rules = state.flow_processing_rules
# The associated node
exec_node_id = flow.current_node_id
exec_node = state.network['nodes'][exec_node_id]
self.simulator.metrics.add_decision(exec_node_id)
if (flow.flow_id, flow.dr) not in self.occupancy_list[exec_node_id]:
self.occupancy_list[exec_node_id].append((flow.flow_id, flow.dr))
if flow.is_processed() and flow['state'] != 'departure':
# yes => switch to departure, forward to egress node
flow['state'] = 'departure'
flow['target_node_id'] = flow.egress_node_id
flow['blocked_links'] = []
self.try_set_new_path(flow)
if flow['state'] == 'transit':
demand, need_placement = Placement.calculate_demand(flow, flow.current_sf, exec_node['available_sf'],
state.service_functions)
if flow['target_node_id'] == exec_node_id:
if exec_node['capacity'] >= demand:
# process flow
if need_placement:
placement[exec_node_id].append(flow.current_sf)
processing_rules[exec_node_id][flow.flow_id] = [flow.current_sf]
else:
try:
self.plan_placement(flow, exclude=[flow.current_node_id])
assert flow['target_node_id'] != exec_node_id, \
'Flow cannot be processed here, why does it stay?'
flow['blocked_links'] = []
self.set_new_path(flow)
self.forward_flow(flow, state)
except (NoCandidateException, nx.NetworkXNoPath) as e:
flow['state'] = 'drop'
flow['path'] = []
else:
try:
self.forward_flow(flow, state)
except nx.NetworkXNoPath:
flow['state'] = 'drop'
flow['path'] = []
elif flow['state'] == 'departure':
# Return to destination as soon as possible, no more processing necessary
if exec_node_id != flow.egress_node_id:
self.forward_flow(flow, state)
if flow['state'] == 'drop':
# Something went legitimate wrong => clear remaing rules => let it drop
processing_rules[exec_node_id].pop(flow.flow_id, None)
forwarding_rules[exec_node_id].pop(flow.flow_id, None)
self.node_mortality[exec_node_id] += 1
# record decision time
decision_time = time.time() - start
# all done centrally at one logical global node for Bjointsp
self.decision_times[flow.flow_id][flow.current_node_id].append(decision_time)
self.simulator.apply(state.derive_action())
def plan_placement(self, flow, exclude=[]):
try:
score_table = self.score(flow, exclude)
target = score_table[0][0]
flow['target_node_id'] = target
flow['state'] = 'transit'
except NoCandidateException:
raise
def score(self, flow, exclude=[]):
state = self.simulator.get_state()
exec_node_id = flow.current_node_id
candidates_nodes = []
candidates_path = []
rejected_nodes = []
rejected_path = []
for n in state.network['node_list']:
node_stats = self.node_stats(n, flow)
path_stats = self.path_stats(flow.current_node_id, n, flow)
if node_stats[1] == 1:
candidates_nodes.append(node_stats)
candidates_path.append(path_stats)
if len(candidates_nodes) == 0:
raise NoCandidateException
# Determine max min
# Nodes
# Closeness
minimum_closeness = min(candidates_nodes, key=lambda x: x[2])[2]
maximum_closeness = max(candidates_nodes, key=lambda x: x[2])[2]
# Compound path length
minimum_compound_path_length = min(candidates_nodes, key=lambda x: x[3])[3]
maximum_compound_path_length = max(candidates_nodes, key=lambda x: x[3])[3]
# Remaining node cap
minimum_remaining_node_cap = min(candidates_nodes, key=lambda x: x[4])[4]
maximum_remaining_node_cap = max(candidates_nodes, key=lambda x: x[4])[4]
# Node mortality
minimum_node_mortality = min(candidates_nodes, key=lambda x: x[5])[5]
maximum_node_mortality = max(candidates_nodes, key=lambda x: x[5])[5]
# Node occupancy
minimum_node_occupancy = min(candidates_nodes, key=lambda x: x[6])[6]
maximum_node_occupancy = max(candidates_nodes, key=lambda x: x[6])[6]
# Path
# Remaining link cap
minimum_remaining_link_cap = min(candidates_path, key=lambda x: x[2])[2]
maximum_remaining_link_cap = max(candidates_path, key=lambda x: x[2])[2]
# Number of unavailable links
minimum_unavailable_links = min(candidates_path, key=lambda x: x[3])[3]
maximum_unavailable_links = max(candidates_path, key=lambda x: x[3])[3]
# Path occupancy
minimum_path_occupancy = min(candidates_path, key=lambda x: x[4])[4]
maximum_path_occupancy = max(candidates_path, key=lambda x: x[4])[4]
# Determine value ranges
# Add delta to prevent zero division
delta = 0.0001
# Node range
range_closeness = maximum_closeness - minimum_closeness + delta
range_compound_path_length = maximum_compound_path_length - minimum_compound_path_length + delta
range_remaining_node_cap = maximum_remaining_node_cap - minimum_remaining_node_cap + delta
range_node_mortality = maximum_node_mortality - minimum_node_mortality + delta
range_node_occupancy = maximum_node_occupancy - minimum_node_occupancy + delta
# Path range
range_remaining_link_cap = maximum_remaining_link_cap - minimum_remaining_link_cap + delta
range_unavailable_links = maximum_unavailable_links - minimum_unavailable_links + delta
range_path_occupancy = maximum_path_occupancy - minimum_path_occupancy + delta
# Range scaling
# Nodes
for i in range(len(candidates_nodes)):
candidates_nodes[i][2] = maximum_closeness - candidates_nodes[i][2]
candidates_nodes[i][3] = maximum_compound_path_length - candidates_nodes[i][3]
candidates_nodes[i][4] = candidates_nodes[i][4] - minimum_remaining_node_cap
candidates_nodes[i][5] = maximum_node_mortality - candidates_nodes[i][5]
candidates_nodes[i][6] = maximum_node_occupancy - candidates_nodes[i][6]
# Links
for i in range(len(candidates_path)):
candidates_path[i][2] = candidates_path[i][2] - minimum_remaining_link_cap
candidates_path[i][3] = maximum_unavailable_links - candidates_path[i][3]
candidates_path[i][4] = maximum_path_occupancy - candidates_path[i][4]
# [0,1] scaling
# print('')
# Nodes
for i in range(len(candidates_nodes)):
candidates_nodes[i][2] = candidates_nodes[i][2] / range_closeness
candidates_nodes[i][3] = candidates_nodes[i][3] / range_compound_path_length
candidates_nodes[i][4] = candidates_nodes[i][4] / range_remaining_node_cap
candidates_nodes[i][5] = candidates_nodes[i][5] / range_node_mortality
candidates_nodes[i][6] = candidates_nodes[i][6] / range_node_occupancy
assert 0 <= candidates_nodes[i][2] <= 1
assert 0 <= candidates_nodes[i][3] <= 1
assert 0 <= candidates_nodes[i][4] <= 1
assert 0 <= candidates_nodes[i][5] <= 1
assert 0 <= candidates_nodes[i][6] <= 1
# Links
for i in range(len(candidates_path)):
candidates_path[i][2] = candidates_path[i][2] / range_remaining_link_cap
candidates_path[i][3] = candidates_path[i][3] / range_unavailable_links
candidates_path[i][4] = candidates_path[i][4] / range_path_occupancy
assert 0 <= candidates_path[i][2] <= 1
assert 0 <= candidates_path[i][3] <= 1
assert 0 <= candidates_path[i][4] <= 1
# Scoring
score_table = []
for i in range(len(candidates_nodes)):
node_score = candidates_nodes[i][2] + candidates_nodes[i][3] + candidates_nodes[i][4] + candidates_nodes[i][5]
path_score = candidates_path[i][4]
score_table.append((candidates_nodes[i][0], node_score + path_score))
score_table.sort(key=lambda x: x[1], reverse=True)
return score_table
def occupancy(self, node_id):
return sum(float(dr) for id, dr in self.occupancy_list[node_id])
def node_stats(self, node_id, flow):
"""
Returns node stats for score calculation as list
Index:
1. Can the flow be processed at this moment
2. Closeness to flows current node
3. Sum of path length from current node to target node and target node to egress node
4. Remaining node capacity
5. How many flows have already dropped there
6. Node occupancy
"""
available_sf = self.simulator.params.network.node[node_id]['available_sf']
demand, place = Placement.calculate_demand(flow, flow.current_sf, available_sf, self.simulator.params.sf_list)
can_be_processed = 1 if self.simulator.params.network.nodes[node_id]['cap'] > demand else 0
closeness = self.apsp_length[flow.current_node_id][node_id]
compound_path_length = (
self.apsp_length[flow.current_node_id][node_id] + self.apsp_length[node_id][flow.egress_node_id])
remaining_cap = self.simulator.params.network.nodes[node_id]['remaining_cap']
node_mortality = self.node_mortality[node_id]
return [node_id, can_be_processed, closeness, compound_path_length, remaining_cap, node_mortality,
self.occupancy(node_id)]
def path_stats(self, node_a_id, node_b_id, flow):
"""
Returns path stats for score calculation as list
Index:
1. path length
2. Avg remaining link capacity
3. Sum of unavailable links
4. Path_occupancy
"""
sum_unavailable_links = 0
sum_remaining_cap = 0
shortest_path = self.apsp[node_a_id][node_b_id]
path_length = self.apsp_length[node_a_id][node_b_id]
path_occupancy = sum(map(self.occupancy, shortest_path))
for i in range(len(shortest_path) - 1):
i_1 = shortest_path[i]
i_2 = shortest_path[i + 1]
cap = self.simulator.params.network[i_1][i_2]['cap']
remaining_cap = self.simulator.params.network[i_1][i_2]['remaining_cap']
sum_remaining_cap += remaining_cap
sum_unavailable_links += 1 if (remaining_cap < flow.dr) else 0
return [(node_a_id, node_b_id), path_length, sum_remaining_cap, sum_unavailable_links, path_occupancy]
def try_set_new_path(self, flow):
try:
self.set_new_path(flow)
except nx.NetworkXNoPath:
flow['state'] = 'drop'
flow['path'] = []
def set_new_path(self, flow):
"""
Calculate and set shortest path to the target node defined by target_node_id, taking blocked links into account.
"""
for link in flow['blocked_links']:
self.network_copy.remove_edge(link[0], link[1])
try:
shortest_path = nx.shortest_path(self.network_copy, flow.current_node_id, flow['target_node_id'], weight='delay')
# Remove first node, as it is corresponds to the current node
shortest_path.pop(0)
flow['path'] = shortest_path
except nx.NetworkXNoPath:
raise
finally:
for link in flow['blocked_links']:
self.network_copy.add_edge(link[0], link[1], **link.attributes)
def forward_flow(self, flow, state):
"""
This function will handle the necessary actions to forward a flow from the associated node. A call to this
function requires the flow to have a precomputed path. If a flow can be forwarded along th precomputed path
the flow_forwarding_rules for the associated node will be set. If a flow cannot be forwarded, due missing link
resources, all incident links will be checked and all unsuitable links will be added to the blocked link list
of the flow. Subsequent a new path is attempted to calculate.
"""
node_id = flow.current_node_id
assert len(flow['path']) > 0
next_neighbor_id = flow['path'].pop(0)
edge = self.simulator.params.network[node_id][next_neighbor_id]
# Can forward?
if edge['remaining_cap'] >= flow.dr:
# yes => set forwarding rule
state.flow_forwarding_rules[node_id][flow.flow_id] = next_neighbor_id
else:
# no => adapt path
# remove all incident links which cannot be crossed
for incident_edge in self.simulator.params.network.edges(node_id, data=True):
if (incident_edge[2]['remaining_cap'] - flow.dr) < 0:
link = Link(incident_edge[0], incident_edge[1], **incident_edge[2])
if link not in flow['blocked_links']:
flow['blocked_links'].append(link)
try:
# Try to find new path once
self.set_new_path(flow)
assert len(flow['path']) > 0
next_neighbor_id = flow['path'].pop(0)
# Set forwarding rule
state.flow_forwarding_rules[node_id][flow.flow_id] = next_neighbor_id
except nx.NetworkXNoPath:
flow['state'] = 'drop'
flow['path'] = []
flow['blocked_links'] = []
def post_forwarding(self, node_id, flow):
"""
Callback
"""
self.occupancy_list[node_id].remove((flow.flow_id, flow.dr))
def depart_flow(self, flow):
"""
Callback
"""
self.occupancy_list[flow.current_node_id].remove((flow.flow_id, flow.dr))
def drop_flow(self, flow):
"""
Callback
"""
self.occupancy_list[flow.current_node_id].remove((flow.flow_id, flow.dr))
def periodic_remove(self):
"""
<Callback>
"""
state = self.simulator.get_state()
for node_id, node_data in state.network['nodes'].items():
for sf, sf_data in node_data['available_sf'].items():
if (sf_data['load'] == 0) and ((state.simulation_time - sf_data['last_requested']) > self.vnf_timeout):
state.placement[node_id].remove(sf)
self.simulator.apply(state.derive_action())
def periodic_measurement(self):
"""
<Callback>
"""
self.simulator.write_state()
if __name__ == "__main__":
# for testing and debugging
# Simple test params
network = 'abilene_11.graphml'
args = {
'network': f'../../../params/networks/{network}',
'service_functions': '../../../params/services/3sfcs.yaml',
'config': '../../../params/config/simple_config.yaml',
'seed': 9999,
'output_path': f'spr2-out/{network}'
}
# Setup logging to screen
logging.basicConfig(level=logging.INFO)
logging.getLogger('coordsim').setLevel(logging.INFO)
logging.getLogger('coordsim.reader').setLevel(logging.WARNING)
simulator = Simulator(test_mode=True)
# Setup algorithm
algo = SPR2Algo(simulator)
algo.init(os.path.abspath(args['network']), os.path.abspath(args['service_functions']),
os.path.abspath(args['config']), args['seed'], args['output_path'])
# Execute orchestrated simulation
algo.run()
| [
"[email protected]"
] | |
6569fbcc6eb836ab5ada7f0d7b0beac36b3a8ac8 | 0f47b8b3775e1730f92141128491b0bbfe3d89e0 | /OOP/SOLID/examples/open_closed/after/character.py | d9d293c50eead1ee7650593e0d7a14a6d3fdb875 | [] | no_license | hongmin0907/CS | 1d75c38da98c6174ea19de163c850d0f3bac22e3 | 697e8e1a5bde56a7588381a12f74bbb0e3aee3e8 | refs/heads/master | 2020-06-23T20:10:22.051477 | 2019-07-15T00:20:09 | 2019-07-15T00:20:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,170 | py | # 추상화 타입을 사용해 프로그래밍
# CLOSED FOR MODIFICATION
# 공격 종류를 확장해도 캐릭터의 공격 코드는 변하지 않는다.
from abc import ABCMeta, abstractmethod
from attack_kind import (AttackKindFactory, FireAttackKind, IceAttackKind,
StoneAttackKind, KungfuAttackKind)
class Character(metaclass=ABCMeta):
def __init__(self, name, hp, power):
self.name=name
self.hp=hp
self.power=power
@abstractmethod
def attack(self, other, kind):
pass
@abstractmethod
def get_damage(self, power, attack_kind):
pass
def __str__(self):
return f'{self.name} : {self.hp}'
class Player(Character):
def __init__(self, name='player', hp=100, power=10, *attack_kinds):
super().__init__(name, hp, power)
self.skills=[]
for attack_kind in attack_kinds:
self.skills.append(attack_kind)
def attack(self, other, a_kind):
for attack_kind in self.skills:
if a_kind==attack_kind.get_kind():
other.get_damage(self.power, a_kind)
attack_kind.attack()
def get_damage(self, power, a_kind):
for attack_kind in self.skills:
if attack_kind.get_kind()==a_kind:
self.hp-=(power//2)
return
self.hp-=power
class Monster(Character):
@classmethod
def get_monster_kind(cls):
return cls.__name__.replace('Monster', '')
def __init__(self, name='Monster', hp=50, power=5):
super().__init__(name, hp, power)
self.name=self.get_monster_kind()+name
self.attack_kind=AttackKindFactory(self.get_monster_kind())
def attack(self, other, a_kind):
if self.attack_kind.get_kind()==a_kind:
other.get_damage(self.power, a_kind)
self.attack_kind.attack()
def get_damage(self, power, a_kind):
if a_kind==self.attack_kind.get_kind():
self.hp+=power
else:
self.hp-=power
def get_attack_kind(self):
return self.attack_kind.get_kind()
@abstractmethod
def generate_gold(self):
pass
# 게임 개발 초기의 몬스터 종류는 두 가지
class FireMonster(Monster):
def generate_gold(self):
return 10
class IceMonster(Monster):
def __init__(self):
super().__init__()
self.hp=100
def generate_gold(self):
return 20
# 게임 규모가 커지면서 추가된 몬스터
class StoneMonster(Monster):
def generate_gold(self):
return 0
class KungfuMonster(Monster):
def generate_gold(self):
return 1000
if __name__=="__main__":
fm=FireMonster()
im=IceMonster()
sm=StoneMonster()
kfm=KungfuMonster()
monsters=[]
monsters.extend((fm, im, sm, kfm))
player=Player('john', 120, 20, IceAttackKind(), FireAttackKind())
print(player)
for mon in monsters:
player.attack(mon, 'Fire')
for mon in monsters:
print(mon)
for mon in monsters:
print(mon.get_attack_kind())
mon.attack(player, mon.get_attack_kind())
print(player)
| [
"[email protected]"
] | |
3a2d8d4fd3ae54ef5535a568c0501b0c2090940f | e61e664d95af3b93150cda5b92695be6551d2a7c | /vega/quota/latency.py | c637d96fd9df153845b45061d928eece3556b401 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | huawei-noah/vega | 44aaf8bb28b45f707ed6cd4e871ba70fc0c04846 | 12e37a1991eb6771a2999fe0a46ddda920c47948 | refs/heads/master | 2023-09-01T20:16:28.746745 | 2023-02-15T09:36:59 | 2023-02-15T09:36:59 | 273,667,533 | 850 | 184 | NOASSERTION | 2023-02-15T09:37:01 | 2020-06-20T08:20:06 | Python | UTF-8 | Python | false | false | 1,693 | py | # -*- coding:utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flops and Parameters Filter."""
import logging
import vega
from vega.metrics import calc_forward_latency_on_host
from vega.model_zoo import ModelZoo
from .quota_item_base import QuotaItemBase
class LatencyVerification(QuotaItemBase):
"""Latency Filter class."""
def __init__(self, latency_range):
self.latency_range = latency_range
def verify_on_host(self, model_desc):
"""Filter function of latency."""
model = ModelZoo.get_model(model_desc)
count_input = self.get_input_data()
trainer = vega.get_trainer(model_desc=model_desc)
sess_config = trainer._init_session_config() if vega.is_tf_backend() else None
latency = calc_forward_latency_on_host(model, count_input, sess_config)
logging.info(f"Sampled model's latency: {latency}ms")
if latency < self.latency_range[0] or latency > self.latency_range[1]:
logging.info(f"The latency ({latency}) is out of range. Skip this network.")
return False
else:
return True
| [
"[email protected]"
] | |
5f80b19489ea23ec495a58932bd20dd15986b2d0 | 1a6726fb62584f7787197ff404e30b012bc54f62 | /Basic Syntax, Conditional Statements and Loops - Exercise/Maximum Multiple.py | 74e93476034afd3a986c079200ab1eddd4d51c2d | [] | no_license | zdravkob98/Fundamentals-with-Python-May-2020 | f7a69d1a534d92f3b14bc16ce5d8d9b8611d97dd | 74e69a486e582c397cdc2f98b3dffe655110d38a | refs/heads/main | 2022-12-30T21:43:57.682790 | 2020-10-06T17:05:32 | 2020-10-06T17:05:32 | 301,797,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | n1 = int(input())
n2 = int(input())
for n in range(n2, n1 , - 1): | [
"[email protected]"
] | |
2f30b44e7c114f7510f263f5587bfd1d560d6815 | 1e5f6ac1590fe64e2d5a2d8b036c0948847f668d | /codes/Module_2/lecture_7/lecture_7_16.py | f535e827c618a0f328550cad12cb40cbcb93bd19 | [] | no_license | Gedanke/Reptile_study_notes | 54a4f48820586b1784c139716c719cc9d614c91b | a9705ebc3a6f95160ad9571d48675bc59876bd32 | refs/heads/master | 2022-07-12T23:43:24.452049 | 2021-08-09T12:54:18 | 2021-08-09T12:54:18 | 247,996,275 | 5 | 1 | null | 2022-06-26T00:21:48 | 2020-03-17T14:50:42 | HTML | UTF-8 | Python | false | false | 535 | py | # -*- coding: utf-8 -*-
import requests
headers = {
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'en-US,en;q=0.8',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.72 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Connection': 'keep-alive',
}
requests.get("http://httpbin.org/cookies/set/number/123456789", headers=headers)
r = requests.get("http://httpbin.org/cookies")
print(r.text)
| [
"[email protected]"
] | |
0121fb0c0c35e0b76606d6c0541c3178447f1eed | d7faf47825b6f8e5abf9a9587f1e7248c0eed1e2 | /rllib/tests/test_gpus.py | 8a1f24311a4d3ac2d53928ee1f8f93bb19544e3e | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ggdupont/ray | 7d7c7f39a8f99a09199fab60897da9e48b8e2645 | 15391026c19f1cbbb8d412e46b01f7998e42f2b9 | refs/heads/master | 2023-03-12T06:30:11.428319 | 2021-12-07T05:34:27 | 2021-12-07T05:34:27 | 165,058,028 | 0 | 0 | Apache-2.0 | 2023-03-04T08:56:50 | 2019-01-10T12:41:09 | Python | UTF-8 | Python | false | false | 4,631 | py | import unittest
import ray
from ray.rllib.agents.pg import PGTrainer, DEFAULT_CONFIG
from ray.rllib.utils.framework import try_import_torch
from ray.rllib.utils.test_utils import framework_iterator
from ray import tune
torch, _ = try_import_torch()
class TestGPUs(unittest.TestCase):
def test_gpus_in_non_local_mode(self):
# Non-local mode.
ray.init()
actual_gpus = torch.cuda.device_count()
print(f"Actual GPUs found (by torch): {actual_gpus}")
config = DEFAULT_CONFIG.copy()
config["num_workers"] = 2
config["env"] = "CartPole-v0"
# Expect errors when we run a config w/ num_gpus>0 w/o a GPU
# and _fake_gpus=False.
for num_gpus in [0, 0.1, 1, actual_gpus + 4]:
# Only allow possible num_gpus_per_worker (so test would not
# block infinitely due to a down worker).
per_worker = [0] if actual_gpus == 0 or actual_gpus < num_gpus \
else [0, 0.5, 1]
for num_gpus_per_worker in per_worker:
for fake_gpus in [False] + ([] if num_gpus == 0 else [True]):
config["num_gpus"] = num_gpus
config["num_gpus_per_worker"] = num_gpus_per_worker
config["_fake_gpus"] = fake_gpus
print(f"\n------------\nnum_gpus={num_gpus} "
f"num_gpus_per_worker={num_gpus_per_worker} "
f"_fake_gpus={fake_gpus}")
frameworks = ("tf", "torch") if num_gpus > 1 else \
("tf2", "tf", "torch")
for _ in framework_iterator(config, frameworks=frameworks):
# Expect that trainer creation causes a num_gpu error.
if actual_gpus < num_gpus + 2 * num_gpus_per_worker \
and not fake_gpus:
# "Direct" RLlib (create Trainer on the driver).
# Cannot run through ray.tune.run() as it would
# simply wait infinitely for the resources to
# become available.
print("direct RLlib")
self.assertRaisesRegex(
RuntimeError,
"Found 0 GPUs on your machine",
lambda: PGTrainer(config, env="CartPole-v0"),
)
# If actual_gpus >= num_gpus or faked,
# expect no error.
else:
print("direct RLlib")
trainer = PGTrainer(config, env="CartPole-v0")
trainer.stop()
# Cannot run through ray.tune.run() w/ fake GPUs
# as it would simply wait infinitely for the
# resources to become available (even though, we
# wouldn't really need them).
if num_gpus == 0:
print("via ray.tune.run()")
tune.run(
"PG",
config=config,
stop={"training_iteration": 0})
ray.shutdown()
def test_gpus_in_local_mode(self):
# Local mode.
ray.init(local_mode=True)
actual_gpus_available = torch.cuda.device_count()
config = DEFAULT_CONFIG.copy()
config["num_workers"] = 2
config["env"] = "CartPole-v0"
# Expect no errors in local mode.
for num_gpus in [0, 0.1, 1, actual_gpus_available + 4]:
print(f"num_gpus={num_gpus}")
for fake_gpus in [False, True]:
print(f"_fake_gpus={fake_gpus}")
config["num_gpus"] = num_gpus
config["_fake_gpus"] = fake_gpus
frameworks = ("tf", "torch") if num_gpus > 1 else \
("tf2", "tf", "torch")
for _ in framework_iterator(config, frameworks=frameworks):
print("direct RLlib")
trainer = PGTrainer(config, env="CartPole-v0")
trainer.stop()
print("via ray.tune.run()")
tune.run(
"PG", config=config, stop={"training_iteration": 0})
ray.shutdown()
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| [
"[email protected]"
] | |
c15d9a7f00b2603e79791d3b4d43209b20ff32db | cb305a20202cd381af979702950311a1b92319f2 | /Flask/Project/setup.py | 2bcadb4c2cff7569f2e7e0c66fa9475e9af3c831 | [] | no_license | ShanjinurIslam/The-Stack | 93a9bafb7355c471e2363bacddc0cfae5c5ae1c1 | 2d31ae8cf37dd9aceef06e067756e716a225f23b | refs/heads/master | 2022-12-08T03:35:01.397484 | 2020-08-26T20:21:13 | 2020-08-26T20:21:13 | 287,058,289 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | from setuptools import find_packages, setup
setup(
name='flaskr',
version='1.0.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=['flask','flask_wtf'],
)
| [
"[email protected]"
] | |
99a11a262c595cf5dddef4b3dd21d0cd093918a2 | d7ce3194a69ad53a6873981ae7f68b89eb7c3900 | /FogLayer/CloudConnector/gunicorn_config.py | e5897c809fee99eb5e4658dfad22228861137b27 | [] | no_license | Melissari1997/eHealth-SDCC | 42d419b981974b241f7fc063b19346bc0bf189ad | 48b6457e54e022e856b0e5bd26e3d0dafdb6c388 | refs/heads/master | 2023-01-04T08:36:05.697749 | 2020-11-07T21:07:41 | 2020-11-07T21:07:41 | 308,496,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60 | py | bind = "0.0.0.0:6000"
workers = 2
threads = 4
timeout = 120
| [
"[email protected]"
] | |
cc45d0ee2e3786742bdb6ce3f8e243e4832d6541 | 2976433a213f354b6d387e1d957192a9871f7e40 | /JavaScript/reactjs/basic01/testcode.py | f9087ac3d31b1989bf0f6a5a225583d5f66980a2 | [] | no_license | saurabh-kumar88/Coding-Practice- | 90a6f6b8feb7a1d2316451b31c646a48dc6f9bf9 | 48f0bac728745c8978468974d55025da86f29486 | refs/heads/master | 2023-02-15T00:19:47.411973 | 2020-10-06T15:48:46 | 2020-10-06T15:48:46 | 280,220,900 | 0 | 1 | null | 2021-01-06T09:15:21 | 2020-07-16T17:51:29 | JavaScript | UTF-8 | Python | false | false | 508 | py | class Solution:
def fizzBuzz(self, n: int):
ans = []
multiple_of_3, multiple_of_5 = False, False
for count in range(1, n+1):
if count % 3 == 0 and count % 5 == 0:
ans.append("FizzBuzz")
elif count % 3 == 0:
ans.append("Fizz")
elif count % 5 == 0:
ans.append("Buzz")
else:
ans.append(str(count))
return ans
if __name__ == "__main__":
obj = Solution()
print(obj.fizzBuzz(15))
| [
"[email protected]"
] | |
856571450b135b64c9414883a6320d798601aeae | aac9fd4a281ffac37fe8b2087f720001b5bcad7a | /mnist_sync_sharding_greedy/worker.py | cb909a02fe9d1d064155aaff4dfee04ee14db282 | [
"MIT"
] | permissive | epikjjh/Distributed-Deep-Learning | 8d77875e9aa74855b29ac5bb8860b987ef798ec1 | 9762b99306771c0f7dadc58abe6bf7ebe5ed468f | refs/heads/master | 2023-08-14T18:27:14.144482 | 2021-10-19T02:13:17 | 2021-10-19T02:13:17 | 266,505,936 | 1 | 3 | MIT | 2020-10-14T22:06:16 | 2020-05-24T09:09:20 | Python | UTF-8 | Python | false | false | 5,193 | py | from model import Model
from mpi4py import MPI
from typing import List
import numpy as np
import tensorflow as tf
import time,sys
from functools import reduce
class SyncWorker(Model):
def __init__(self, batch_size, rank, num_ps, num_workers):
super().__init__()
''' Modify var_bucket & var_shape for greedy ordering '''
# Sort parameters
tmp = {i: reduce(lambda x, y: x*y, self.var_shape[i].as_list()) for i in range(self.var_size)}
tmp = sorted(tmp, key=tmp.get)
# Reorder parameters
self.greedy_order = []
i = 0
j = len(tmp) - 1
while i < j:
self.greedy_order.append(tmp[i])
self.greedy_order.append(tmp[j])
i += 1
j -= 1
# Add mid value if the number of parameters is odd
if len(tmp) % 2:
self.greedy_order.append(tmp[i])
# Modify var_bucket
with tf.compat.v1.variable_scope("mnist", reuse=tf.compat.v1.AUTO_REUSE):
self.var_bucket = [tf.compat.v1.get_variable("v{}".format(i), shape=self.var_shape[i], dtype=tf.float32) for i in self.greedy_order]
# Modify var_shape
self.var_shape = [self.var_shape[i] for i in self.greedy_order]
# Set rank of worker
# rank: number of parameter servers ~ number of parameter servers + number of workers - 1
self.rank = rank
# Set number of parameter servers & workers
self.num_workers = num_workers
self.num_ps = num_ps
self.avg_var_size = self.var_size // self.num_ps
self.local_var_size = self.avg_var_size + self.var_size % self.num_ps
self.batch_size = batch_size
self.grad_buckets = [tf.compat.v1.placeholder(shape=self.var_shape[i], dtype=tf.float32) for i in range(self.var_size)]
self.senders = [tf.py_function(func=self.wrap_send(i), inp=[self.grad_buckets[i]], Tout=[]) for i in range(self.var_size)]
def wrap_send(self, num):
def send(grad):
# Send data to parameter server
ind = num // self.avg_var_size
if num >= self.var_size - self.local_var_size:
ind = self.num_ps-1
comm.Send([grad, MPI.FLOAT], dest=ind, tag=num-(ind*self.avg_var_size))
return None
return send
def work(self, cnt):
x_batch = self.x_train[self.batch_size*cnt:self.batch_size*(cnt+1)]
y_batch = self.y_train[self.batch_size*cnt:self.batch_size*(cnt+1)]
ret, = self.sess.run([self.grads], feed_dict={self.x: x_batch, self.y_: y_batch, self.keep_prob: 0.5})
grads = [grad for grad, var in ret] # gradient tuple
# Send gradients to each parameter server
for i in range(self.var_size):
self.sess.run([self.senders[i]], feed_dict={self.grad_buckets[i]: grads[self.greedy_order[i]]})
if __name__ == "__main__":
epoch = 1
batch_size = 100
comm = MPI.COMM_WORLD
# Set rank of worker
# rank: number of parameter servers ~ number of parameter servers + number of workers - 1
rank = comm.Get_rank()
# Set number of parameter servers & workers
num_workers = int(sys.argv[2])
num_ps = comm.Get_size() - num_workers
start = time.clock()
worker = SyncWorker(batch_size, rank, num_ps, num_workers)
# Send parameters to all parameter servers
if worker.rank == worker.num_ps:
data = {"size": worker.var_size, "shape": worker.var_shape, "total_batch": worker.x_train.shape[0]}
for i in range(worker.num_ps):
comm.send(data, dest=i, tag=0)
# For broadcasting
bucket = [np.empty(worker.var_shape[i], dtype=np.float32) for i in range(worker.var_size)]
ph_bucket = [tf.compat.v1.placeholder(shape=worker.var_shape[i], dtype=tf.float32) for i in range(worker.var_size)]
bucket_assign = [tf.compat.v1.assign(worker.var_bucket[i], ph_bucket[i]) for i in range(worker.var_size)]
for step in range(epoch):
batch_num = int(worker.x_train.shape[0]/batch_size)
for batch_cnt in range(batch_num):
# Calculate gradients then send them to parameter server
worker.work(batch_cnt)
# Receive data from parameter server
for i in range(worker.var_size):
ind = i // worker.avg_var_size
if i >= worker.var_size - worker.local_var_size:
ind = worker.num_ps-1
comm.Recv([bucket[i], MPI.FLOAT], source=ind, tag=i-(ind*worker.avg_var_size))
# Assign broadcasted values
worker.sess.run(bucket_assign, feed_dict={ph_bucket[i]:bucket[i] for i in range(worker.var_size)})
if batch_cnt % 10 == 0:
print("Worker{} epoch: {} batch: {} accuracy: {}".format(rank,step,batch_cnt,worker.sess.run(worker.accuracy, feed_dict={worker.x: worker.x_test, worker.y_: worker.y_test, worker.keep_prob: 1.0})))
end = time.clock()
print("Worker{} final accuracy: {}".format(rank,worker.sess.run(worker.accuracy, feed_dict={worker.x: worker.x_test, worker.y_: worker.y_test, worker.keep_prob: 1.0})))
print("Time: {}".format(end-start))
| [
"[email protected]"
] | |
e31ffe1f95b4ccb8bfc800dd1d762b33eea9a203 | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractInacloudspaceWordpressCom.py | d57d99f589b5ed6165f014a0a818e2dcb1ef10e2 | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 658 | py | def extractInacloudspaceWordpressCom(item):
'''
Parser for 'inacloudspace.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Drunken Exquisiteness', 'Drunken Exquisiteness', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | [
"[email protected]"
] | |
e6368bdc60f7acd094e96b47d3e1dccfe59f0286 | 3e0a2a0e489f41a5b6b8afb1c09227ae2b4a5c92 | /picarx.py | 01beed36417eb096f35725c41f2d1aaa437169f5 | [] | no_license | mlowell28/RobotSystems | 0d82f2a9509dd0842be4c71a66182c90478092e7 | 8407c91044c1db002c8ddd097730d07e8892b96e | refs/heads/main | 2023-05-26T05:48:53.198947 | 2021-06-09T21:18:48 | 2021-06-09T21:18:48 | 353,469,095 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,354 | py | from ezblock import *
from ezblock import __reset_mcu__
import time
import atexit
__reset_mcu__()
time.sleep(0.01)
PERIOD = 4095
PRESCALER = 10
TIMEOUT = 0.02
dir_servo_pin = Servo(PWM('P2'))
camera_servo_pin1 = Servo(PWM('P0'))
camera_servo_pin2 = Servo(PWM('P1'))
left_rear_pwm_pin = PWM("P13")
right_rear_pwm_pin = PWM("P12")
left_rear_dir_pin = Pin("D4")
right_rear_dir_pin = Pin("D5")
S0 = ADC('A0')
S1 = ADC('A1')
S2 = ADC('A2')
Servo_dir_flag = 1
dir_cal_value = 0
cam_cal_value_1 = 0
cam_cal_value_2 = 0
motor_direction_pins = [left_rear_dir_pin, right_rear_dir_pin]
motor_speed_pins = [left_rear_pwm_pin, right_rear_pwm_pin]
cali_dir_value = [1, -1]
cali_speed_value = [0, 0]
#初始化PWM引脚
for pin in motor_speed_pins:
pin.period(PERIOD)
pin.prescaler(PRESCALER)
def set_motor_speed(motor, speed):
global cali_speed_value,cali_dir_value
motor -= 1
if speed >= 0:
direction = 1 * cali_dir_value[motor]
elif speed < 0:
direction = -1 * cali_dir_value[motor]
speed = abs(speed)
if speed != 0:
speed = int(speed /2 ) + 50
speed = speed - cali_speed_value[motor]
if direction < 0:
motor_direction_pins[motor].high()
motor_speed_pins[motor].pulse_width_percent(speed)
else:
motor_direction_pins[motor].low()
motor_speed_pins[motor].pulse_width_percent(speed)
def motor_speed_calibration(value):
global cali_speed_value,cali_dir_value
cali_speed_value = value
if value < 0:
cali_speed_value[0] = 0
cali_speed_value[1] = abs(cali_speed_value)
else:
cali_speed_value[0] = abs(cali_speed_value)
cali_speed_value[1] = 0
def motor_direction_calibration(motor, value):
# 0: positive direction
# 1:negative direction
global cali_dir_value
motor -= 1
if value == 1:
cali_dir_value[motor] = -1*cali_dir_value[motor]
def dir_servo_angle_calibration(value):
global dir_cal_value
dir_cal_value = value
set_dir_servo_angle(dir_cal_value)
# dir_servo_pin.angle(dir_cal_value)
def set_dir_servo_angle(value):
global dir_cal_value
dir_servo_pin.angle(value+dir_cal_value)
def camera_servo1_angle_calibration(value):
global cam_cal_value_1
cam_cal_value_1 = value
set_camera_servo1_angle(cam_cal_value_1)
# camera_servo_pin1.angle(cam_cal_value)
def camera_servo2_angle_calibration(value):
global cam_cal_value_2
cam_cal_value_2 = value
set_camera_servo2_angle(cam_cal_value_2)
# camera_servo_pin2.angle(cam_cal_value)
def set_camera_servo1_angle(value):
global cam_cal_value_1
camera_servo_pin1.angle(-1 *(value+cam_cal_value_1))
def set_camera_servo2_angle(value):
global cam_cal_value_2
camera_servo_pin2.angle(-1 * (value+cam_cal_value_2))
def get_adc_value():
adc_value_list = []
adc_value_list.append(S0.read())
adc_value_list.append(S1.read())
adc_value_list.append(S2.read())
return adc_value_list
def set_power(speed):
set_motor_speed(1, speed)
set_motor_speed(2, speed)
def backward(speed):
set_motor_speed(1, speed)
set_motor_speed(2, speed)
def forward(speed):
set_motor_speed(1, -1*speed)
set_motor_speed(2, -1*speed)
def stop():
set_motor_speed(1, 0)
set_motor_speed(2, 0)
def Get_distance():
timeout=0.01
trig = Pin('D8')
echo = Pin('D9')
trig.low()
time.sleep(0.01)
trig.high()
time.sleep(0.000015)
trig.low()
pulse_end = 0
pulse_start = 0
timeout_start = time.time()
while echo.value()==0:
pulse_start = time.time()
if pulse_start - timeout_start > timeout:
return -1
while echo.value()==1:
pulse_end = time.time()
if pulse_end - timeout_start > timeout:
return -2
during = pulse_end - pulse_start
cm = round(during * 340 / 2 * 100, 2)
#print(cm)
return cm
def test():
# set_dir_servo_angle(0)
#time.sleep(1)
forward(50)
time.sleep(1)
# set_dir_servo_angle(0)
# time.sleep(1)
# set_motor_speed(1, 1)
# set_motor_speed(2, 1)
# camera_servo_pin.angle(0)
if __name__ == "__main__":
atexit.register(stop)
__reset_mcu__()
time.sleep(0.01)
dir_servo_angle_calibration(-10)
time.sleep(.1)
while 1:
test()
time.sleep(1)
| [
"[email protected]"
] | |
4516a4a31e687c163b02622a904cae6e349a07f4 | bf9ae1e4269952622c7f03dc86c418d21eb20ec7 | /PythonCode-FatherandSon/示例代码/TIO_CH22_2.py | 1956584d2e89471f7f30eefe14a58c4e5d5668e8 | [] | no_license | ChuixinZeng/PythonStudyCode | 5692ca7cf5fe9b9ca24e9f54f6594f3a79b0ffb5 | 2986c83c804da51ef386ca419d0c4ebcf194cf8f | refs/heads/master | 2021-01-21T16:09:58.622069 | 2019-12-01T14:30:36 | 2019-12-01T14:30:36 | 91,876,874 | 4 | 4 | null | null | null | null | UTF-8 | Python | false | false | 599 | py | # TIO_CH22_2.py
# Copyright Warren & Carter Sande, 2013
# Released under MIT license http://www.opensource.org/licenses/mit-license.php
# Version $version ----------------------------
# Answer to Try It Out, Question 2, Chapter 22
# Save some data to a text file
name = raw_input("Enter your name: ")
age = raw_input("Enter your age: ")
color = raw_input("Enter your favorite color: ")
food = raw_input("Enter your favorite food: ")
my_data = open("my_data_file.txt", 'w')
my_data.write(name + "\n")
my_data.write(age + "\n")
my_data.write(color + "\n")
my_data.write(food)
my_data.close()
| [
"[email protected]"
] | |
9b8eaf92a3384ae848cae589c602cbf9bb952432 | ff93e108a358a40d71b426bb9615587dfcab4d03 | /Python_Basic/5_Dictionaries/basics_of_dict.py | 4b5cd56fb88e023dbfd19e8c705493c7c71ddf15 | [] | no_license | soumya9988/Python_Machine_Learning_Basics | 074ff0e8e55fd925ca50e0f9b56dba76fc93d187 | 3711bc8e618123420985d01304e13051d9fb13e0 | refs/heads/master | 2020-03-31T14:31:49.217429 | 2019-11-16T21:55:54 | 2019-11-16T21:55:54 | 152,298,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | spam = {'Alice' : 30,
'planets' : ['mars', 'venus', 'earth', 'pluto'],
'pi' : 3.14,
1: 13}
# Key, values and items in dictionary
print(spam.keys())
print(spam.values())
print(spam.items())
# setdefault method in dict
spam.setdefault('colour', 'black')
print(spam)
spam.setdefault('colour', 'pink')
print(spam)
# get() method in dict with default value
print(spam.get('Alice', 50))
print(spam.get('Alan', 50))
| [
"[email protected]"
] | |
bb36fa74e3222d89bf01c2cafbdbe15c907ad403 | 30227ff573bcec32644fca1cca42ef4cdd612c3e | /leetcode/linkedList/problems/tests/test_list_deep_copy.py | ebe297cf39216dcc76a910b90e47673193f9a26c | [] | no_license | saurabh-pandey/AlgoAndDS | bc55864422c93e6c93b8432e483394f286ce8ef2 | dad11dedea9ceb4904d6c2dea801ce0172abfc81 | refs/heads/master | 2023-07-01T09:12:57.951949 | 2023-06-15T12:16:36 | 2023-06-15T12:16:36 | 88,239,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,691 | py | import pytest
import problems.list_deep_copy as prob
def toList(head):
output = []
currNode = head
while currNode is not None:
nodeList = [currNode.val]
if currNode.random is not None:
nodeList.append(currNode.random.val)
else:
nodeList.append(None)
output.append(nodeList)
currNode = currNode.next
return output
def createRandomList(input):
head = None
currNode = None
nodes = []
for nodeList in input:
newNode = prob.Node(nodeList[0])
nodes.append(newNode)
if head is None:
head = newNode
currNode = head
else:
currNode.next = newNode
currNode = currNode.next
for i in range(len(input)):
nodeList = input[i]
currNode = nodes[i]
if nodeList[1] is not None:
randomId = nodeList[1]
if randomId < len(nodes):
randomNode = nodes[randomId]
currNode.random = randomNode
return head
class TestListDeepCopy:
def test_example1(self):
input = [[7,None],[13,0],[11,4],[10,2],[1,0]]
head = createRandomList(input)
copiedList = prob.copyRandomList(head)
assert toList(copiedList) == toList(head)
def test_example2(self):
input = [[1,1],[2,1]]
head = createRandomList(input)
copiedList = prob.copyRandomList(head)
assert toList(copiedList) == toList(head)
def test_example3(self):
input = [[3,None],[3,0],[3,None]]
head = createRandomList(input)
copiedList = prob.copyRandomList(head)
assert toList(copiedList) == toList(head)
def test_example4(self):
input = []
head = createRandomList(input)
copiedList = prob.copyRandomList(head)
assert toList(copiedList) == toList(head) | [
"[email protected]"
] | |
643dfe06feab3a458e55f0b9b5cf060e9f8d5409 | 75f5767b35095d0afcc616925bf6768ec32cb79f | /old/src/coc.py | 017875a415bb4f9f88eb95fdeb801f756b6fa62e | [] | no_license | ai-se/cocreport | ca1832d013c45fd908d92de650ac7bc3b5a3d47a | 102b9240fdd640ee55564a7d44504b0f29f22add | refs/heads/master | 2020-04-06T09:47:50.280628 | 2016-11-26T18:29:59 | 2016-11-26T18:29:59 | 30,427,607 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,322 | py |
_ = None; Coc2tunings = [[
# vlow low nom high vhigh xhigh
# scale factors:
'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _],[
'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _],[
'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _],[
'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _],[
'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _],[
# effort multipliers:
'acap', 1.42, 1.19, 1.00, 0.85, 0.71, _],[
'aexp', 1.22, 1.10, 1.00, 0.88, 0.81, _],[
'cplx', 0.73, 0.87, 1.00, 1.17, 1.34, 1.74],[
'data', _, 0.90, 1.00, 1.14, 1.28, _],[
'docu', 0.81, 0.91, 1.00, 1.11, 1.23, _],[
'ltex', 1.20, 1.09, 1.00, 0.91, 0.84, _],[
'pcap', 1.34, 1.15, 1.00, 0.88, 0.76, _],[
'pcon', 1.29, 1.12, 1.00, 0.90, 0.81, _],[
'plex', 1.19, 1.09, 1.00, 0.91, 0.85, _],[
'pvol', _, 0.87, 1.00, 1.15, 1.30, _],[
'rely', 0.82, 0.92, 1.00, 1.10, 1.26, _],[
'ruse', _, 0.95, 1.00, 1.07, 1.15, 1.24],[
'sced', 1.43, 1.14, 1.00, 1.00, 1.00, _],[
'site', 1.22, 1.09, 1.00, 0.93, 0.86, 0.80],[
'stor', _, _, 1.00, 1.05, 1.17, 1.46],[
'time', _, _, 1.00, 1.11, 1.29, 1.63],[
'tool', 1.17, 1.09, 1.00, 0.90, 0.78, _]]
def COCOMO2(project, a = 2.94, b = 0.91, # defaults
tunes= Coc2tunings):# defaults
sfs,ems,kloc = 0, 5 ,22
scaleFactors, effortMultipliers = 5, 17
for i in range(scaleFactors):
sfs += tunes[i][project[i]]
for i in range(effortMultipliers):
j = i + scaleFactors
ems *= tunes[j][project[j]]
return a * ems * project[kloc] ** (b + 0.01*sfs)
def COCONUT(training, # list of projects
a=10, b=1, # initial (a,b) guess
deltaA = 10, # range of "a" guesses
deltaB = 0.5, # range of "b" guesses
depth = 10, # max recursive calls
constricting=0.66):# next time,guess less
if depth > 0:
useful,a1,b1= GUESSES(training,a,b,deltaA,deltaB)
if useful: # only continue if something useful
return COCONUT(training,
a1, b1, # our new next guess
deltaA * constricting,
deltaB * constricting,
depth - 1)
return a,b
def GUESSES(training, a,b, deltaA, deltaB,
repeats=20): # number of guesses
useful, a1,b1,least,n = False, a,b, 10**32, 0
while n < repeats:
n += 1
aGuess = a1 - deltaA + 2 * deltaA * rand()
bGuess = b1 - deltaB + 2 * deltaB * rand()
error = ASSESS(training, aGuess, bGuess)
if error < least: # found a new best guess
useful,a1,b1,least = True,aGuess,bGuess,error
return useful,a1,b1
def ASSESS(training, aGuess, bGuess):
error = 0.0
for project in training: # find error on training
predicted = COCOMO2(project, aGuess, bGuess)
actual = effort(project)
error += abs(predicted - actual) / actual
return error / len(training) # mean training error
def RIG():
DATA = { COC81, NASA83, COC05, NASA10 }
for data in DATA: # e.g. data = COC81
mres= {}
for learner in LEARNERS: # e.g. learner = COCONUT
for n in range(10): #10 times repeat
for project in DATA: # e.g. one project
training = data - project # leave-one-out
model = learn(training)
estimate = guess(model, project)
actual = effort(project)
mre = abs(actual - estimate)/actual
mres[learner][n] = mre
print rank(mres) # some statistical tests
def demo():
most , least, mid = {},{},{}
for i,x in enumerate(Coc2tunings):
ranges = x[1:]
hi, lo = -1, 10**32
jhi, jlo= -1, 10
for j,y in enumerate(ranges):
k = j+1
if y == _:
continue
if y > hi:
jhi,hi = k,y
if y < lo:
jlo,lo = k,y
most[i] = jhi
least[i] = jlo
mid[i] = 4
for k in range(10,1000,10):
least[22] = most[22] = mid[22] = k
print k,COCOMO2(least), COCOMO2(mid), COCOMO2(most)
demo()
| [
"[email protected]"
] | |
08b9326e06cca6119034079ff245832c668e5a0b | 3b84c4b7b16ccfd0154f8dcb75ddbbb6636373be | /google-cloud-sdk/lib/googlecloudsdk/surface/compute/networks/subnets/describe.py | aa960587e544f5f40969d3afadff5c479fd18533 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | twistedpair/google-cloud-sdk | 37f04872cf1ab9c9ce5ec692d2201a93679827e3 | 1f9b424c40a87b46656fc9f5e2e9c81895c7e614 | refs/heads/master | 2023-08-18T18:42:59.622485 | 2023-08-15T00:00:00 | 2023-08-15T12:14:05 | 116,506,777 | 58 | 24 | null | 2022-02-14T22:01:53 | 2018-01-06T18:40:35 | Python | UTF-8 | Python | false | false | 639 | py | # Copyright 2015 Google Inc. All Rights Reserved.
"""Command for describing subnetworks."""
from googlecloudsdk.api_lib.compute import base_classes
class Describe(base_classes.RegionalDescriber):
"""Describe a Google Compute Engine subnetwork.
*{command}* displays all data associated with a Google Compute
Engine subnetwork.
"""
@staticmethod
def Args(parser):
base_classes.RegionalDescriber.Args(parser, 'compute.subnetworks')
base_classes.AddFieldsFlag(parser, 'subnetworks')
@property
def service(self):
return self.compute.subnetworks
@property
def resource_type(self):
return 'subnetworks'
| [
"[email protected]"
] | |
d45044c57759e27116e80ecbd70cd22d4cb3dac8 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03418/s290294208.py | 0dc31ee21ead32a26ad160f8881c6dff33c39726 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | n, k = map(int, input().split())
ans = n**2
for b in range(1, n+1):
if b > k:
ans -= (n//b)*k+min([n%b, max([k-1, 0])])
else:
ans -= n
print(ans) | [
"[email protected]"
] | |
9b27a14f61da3ca6e8ef0f45c05e2a1affff2547 | 6ac2c27121d965babbb4bcbc7c479c26bf60bdf5 | /pymatex/search/IndexCreatorVisitor.py | 4fe3d2d775c13e181728520af7fba07bb55e3a94 | [
"MIT"
] | permissive | Gawaboumga/PyMatex | 5a2e18c3e17d3b76e814492f7e2ca63a57d720e9 | 3ccc0aa23211a064aa31a9b509b108cd606a4992 | refs/heads/master | 2020-03-28T01:40:32.341723 | 2018-12-20T13:49:12 | 2018-12-20T13:49:12 | 147,521,693 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,268 | py | from pymatex.listener import MatexASTVisitor
from pymatex.node import *
class IndexCreatorVisitor(MatexASTVisitor.MatexASTVisitor):
def __init__(self, data: dict, pk: int):
self.data = data
self.pk = pk
self.nodes_seen = {}
self.bound_variables = set()
def get_number_of_nodes_of_different_nodes(self):
return len(self.nodes_seen)
def visit_addition(self, addition_node: Addition):
depth_lhs = addition_node.lhs.accept(self)
depth_rhs = addition_node.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.ADDITION)
return node_depth
def visit_constant(self, constant_node: Constant):
node_depth = 0
self.add(node_depth, NodeType.CONSTANT, constant_node.value)
return node_depth
def visit_division(self, division_node: Division):
depth_lhs = division_node.lhs.accept(self)
depth_rhs = division_node.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.DIVISION)
return node_depth
def visit_exponentiation(self, exponentiation_node: Exponentiation):
depth_expr = exponentiation_node.lhs.accept(self)
depth_exponent = exponentiation_node.rhs.accept(self)
node_depth = max(depth_expr, depth_exponent) + 1
self.add(node_depth, NodeType.EXPONENTIATION)
return node_depth
def visit_fraction(self, fraction_node: Fraction):
if fraction_node.variable:
fraction_node.variable.accept(self)
fraction_node.start_range.accept(self)
if fraction_node.end_range:
fraction_node.end_range.accept(self)
if fraction_node.variable:
self.add_bound_variable(fraction_node.variable)
depth_expression = fraction_node.expression.accept(self)
if fraction_node.variable:
self.remove_bound_variable(fraction_node.variable)
node_depth = depth_expression + 1
self.add(node_depth, NodeType.FRACTION)
return node_depth
def visit_function(self, function_node: Function):
first_argument = function_node.argument(0)
depth = first_argument.accept(self)
for i in range(1, function_node.number_of_arguments()):
depth = min(depth, function_node.argument(i).accept(self))
node_depth = depth + 1
self.add(node_depth, NodeType.FUNCTION)
return node_depth
def visit_indexed_variable(self, indexed_variable_node: IndexedVariable):
depth = indexed_variable_node.index.accept(self)
node_depth = depth + 1
self.add(node_depth, NodeType.INDEXEDVARIABLE, indexed_variable_node.variable)
return node_depth
def visit_integral(self, integral_node: Integral):
integral_node.variable.accept(self)
integral_node.start_range.accept(self)
integral_node.end_range.accept(self)
self.add_bound_variable(integral_node.variable)
depth_expression = integral_node.expression.accept(self)
self.remove_bound_variable(integral_node.variable)
node_depth = depth_expression + 1
self.add(node_depth, NodeType.SUMMATION)
return node_depth
def visit_multiplication(self, multiplication_node: Multiplication):
depth_lhs = multiplication_node.lhs.accept(self)
depth_rhs = multiplication_node.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.MULTIPLICATION)
return node_depth
def visit_negate(self, negate_node: Negate):
depth = negate_node.node.accept(self)
self.add(depth + 1, NodeType.NEGATE)
return depth
def visit_product(self, product_node: Product):
if product_node.variable:
product_node.variable.accept(self)
product_node.start_range.accept(self)
if product_node.end_range:
product_node.end_range.accept(self)
if product_node.variable:
self.add_bound_variable(product_node.variable)
depth_expression = product_node.expression.accept(self)
if product_node.variable:
self.remove_bound_variable(product_node.variable)
node_depth = depth_expression + 1
self.add(node_depth, NodeType.PRODUCT)
return node_depth
def visit_set(self, set_node: Set):
depth_lhs = set_node.lhs.accept(self)
depth_rhs = set_node.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.SET)
return node_depth
def visit_set_difference(self, set_difference: SetDifference):
depth_lhs = set_difference.lhs.accept(self)
depth_rhs = set_difference.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.SET_DIFFERENCE)
return node_depth
def visit_subtraction(self, subtraction_node: Subtraction):
depth_lhs = subtraction_node.lhs.accept(self)
depth_rhs = subtraction_node.rhs.accept(self)
node_depth = max(depth_lhs, depth_rhs) + 1
self.add(node_depth, NodeType.SUBTRACTION)
return node_depth
def visit_summation(self, summation_node: Summation):
if summation_node.variable:
summation_node.variable.accept(self)
summation_node.start_range.accept(self)
if summation_node.end_range:
summation_node.end_range.accept(self)
if summation_node.variable:
self.add_bound_variable(summation_node.variable)
depth_expression = summation_node.expression.accept(self)
if summation_node.variable:
self.remove_bound_variable(summation_node.variable)
node_depth = depth_expression + 1
self.add(node_depth, NodeType.SUMMATION)
return node_depth
def visit_variable(self, variable_node: Variable):
node_depth = 0
if str(variable_node.variable) in self.bound_variables:
self.add(node_depth, NodeType.BOUNDVARIABLE, variable_node.variable)
else:
self.add(node_depth, NodeType.VARIABLE, variable_node.variable)
return node_depth
def add(self, node_depth: int, node_type: NodeType, external_data=None):
nodes = self.data.get(node_depth, dict())
if external_data is None:
objects = nodes.get(node_type, set())
objects.add(self.pk)
else:
objects = nodes.get(node_type, dict())
associated = objects.get(external_data, set())
associated.add(self.pk)
objects[external_data] = associated
nodes[node_type] = objects
self.data[node_depth] = nodes
nodes_depth = self.nodes_seen.get(node_depth, set())
if nodes_depth is None:
nodes_depth.add(node_type)
else:
if node_type not in nodes_depth:
nodes_depth.add(node_type)
self.nodes_seen[node_depth] = nodes_depth
def add_bound_variable(self, variable: Variable):
self.bound_variables.add(str(variable))
def remove_bound_variable(self, variable: Variable):
self.bound_variables.remove(str(variable))
| [
"[email protected]"
] | |
dfc118a4b19025facca9b1d3907f74a48c6699b3 | 3528abad46b15133b2108c237f926a1ab252cbd5 | /Core/ableton/v2/control_surface/components/session.py | d7ca9873cdb41c85dc8ae4b7b0604d5b3b8008f1 | [] | no_license | scottmudge/MPK261_Ableton | 20f08234f4eab5ba44fde6e5e745752deb968df2 | c2e316b8347367bd157276f143b9f1a9bc2fe92c | refs/heads/master | 2020-03-20T10:56:32.421561 | 2018-06-14T19:12:47 | 2018-06-14T19:12:47 | 137,389,086 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,296 | py | # Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/ableton/v2/control_surface/components/session.py
# Compiled at: 2018-05-12 02:03:19
from __future__ import absolute_import, print_function, unicode_literals
import Live
from itertools import count
from ...base import EventObject, in_range, product, listens, listens_group
from ..compound_component import CompoundComponent
from .scene import SceneComponent
class SessionComponent(CompoundComponent):
u"""
Class encompassing several scenes to cover a defined section of
Live's session. It handles starting and playing clips.
"""
_session_component_ends_initialisation = True
scene_component_type = SceneComponent
def __init__(self, session_ring=None, auto_name=False, *a, **k):
super(SessionComponent, self).__init__(*a, **k)
assert session_ring is not None
self._session_ring = session_ring
self.__on_offsets_changed.subject = self._session_ring
self._stop_all_button = None
self._stop_track_clip_buttons = None
self._stop_clip_triggered_value = 'Session.StopClipTriggered'
self._stop_clip_value = 'Session.StopClip'
self._track_slots = self.register_disconnectable(EventObject())
self._selected_scene = self.register_component(self._create_scene())
self._scenes = self.register_components(*[ self._create_scene() for _ in xrange(self._session_ring.num_scenes) ])
if self._session_component_ends_initialisation:
self._end_initialisation()
if auto_name:
self._auto_name()
self.__on_track_list_changed.subject = self.song
self.__on_scene_list_changed.subject = self.song
self.__on_selected_scene_changed.subject = self.song.view
return
def _end_initialisation(self):
self.__on_selected_scene_changed()
self._reassign_scenes_and_tracks()
def _create_scene(self):
return self.scene_component_type(session_ring=self._session_ring)
def scene(self, index):
assert in_range(index, 0, len(self._scenes))
return self._scenes[index]
def selected_scene(self):
return self._selected_scene
def _auto_name(self):
self.name = 'Session_Control'
self.selected_scene().name = 'Selected_Scene'
for track_index in xrange(self._session_ring.num_tracks):
clip_slot = self.selected_scene().clip_slot(track_index)
clip_slot.name = 'Selected_Scene_Clip_Slot_%d' % track_index
for scene_index in xrange(self._session_ring.num_scenes):
scene = self.scene(scene_index)
scene.name = 'Scene_%d' % scene_index
for track_index in xrange(self._session_ring.num_tracks):
clip_slot = scene.clip_slot(track_index)
clip_slot.name = '%d_Clip_Slot_%d' % (track_index, scene_index)
def set_stop_all_clips_button(self, button):
self._stop_all_button = button
self.__on_stop_all_value.subject = button
self._update_stop_all_clips_button()
def set_stop_track_clip_buttons(self, buttons):
self._stop_track_clip_buttons = buttons
self.__on_stop_track_value.replace_subjects(buttons or [])
self._update_stop_track_clip_buttons()
def set_clip_launch_buttons(self, buttons):
assert not buttons or buttons.width() == self._session_ring.num_tracks and buttons.height() == self._session_ring.num_scenes
if buttons:
for button, (x, y) in buttons.iterbuttons():
scene = self.scene(y)
slot = scene.clip_slot(x)
slot.set_launch_button(button)
else:
for x, y in product(xrange(self._session_ring.num_tracks), xrange(self._session_ring.num_scenes)):
scene = self.scene(y)
slot = scene.clip_slot(x)
slot.set_launch_button(None)
return
def set_scene_launch_buttons(self, buttons):
assert not buttons or buttons.width() == self._session_ring.num_scenes and buttons.height() == 1
if buttons:
for button, (x, _) in buttons.iterbuttons():
scene = self.scene(x)
scene.set_launch_button(button)
else:
for x in xrange(self._session_ring.num_scenes):
scene = self.scene(x)
scene.set_launch_button(None)
return
@listens('offset')
def __on_offsets_changed(self, *a):
if self.is_enabled():
self._reassign_scenes_and_tracks()
def _reassign_scenes_and_tracks(self):
self._reassign_tracks()
self._reassign_scenes()
def set_rgb_mode(self, color_palette, color_table, clip_slots_only=False):
u"""
Put the session into rgb mode by providing a color table and a color palette.
color_palette is a dictionary, mapping custom Live colors to MIDI ids. This can be
used to map a color directly to a CC value.
The color_table is a list of tuples, where the first element is a MIDI CC and the
second is the RGB color is represents. The table will be used to find the nearest
matching color for a custom color. The table is used if there is no entry in the
palette.
"""
for y in xrange(self._session_ring.num_scenes):
scene = self.scene(y)
if not clip_slots_only:
scene.set_color_palette(color_palette)
scene.set_color_table(color_table)
for x in xrange(self._session_ring.num_tracks):
slot = scene.clip_slot(x)
slot.set_clip_palette(color_palette)
slot.set_clip_rgb_table(color_table)
def update(self):
super(SessionComponent, self).update()
if self._allow_updates:
if self.is_enabled():
self._update_stop_track_clip_buttons()
self._update_stop_all_clips_button()
self._reassign_scenes_and_tracks()
else:
self._update_requests += 1
def _update_stop_track_clip_buttons(self):
if self.is_enabled():
for index in xrange(self._session_ring.num_tracks):
self._update_stop_clips_led(index)
@listens('scenes')
def __on_scene_list_changed(self):
self._reassign_scenes()
@listens('visible_tracks')
def __on_track_list_changed(self):
self._reassign_tracks()
@listens('selected_scene')
def __on_selected_scene_changed(self):
if self._selected_scene != None:
self._selected_scene.set_scene(self.song.view.selected_scene)
return
def _update_stop_all_clips_button(self):
if self.is_enabled():
button = self._stop_all_button
if button:
button.set_light(button.is_pressed())
def _reassign_scenes(self):
scenes = self.song.scenes
for index, scene in enumerate(self._scenes):
scene_index = self._session_ring.scene_offset + index
if len(scenes) > scene_index:
scene.set_scene(scenes[scene_index])
scene.set_track_offset(self._session_ring.track_offset)
else:
self._scenes[index].set_scene(None)
if self._selected_scene != None:
self._selected_scene.set_track_offset(self._session_ring.track_offset)
return
def _reassign_tracks(self):
tracks_to_use = self._session_ring.tracks_to_use()
tracks = map(lambda t: t if isinstance(t, Live.Track.Track) else None, tracks_to_use)
self.__on_fired_slot_index_changed.replace_subjects(tracks, count())
self.__on_playing_slot_index_changed.replace_subjects(tracks, count())
self._update_stop_all_clips_button()
self._update_stop_track_clip_buttons()
@listens('value')
def __on_stop_all_value(self, value):
self._stop_all_value(value)
def _stop_all_value(self, value):
if self.is_enabled():
if value is not 0 or not self._stop_all_button.is_momentary():
self.song.stop_all_clips()
self._update_stop_all_clips_button()
@listens_group('value')
def __on_stop_track_value(self, value, button):
if self.is_enabled():
if value is not 0 or not button.is_momentary():
tracks = self._session_ring.tracks_to_use()
track_index = list(self._stop_track_clip_buttons).index(button) + self._session_ring.track_offset
if in_range(track_index, 0, len(tracks)) and tracks[track_index] in self.song.tracks:
tracks[track_index].stop_all_clips()
@listens_group('fired_slot_index')
def __on_fired_slot_index_changed(self, track_index):
button_index = track_index - self._session_ring.track_offset
self._update_stop_clips_led(button_index)
@listens_group('playing_slot_index')
def __on_playing_slot_index_changed(self, track_index):
button_index = track_index - self._session_ring.track_offset
self._update_stop_clips_led(button_index)
def _update_stop_clips_led(self, index):
tracks_to_use = self._session_ring.tracks_to_use()
track_index = index + self._session_ring.track_offset
if self.is_enabled() and self._stop_track_clip_buttons != None and index < len(self._stop_track_clip_buttons):
button = self._stop_track_clip_buttons[index]
if button != None:
value_to_send = None
if track_index < len(tracks_to_use) and tracks_to_use[track_index].clip_slots:
track = tracks_to_use[track_index]
if track.fired_slot_index == -2:
value_to_send = self._stop_clip_triggered_value
elif track.playing_slot_index >= 0:
value_to_send = self._stop_clip_value
if value_to_send == None:
button.turn_off()
elif in_range(value_to_send, 0, 128):
button.send_value(value_to_send)
else:
button.set_light(value_to_send)
return
| [
"[email protected]"
] | |
0f08f84f3b0afc3d967ffe5f50622a28dc592c42 | 59bd9c968a3a31a73d17f252fe716a3eacdf7f4f | /portfolio/Python/scrapy/hifix/__init__.py | cd69d839f9ef03f32871fe5b415125bd62ae134e | [
"Apache-2.0"
] | permissive | 0--key/lib | 113ff1e9cf75e446fa50eb065bc3bc36c090d636 | a619938ea523e96ab9e676ace51f5a129e6612e6 | refs/heads/master | 2023-06-23T22:17:54.244257 | 2023-06-21T17:42:57 | 2023-06-21T17:42:57 | 23,730,551 | 3 | 5 | null | 2016-03-22T08:19:30 | 2014-09-06T08:46:41 | Python | UTF-8 | Python | false | false | 23 | py | ACCOUNT_NAME = 'Hifix'
| [
"[email protected]"
] | |
276069bda00ba209a3738e7f975ad78e5243e7ac | a6155458f58f2e40e2583557cf807eda52a0013b | /catalog/database_helpers.py | e58588c048dacef8e37b8ccd0c9d2a8d74ce96d9 | [] | no_license | georgeplusplus-ZZ/udacity-project-2 | ab6c80052cc601508743fd5003ae5d09103d8fbb | 5442f1f99808af2f8663d59fdbd02be7dd7e425a | refs/heads/master | 2021-10-26T02:47:28.841918 | 2019-04-10T01:52:40 | 2019-04-10T01:52:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 803 | py | #George Haralampopoulos 2019
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from catalog import app
from catalog.database_setup import Base
import requests
def connect_to_database():
"""Connects to the database and returns an sqlalchemy session object."""
engine = create_engine('sqlite:///nycattractions.db')
Base.metadata.bind = engine
db_session = sessionmaker(bind=engine)
session = db_session()
return session
def token_still_valid(access_token):
gapi_request = "https://www.googleapis.com/oauth2/v1/tokeninfo?access_token="
gapi_request+=access_token
resp = requests.get(gapi_request)
if(resp.status_code == 200):
resp_json = resp.json()
if resp_json.get("expires_in") > 0:
return True
return False | [
"[email protected]"
] | |
5ad8c85c4220faba9ed2da5a89e7b73fe36a248d | e4ab984c6d27167849f6c6e2d8ced3c0ee167c7c | /Edabit/Combinations.py | 3a5bf494a7486fc769a4c9d00ba532a776266274 | [] | no_license | ravalrupalj/BrainTeasers | b3bc2a528edf05ef20291367f538cf214c832bf9 | c3a48453dda29fe016ff89f21f8ee8d0970a3cf3 | refs/heads/master | 2023-02-10T02:09:59.443901 | 2021-01-06T02:03:34 | 2021-01-06T02:03:34 | 255,720,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 599 | py | #Combinations
#Create a function that takes a variable number of groups of items, and returns the number of ways the items can be arranged, with one item from each group. Order does not matter.
def combinations(*items):
l=[]
for t in items:
l.append(str(t))
multi = 1
for i in l:
if i=='0':
continue
multi = multi * int(i)
return multi
print(combinations(6, 7, 0))
#42
print(combinations(2, 3, 4, 5, 6, 7, 8, 9, 10))
#3628800
print(combinations(2, 3) )
#➞ 6
print(combinations(3, 7, 4) )
#➞ 84
print(combinations(2, 3, 4, 5))
#➞ 120
| [
"[email protected]"
] | |
f0bb0595cc4ae45f13b3ffda4adab054d0aab904 | 71efd37d485c43f5872bf35a3fde45ba7aa7d91e | /flask_server_side/app.py | ba35e9257a2ee933810273b47cc01df0f388470f | [] | no_license | jreiher2003/push-notifications | e6f90cb056aad726a6f5049139b36a6dd5368aff | d59ae39929ad0d6fce71ae2ca5b255d940530d62 | refs/heads/master | 2021-01-11T12:03:42.209063 | 2016-12-15T14:16:00 | 2016-12-15T14:16:00 | 76,565,602 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,085 | py | from flask import Flask, render_template, request, redirect, url_for, Response, session
from flask_sse import sse
import redis
import datetime
app = Flask(__name__)
# app.config["REDIS_URL"] = "redis://localhost"
# app.register_blueprint(sse, url_prefix='/stream')
app.secret_key = 'asdf'
red = redis.StrictRedis()
# @app.route('/')
# def index():
# return render_template("index.html")
def event_stream():
pubsub = red.pubsub()
pubsub.subscribe('chat')
for message in pubsub.listen():
print message
yield 'data: %s\n\n' % message['data']
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
session['user'] = request.form['user']
return redirect('/')
return '<form action="" method="post">user: <input name="user">'
@app.route("/logout")
def logout():
session.pop('user')
return redirect('/')
@app.route('/')
def home():
if 'user' not in session:
return redirect('/login')
return render_template('index2.html', session['user'])
@app.route('/post', methods=['POST'])
def post():
message = request.form['message']
user = session.get('user', 'anonymous')
now = datetime.datetime.now().replace(microsecond=0).time()
red.publish('chat', u'[%s] %s: %s' % (now.isoformat(), user, message))
return "print message"
@app.route('/stream')
def stream():
return Response(event_stream(), mimetype="text/event-stream")
# @app.route('/new')
# def new():
# return render_template("message.html")
# @app.route('/send', methods=['POST'])
# def send():
# data = {"message": request.form.get('message')}
# sse.publish(type="testevent", data=data, channel='test')
# return redirect(url_for('new'))
# @app.route('/hello')
# def publish_hello():
# data = {"message": "Hello!"}
# sse.publish(data=data, type='greeting', channel='test2')
# return "Message sent!"
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5020, debug=True, threaded=True) | [
"[email protected]"
] | |
b8f3957b6f14d803419ff4d6519073e3f1c398a8 | 489da428bc0e1ab8f5117c0f8ba5ddb7aff05360 | /scripts/motors1.py | 7d42b31c04843bce8fcf8b7e1775bd5f39123592 | [
"BSD-3-Clause"
] | permissive | norihisayamada/pimouse_ros | 4f77e769b7ac9cbfc4af6e703764af1d2df56b30 | 3b07880a6ceb584d92cf640c1a38864130d44189 | refs/heads/master | 2020-04-17T03:03:49.424738 | 2019-02-23T11:05:38 | 2019-02-23T11:05:38 | 166,164,916 | 1 | 0 | BSD-3-Clause | 2019-01-17T05:18:41 | 2019-01-17T05:18:41 | null | UTF-8 | Python | false | false | 2,166 | py | #!/usr/bin/env python
#encoding: utf8
import sys, rospy, math
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
class Motor():
def __init__(self):
if not self.set_power(True): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self,onoff=False):
en = "/dev/rtmotoren0"
try:
with open(en,'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self,left_hz,right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0",'w') as lf,\
open("/dev/rtmotor_raw_r0",'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self,message):
self.set_raw_freq(message.left_hz,message.right_hz)
def callback_cmd_vel(self,message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0,0)
m.using_cmd_vel = False
rate.sleep()
# Copyright 2016 Ryuichi Ueda
# Released under the BSD License.
# To make line numbers be identical with the book, this statement is written here. Don't move it to the header.
| [
"[email protected]"
] | |
de1581f90bcd424674cf7ab97354de05f7ccfff9 | b69e78b6757d7e9ca90272391116fa8c197d9d53 | /testEfficientDockSize.py | 57604164ef8abdcf8b573be965f813d415cdcd17 | [] | no_license | wangyingtaodeepin/autotest-dde-dock | d1cd3146c42d026e9a2f70205a72c65fb1927c7d | 89e37500f6ba994df482599a0eeb3f1c175d51de | refs/heads/master | 2021-01-09T20:39:48.989833 | 2016-08-10T01:33:48 | 2016-08-10T01:33:48 | 64,373,794 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,697 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from lib import utils
from lib import runner
result = True
class MyTestResult(runner.MyTextTestResult):
def addError(self, test, err):
super(MyTestResult, self).addError(test, err)
global result
result = result and False
def addFailure(self, test, err):
super(MyTestResult, self).addFailure(test, err)
global result
result = result and False
class EfficientDockSize(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.caseid = '68490'
cls.casename = "all-2493:高效模式大图标显示"
cls.ddedockobject = utils.getDdeDockObject()
cls.defaultdisplaymode = utils.getDdeDockDisplayMode()
cls.defaultposition = utils.getDdeDockPosition()
if utils.dock.displaymode_efficient != cls.defaultdisplaymode:
utils.setDdeDockDisplayMode(utils.dock.displaymode_efficient)
if utils.dock.position_bottom != cls.defaultposition:
utils.setDdeDockPosition(utils.dock.position_bottom)
@classmethod
def tearDownClass(cls):
global result
utils.commitresult(cls.caseid, result)
if utils.getDdeDockDisplayMode() != cls.defaultdisplaymode:
utils.setDdeDockDisplayMode(cls.defaultdisplaymode)
if utils.getDdeDockPosition() != cls.defaultposition:
utils.setDdeDockPosition(cls.defaultposition)
def setUp(self):
pass
def tearDown(self):
pass
def testIconSize(self):
launcher = self.ddedockobject.child("Launcher")
dbus_iconsize = utils.getDdeDockIconSize()
displaymode = utils.getDdeDockDisplayMode()
calculate_iconsize_y = 0
calculate_iconsize_x = 0
if utils.dock.displaymode_fashion == displaymode:
calculate_iconsize_y = int(dbus_iconsize * 1.5)
calculate_iconsize_x = int(calculate_iconsize_y * 1.1)
elif utils.dock.displaymode_efficient == displaymode:
calculate_iconsize_y = int(dbus_iconsize * 1.2)
calculate_iconsize_x = int(calculate_iconsize_y * 1.4)
self.assertEquals((calculate_iconsize_x, calculate_iconsize_y),
launcher.size)
def testChangeIconSizeToLarge(self):
utils.m.click(int(utils.resolution.width/2), utils.resolution.height, 2)
utils.dockmenu.findMainWindow()
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.enter_key)
dbus_iconsize = utils.getDdeDockIconSize()
self.assertTrue(dbus_iconsize == utils.dock.iconsize_large)
def testChangeIconSizeToMedium(self):
utils.m.click(int(utils.resolution.width/2), utils.resolution.height, 2)
utils.dockmenu.findMainWindow()
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.enter_key)
dbus_iconsize = utils.getDdeDockIconSize()
self.assertTrue(dbus_iconsize == utils.dock.iconsize_medium)
def testChangeIconSizeToSmall(self):
utils.m.click(int(utils.resolution.width/2), utils.resolution.height, 2)
utils.dockmenu.findMainWindow()
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.left_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.down_key)
utils.keySingle(utils.k.enter_key)
dbus_iconsize = utils.getDdeDockIconSize()
self.assertTrue(dbus_iconsize == utils.dock.iconsize_small)
def suite():
suite = unittest.TestSuite()
suite.addTest(EfficientDockSize('testIconSize'))
suite.addTest(EfficientDockSize('testChangeIconSizeToLarge'))
suite.addTest(EfficientDockSize('testIconSize'))
suite.addTest(EfficientDockSize('testChangeIconSizeToMedium'))
suite.addTest(EfficientDockSize('testIconSize'))
suite.addTest(EfficientDockSize('testChangeIconSizeToSmall'))
suite.addTest(EfficientDockSize('testIconSize'))
suite.addTest(EfficientDockSize('testChangeIconSizeToMedium'))
suite.addTest(EfficientDockSize('testIconSize'))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(resultclass=MyTestResult).run(suite())
| [
"[email protected]"
] | |
145aa2bb74e0496699c2e6db24bb5117d315f1d1 | 6817c0ca3f3a0340d4741e8850477c4bcc26300a | /tests/models/convnext/test_modeling_tf_convnext.py | 72981c09d65e450e19b526ad4bf14126f7766ef1 | [
"Apache-2.0"
] | permissive | cedrickchee/transformers-llama | 4df9d829e9e07cd7f813db6f3fbb1bc892cc443a | 98467b6d9276ed857238173776ec39d05f14346c | refs/heads/llama_push | 2023-05-23T12:37:44.608815 | 2023-03-17T10:17:14 | 2023-03-17T10:17:14 | 609,750,737 | 27 | 4 | Apache-2.0 | 2023-03-17T10:17:16 | 2023-03-05T05:39:49 | Python | UTF-8 | Python | false | false | 11,809 | py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Testing suite for the TensorFlow ConvNext model. """
import inspect
import unittest
from typing import List, Tuple
from transformers import ConvNextConfig
from transformers.testing_utils import require_tf, require_vision, slow
from transformers.utils import cached_property, is_tf_available, is_vision_available
from ...test_configuration_common import ConfigTester
from ...test_modeling_tf_common import TFModelTesterMixin, floats_tensor, ids_tensor
from ...test_pipeline_mixin import PipelineTesterMixin
if is_tf_available():
import tensorflow as tf
from transformers import TFConvNextForImageClassification, TFConvNextModel
if is_vision_available():
from PIL import Image
from transformers import ConvNextFeatureExtractor
class TFConvNextModelTester:
def __init__(
self,
parent,
batch_size=13,
image_size=32,
num_channels=3,
num_stages=4,
hidden_sizes=[10, 20, 30, 40],
depths=[2, 2, 3, 2],
is_training=True,
use_labels=True,
intermediate_size=37,
hidden_act="gelu",
type_sequence_label_size=10,
initializer_range=0.02,
num_labels=3,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.image_size = image_size
self.num_channels = num_channels
self.num_stages = num_stages
self.hidden_sizes = hidden_sizes
self.depths = depths
self.is_training = is_training
self.use_labels = use_labels
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.scope = scope
def prepare_config_and_inputs(self):
pixel_values = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size])
labels = None
if self.use_labels:
labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
config = self.get_config()
return config, pixel_values, labels
def get_config(self):
return ConvNextConfig(
num_channels=self.num_channels,
hidden_sizes=self.hidden_sizes,
depths=self.depths,
num_stages=self.num_stages,
hidden_act=self.hidden_act,
is_decoder=False,
initializer_range=self.initializer_range,
)
def create_and_check_model(self, config, pixel_values, labels):
model = TFConvNextModel(config=config)
result = model(pixel_values, training=False)
# expected last hidden states: B, C, H // 32, W // 32
self.parent.assertEqual(
result.last_hidden_state.shape,
(self.batch_size, self.hidden_sizes[-1], self.image_size // 32, self.image_size // 32),
)
def create_and_check_for_image_classification(self, config, pixel_values, labels):
config.num_labels = self.type_sequence_label_size
model = TFConvNextForImageClassification(config)
result = model(pixel_values, labels=labels, training=False)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.type_sequence_label_size))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, pixel_values, labels = config_and_inputs
inputs_dict = {"pixel_values": pixel_values}
return config, inputs_dict
@require_tf
class TFConvNextModelTest(TFModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
"""
Here we also overwrite some of the tests of test_modeling_common.py, as ConvNext does not use input_ids, inputs_embeds,
attention_mask and seq_length.
"""
all_model_classes = (TFConvNextModel, TFConvNextForImageClassification) if is_tf_available() else ()
pipeline_model_mapping = (
{"feature-extraction": TFConvNextModel, "image-classification": TFConvNextForImageClassification}
if is_tf_available()
else {}
)
test_pruning = False
test_onnx = False
test_resize_embeddings = False
test_head_masking = False
has_attentions = False
def setUp(self):
self.model_tester = TFConvNextModelTester(self)
self.config_tester = ConfigTester(
self,
config_class=ConvNextConfig,
has_text_modality=False,
hidden_size=37,
)
@unittest.skip(reason="ConvNext does not use inputs_embeds")
def test_inputs_embeds(self):
pass
@unittest.skipIf(
not is_tf_available() or len(tf.config.list_physical_devices("GPU")) == 0,
reason="TF does not support backprop for grouped convolutions on CPU.",
)
def test_keras_fit(self):
super().test_keras_fit()
@unittest.skip(reason="ConvNext does not support input and output embeddings")
def test_model_common_attributes(self):
pass
def test_forward_signature(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
model = model_class(config)
signature = inspect.signature(model.call)
# signature.parameters is an OrderedDict => so arg_names order is deterministic
arg_names = [*signature.parameters.keys()]
expected_arg_names = ["pixel_values"]
self.assertListEqual(arg_names[:1], expected_arg_names)
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@unittest.skipIf(
not is_tf_available() or len(tf.config.list_physical_devices("GPU")) == 0,
reason="TF does not support backprop for grouped convolutions on CPU.",
)
def test_dataset_conversion(self):
super().test_dataset_conversion()
def test_hidden_states_output(self):
def check_hidden_states_output(inputs_dict, config, model_class):
model = model_class(config)
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
hidden_states = outputs.encoder_hidden_states if config.is_encoder_decoder else outputs.hidden_states
expected_num_stages = self.model_tester.num_stages
self.assertEqual(len(hidden_states), expected_num_stages + 1)
# ConvNext's feature maps are of shape (batch_size, num_channels, height, width)
self.assertListEqual(
list(hidden_states[0].shape[-2:]),
[self.model_tester.image_size // 4, self.model_tester.image_size // 4],
)
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
inputs_dict["output_hidden_states"] = True
check_hidden_states_output(inputs_dict, config, model_class)
# check that output_hidden_states also work using config
del inputs_dict["output_hidden_states"]
config.output_hidden_states = True
check_hidden_states_output(inputs_dict, config, model_class)
# Since ConvNext does not have any attention we need to rewrite this test.
def test_model_outputs_equivalence(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
def check_equivalence(model, tuple_inputs, dict_inputs, additional_kwargs={}):
tuple_output = model(tuple_inputs, return_dict=False, **additional_kwargs)
dict_output = model(dict_inputs, return_dict=True, **additional_kwargs).to_tuple()
def recursive_check(tuple_object, dict_object):
if isinstance(tuple_object, (List, Tuple)):
for tuple_iterable_value, dict_iterable_value in zip(tuple_object, dict_object):
recursive_check(tuple_iterable_value, dict_iterable_value)
elif tuple_object is None:
return
else:
self.assertTrue(
all(tf.equal(tuple_object, dict_object)),
msg=(
"Tuple and dict output are not equal. Difference:"
f" {tf.math.reduce_max(tf.abs(tuple_object - dict_object))}"
),
)
recursive_check(tuple_output, dict_output)
for model_class in self.all_model_classes:
model = model_class(config)
tuple_inputs = self._prepare_for_class(inputs_dict, model_class)
dict_inputs = self._prepare_for_class(inputs_dict, model_class)
check_equivalence(model, tuple_inputs, dict_inputs)
tuple_inputs = self._prepare_for_class(inputs_dict, model_class, return_labels=True)
dict_inputs = self._prepare_for_class(inputs_dict, model_class, return_labels=True)
check_equivalence(model, tuple_inputs, dict_inputs)
tuple_inputs = self._prepare_for_class(inputs_dict, model_class)
dict_inputs = self._prepare_for_class(inputs_dict, model_class)
check_equivalence(model, tuple_inputs, dict_inputs, {"output_hidden_states": True})
tuple_inputs = self._prepare_for_class(inputs_dict, model_class, return_labels=True)
dict_inputs = self._prepare_for_class(inputs_dict, model_class, return_labels=True)
check_equivalence(model, tuple_inputs, dict_inputs, {"output_hidden_states": True})
def test_for_image_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_for_image_classification(*config_and_inputs)
@slow
def test_model_from_pretrained(self):
model = TFConvNextModel.from_pretrained("facebook/convnext-tiny-224")
self.assertIsNotNone(model)
# We will verify our results on an image of cute cats
def prepare_img():
image = Image.open("./tests/fixtures/tests_samples/COCO/000000039769.png")
return image
@require_tf
@require_vision
class TFConvNextModelIntegrationTest(unittest.TestCase):
@cached_property
def default_feature_extractor(self):
return (
ConvNextFeatureExtractor.from_pretrained("facebook/convnext-tiny-224") if is_vision_available() else None
)
@slow
def test_inference_image_classification_head(self):
model = TFConvNextForImageClassification.from_pretrained("facebook/convnext-tiny-224")
feature_extractor = self.default_feature_extractor
image = prepare_img()
inputs = feature_extractor(images=image, return_tensors="tf")
# forward pass
outputs = model(**inputs)
# verify the logits
expected_shape = tf.TensorShape((1, 1000))
self.assertEqual(outputs.logits.shape, expected_shape)
expected_slice = tf.constant([-0.0260, -0.4739, 0.1911])
tf.debugging.assert_near(outputs.logits[0, :3], expected_slice, atol=1e-4)
| [
"[email protected]"
] | |
fa065210ccebf15da4cef79217b04ce380761e8e | d9eafd325ab775b7b32af2dd0b63afc7310be53d | /pfwra/home/migrations/0007_auto_20210326_0755.py | 3d4f72d76270133753f0df224c1d54935fa4def2 | [
"MIT"
] | permissive | johnkellehernz/pfwra | 54b0db7debaed629d6003e0826a15bde2fd4a197 | 5b8c718bb2f1aaa34e9a718e07baf270294f7ba6 | refs/heads/main | 2023-05-01T14:39:42.419993 | 2021-05-13T11:00:07 | 2021-05-13T11:00:07 | 353,514,688 | 0 | 0 | MIT | 2021-03-31T23:15:32 | 2021-03-31T23:15:31 | null | UTF-8 | Python | false | false | 2,085 | py | # Generated by Django 3.0.11 on 2021-03-26 07:55
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('home', '0006_auto_20210324_2004'),
]
operations = [
migrations.AlterField(
model_name='homepage',
name='featured',
field=wagtail.core.fields.StreamField([('cards', wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock(required=False)), ('header', wagtail.core.blocks.CharBlock(label='Header text')), ('text', wagtail.core.blocks.TextBlock(help_text='Write an introduction for the card', required=False)), ('link', wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(label='Link label', required=False)), ('page', wagtail.core.blocks.PageChooserBlock(help_text='Choose a page to link to', label='Page', required=False)), ('external_url', wagtail.core.blocks.URLBlock(help_text='Or choose an external URL to link to', label='External URL', required=False))], help_text='Link URL and link text (button)', required=False))]))], blank=True, help_text='Featured cards'),
),
migrations.AlterField(
model_name='homepage',
name='quotations',
field=wagtail.core.fields.StreamField([('quotes', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(label='Quote title', required=False)), ('text', wagtail.core.blocks.TextBlock(label='Body of quote')), ('author', wagtail.core.blocks.CharBlock(label='Quote title', required=False)), ('link', wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(label='Link label', required=False)), ('page', wagtail.core.blocks.PageChooserBlock(help_text='Choose a page to link to', label='Page', required=False)), ('external_url', wagtail.core.blocks.URLBlock(help_text='Or choose an external URL to link to', label='External URL', required=False))], required=False))]))], blank=True, help_text='Featured quotes'),
),
]
| [
"[email protected]"
] | |
2e052f979575d022eae6b685e56b2e6187c3b127 | e11e0d06e6a55c1e84c9d0cb885651cf2035e476 | /ddco_code/lear.py | 90d635b116057181ccc75c82b42031a380d46d6d | [
"MIT"
] | permissive | DanielTakeshi/debridement-code | ae5a6413e58e9bae352f0adeae1d09185937dbed | a889dcc6e1c96ac0466afa9e4f7e76015dc3c958 | refs/heads/master | 2023-01-09T09:05:37.165092 | 2023-01-02T18:58:16 | 2023-01-02T18:58:16 | 96,563,293 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | import pickle
import numpy as np
from sklearn.ensemble import RandomForestRegressor
data = []
f = open('data.p','r')
while True:
try:
d = pickle.load(f)
data.append(d)
except EOFError:
break
X = np.zeros((len(data),2))
Y = np.zeros((len(data),2))
for i,d in enumerate(data):
Y[i,0] = np.ravel(d['pos'][0])[0]
Y[i,1] = np.ravel(d['pos'][1])[0]
X[i,0] = d['estimate'][0]
X[i,1] = d['estimate'][1]
print(X, Y)
regx = RandomForestRegressor(n_estimators=3)
regx.fit(X,Y[:,0])
regy = RandomForestRegressor()
regy.fit(X,Y[:,1])
#Yp = reg.predict(X)
pickle.dump((regx,regy), open('model-sep.p','wb'))
import matplotlib.pyplot as plt
plt.scatter(Y[:,0], Y[:,1], c='r')
print(X, Y)
plt.scatter(regx.predict(X), regy.predict(X),c='b', marker='x')
plt.show()
| [
"[email protected]"
] | |
b1b625c333c9755c0f379779cf9d9b2613b21940 | f22ca9aecda111a019502b462ce6772cb22d9425 | /test/test_model_response_cart_script_list.py | 529c99308a7ddcc364fd9e5a1f1cfcabd2bb5062 | [] | no_license | sivanv-unbxd/a2c-sdk-pim | cac05bc6335ddc3c4121d43e2dc476a6fec14965 | 51a07a0b7f90d74569ad14b47b174da7ac1fc374 | refs/heads/main | 2023-05-29T05:45:32.279821 | 2021-06-09T03:52:11 | 2021-06-09T03:52:11 | 375,218,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 982 | py | # coding: utf-8
"""
Swagger API2Cart
API2Cart # noqa: E501
OpenAPI spec version: 1.1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.model_response_cart_script_list import ModelResponseCartScriptList # noqa: E501
from swagger_client.rest import ApiException
class TestModelResponseCartScriptList(unittest.TestCase):
"""ModelResponseCartScriptList unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testModelResponseCartScriptList(self):
"""Test ModelResponseCartScriptList"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.model_response_cart_script_list.ModelResponseCartScriptList() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
802549a81d933b4c840beebfe9acc73fcbda6d31 | 6b9084d234c87d7597f97ec95808e13f599bf9a1 | /Dataset/Base/Video/Filter/func.py | b127cbe1276665f6b16fba571f79183c2de87d26 | [] | no_license | LitingLin/ubiquitous-happiness | 4b46234ce0cb29c4d27b00ec5a60d3eeb52c26fc | aae2d764e136ca4a36c054212b361dd7e8b22cba | refs/heads/main | 2023-07-13T19:51:32.227633 | 2021-08-03T16:02:03 | 2021-08-03T16:02:03 | 316,664,903 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,014 | py | from Dataset.Filter.DataCleaning.ObjectCategory import DataCleaning_ObjectCategory
from Dataset.Filter.Selector import Selector
from Dataset.Filter.SortBySequenceFrameSize import SortBySequenceFrameSize
from Dataset.Filter.DataCleaning.Integrity import DataCleaning_Integrity
from Dataset.Filter.DataCleaning.BoundingBox import DataCleaning_BoundingBox
from Dataset.Filter.DataCleaning.AnnotationStandard import DataCleaning_AnnotationStandard
from .tweak_tool import VideoDatasetTweakTool
__all__ = ['apply_filters_on_video_dataset_']
def apply_filters_on_video_dataset_(dataset: dict, filters: list):
if len(filters) == 0:
return dataset
if 'filters' not in dataset:
dataset['filters'] = []
filters_backup = dataset['filters']
dataset_tweak_tool = VideoDatasetTweakTool(dataset)
for filter_ in filters:
if isinstance(filter_, Selector):
dataset_tweak_tool.apply_index_filter(filter_(len(dataset['sequences'])))
elif isinstance(filter_, DataCleaning_BoundingBox):
if filter_.fit_in_image_size:
dataset_tweak_tool.bounding_box_fit_in_image_size()
if filter_.update_validity:
dataset_tweak_tool.bounding_box_update_validity()
if filter_.remove_invalid_objects:
dataset_tweak_tool.bounding_box_remove_non_validity_objects()
if filter_.remove_empty_objects:
dataset_tweak_tool.bounding_box_remove_empty_annotation_objects()
elif isinstance(filter_, DataCleaning_Integrity):
if filter_.remove_zero_annotation_objects:
dataset_tweak_tool.remove_zero_annotation_objects()
if filter_.remove_zero_annotation_video_head_tail:
dataset_tweak_tool.remove_empty_annotation_head_tail()
if filter_.remove_invalid_image:
dataset_tweak_tool.remove_invalid_image()
elif isinstance(filter_, DataCleaning_ObjectCategory):
if filter_.category_ids_to_remove is not None:
dataset_tweak_tool.remove_category_ids(filter_.category_ids_to_remove)
if filter_.make_category_id_sequential:
dataset_tweak_tool.make_category_id_sequential()
elif isinstance(filter_, SortBySequenceFrameSize):
dataset_tweak_tool.sort_by_sequence_size(filter_.descending)
elif isinstance(filter_, DataCleaning_AnnotationStandard):
dataset_tweak_tool.annotation_standard_conversion(filter_.bounding_box_format,
filter_.pixel_coordinate_system,
filter_.bounding_box_coordinate_system,
filter_.pixel_definition)
else:
raise RuntimeError(f"{type(filter_)} not implemented for Video Dataset")
filters_backup.append(filter_.serialize())
dataset['filters'] = filters_backup
| [
"[email protected]"
] | |
5468bb1dd4831420db8468b95ef6e63f16084a87 | fb8cbebdf034b2f478943752d5443afc82c6eef5 | /tuirer/venv/lib/python3.6/site-packages/pip/_internal/index.py | 3c4674e0cbdf8bdc13e73d9ac957ebca950939d4 | [] | no_license | fariasjr/CitiTuirer | f64e0ec93ef088f8140bb0961d2ad4ed3b59448a | deb3f7a9c2d45b8a7f54639037f097b99abdac11 | refs/heads/master | 2020-03-24T05:10:36.261050 | 2018-08-01T20:24:30 | 2018-08-01T20:24:30 | 142,477,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 41,196 | py | """Routines related to PyPI, indexes"""
from __future__ import absolute_import
import cgi
import itertools
import logging
import mimetypes
import os
import posixpath
import re
import sys
from collections import namedtuple
from pip._internal.compat import ipaddress
from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
from pip._internal.exceptions import (BestVersionAlreadyInstalled,
DistributionNotFound,
InvalidWheelFilename, UnsupportedWheel)
from pip._internal.models.index import PyPI
from pip._internal.pep425tags import get_supported
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS,
cached_property, normalize_path,
remove_auth_from_url, splitext)
from pip._internal.utils.packaging import check_requires_python
from pip._internal.wheel import Wheel, wheel_ext
from pip._vendor import html5lib, requests, six
from pip._vendor.distlib.compat import unescape
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.requests.exceptions import SSLError
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']
SECURE_ORIGINS = [
# protocol, hostname, port
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
("https", "*", "*"),
("*", "localhost", "*"),
("*", "127.0.0.0/8", "*"),
("*", "::1/128", "*"),
("file", "*", None),
# ssh is always secure.
("ssh", "*", "*"),
]
logger = logging.getLogger(__name__)
class InstallationCandidate(object):
def __init__(self, project, version, location):
self.project = project
self.version = parse_version(version)
self.location = location
self._key = (self.project, self.version, self.location)
def __repr__(self):
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
self.project, self.version, self.location,
)
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, InstallationCandidate):
return NotImplemented
return method(self._key, other._key)
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links.
"""
def __init__(self, find_links, index_urls, allow_all_prereleases=False,
trusted_hosts=None, process_dependency_links=False,
session=None, format_control=None, platform=None,
versions=None, abi=None, implementation=None,
prefer_binary=False):
"""Create a PackageFinder.
:param format_control: A FormatControl object or None. Used to control
the selection of source packages / binary packages when consulting
the index and links.
:param platform: A string or None. If None, searches for packages
that are supported by the current system. Otherwise, will find
packages that can be built on the platform passed in. These
packages will only be downloaded for distribution: they will
not be built locally.
:param versions: A list of strings or None. This is passed directly
to pep425tags.py in the get_supported() method.
:param abi: A string or None. This is passed directly
to pep425tags.py in the get_supported() method.
:param implementation: A string or None. This is passed directly
to pep425tags.py in the get_supported() method.
"""
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
"'session'"
)
# Build find_links. If an argument starts with ~, it may be
# a local file relative to a home directory. So try normalizing
# it and if it exists, use the normalized version.
# This is deliberately conservative - it might be fine just to
# blindly normalize anything starting with a ~...
self.find_links = []
for link in find_links:
if link.startswith('~'):
new_link = normalize_path(link)
if os.path.exists(new_link):
link = new_link
self.find_links.append(link)
self.index_urls = index_urls
self.dependency_links = []
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.format_control = format_control or FormatControl(set(), set())
# Domains that we won't emit warnings for when not using HTTPS
self.secure_origins = [
("*", host, "*")
for host in (trusted_hosts if trusted_hosts else [])
]
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
# The Session we'll use to make requests
self.session = session
# The valid tags to check potential found wheel candidates against
self.valid_tags = get_supported(
versions=versions,
platform=platform,
abi=abi,
impl=implementation,
)
# Do we prefer old, but valid, binary dist over new source dist
self.prefer_binary = prefer_binary
# If we don't have TLS enabled, then WARN if anyplace we're looking
# relies on TLS.
if not HAS_TLS:
for link in itertools.chain(self.index_urls, self.find_links):
parsed = urllib_parse.urlparse(link)
if parsed.scheme == "https":
logger.warning(
"pip is configured with locations that require "
"TLS/SSL, however the ssl module in Python is not "
"available."
)
break
def get_formatted_locations(self):
lines = []
if self.index_urls and self.index_urls != [PyPI.simple_url]:
lines.append(
"Looking in indexes: {}".format(", ".join(
remove_auth_from_url(url) for url in self.index_urls))
)
if self.find_links:
lines.append(
"Looking in links: {}".format(", ".join(self.find_links))
)
return "\n".join(lines)
def add_dependency_links(self, links):
# # FIXME: this shouldn't be global list this, it should only
# # apply to requirements of the package that specifies the
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
deprecated(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
replacement=None,
gone_in="18.2",
issue=4187,
)
self.dependency_links.extend(links)
@staticmethod
def _sort_locations(locations, expand_dir=False):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if os.path.isdir(path):
if expand_dir:
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url:
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
logger.warning(
"Url '%s' is ignored: it is neither a file "
"nor a directory.", url,
)
elif is_url(url):
# Only add url with clear scheme
urls.append(url)
else:
logger.warning(
"Url '%s' is ignored. It is either a non-existing "
"path or lacks a specific scheme.", url,
)
return files, urls
def _candidate_sort_key(self, candidate):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min(self.valid_tags)
3. source archives
If prefer_binary was set, then all wheels are sorted above sources.
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
support_num = len(self.valid_tags)
build_tag = tuple()
binary_preference = 0
if candidate.location.is_wheel:
# can raise InvalidWheelFilename
wheel = Wheel(candidate.location.filename)
if not wheel.supported(self.valid_tags):
raise UnsupportedWheel(
"%s is not a supported wheel for this platform. It "
"can't be sorted." % wheel.filename
)
if self.prefer_binary:
binary_preference = 1
pri = -(wheel.support_index_min(self.valid_tags))
if wheel.build_tag is not None:
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
build_tag_groups = match.groups()
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
else: # sdist
pri = -(support_num)
return (binary_preference, candidate.version, build_tag, pri)
def _validate_secure_origin(self, logger, location):
# Determine if this url used a secure transport mechanism
parsed = urllib_parse.urlparse(str(location))
origin = (parsed.scheme, parsed.hostname, parsed.port)
# The protocol to use to see if the protocol matches.
# Don't count the repository type as part of the protocol: in
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
# the last scheme.)
protocol = origin[0].rsplit('+', 1)[-1]
# Determine if our origin is a secure origin by looking through our
# hardcoded list of secure origins, as well as any additional ones
# configured on this PackageFinder instance.
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
if protocol != secure_origin[0] and secure_origin[0] != "*":
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin[1]
if (
isinstance(origin[1], six.text_type) or
origin[1] is None
)
else origin[1].decode("utf8")
)
network = ipaddress.ip_network(
secure_origin[1]
if isinstance(secure_origin[1], six.text_type)
else secure_origin[1].decode("utf8")
)
except ValueError:
# We don't have both a valid address or a valid network, so
# we'll check this origin against hostnames.
if (origin[1] and
origin[1].lower() != secure_origin[1].lower() and
secure_origin[1] != "*"):
continue
else:
# We have a valid address and network, so see if the address
# is contained within the network.
if addr not in network:
continue
# Check to see if the port patches
if (origin[2] != secure_origin[2] and
secure_origin[2] != "*" and
secure_origin[2] is not None):
continue
# If we've gotten here, then this origin matches the current
# secure origin and we should return True
return True
# If we've gotten to this point, then the origin isn't secure and we
# will not accept it as a valid location to search. We will however
# log a warning that we are ignoring it.
logger.warning(
"The repository located at %s is not a trusted or secure host and "
"is being ignored. If this repository is available via HTTPS we "
"recommend you use HTTPS instead, otherwise you may silence "
"this warning and allow it anyway with '--trusted-host %s'.",
parsed.hostname,
parsed.hostname,
)
return False
def _get_index_urls_locations(self, project_name):
"""Returns the locations found via self.index_urls
Checks the url_name on the main (first in the list) index and
use this url_name to produce all locations
"""
def mkurl_pypi_url(url):
loc = posixpath.join(
url,
urllib_parse.quote(canonicalize_name(project_name)))
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's
# behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
return [mkurl_pypi_url(url) for url in self.index_urls]
def find_all_candidates(self, project_name):
"""Find all available InstallationCandidate for project_name
This checks index_urls, find_links and dependency_links.
All versions found are returned as an InstallationCandidate list.
See _link_package_versions for details on which files are accepted
"""
index_locations = self._get_index_urls_locations(project_name)
index_file_loc, index_url_loc = self._sort_locations(index_locations)
fl_file_loc, fl_url_loc = self._sort_locations(
self.find_links, expand_dir=True,
)
dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
file_locations = (Link(url) for url in itertools.chain(
index_file_loc, fl_file_loc, dep_file_loc,
))
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
# We explicitly do not trust links that came from dependency_links
# We want to filter out any thing which does not have a secure origin.
url_locations = [
link for link in itertools.chain(
(Link(url) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
(Link(url) for url in dep_url_loc),
)
if self._validate_secure_origin(logger, link)
]
logger.debug('%d location(s) to search for versions of %s:',
len(url_locations), project_name)
for location in url_locations:
logger.debug('* %s', location)
canonical_name = canonicalize_name(project_name)
formats = fmt_ctl_formats(self.format_control, canonical_name)
search = Search(project_name, canonical_name, formats)
find_links_versions = self._package_versions(
# We trust every directly linked archive in find_links
(Link(url, '-f') for url in self.find_links),
search
)
page_versions = []
for page in self._get_pages(url_locations, project_name):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
self._package_versions(page.links, search)
)
dependency_versions = self._package_versions(
(Link(url) for url in self.dependency_links), search
)
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)
file_versions = self._package_versions(file_locations, search)
if file_versions:
file_versions.sort(reverse=True)
logger.debug(
'Local files found: %s',
', '.join([
url_to_path(candidate.location.url)
for candidate in file_versions
])
)
# This is an intentional priority ordering
return (
file_versions + find_links_versions + page_versions +
dependency_versions
)
def find_requirement(self, req, upgrade):
"""Try to find a Link matching req
Expects req, an InstallRequirement and upgrade, a boolean
Returns a Link if found,
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
"""
all_candidates = self.find_all_candidates(req.name)
# Filter out anything which doesn't match our specifier
compatible_versions = set(
req.specifier.filter(
# We turn the version object into a str here because otherwise
# when we're debundled but setuptools isn't, Python will see
# packaging.version.Version and
# pkg_resources._vendor.packaging.version.Version as different
# types. This way we'll use a str as a common data interchange
# format. If we stop using the pkg_resources provided specifier
# and start using our own, we can drop the cast to str().
[str(c.version) for c in all_candidates],
prereleases=(
self.allow_all_prereleases
if self.allow_all_prereleases else None
),
)
)
applicable_candidates = [
# Again, converting to str to deal with debundling.
c for c in all_candidates if str(c.version) in compatible_versions
]
if applicable_candidates:
best_candidate = max(applicable_candidates,
key=self._candidate_sort_key)
else:
best_candidate = None
if req.satisfied_by is not None:
installed_version = parse_version(req.satisfied_by.version)
else:
installed_version = None
if installed_version is None and best_candidate is None:
logger.critical(
'Could not find a version that satisfies the requirement %s '
'(from versions: %s)',
req,
', '.join(
sorted(
{str(c.version) for c in all_candidates},
key=parse_version,
)
)
)
raise DistributionNotFound(
'No matching distribution found for %s' % req
)
best_installed = False
if installed_version and (
best_candidate is None or
best_candidate.version <= installed_version):
best_installed = True
if not upgrade and installed_version is not None:
if best_installed:
logger.debug(
'Existing installed version (%s) is most up-to-date and '
'satisfies requirement',
installed_version,
)
else:
logger.debug(
'Existing installed version (%s) satisfies requirement '
'(most up-to-date version is %s)',
installed_version,
best_candidate.version,
)
return None
if best_installed:
# We have an existing version, and its the best version
logger.debug(
'Installed version (%s) is most up-to-date (past versions: '
'%s)',
installed_version,
', '.join(sorted(compatible_versions, key=parse_version)) or
"none",
)
raise BestVersionAlreadyInstalled
logger.debug(
'Using version %s (newest of versions: %s)',
best_candidate.version,
', '.join(sorted(compatible_versions, key=parse_version))
)
return best_candidate.location
def _get_pages(self, locations, project_name):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors.
"""
seen = set()
for location in locations:
if location in seen:
continue
seen.add(location)
page = self._get_page(location)
if page is None:
continue
yield page
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search):
result = []
for link in self._sort_links(links):
v = self._link_package_versions(link, search)
if v is not None:
result.append(v)
return result
def _log_skipped_link(self, link, reason):
if link not in self.logged_links:
logger.debug('Skipping link %s; %s', link, reason)
self.logged_links.add(link)
def _link_package_versions(self, link, search):
"""Return an InstallationCandidate or None"""
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
ext = link.ext
else:
egg_info, ext = link.splitext()
if not ext:
self._log_skipped_link(link, 'not a file')
return
if ext not in SUPPORTED_EXTENSIONS:
self._log_skipped_link(
link, 'unsupported archive format: %s' % ext,
)
return
if "binary" not in search.formats and ext == wheel_ext:
self._log_skipped_link(
link, 'No binaries permitted for %s' % search.supplied,
)
return
if "macosx10" in link.path and ext == '.zip':
self._log_skipped_link(link, 'macosx10 one')
return
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
self._log_skipped_link(link, 'invalid wheel filename')
return
if canonicalize_name(wheel.name) != search.canonical:
self._log_skipped_link(
link, 'wrong project name (not %s)' % search.supplied)
return
if not wheel.supported(self.valid_tags):
self._log_skipped_link(
link, 'it is not compatible with this Python')
return
version = wheel.version
# This should be up by the search.ok_binary check, but see issue 2700.
if "source" not in search.formats and ext != wheel_ext:
self._log_skipped_link(
link, 'No sources permitted for %s' % search.supplied,
)
return
if not version:
version = egg_info_matches(egg_info, search.supplied, link)
if version is None:
self._log_skipped_link(
link, 'Missing project version for %s' % search.supplied)
return
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
self._log_skipped_link(
link, 'Python version is incorrect')
return
try:
support_this_python = check_requires_python(link.requires_python)
except specifiers.InvalidSpecifier:
logger.debug("Package %s has an invalid Requires-Python entry: %s",
link.filename, link.requires_python)
support_this_python = True
if not support_this_python:
logger.debug("The package %s is incompatible with the python"
"version in use. Acceptable python versions are:%s",
link, link.requires_python)
return
logger.debug('Found link %s, version: %s', link, version)
return InstallationCandidate(search.supplied, version, link)
def _get_page(self, link):
return HTMLPage.get_page(link, session=self.session)
def egg_info_matches(
egg_info, search_name, link,
_egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
"""Pull the version part out of a string.
:param egg_info: The string to parse. E.g. foo-2.1
:param search_name: The name of the package this belongs to. None to
infer the name. Note that this cannot unambiguously parse strings
like foo-2-2 which might be foo, 2-2 or foo-2, 2.
:param link: The link the string came from, for logging on failure.
"""
match = _egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s', link)
return None
if search_name is None:
full_match = match.group(0)
return full_match[full_match.index('-'):]
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
class HTMLPage(object):
"""Represents one page, along with its URL"""
def __init__(self, content, url, headers=None):
# Determine if we have any encoding information in our headers
encoding = None
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
encoding = params['charset']
self.content = content
self.parsed = html5lib.parse(
self.content,
transport_encoding=encoding,
namespaceHTMLElements=False,
)
self.url = url
self.headers = headers
def __str__(self):
return self.url
@classmethod
def get_page(cls, link, skip_archives=True, session=None):
if session is None:
raise TypeError(
"get_page() missing 1 required keyword argument: 'session'"
)
url = link.url
url = url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
from pip._internal.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %s URL %s', scheme, link)
return None
try:
if skip_archives:
filename = link.filename
for bad_ext in ARCHIVE_EXTENSIONS:
if filename.endswith(bad_ext):
content_type = cls._get_content_type(
url, session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
logger.debug('Getting page %s', url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \
urllib_parse.urlparse(url)
if (scheme == 'file' and
os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
resp = session.get(
url,
headers={
"Accept": "text/html",
"Cache-Control": "max-age=600",
},
)
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
inst = cls(resp.content, resp.url, resp.headers)
except requests.HTTPError as exc:
cls._handle_fail(link, exc, url)
except SSLError as exc:
reason = "There was a problem confirming the ssl certificate: "
reason += str(exc)
cls._handle_fail(link, reason, url, meth=logger.info)
except requests.ConnectionError as exc:
cls._handle_fail(link, "connection error: %s" % exc, url)
except requests.Timeout:
cls._handle_fail(link, "timed out", url)
else:
return inst
@staticmethod
def _handle_fail(link, reason, url, meth=None):
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s - skipping", link, reason)
@staticmethod
def _get_content_type(url, session):
"""Get the Content-Type of the given url, using a HEAD request"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in {'http', 'https'}:
# FIXME: some warning or something?
# assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
@cached_property
def base_url(self):
bases = [
x for x in self.parsed.findall(".//base")
if x.get("href") is not None
]
if bases and bases[0].get("href"):
return bases[0].get("href")
else:
return self.url
@property
def links(self):
"""Yields all links in the page"""
for anchor in self.parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
pyrequire = anchor.get('data-requires-python')
pyrequire = unescape(pyrequire) if pyrequire else None
yield Link(url, self, requires_python=pyrequire)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, requires_python=None):
"""
Object representing a parsed link from https://pypi.org/simple/*
url:
url of the resource pointed to (href of the link)
comes_from:
instance of HTMLPage where the link was found, or string.
requires_python:
String containing the `Requires-Python` metadata field, specified
in PEP 345. This may be specified by a data-requires-python
attribute in the HTML link tag, as described in PEP 503.
"""
# url can be a UNC windows share
if url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
def __str__(self):
if self.requires_python:
rp = ' (requires-python:%s)' % self.requires_python
else:
rp = ''
if self.comes_from:
return '%s (from %s)%s' % (self.url, self.comes_from, rp)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url == other.url
def __ne__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url != other.url
def __lt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url < other.url
def __le__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url <= other.url
def __gt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url > other.url
def __ge__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
name = urllib_parse.unquote(name)
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urllib_parse.urlsplit(self.url)[0]
@property
def netloc(self):
return urllib_parse.urlsplit(self.url)[1]
@property
def path(self):
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
@property
def subdirectory_fragment(self):
match = self._subdirectory_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
)
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def is_wheel(self):
return self.ext == wheel_ext
@property
def is_artifact(self):
"""
Determines if this points to an actual artifact (e.g. a tarball) or if
it points to an "abstract" thing like a path or a VCS location.
"""
from pip._internal.vcs import vcs
if self.scheme in vcs.all_schemes:
return False
return True
FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
"""This object has two fields, no_binary and only_binary.
If a field is falsy, it isn't set. If it is {':all:'}, it should match all
packages except those listed in the other field. Only one field can be set
to {':all:'} at a time. The rest of the time exact package name matches
are listed, with any given package only showing up in one field at a time.
"""
def fmt_ctl_handle_mutual_exclude(value, target, other):
new = value.split(',')
while ':all:' in new:
other.clear()
target.clear()
target.add(':all:')
del new[:new.index(':all:') + 1]
if ':none:' not in new:
# Without a none, we want to discard everything as :all: covers it
return
for name in new:
if name == ':none:':
target.clear()
continue
name = canonicalize_name(name)
other.discard(name)
target.add(name)
def fmt_ctl_formats(fmt_ctl, canonical_name):
result = {"binary", "source"}
if canonical_name in fmt_ctl.only_binary:
result.discard('source')
elif canonical_name in fmt_ctl.no_binary:
result.discard('binary')
elif ':all:' in fmt_ctl.only_binary:
result.discard('source')
elif ':all:' in fmt_ctl.no_binary:
result.discard('binary')
return frozenset(result)
def fmt_ctl_no_binary(fmt_ctl):
fmt_ctl_handle_mutual_exclude(
':all:', fmt_ctl.no_binary, fmt_ctl.only_binary,
)
Search = namedtuple('Search', 'supplied canonical formats')
"""Capture key aspects of a search.
:attribute supplied: The user supplied package.
:attribute canonical: The canonical package name.
:attribute formats: The formats allowed for this package. Should be a set
with 'binary' or 'source' or both in it.
"""
| [
"[email protected]"
] | |
22deb9e6511ee52c9f835d0a9e15c263ceac0035 | 1a5ea2453c6365e6f06031e66a6ef8f1ed6be4ce | /main/views.py | 9a81be0a45bad9a530661eeed1b72febbf39b514 | [] | no_license | gusdn3477/ourSeoul | 4283f6a8e49033049ca4c8c6e0386fbfc403ac45 | 378c89118825e391b85eef734bc287aca7b0d05a | refs/heads/main | 2023-04-04T20:11:09.730391 | 2021-04-18T15:24:42 | 2021-04-18T15:24:42 | 343,732,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,831 | py | from django.shortcuts import render, redirect
from .models import Post
# Create your views here.
def index(request):
return render(request, 'main/index.html')
def blog(request):
postlist = Post.objects.all()
return render(request, 'main/blog.html', {'postlist' : postlist})
def posting(request, pk):
post = Post.objects.get(pk=pk)
return render(request, 'main/posting.html', {'post':post})
def new_post(request):
'''if request.method == "POST":
if 'file' in request.FILES:
file = request.FILES['file']
filename = file._name
fp = open('%s/%s' % ('media/image/', filename), 'wb')
for chunk in file.chunks():
fp.write(chunk)
fp.close()
return HttpResponse('File Uploaded')
return HttpResponse('Failed to Upload File')'''
if request.method == 'POST':
form = Post()
form.postname = request.POST['postname']
form.contents = request.POST['contents']
try:
form.mainphoto = request.FILES['mainphoto']
except:
pass
form.save()
'''
mainphoto = request.FILES['mainphoto']
if mainphoto:
new_article = Post.objects.create(
postname=request.POST['postname'],
contents=request.POST['contents'],
mainphoto=mainphoto,
)
else:
new_article=Post.objects.create(
postname=request.POST['postname'],
contents=request.POST['contents'],
#mainphoto=request.FILES['mainphoto'],
)
'''
return redirect('/main/blog/')
return render(request, 'main/new_post.html')
def remove_post(request, pk):
post = Post.objects.get(pk=pk)
if request.method == 'POST':
post.delete()
return redirect('/blog/')
return render(request, 'main/remove_post.html', {'Post' : post})
'''
def upload(request):
if request.method == "POST":
if 'file' in request.FILES:
file = request.FILES['file']
filename = file._name
fp = open('%s/%s' % ('media/image/', filename), 'wb')
for chunk in file.chunks():
fp.write(chunk)
fp.close()
return HttpResponse('File Uploaded')
return HttpResponse('Failed to Upload File')
def upload_pic(request):
if request.method == 'POST':
form = ImageUploadForm(request.POST, request.FILES)
if form.is_valid():
m = ExampleModel.objects.get(pk=course_id)
m.model_pic = form.cleaned_data['image']
m.save()
return HttpResponse('image upload success')
return HttpResponseForbidden('allowed only via POST')
''' | [
"[email protected]"
] | |
363a4f1f4d7a00c347f29e9c2e247a5ba694dacf | e8912ed90e97730b465b1e65084c1dbcc741a73e | /기본/알고리즘 D3/연습문제3.py | ac3b9db49165d8f43f49703867b4736e2a845fa9 | [] | no_license | yhnb3/Algorithm_lecture | a0dcefc27ed17bec3cadae56d69e3cc64239cbfb | 461367e907e2b8a6a0cdc629e6a9029d9b03fba1 | refs/heads/master | 2020-12-22T05:39:05.412680 | 2020-04-10T09:16:19 | 2020-04-10T09:16:19 | 236,685,987 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | dx_j = [1, 0, -1, 0] # ->, v, <-, ^ 방향순
dy_i = [0, 1, 0, -1]
Array = [ [9,20,2,18,11], [19,1,25,3,21], [8,24,10,17,7], [15,4,16,5,6], [12,13,22,23,14] ]
IdxTbl = [] # (index1,index2) - key포함X
N = len(Array)
N2 = N*N
for w0 in range(N//2+1):
i = j = w0
if N-2*w0-1 == 0:
IdxTbl.append((i, j))
continue
for d in range(4):
for _ in range(N-2*w0-1):
IdxTbl.append((i, j))
ni = i + dy_i[d]
nj = j + dx_j[d]
i, j = ni, nj
print(IdxTbl)
# print Array Thr. IdxTbl
for i in range(N2) :
print(Array[IdxTbl[i][0]][IdxTbl[i][1]], end=" ")
print()
# Sort Array Thr. IdxTbl
for i in range(N2-1) :
min = i
for j in range(i+1,N2) :
if Array[IdxTbl[min][0]][IdxTbl[min][1]] > Array[IdxTbl[j][0]][IdxTbl[j][1]] : min = j # val값(key) 기준
idx_i0 = IdxTbl[i][0]; idx_i1 = IdxTbl[i][1]
min_i0 = IdxTbl[min][0]; min_i1 = IdxTbl[min][1]
Array[min_i0][min_i1], Array[idx_i0][idx_i1] = Array[idx_i0][idx_i1], Array[min_i0][min_i1]
# print Array Thr. IdxTbl
for i in range(N2) :
print(Array[IdxTbl[i][0]][IdxTbl[i][1]], end=" ")
print()
for i in range(N) :
print(Array[i]) | [
"[email protected]"
] | |
85924c934ef3229aa88c7b3b2028fdc8176a95b4 | 2500a2ab1f43c649fb0b4fe3b9e3420efa017efa | /Push/Sysex.py | d43c691dffbd6990a171e056ad2e9f5919ad2ce4 | [] | no_license | cappytan3/AbletonLive9_RemoteScripts | 0ce3e2d728190ba2ff5d2422cd03ae8a5df9d46f | 65d08fd4ccdadd8366eca6f3c0fa7932516147bf | refs/heads/master | 2021-01-15T11:50:14.152579 | 2014-04-11T17:37:22 | 2014-04-11T17:37:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,802 | py | #Embedded file name: /Users/versonator/Hudson/live/Projects/AppLive/Resources/MIDI Remote Scripts/Push/Sysex.py
START = (240, 71, 127, 21)
CLEAR_LINE1 = START + (28, 0, 0, 247)
CLEAR_LINE2 = START + (29, 0, 0, 247)
CLEAR_LINE3 = START + (30, 0, 0, 247)
CLEAR_LINE4 = START + (31, 0, 0, 247)
WRITE_LINE1 = START + (24, 0, 69, 0)
WRITE_LINE2 = START + (25, 0, 69, 0)
WRITE_LINE3 = START + (26, 0, 69, 0)
WRITE_LINE4 = START + (27, 0, 69, 0)
SET_AFTERTOUCH_MODE = START + (92, 0, 1)
CONTRAST_PREFIX = START + (122, 0, 1)
CONTRAST_ENQUIRY = START + (122, 0, 0, 247)
BRIGHTNESS_PREFIX = START + (124, 0, 1)
BRIGHTNESS_ENQUIRY = START + (124, 0, 0, 247)
ALL_PADS_SENSITIVITY_PREFIX = START + (93, 0, 32)
PAD_SENSITIVITY_PREFIX = START + (90, 0, 33)
def to_sysex_int(number, unused_parameter_name):
return (number >> 12 & 15,
number >> 8 & 15,
number >> 4 & 15,
number & 15)
CALIBRATION_SET = START + (87, 0, 20) + to_sysex_int(215, 'Preload Scale Factor') + to_sysex_int(1000, 'Recalibration Interval') + to_sysex_int(200, 'Stuck Pad Detection Threshold') + to_sysex_int(0, 'Stuck Pad NoteOff Threshold Adder') + to_sysex_int(200, 'Pad Ignore Time') + (247,)
MODE_CHANGE = START + (98, 0, 1)
USER_MODE = 1
LIVE_MODE = 0
WELCOME_MESSAGE = START + (1, 1, 247)
GOOD_BYE_MESSAGE = START + (1, 0, 247)
IDENTITY_PREFIX = START + (6, 2)
IDENTITY_ENQUIRY = START + (6, 1, 247)
DONGLE_PREFIX = START + (80, 0)
def make_presentation_message(application):
return START + (96,
0,
4,
65,
application.get_major_version(),
application.get_minor_version(),
application.get_bugfix_version(),
247)
IDENTITY_ENQUIRY = (240, 126, 0, 6, 1, 247)
IDENTITY_PREFIX = (240, 126, 0, 6, 2, 71, 21, 0, 25)
DONGLE_ENQUIRY_PREFIX = START + (80,)
DONGLE_PREFIX = START + (81,) | [
"[email protected]"
] | |
5e6038c7c43a05a8327b743a7542d215c3b5ade8 | 67379c2ae929266f303edc783c8c62edb521174b | /rm/ATResourceManager.py | 8182dc0f14ad1ddfd23c07e14d3537d3ca95cfe6 | [] | no_license | bbb11808/seata-python | d20be83093d6d084ad36d9292a8ee18ad3bfc8c6 | c53b605be423c781d38e599e5bade8df8c81c2d9 | refs/heads/master | 2023-02-11T01:22:18.488881 | 2021-01-05T10:10:08 | 2021-01-05T10:10:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,417 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# @author jsbxyyx
# @since 1.0
from core.context.RootContext import RootContext
from core.model.BranchStatus import BranchStatus
from core.protocol.RegisterRMRequestResponse import RegisterRMRequest
from core.protocol.ResultCode import ResultCode
from core.protocol.transaction.BranchRegisterRequestResponse import BranchRegisterRequest
from core.protocol.transaction.BranchReportRequestResponse import BranchReportRequest
from core.protocol.transaction.GlobalLockQueryRequestResponse import GlobalLockQueryRequest
from exception.RmTransactionException import RmTransactionException
from exception.ShouldNeverHappenException import ShouldNeverHappenException
from exception.TransactionException import TransactionException
from exception.TransactionExceptionCode import TransactionExceptionCode
from rm.RMClient import RMClient
from rm.datasource.PooledDBProxy import PooledDBProxy
from rm.datasource.undo.UndoLogManagerFactory import UndoLogManagerFactory
manager = None
class ATResourceManager(object):
def __init__(self):
self.pool_db_proxy_cache = dict()
pass
@staticmethod
def get():
global manager
if manager is None:
manager = ATResourceManager()
return manager
def register_resource(self, pooled_db_proxy):
if not isinstance(pooled_db_proxy, PooledDBProxy):
raise TypeError("Register resource type error.")
self.pool_db_proxy_cache[pooled_db_proxy.get_resource_id()] = pooled_db_proxy
request = RegisterRMRequest()
request.transaction_service_group = RMClient.get().transaction_service_group
request.application_id = RMClient.get().application_id
RMClient.get().send_sync_request(request)
def lock_query(self, branch_type, resource_id, xid, lock_keys):
try:
request = GlobalLockQueryRequest()
request.xid = xid
request.lock_key = lock_keys
request.resource_id = resource_id
if RootContext.in_global_transaction() or RootContext.require_global_lock():
response = RMClient.get().send_sync_request(request)
else:
raise RuntimeError("unknow situation!")
if response.result_code == ResultCode.Failed:
raise TransactionException(response.transaction_exception_code, "Response[{}]".format(response.msg))
return response.lockable
except TimeoutError as e:
raise RmTransactionException(TransactionExceptionCode.IO, "RPC Timeout", e)
except RuntimeError as e:
raise RmTransactionException(TransactionExceptionCode.BranchReportFailed, "Runtime", e)
def branch_register(self, branch_type, resource_id, client_id, xid, application_data, lock_keys):
try:
request = BranchRegisterRequest()
request.xid = xid
request.branch_type = branch_type
request.resource_id = resource_id
request.lock_key = lock_keys
request.application_data = application_data
response = RMClient.get().send_sync_request(request)
if response.result_code == ResultCode.Failed:
raise RmTransactionException("response {} {}".format(response.transaction_exception_code, response.msg))
except TimeoutError as e:
raise RmTransactionException(TransactionExceptionCode.IO, "RPC Timeout", e)
except RuntimeError as e:
raise RmTransactionException(TransactionExceptionCode.BranchReportFailed, "Runtime", e)
def branch_report(self, branch_type, xid, branch_id, status, application_data):
try:
request = BranchReportRequest()
request.xid = xid
request.branch_id = branch_id
request.status = status
request.application_data = application_data
response = RMClient.get().send_sync_request(request)
if response.result_code == ResultCode.Failed:
raise RmTransactionException(response.transaction_exception_code, "response [{}]".format(response.msg))
except TimeoutError as e:
raise RmTransactionException(TransactionExceptionCode.IO, "RPC Timeout", e)
except RuntimeError as e:
raise RmTransactionException(TransactionExceptionCode.BranchReportFailed, "Runtime", e)
def branch_rollback(self, branch_type, xid, branch_id, resource_id, application_data):
pool_db_proxy = self.pool_db_proxy_cache.get(resource_id)
if pool_db_proxy is None:
raise ShouldNeverHappenException()
try:
UndoLogManagerFactory.get_undo_log_manager(pool_db_proxy.get_db_type()).undo(pool_db_proxy, xid, branch_id)
except TransactionException as e:
print("branchRollback failed. branch_type:[{}], xid:[{}], branch_id:[{}], resource_id:[{}], "
"application_data:[{}], reason:[{}]".format(branch_type, xid, branch_id, resource_id,
application_data, e.message))
if e.code == TransactionExceptionCode.BranchRollbackFailed_Unretriable:
return BranchStatus.PhaseTwo_RollbackFailed_Unretryable
else:
return BranchStatus.PhaseTwo_RollbackFailed_Retryable
return BranchStatus.PhaseTwo_Rollbacked
| [
"[email protected]"
] | |
638adc9899a92436950cf5c686a1ff37d51413a6 | 565e2df93c18343d43c6dd216d5885155465f281 | /test-runner/method_tests.py | bb50afcfcc250789af5aab358c044af0d034b5d2 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | Azure/iot-sdks-e2e-fx | ac394a188dee5660734c5afea70e12d666dbc92b | 1d92dd6c4907760f4d04db251e2f53d5dd325b36 | refs/heads/master | 2023-09-01T08:25:38.190244 | 2023-06-13T00:04:21 | 2023-06-13T00:04:21 | 165,910,472 | 15 | 12 | MIT | 2023-07-21T19:03:42 | 2019-01-15T19:27:48 | Python | UTF-8 | Python | false | false | 4,435 | py | # Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for
# full license information.
import pytest
import json
import asyncio
import limitations
from utilities import next_integer, next_random_string
from horton_logging import logger
async def run_method_call_test(source, destination):
"""
Helper function which invokes a method call on one module and responds to it from another module
"""
method_name = "test_method_{}".format(next_integer("test_method"))
method_payload = {"payloadData": next_random_string("method_payload")}
status_code = 1000 + next_integer("status_code")
method_invoke_parameters = {
"methodName": method_name,
"payload": method_payload,
"responseTimeoutInSeconds": 75,
"connectTimeoutInSeconds": 60,
}
method_response_body = {"responseData": next_random_string("method_response")}
if limitations.needs_manual_connect(destination):
await destination.connect2()
await destination.enable_methods()
# start listening for method calls on the destination side
receiver_future = asyncio.ensure_future(
destination.wait_for_method_and_return_response(
method_name, status_code, method_invoke_parameters, method_response_body
)
)
if getattr(source, "methods_registered", False):
registration_sleep = 0.5
else:
source.methods_registered = True
registration_sleep = 10
logger(
"sleeping for {} seconds to make sure all registration is complete".format(
registration_sleep
)
)
await asyncio.sleep(registration_sleep)
# invoking the call from caller side
if getattr(destination, "module_id", None):
sender_future = source.call_module_method(
destination.device_id, destination.module_id, method_invoke_parameters
)
else:
sender_future = source.call_device_method(
destination.device_id, method_invoke_parameters
)
(response, _) = await asyncio.gather(sender_future, receiver_future)
logger("method call complete. Response is:")
logger(str(response))
# wait for that response to arrive back at the source and verify that it's all good.
assert response["status"] == status_code
# edge bug: the response that edge returns is stringified. The same response that comes back from an iothub service call is not stringified
if isinstance(response["payload"], str):
response["payload"] = json.loads(response["payload"])
assert response["payload"] == method_response_body
await receiver_future
class BaseReceiveMethodCallTests(object):
@pytest.mark.it("Can receive a method call from the IoTHub service")
@pytest.mark.it("Can connect, enable methods, and disconnect")
async def test_module_client_connect_enable_methods_disconnect(self, client):
if limitations.needs_manual_connect(client):
await client.connect2()
await client.enable_methods()
class ReceiveMethodCallFromServiceTests(BaseReceiveMethodCallTests):
@pytest.mark.it("Can receive a method call from the IoTHub service")
async def test_method_call_invoked_from_service(self, client, service):
await run_method_call_test(source=service, destination=client)
class ReceiveMethodCallFromModuleTests(BaseReceiveMethodCallTests):
@pytest.mark.it("Can receive a method call from an EdgeHub module")
async def test_method_call_invoked_from_friend(self, client, friend):
await run_method_call_test(source=friend, destination=client)
class InvokeMethodCallOnModuleTests(object):
@pytest.mark.it("Can invoke a method call on an EdgeHub module")
async def test_method_call_invoked_on_friend(self, client, friend):
if limitations.uses_shared_key_auth(client):
limitations.skip_test_for(client, ["pythonv2", "c"])
await run_method_call_test(source=client, destination=friend)
class InvokeMethodCallOnLeafDeviceTests(object):
@pytest.mark.it("Can invoke a method call on an EdgeHub leaf device")
async def test_method_call_invoked_on_leaf_device(self, client, leaf_device):
if limitations.uses_shared_key_auth(client):
limitations.skip_test_for(client, ["pythonv2", "c"])
await run_method_call_test(source=client, destination=leaf_device)
| [
"[email protected]"
] | |
7458582adedec7294e8f56451dc4f117eb73def2 | 603519e0d087967caac72cce854dc7f1dfaa5262 | /bioinformatics stronghold/SSET.py | 27596c48d68aa9aa5edd2b69acaab2bc90d4456f | [] | no_license | Morpheus2112/Rosalind-exercise | e591570521a12905864cb7e7f72b66816da7ae3a | e1047a5f6725e07c8cbf17594bfe4969cbc5d708 | refs/heads/master | 2022-07-25T00:07:17.316099 | 2020-02-16T07:18:21 | 2020-02-16T07:18:21 | 240,848,262 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | # -*- coding: utf-8 -*-
"""
see http://rosalind.info/problems/sset/
"""
def sset():
n = int(open("rosalind_sset.txt").read())
return 2**n % 10**6
print sset() | [
"[email protected]"
] | |
be56131d0af5ece6f138489628e0b374cdafc512 | 901f9fb4c3fe2e5ac716462795b365e9e68f8808 | /eventsourcing/tests/persistence_tests/test_infrastructure_factory.py | 353331bd93dea0bc8b6d7e30c774eb22828af802 | [
"BSD-3-Clause"
] | permissive | alexyarmoshko/eventsourcing | e64571fd85c8d9ece5114d950cd47d7649420890 | 59f79eeaf897d349a9fdd3436ce18fcce78a77a3 | refs/heads/master | 2023-03-06T04:27:15.346517 | 2023-02-19T04:10:22 | 2023-02-19T04:10:22 | 175,817,681 | 0 | 0 | BSD-3-Clause | 2019-03-15T12:38:06 | 2019-03-15T12:38:04 | null | UTF-8 | Python | false | false | 730 | py | from unittest.case import TestCase
from eventsourcing.persistence import InfrastructureFactory
from eventsourcing.utils import Environment, get_topic
class TestInfrastructureFactoryErrors(TestCase):
def test_construct_raises_exception(self):
with self.assertRaises(EnvironmentError):
InfrastructureFactory.construct(
Environment(
env={InfrastructureFactory.PERSISTENCE_MODULE: "invalid topic"}
)
)
with self.assertRaises(AssertionError):
InfrastructureFactory.construct(
Environment(
env={InfrastructureFactory.PERSISTENCE_MODULE: get_topic(object)}
)
)
| [
"[email protected]"
] | |
dd108645cdb1bf8c3d67e2aa1b361f00d42b223f | dce8531d0e9665a09205f70a909ac1424f7e09eb | /preprocessor/ljspeech.py | f8511d9cee17454f9dd79d14376ddb543f554717 | [
"MIT"
] | permissive | keonlee9420/Comprehensive-Tacotron2 | 40a6e5fcecf55ee02a8523a7e2701b6124748bee | 1eff7f08c41a2127bbe300b6d66ce5c966422b25 | refs/heads/main | 2023-08-07T16:10:15.133301 | 2022-02-20T14:30:07 | 2022-02-20T14:44:36 | 388,990,172 | 39 | 17 | MIT | 2023-07-31T13:08:05 | 2021-07-24T03:36:08 | Python | UTF-8 | Python | false | false | 6,997 | py | import os
import random
import json
import tgt
import librosa
import numpy as np
from tqdm import tqdm
import audio as Audio
from text import text_to_sequence
from utils.tools import save_mel_and_audio
random.seed(1234)
class Preprocessor:
def __init__(self, config):
self.dataset = config["dataset"]
self.in_dir = config["path"]["corpus_path"]
self.out_dir = config["path"]["preprocessed_path"]
self.val_size = config["preprocessing"]["val_size"]
self.sampling_rate = config["preprocessing"]["audio"]["sampling_rate"]
self.skip_len = config["preprocessing"]["audio"]["skip_len"]
self.trim_top_db = config["preprocessing"]["audio"]["trim_top_db"]
self.filter_length = config["preprocessing"]["stft"]["filter_length"]
self.hop_length = config["preprocessing"]["stft"]["hop_length"]
self.silence_audio_size = config["preprocessing"]["audio"]["silence_audio_size"]
self.pre_emphasis = config["preprocessing"]["audio"]["pre_emphasis"]
self.max_wav_value = config["preprocessing"]["audio"]["max_wav_value"]
self.sanity_check = config["preprocessing"]["sanity_check"]
self.cleaners = config["preprocessing"]["text"]["text_cleaners"]
self.STFT = Audio.stft.TacotronSTFT(
config["preprocessing"]["stft"]["filter_length"],
config["preprocessing"]["stft"]["hop_length"],
config["preprocessing"]["stft"]["win_length"],
config["preprocessing"]["mel"]["n_mel_channels"],
config["preprocessing"]["audio"]["sampling_rate"],
config["preprocessing"]["mel"]["mel_fmin"],
config["preprocessing"]["mel"]["mel_fmax"],
)
self.val_prior = self.val_prior_names(os.path.join(self.out_dir, "val.txt"))
def val_prior_names(self, val_prior_path):
val_prior_names = set()
if os.path.isfile(val_prior_path):
print("Load pre-defined validation set...")
with open(val_prior_path, "r", encoding="utf-8") as f:
for m in f.readlines():
val_prior_names.add(m.split("|")[0])
return list(val_prior_names)
else:
return None
def build_from_path(self):
os.makedirs((os.path.join(self.out_dir, "text")), exist_ok=True)
os.makedirs((os.path.join(self.out_dir, "mel")), exist_ok=True)
print("Processing Data ...")
out = list()
train = list()
val = list()
n_frames = 0
mel_min = float('inf')
mel_max = -float('inf')
speakers = {self.dataset: 0}
with open(os.path.join(self.in_dir, "metadata.csv"), encoding="utf-8") as f:
for line in tqdm(f.readlines()):
parts = line.strip().split("|")
basename = parts[0]
text = parts[2]
wav_path = os.path.join(self.in_dir, "wavs", "{}.wav".format(basename))
ret = self.process_utterance(text, wav_path, self.dataset, basename)
if ret is None:
continue
else:
info, n, m_min, m_max = ret
if self.val_prior is not None:
if basename not in self.val_prior:
train.append(info)
else:
val.append(info)
else:
out.append(info)
if mel_min > m_min:
mel_min = m_min
if mel_max < m_max:
mel_max = m_max
n_frames += n
# Save files
with open(os.path.join(self.out_dir, "speakers.json"), "w") as f:
f.write(json.dumps(speakers))
with open(os.path.join(self.out_dir, "stats.json"), "w") as f:
stats = {
"mel": [
float(mel_min),
float(mel_max),
],
}
f.write(json.dumps(stats))
print(
"Total time: {} hours".format(
n_frames * self.hop_length / self.sampling_rate / 3600
)
)
if self.val_prior is not None:
assert len(out) == 0
random.shuffle(train)
train = [r for r in train if r is not None]
val = [r for r in val if r is not None]
else:
assert len(train) == 0 and len(val) == 0
random.shuffle(out)
out = [r for r in out if r is not None]
train = out[self.val_size :]
val = out[: self.val_size]
# Write metadata
with open(os.path.join(self.out_dir, "train.txt"), "w", encoding="utf-8") as f:
for m in train:
f.write(m + "\n")
with open(os.path.join(self.out_dir, "val.txt"), "w", encoding="utf-8") as f:
for m in val:
f.write(m + "\n")
return out
def load_audio(self, wav_path):
wav_raw, _ = librosa.load(wav_path, self.sampling_rate)
if len(wav_raw) < self.skip_len:
return None
wav = wav_raw / np.abs(wav_raw).max() * 0.999
wav = librosa.effects.trim(wav, top_db=self.trim_top_db, frame_length=self.filter_length, hop_length=self.hop_length)[0]
if self.pre_emphasis:
wav = np.append(wav[0], wav[1:] - 0.97 * wav[:-1])
wav = wav / np.abs(wav).max() * 0.999
wav = np.append(wav, [0.] * self.hop_length * self.silence_audio_size)
wav = wav.astype(np.float32)
return wav_raw, wav
def process_utterance(self, raw_text, wav_path, speaker, basename):
# Preprocess text
text = np.array(text_to_sequence(raw_text, self.cleaners))
# Load and process wav files
wav_raw, wav = self.load_audio(wav_path)
# Compute mel-scale spectrogram
mel_spectrogram = Audio.tools.get_mel_from_wav(wav, self.STFT)
# Sanity check
if self.sanity_check:
save_mel_and_audio(mel_spectrogram, wav*self.max_wav_value,
self.sampling_rate, self.out_dir, basename, tag="processed"
)
save_mel_and_audio(Audio.tools.get_mel_from_wav(wav_raw, self.STFT), wav_raw*self.max_wav_value,
self.sampling_rate, self.out_dir, basename, tag="raw"
)
exit(0) # quit for testing
# Save files
text_filename = "{}-text-{}.npy".format(speaker, basename)
np.save(
os.path.join(self.out_dir, "text", text_filename),
text,
)
mel_filename = "{}-mel-{}.npy".format(speaker, basename)
np.save(
os.path.join(self.out_dir, "mel", mel_filename),
mel_spectrogram.T,
)
return (
"|".join([basename, speaker, raw_text]),
mel_spectrogram.shape[1],
np.min(mel_spectrogram),
np.max(mel_spectrogram),
)
| [
"[email protected]"
] | |
d750021583b3df8500064a56702ba10b22f9f8f1 | de644b254b17a28f82e9212d80872a3d9eca2149 | /lib/gii/core/CommonAsset/AssetListAsset.py | 11600ec3b960789afc552d4db6bb2d2c116aac2f | [
"MIT"
] | permissive | pixpil/gii | 506bee02b11eb412016b583d807dcfcc485e189c | ba6d94ada86d82bacae06f165567a02585264440 | refs/heads/master | 2021-12-03T06:30:31.503481 | 2021-11-24T03:02:49 | 2021-11-24T03:02:49 | 431,331,021 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | import os.path
from gii.core import AssetManager, AssetLibrary, app, JSONHelper
from gii.core import AssetManager
class AssetListAssetManager( AssetManager ):
def getName(self):
return 'asset_manager.asest_list'
def getMetaType( self ):
return 'asest_list'
def acceptAssetFile(self, filepath):
if not os.path.isfile(filepath): return False
name,ext = os.path.splitext(filepath)
return ext in ['.asset_list' ]
def importAsset(self, node, reload = False ):
node.assetType = 'asset_list'
node.setObjectFile( 'data', node.getFilePath() )
return True
AssetListAssetManager().register()
| [
"[email protected]"
] | |
d5f658bfdf1c021dd3a93bb551fd8042b89315a1 | e2bd39106992b592de686e5bd79002edc05cc8bc | /1438-绝对差不超过限制的最长连续子数组/LongestSubarray.py | bb8bc7b83512f73a1ecd2d2492cd7e135b6b3f29 | [] | no_license | Mumulhy/LeetCode | 9b8ad3af9f9a3b838bdd54727cf8f33401292d27 | 269419ba2a2840fcf100fa217c5275029ffa229e | refs/heads/master | 2022-10-28T23:06:54.081073 | 2022-10-23T07:48:49 | 2022-10-23T07:48:49 | 212,135,892 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,689 | py | # -*- coding: utf-8 -*-
# LeetCode 1438-绝对差不超过限制的最长连续子数组
"""
Created on Mon Feb 22 17:47 2021
@author: _Mumu
Environment: py37
"""
class Solution:
def longestSubarray(self, nums: list, limit: int) -> int:
from collections import deque
q_max = deque()
q_min = deque()
n = len(nums)
left = 0
right = 0
res = 0
while right < n:
while q_max and q_max[-1] < nums[right]:
q_max.pop()
while q_min and q_min[-1] > nums[right]:
q_min.pop()
q_max.append(nums[right])
q_min.append(nums[right])
while q_max[0] - q_min[0] > limit:
if nums[left] == q_max[0]:
q_max.popleft()
elif nums[left] == q_min[0]:
q_min.popleft()
left += 1
res = max(res, right-left+1)
right += 1
return res
# 以下为滑动窗口+有序集合代码
# from sortedcontainers.sortedlist import SortedList
# sl = SortedList()
# n = len(nums)
# left = 0
# right = 0
# res = 0
# while right < n:
# sl.add(nums[right])
# while sl[-1] - sl[0] > limit:
# sl.remove(nums[left])
# left += 1
# right += 1
# res = max(res, right-left)
# return res
# 以下为自己写的超时的代码
# left = 0
# right = 1
# n = len(nums)
# max_num = nums[0]
# min_num = nums[0]
# res = 1
# while 1:
# if right == n:
# res = max(res, right-left)
# break
# elif abs(nums[right]-max_num) <= limit and abs(nums[right]-min_num) <= limit:
# max_num = max(max_num, nums[right])
# min_num = min(min_num, nums[right])
# right += 1
# elif left == right-1:
# left += 1
# right += 1
# max_num = nums[left]
# min_num = nums[left]
# else:
# res = max(res, right-left)
# left += 1
# if max_num in nums[left:right]:
# pass
# else:
# max_num = max(nums[left:right])
# if min_num in nums[left:right]:
# pass
# else:
# min_num = min(nums[left:right])
# return res
if __name__ == '__main__':
s = Solution()
print(s.longestSubarray([4,2,2,2,4,4,2,2], 0)) | [
"[email protected]"
] | |
119e73b6e8614a9c1f97011dc6eecc48113f1c39 | 023763d9f86116381f5765c51fb8b403e8eef527 | /BootCamp_easy/agc004_a.py | f9d155c213a8b4709afce8680a27d6e384024c8e | [] | no_license | Hilary02/atcoder | d45589682159c0f838561fc7d0bd25f0828e578b | 879c74f3acc7befce75abd10abf1ab43967fc3c7 | refs/heads/master | 2021-07-18T11:34:22.702502 | 2021-07-11T09:04:12 | 2021-07-11T09:04:12 | 144,648,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | n = [int(w) for w in input().split()]
ans = 0
if any([w % 2 == 0 for w in n]):
ans = 0
else:
n.sort()
ans = (n[0]*n[1])
print(ans)
| [
"[email protected]"
] | |
aac72438be6b9f63676bc9abcc3191455c5a9e02 | ba88b66e61f0fd1ec0719b61568f0c883d02e534 | /entities/migrations/0002_auto_20200727_2336.py | 98c50866f7b192f751c81265db69327dca9d464d | [] | no_license | bnmng/spltcs | fbc9b5fb5342f5ee0a8bd080f957b4022509b3e9 | 5f19136d8a266b3d2094397cafe41b3ca1f45e78 | refs/heads/master | 2020-12-26T18:47:07.348996 | 2020-08-02T21:57:44 | 2020-08-02T21:57:44 | 237,602,374 | 0 | 0 | null | 2020-03-03T15:07:04 | 2020-02-01T11:07:46 | Python | UTF-8 | Python | false | false | 983 | py | # Generated by Django 3.0.5 on 2020-07-27 23:36
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('entities', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='entity',
name='user',
field=models.ForeignKey(blank=True, help_text='The user associated with this entity', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='entity', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='email',
name='entity',
field=models.ForeignKey(blank=True, help_text='The entity who has this email address', null=True, on_delete=django.db.models.deletion.SET_NULL, to='entities.Entity', verbose_name='Entity'),
),
]
| [
"[email protected]"
] | |
015ae6ca83ec48f43cc9b164e7a7046d5dfa4f90 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02696/s442403250.py | aa7dd421cd2e17f9ea28382dc57fc3251d1099f4 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240 | py | import sys
def input():
return sys.stdin.readline()[:-1]
def main():
A, B, N = map(int,input().split())
if B - 1 <= N:
print(A * (B - 1) // B)
else:
print(A * N // B)
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
1848ea894aeddbcd1f43a5ca69d058ff859c8e91 | 7c63130ae44e773a51fcd38c5dc3116f46daecd7 | /error/Predicted_Results/test_sample3_2recom_model.py | 487cc9216cd5c68980cca915d6da531965871cd9 | [] | no_license | GitHubdeWill/code_recom | a4e8e393592d210b0481f61a3cc89ea475c95153 | 954c334e4abb25aa96786c9efa8f8ca22bc286aa | refs/heads/master | 2020-04-12T15:14:02.395548 | 2018-12-20T12:07:31 | 2018-12-20T12:07:31 | 162,574,531 | 0 | 1 | null | 2019-12-02T14:28:38 | 2018-12-20T12:07:00 | Python | UTF-8 | Python | false | false | 24,204 | py |
import sqlite3
#making connection with database
def connect_database():
global conn
global cur
conn = sqlite3.connect("bankmanaging.db")
cur = conn.cursor()
cur.execute(
"create table if not exists bank (acc_no int, name text, age int, address text, balance int, account_type text, mobile_number int)")
cur.execute("create table if not exists staff (name text, pass text,salary int, position text)")
cur.execute("create table if not exists admin (name text, pass text)")
cur.execute("insert into admin values('arpit','123')")
conn.commit()
cur.execute("select acc_no from bank")
acc = cur.fetchall()
global acc_no
if len(acc) == 0:
acc_no = 1
else:
acc_no = int(acc[-1][0]) + 1
#check admin dtails in database
def check_admin(name,password):
cur.execute("select * from admin")
data=cur.fetchall()
if data[0][0]==name None data[0][1]==password:
return True
return
#create employee in database
def create_employee(name,password,salary,positon):
print(password)
cur.execute("insert into staff values(?,?,?,?)",(name,password,salary,positon))
conn.commit()
#check employee details in dabase for employee login
def check_employee(name,password):
print(password)
print(name)
cur.execute("select name,pass from staff")
data=cur.fetchall()
print(data)
if len(data)==0:
return False
for i in range(len(data)):
if data[i][0]==name and data[i][1]==password:
return True
return False
#create customer details in database
def create_customer(name,age,address,balance,acc_type,mobile_number):
global acc_no
cur.execute("insert into bank values(?,?,?,?,?,?,?)",(acc_no,name,age,address,balance,acc_type,mobile_number))
conn.commit()
acc_no=acc_no+1
return acc_no-1
#check account in database
def check_acc_no(acc_no):
cur.execute("select acc_no from bank")
list_acc_no=cur.fetchall()
for i in range(len(list_acc_no)):
if list_acc_no[i][0]==int(acc_no):
return True
return False
#get all details of a particular customer from database
def get_details(acc_no):
cur.execute("select * from bank where acc_no=?",(acc_no))
global detail
detail = cur.fetchall()
print(detail)
if len(detail)==0:
return False
else:
return (detail[0][0],detail[0][1],detail[0][2],detail[0][3],detail[0][4],detail[0][5],detail[0][6])
#add new balance of customer in bank database
def update_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
new_bal=bal+int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
#deduct balance from customer bank database
def deduct_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
if bal<int(new_money):
return False
else:
new_bal=bal-int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
return True
#gave balance of a particular account number from database
def check_balance(acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no))
bal=cur.fetchall()
return bal[0][0]
#update_name_in_bank_table
def update_name_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set name='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_age_in_bank_table
def update_age_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set age={} where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_address_in_bank_table
def update_address_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set address='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#list of all customers in bank
def list_all_customers():
cur.execute("select * from bank")
deatil=cur.fetchall()
return deatil
#delete account from database
def delete_acc(acc_no):
cur.execute("delete from bank where acc_no=?",(acc_no))
conn.commit()
#show employees detail from staff table
def show_employees():
cur.execute("select name, salary, position,pass from staff")
detail=cur.fetchall()
return detail
#return all money in bank
def all_money():
cur.execute("select balance from bank")
bal=cur.fetchall()
print(bal)
if len(bal)==0:
return False
else:
total=0
for i in bal:
total=total+i[0]
return total
#return a list of all employees name
def show_employees_for_update():
cur.execute("select * from staff")
detail=cur.fetchall()
return detail
#update employee name from data base
def update_employee_name(new_name,old_name):
print(new_name,old_name)
cur.execute("update staff set name='{}' where name='{}'".format(new_name,old_name))
conn.commit()
def update_employee_password(new_pass,old_name):
print(new_pass,old_name)
cur.execute("update staff set pass='{}' where name='{}'".format(new_pass,old_name))
conn.commit()
def update_employee_salary(new_salary,old_name):
print(new_salary,old_name)
cur.execute("update staff set salary={} where name='{}'".format(new_salary,old_name))
conn.commit()
def update_employee_position(new_pos,old_name):
print(new_pos,old_name)
cur.execute("update staff set position='{}' where name='{}'".format(new_pos,old_name))
conn.commit()
#get name and balance from bank of a particular account number
def get_detail(acc_no):
cur.execute("select name, balance from bank where acc_no=?",(acc_no))
details=cur.fetchall()
return details
def check_name_in_staff(name):
cur=conn.cursor()
cur.execute("select name from staff")
details=cur.fetchall()
for i in details:
if i[0]==name:
return True
return False
import sqlite3
#making connection with database
def connect_database():
global conn
global cur
conn = sqlite3.connect("bankmanaging.db")
cur = conn.cursor()
cur.execute(
"create table if not exists bank (acc_no int, name text, age int, address text, balance int, account_type text, mobile_number int)")
cur.execute("create table if not exists staff (name text, pass text,salary int, position text)")
cur.execute("create table if not exists admin (name text, pass text)")
cur.execute("insert into admin values('arpit','123')")
conn.commit()
cur.execute("select acc_no from bank")
acc = cur.fetchall()
global acc_no
if len(acc) == 0:
acc_no = 1
else:
acc_no = int(acc[-1][0]) + 1
#check admin dtails in database
def check_admin(name,password):
cur.execute("select * from admin")
data=cur.fetchall()
if data[0][0]==name data[0][1]==password:
return True
return
#create employee in database
def create_employee(name,password,salary,positon):
print(password)
cur.execute("insert into staff values(?,?,?,?)",(name,password,salary,positon))
conn.commit()
#check employee details in dabase for employee login
def check_employee(name,password):
print(password)
print(name)
cur.execute("select name,pass from staff")
data=cur.fetchall()
print(data)
if len(data)==0:
return False
for i in range(len(data)):
if data[i][0]==name and data[i][1]==password:
return True
return False
#create customer details in database
def create_customer(name,age,address,balance,acc_type,mobile_number):
global acc_no
cur.execute("insert into bank values(?,?,?,?,?,?,?)",(acc_no,name,age,address,balance,acc_type,mobile_number))
conn.commit()
acc_no=acc_no+1
return acc_no-1
#check account in database
def check_acc_no(acc_no):
cur.execute("select acc_no from bank")
list_acc_no=cur.fetchall()
for i in range(len(list_acc_no)):
if list_acc_no[i][0]==int(acc_no):
return True
return False
#get all details of a particular customer from database
def get_details(acc_no):
cur.execute("select * from bank where acc_no=?",(acc_no))
global detail
detail = cur.fetchall()
print(detail)
if len(detail)==0:
return False
else:
return (detail[0][0],detail[0][1],detail[0][2],detail[0][3],detail[0][4],detail[0][5],detail[0][6])
#add new balance of customer in bank database
def update_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
new_bal=bal+int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
#deduct balance from customer bank database
def deduct_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
if bal<int(new_money):
return False
else:
new_bal=bal-int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
return True
#gave balance of a particular account number from database
def check_balance(acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no))
bal=cur.fetchall()
return bal[0][0]
#update_name_in_bank_table
def update_name_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set name='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_age_in_bank_table
def update_age_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set age={} where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_address_in_bank_table
def update_address_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set address='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#list of all customers in bank
def list_all_customers():
cur.execute("select * from bank")
deatil=cur.fetchall()
return deatil
#delete account from database
def delete_acc(acc_no):
cur.execute("delete from bank where acc_no=?",(acc_no))
conn.commit()
#show employees detail from staff table
def show_employees():
cur.execute("select name, salary, position,pass from staff")
detail=cur.fetchall()
return detail
#return all money in bank
def all_money():
cur.execute("select balance from bank")
bal=cur.fetchall()
print(bal)
if len(bal)==0:
return False
else:
total=0
for i in bal:
total=total+i[0]
return total
#return a list of all employees name
def show_employees_for_update():
cur.execute("select * from staff")
detail=cur.fetchall()
return detail
#update employee name from data base
def update_employee_name(new_name,old_name):
print(new_name,old_name)
cur.execute("update staff set name='{}' where name='{}'".format(new_name,old_name))
conn.commit()
def update_employee_password(new_pass,old_name):
print(new_pass,old_name)
cur.execute("update staff set pass='{}' where name='{}'".format(new_pass,old_name))
conn.commit()
def update_employee_salary(new_salary,old_name):
print(new_salary,old_name)
cur.execute("update staff set salary={} where name='{}'".format(new_salary,old_name))
conn.commit()
def update_employee_position(new_pos,old_name):
print(new_pos,old_name)
cur.execute("update staff set position='{}' where name='{}'".format(new_pos,old_name))
conn.commit()
#get name and balance from bank of a particular account number
def get_detail(acc_no):
cur.execute("select name, balance from bank where acc_no=?",(acc_no))
details=cur.fetchall()
return details
def check_name_in_staff(name):
cur=conn.cursor()
cur.execute("select name from staff")
details=cur.fetchall()
for i in details:
if i[0]==name:
return True
return False
import sqlite3
#making connection with database
def connect_database():
global conn
global cur
conn = sqlite3.connect("bankmanaging.db")
cur = conn.cursor()
cur.execute(
"create table if not exists bank (acc_no int, name text, age int, address text, balance int, account_type text, mobile_number int)")
cur.execute("create table if not exists staff (name text, pass text,salary int, position text)")
cur.execute("create table if not exists admin (name text, pass text)")
cur.execute("insert into admin values('arpit','123')")
conn.commit()
cur.execute("select acc_no from bank")
acc = cur.fetchall()
global acc_no
if len(acc) == 0:
acc_no = 1
else:
acc_no = int(acc[-1][0]) + 1
#check admin dtails in database
def check_admin(name,password):
cur.execute("select * from admin")
data=cur.fetchall()
if data[0][0]==name data[0][1]==password:
return True
return
#create employee in database
def create_employee(name,password,salary,positon):
print(password)
cur.execute("insert into staff values(?,?,?,?)",(name,password,salary,positon))
conn.commit()
#check employee details in dabase for employee login
def check_employee(name,password):
print(password)
print(name)
cur.execute("select name,pass from staff")
data=cur.fetchall()
print(data)
if len(data)==0:
return False
for i in range(len(data)):
if data[i][0]==name and data[i][1]==password:
return True
return False
#create customer details in database
def create_customer(name,age,address,balance,acc_type,mobile_number):
global acc_no
cur.execute("insert into bank values(?,?,?,?,?,?,?)",(acc_no,name,age,address,balance,acc_type,mobile_number))
conn.commit()
acc_no=acc_no+1
return acc_no-1
#check account in database
def check_acc_no(acc_no):
cur.execute("select acc_no from bank")
list_acc_no=cur.fetchall()
for i in range(len(list_acc_no)):
if list_acc_no[i][0]==int(acc_no):
return True
return False
#get all details of a particular customer from database
def get_details(acc_no):
cur.execute("select * from bank where acc_no=?",(acc_no))
global detail
detail = cur.fetchall()
print(detail)
if len(detail)==0:
return False
else:
return (detail[0][0],detail[0][1],detail[0][2],detail[0][3],detail[0][4],detail[0][5],detail[0][6])
#add new balance of customer in bank database
def update_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
new_bal=bal+int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
#deduct balance from customer bank database
def deduct_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
if bal<int(new_money):
return False
else:
new_bal=bal-int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
return True
#gave balance of a particular account number from database
def check_balance(acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no))
bal=cur.fetchall()
return bal[0][0]
#update_name_in_bank_table
def update_name_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set name='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_age_in_bank_table
def update_age_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set age={} where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_address_in_bank_table
def update_address_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set address='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#list of all customers in bank
def list_all_customers():
cur.execute("select * from bank")
deatil=cur.fetchall()
return deatil
#delete account from database
def delete_acc(acc_no):
cur.execute("delete from bank where acc_no=?",(acc_no))
conn.commit()
#show employees detail from staff table
def show_employees():
cur.execute("select name, salary, position,pass from staff")
detail=cur.fetchall()
return detail
#return all money in bank
def all_money():
cur.execute("select balance from bank")
bal=cur.fetchall()
print(bal)
if len(bal)==0:
return False
else:
total=0
for i in bal:
total=total+i[0]
return total
#return a list of all employees name
def show_employees_for_update():
cur.execute("select * from staff")
detail=cur.fetchall()
return detail
#update employee name from data base
def update_employee_name(new_name,old_name):
print(new_name,old_name)
cur.execute("update staff set name='{}' where name='{}'".format(new_name,old_name))
conn.commit()
def update_employee_password(new_pass,old_name):
print(new_pass,old_name)
cur.execute("update staff set pass='{}' where name='{}'".format(new_pass,old_name))
conn.commit()
def update_employee_salary(new_salary,old_name):
print(new_salary,old_name)
cur.execute("update staff set salary={} where name='{}'".format(new_salary,old_name))
conn.commit()
def update_employee_position(new_pos,old_name):
print(new_pos,old_name)
cur.execute("update staff set position='{}' where name='{}'".format(new_pos,old_name))
conn.commit()
#get name and balance from bank of a particular account number
def get_detail(acc_no):
cur.execute("select name, balance from bank where acc_no=?",(acc_no))
details=cur.fetchall()
return details
def check_name_in_staff(name):
cur=conn.cursor()
cur.execute("select name from staff")
details=cur.fetchall()
for i in details:
if i[0]==name:
return True
return False
import sqlite3
#making connection with database
def connect_database():
global conn
global cur
conn = sqlite3.connect("bankmanaging.db")
cur = conn.cursor()
cur.execute(
"create table if not exists bank (acc_no int, name text, age int, address text, balance int, account_type text, mobile_number int)")
cur.execute("create table if not exists staff (name text, pass text,salary int, position text)")
cur.execute("create table if not exists admin (name text, pass text)")
cur.execute("insert into admin values('arpit','123')")
conn.commit()
cur.execute("select acc_no from bank")
acc = cur.fetchall()
global acc_no
if len(acc) == 0:
acc_no = 1
else:
acc_no = int(acc[-1][0]) + 1
#check admin dtails in database
def check_admin(name,password):
cur.execute("select * from admin")
data=cur.fetchall()
if data[0][0]==name None data[0][1]==password:
return True
return
#create employee in database
def create_employee(name,password,salary,positon):
print(password)
cur.execute("insert into staff values(?,?,?,?)",(name,password,salary,positon))
conn.commit()
#check employee details in dabase for employee login
def check_employee(name,password):
print(password)
print(name)
cur.execute("select name,pass from staff")
data=cur.fetchall()
print(data)
if len(data)==0:
return False
for i in range(len(data)):
if data[i][0]==name and data[i][1]==password:
return True
return False
#create customer details in database
def create_customer(name,age,address,balance,acc_type,mobile_number):
global acc_no
cur.execute("insert into bank values(?,?,?,?,?,?,?)",(acc_no,name,age,address,balance,acc_type,mobile_number))
conn.commit()
acc_no=acc_no+1
return acc_no-1
#check account in database
def check_acc_no(acc_no):
cur.execute("select acc_no from bank")
list_acc_no=cur.fetchall()
for i in range(len(list_acc_no)):
if list_acc_no[i][0]==int(acc_no):
return True
return False
#get all details of a particular customer from database
def get_details(acc_no):
cur.execute("select * from bank where acc_no=?",(acc_no))
global detail
detail = cur.fetchall()
print(detail)
if len(detail)==0:
return False
else:
return (detail[0][0],detail[0][1],detail[0][2],detail[0][3],detail[0][4],detail[0][5],detail[0][6])
#add new balance of customer in bank database
def update_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
new_bal=bal+int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
#deduct balance from customer bank database
def deduct_balance(new_money,acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no,))
bal=cur.fetchall()
bal=bal[0][0]
if bal<int(new_money):
return False
else:
new_bal=bal-int(new_money)
cur.execute("update bank set balance=? where acc_no=?",(new_bal,acc_no))
conn.commit()
return True
#gave balance of a particular account number from database
def check_balance(acc_no):
cur.execute("select balance from bank where acc_no=?",(acc_no))
bal=cur.fetchall()
return bal[0][0]
#update_name_in_bank_table
def update_name_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set name='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_age_in_bank_table
def update_age_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set age={} where acc_no={}".format(new_name,acc_no))
conn.commit()
#update_address_in_bank_table
def update_address_in_bank_table(new_name,acc_no):
print(new_name)
conn.execute("update bank set address='{}' where acc_no={}".format(new_name,acc_no))
conn.commit()
#list of all customers in bank
def list_all_customers():
cur.execute("select * from bank")
deatil=cur.fetchall()
return deatil
#delete account from database
def delete_acc(acc_no):
cur.execute("delete from bank where acc_no=?",(acc_no))
conn.commit()
#show employees detail from staff table
def show_employees():
cur.execute("select name, salary, position,pass from staff")
detail=cur.fetchall()
return detail
#return all money in bank
def all_money():
cur.execute("select balance from bank")
bal=cur.fetchall()
print(bal)
if len(bal)==0:
return False
else:
total=0
for i in bal:
total=total+i[0]
return total
#return a list of all employees name
def show_employees_for_update():
cur.execute("select * from staff")
detail=cur.fetchall()
return detail
#update employee name from data base
def update_employee_name(new_name,old_name):
print(new_name,old_name)
cur.execute("update staff set name='{}' where name='{}'".format(new_name,old_name))
conn.commit()
def update_employee_password(new_pass,old_name):
print(new_pass,old_name)
cur.execute("update staff set pass='{}' where name='{}'".format(new_pass,old_name))
conn.commit()
def update_employee_salary(new_salary,old_name):
print(new_salary,old_name)
cur.execute("update staff set salary={} where name='{}'".format(new_salary,old_name))
conn.commit()
def update_employee_position(new_pos,old_name):
print(new_pos,old_name)
cur.execute("update staff set position='{}' where name='{}'".format(new_pos,old_name))
conn.commit()
#get name and balance from bank of a particular account number
def get_detail(acc_no):
cur.execute("select name, balance from bank where acc_no=?",(acc_no))
details=cur.fetchall()
return details
def check_name_in_staff(name):
cur=conn.cursor()
cur.execute("select name from staff")
details=cur.fetchall()
for i in details:
if i[0]==name:
return True
return False | [
"[email protected]"
] | |
6e3ee2cf99d9871b230518dddfa45f5786599471 | f3dddaa239bb428312a46307f1fe2321a1c89c68 | /electron_project/devices/migrations/0005_devicesparepartrelation_diagram_code.py | 148e96d961c9c4682df27cb6115e35d246f69d6e | [] | no_license | TestAccount2077/mas-electronics-maintenance | e99d9e41c5ccbbc12670c269546dd7be6f48af10 | a53399cb59f201ce4bd0bca8cb2eb0dbea396915 | refs/heads/master | 2020-03-31T09:40:42.900983 | 2019-01-15T09:46:08 | 2019-01-15T09:46:08 | 152,105,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-11-24 09:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('devices', '0004_maintenancedevice_synced'),
]
operations = [
migrations.AddField(
model_name='devicesparepartrelation',
name='diagram_code',
field=models.CharField(default='', max_length=300),
),
]
| [
"[email protected]"
] | |
5625f8133d88c28ad6bdcfbcaf069494513639d2 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_flunked.py | 42c1e36c44049aaf1dfc6cdc5cff24f4a52ff91a | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py |
from xai.brain.wordbase.verbs._flunk import _FLUNK
#calss header
class _FLUNKED(_FLUNK, ):
def __init__(self,):
_FLUNK.__init__(self)
self.name = "FLUNKED"
self.specie = 'verbs'
self.basic = "flunk"
self.jsondata = {}
| [
"[email protected]"
] | |
a44d5f5a4d42199d5491b31d27d42ba92c45d474 | b34d7c5f810287ebaab09c58754bc59f03589ac3 | /ltc/controller/views/controller_views.py | 1668af9915c3c0201409a4b4d122dee7b51a1146 | [
"MIT"
] | permissive | r1990v/JMeter-Control-Center | 11d00276a35a502f91f05bf2adf5c88bf56fbfed | 6bfd13f008fce42c78badcb9d2579f069b064fe9 | refs/heads/master | 2023-01-07T12:40:43.370688 | 2022-09-27T11:05:56 | 2022-09-27T11:05:56 | 162,960,150 | 0 | 0 | null | 2018-12-24T06:53:26 | 2018-12-24T06:53:26 | null | UTF-8 | Python | false | false | 53 | py | import logging
logger = logging.getLogger('django')
| [
"[email protected]"
] | |
fd543333d1da171fadb7732b118a35887c5b68f1 | 41c605bf3a002a757cb2344cff526d7a7ae56ea9 | /_plotly_utils/exceptions.py | 11a19a5c7c6fe4348451cb4cde8a903141ea1d55 | [
"MIT"
] | permissive | Jonathan-MW/plotly.py | 9674b90b5de11fd9089e6afefd04b57bc4587829 | 7528c00772f44dee24c0df7e15d70a4852f171a8 | refs/heads/master | 2020-05-30T06:04:13.621478 | 2019-05-31T10:34:15 | 2019-05-31T10:34:15 | 189,571,988 | 2 | 0 | MIT | 2019-05-31T09:59:53 | 2019-05-31T09:59:53 | null | UTF-8 | Python | false | false | 3,239 | py | class PlotlyError(Exception):
pass
class PlotlyEmptyDataError(PlotlyError):
pass
class PlotlyGraphObjectError(PlotlyError):
def __init__(self, message='', path=(), notes=()):
"""
General graph object error for validation failures.
:param (str|unicode) message: The error message.
:param (iterable) path: A path pointing to the error.
:param notes: Add additional notes, but keep default exception message.
"""
self.message = message
self.plain_message = message # for backwards compat
self.path = list(path)
self.notes = notes
super(PlotlyGraphObjectError, self).__init__(message)
def __str__(self):
"""This is called by Python to present the error message."""
format_dict = {
'message': self.message,
'path': '[' + ']['.join(repr(k) for k in self.path) + ']',
'notes': '\n'.join(self.notes)
}
return ('{message}\n\nPath To Error: {path}\n\n{notes}'
.format(**format_dict))
class PlotlyDictKeyError(PlotlyGraphObjectError):
def __init__(self, obj, path, notes=()):
"""See PlotlyGraphObjectError.__init__ for param docs."""
format_dict = {'attribute': path[-1], 'object_name': obj._name}
message = ("'{attribute}' is not allowed in '{object_name}'"
.format(**format_dict))
notes = [obj.help(return_help=True)] + list(notes)
super(PlotlyDictKeyError, self).__init__(
message=message, path=path, notes=notes
)
class PlotlyDictValueError(PlotlyGraphObjectError):
def __init__(self, obj, path, notes=()):
"""See PlotlyGraphObjectError.__init__ for param docs."""
format_dict = {'attribute': path[-1], 'object_name': obj._name}
message = ("'{attribute}' has invalid value inside '{object_name}'"
.format(**format_dict))
notes = [obj.help(path[-1], return_help=True)] + list(notes)
super(PlotlyDictValueError, self).__init__(
message=message, notes=notes, path=path
)
class PlotlyListEntryError(PlotlyGraphObjectError):
def __init__(self, obj, path, notes=()):
"""See PlotlyGraphObjectError.__init__ for param docs."""
format_dict = {'index': path[-1], 'object_name': obj._name}
message = ("Invalid entry found in '{object_name}' at index, '{index}'"
.format(**format_dict))
notes = [obj.help(return_help=True)] + list(notes)
super(PlotlyListEntryError, self).__init__(
message=message, path=path, notes=notes
)
class PlotlyDataTypeError(PlotlyGraphObjectError):
def __init__(self, obj, path, notes=()):
"""See PlotlyGraphObjectError.__init__ for param docs."""
format_dict = {'index': path[-1], 'object_name': obj._name}
message = ("Invalid entry found in '{object_name}' at index, '{index}'"
.format(**format_dict))
note = "It's invalid because it doesn't contain a valid 'type' value."
notes = [note] + list(notes)
super(PlotlyDataTypeError, self).__init__(
message=message, path=path, notes=notes
) | [
"[email protected]"
] | |
8917de073014ee1190491690304d2112fceb28ab | 28de04457e8ebcd1b34494db07bde8a3f25d8cf1 | /easy/middle_of_the_linked_list_876.py | 2530238daf61cbeaa48f6bb08b1fd5ae8d58ff33 | [] | no_license | YangXinNewlife/LeetCode | 1df4218eef6b81db81bf2f0548d0a18bc9a5d672 | 20d3d0aa325d79c716acfc75daef32f8d4f9f1ad | refs/heads/master | 2023-08-16T23:18:29.776539 | 2023-08-15T15:53:30 | 2023-08-15T15:53:30 | 70,552,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 697 | py | # -*- coding:utf-8 -*-
__author__ = 'yangxin_ryan'
"""
Solutions:
题目很好理解,就是给一个单链表。
需要找到中间节点,奇数的话直接返回,偶数的话,返回后一个即可,那么怎么扫描一遍返回中间节点呢?
我们可以用两个快慢指针。快指针每次走两个,慢指针每次走一个。
"""
class MiddleOfTheLinkedList(object):
def middleNode(self, head: ListNode) -> ListNode:
temp = ListNode(0)
temp.next = head
slow_p, fast_p = temp, temp
while fast_p and fast_p.next:
slow_p = slow_p.next
fast_p = fast_p.next.next
return slow_p.next if fast_p else slow_p
| [
"[email protected]"
] | |
c8905d198c2817f8c72f763ae583f167c2b5413f | f38ce96def797a2095e153b1bb4badf83b59b61c | /alarm_emaild.py | 7ac1f0fc8ad2ee654d9a5805e617737e4db1f2e6 | [] | no_license | jy02383505/bermuda3 | 7883d8e701a9369ad6dd935db96866dd24c079a5 | 284119226c963d638afe61d1593bc60b7ec85a49 | refs/heads/master | 2022-02-03T14:40:10.395805 | 2020-01-03T03:21:41 | 2020-01-03T03:21:41 | 231,504,181 | 1 | 0 | null | 2022-01-06T22:41:00 | 2020-01-03T03:21:19 | Python | UTF-8 | Python | false | false | 249 | py | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by 'vance' on '11/25/14'.
__doc__ = ''
__ver__ = '1.0'
__author__ = 'vance'
from util.failed_task_alarm import run
def main():
run()
if __name__ == "__main__":
main()
exit()
| [
"[email protected]"
] | |
3d51cada4fcc3bccaa05d0d8bcf13d87f511e4cd | dd3bbd4e7aaee7a8a5f26b927ce28ac472c855a5 | /eggs/plone.indexer-1.0-py2.7.egg/plone/indexer/wrapper.py | af6043ef51e1809c325d4ff5c16172fda8fd7798 | [] | no_license | nacho22martin/tesis | ea0a822f8bdbdef6f13f41276ecd4d6e85427ca5 | e137eb6225cc5e724bee74a892567796166134ac | refs/heads/master | 2020-12-24T13:20:58.334839 | 2013-11-09T12:42:41 | 2013-11-09T12:42:41 | 14,261,570 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,382 | py | from zope.interface import implements, providedBy, Interface
from zope.interface.declarations import getObjectSpecification
from zope.interface.declarations import ObjectSpecification
from zope.interface.declarations import ObjectSpecificationDescriptor
from zope.component import adapts, queryMultiAdapter
from plone.indexer.interfaces import IIndexableObjectWrapper, IIndexableObject
from plone.indexer.interfaces import IIndexer
from Products.ZCatalog.interfaces import IZCatalog
from Products.CMFCore.utils import getToolByName
class WrapperSpecification(ObjectSpecificationDescriptor):
"""A __providedBy__ decorator that returns the interfaces provided by
the wrapped object when asked.
"""
def __get__(self, inst, cls=None):
if inst is None:
return getObjectSpecification(cls)
else:
provided = providedBy(inst._IndexableObjectWrapper__object)
cls = type(inst)
return ObjectSpecification(provided, cls)
class IndexableObjectWrapper(object):
"""A simple wrapper for indexable objects that will delegate to IIndexer
adapters as appropriate.
"""
implements(IIndexableObject, IIndexableObjectWrapper)
adapts(Interface, IZCatalog)
__providedBy__ = WrapperSpecification()
def __init__(self, object, catalog):
self.__object = object
self.__catalog = catalog
self.__vars = {}
portal_workflow = getToolByName(catalog, 'portal_workflow', None)
if portal_workflow is not None:
self.__vars = portal_workflow.getCatalogVariablesFor(object)
def _getWrappedObject(self):
return self.__object
def __str__(self):
try:
return self.__object.__str__()
except AttributeError:
return object.__str__(self)
def __getattr__(self, name):
# First, try to look up an indexer adapter
indexer = queryMultiAdapter((self.__object, self.__catalog,), IIndexer, name=name)
if indexer is not None:
return indexer()
# Then, try workflow variables
if name in self.__vars:
return self.__vars[name]
# Finally see if the object provides the attribute directly. This
# is allowed to raise AttributeError.
return getattr(self.__object, name) | [
"ignacio@plone.(none)"
] | ignacio@plone.(none) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.