blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0a4c03dd05dc9d1deeec097157b5ad59aaa9a092 | 9fce5f629873ef5c43274fdae1d49a270ec78f38 | /TS_Modular/TS1.py | 68929104a538e8e2d71d0077737c303f27fa5251 | [] | no_license | makkar-nishant123/Pythonselenium | 2c9273d81915bc1f7724de93d7b87c76b5f9066b | 173f49e6522b80f13e6e406756130c0b1376a139 | refs/heads/master | 2021-06-12T09:30:24.097297 | 2021-04-22T02:48:56 | 2021-04-22T02:48:56 | 182,635,591 | 0 | 0 | null | 2021-04-22T02:52:08 | 2019-04-22T06:26:41 | Python | UTF-8 | Python | false | false | 666 | py | from Modular_helper.be_helper import validate_be
from Modular_helper.ui_helper import *
login_username = driver.find_element_by_name("")
login_password = driver.find_element_by_name("")
peform_order_element1 = driver.find_element_by_name("")
peform_order_element2 = driver.find_element_by_name("")
logout_button = driver.find_element_by_name("")
validate_gui_element = driver.find_element_by_name("")
validate_be_element = driver.find_element_by_name("")
login(login_username,login_password,"username","password")
performorder(peform_order_element1,peform_order_element2)
logout(logout_button)
validate_gui(validate_gui_element)
validate_be(validate_be_element)
| [
"[email protected]"
] | |
7f921075d59189eabfc07610cf12738f7c86eeb1 | 7eb8e38557941cd9d536fd6893028b9dc1342413 | /game/handler.py | b0aef27cdcccf381a2a4b3abcfadeff53fd28a8b | [] | no_license | wlaub/boardgame | 788db37d1f9a50a04fcbc3276e3f94c0536c0063 | b6bb84e9fc09c2809d468767a26b55ec33e887b4 | refs/heads/master | 2021-01-23T04:23:36.915511 | 2017-03-29T21:57:21 | 2017-03-29T21:57:21 | 86,194,131 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,236 | py | import pygame
from pygame.locals import *
import ui
import sys, time, math
class Handler():
"""
Primary handler class for all the stuff.
Should be subclassed for the main application
"""
usemini = True
msize = 200
def __init__(self, size):
pygame.init()
pygame.font.init()
self.font = pygame.font.SysFont(pygame.font.get_default_font(), 18)
self.size = size
self.center = (size[0]/2, size[1]/2)
self.wsize = (1280,720)
self.update_locs()
self.screen = pygame.Surface(self.size)
self.drawscreen = pygame.display.set_mode(self.wsize)
self.pos = (640,360)
self.things = []
self.modals = []
self.done = False
self.bgcolor = (255,255,255)
self.mpos = (0,0)
self.update_areas()
self.panning = False
self.poff = (0,0)
self.minimap = ui.Minimap(self
, (self.wsize[0]-self.msize, self.wsize[1]-self.msize)
, (self.msize, self.msize) )
def render(self):
"""
Render the display area to the display screen
"""
ul= ( max(0,int(self.pos[0]-self.wsize[0]/2.))
, max(0,int(self.pos[1]-self.wsize[1]/2.))
)
self.drawscreen.fill(self.bgcolor)
self.drawscreen.blit(self.screen, (0,0), area = pygame.Rect(ul,self.wsize))
for t in self.things:
t.draw_ui(self.drawscreen)
if self.usemini:
self.minimap.draw(self.screen, self.drawscreen)
def update_locs(self):
"""
For updating relative locations like wcenter
"""
self.wcenter = (self.wsize[0]/2, self.wsize[1]/2)
def update_areas(self):
ul= ( min(0, self.pos[0]-self.wsize[0]/2.)
, min(0, self.pos[1]-self.wsize[1]/2.)
)
self.pos = (self.pos[0] + ul[0], self.pos[1] + ul[1])
def get_mouse(self):
return self.relmpos
def translate_mouse(self, pos):
val = [int(pos[i]+self.pos[i]-self.wsize[i]/2.) for i in range(2)]
self.relmpos = tuple(val)
def pan(self, rel):
self.pos = (max(self.wsize[0]/2,self.pos[0] - rel[0]), max(self.wsize[1]/2,self.pos[1] - rel[1]))
# self.update_areas()
def event(self, event):
if event.type == pygame.QUIT:
exit()
elif event.type == pygame.VIDEORESIZE:
self.wsize = event.size
self.update_locs()
elif event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 3:
self.panning =True
self.poff = self.mpos
elif event.type == pygame.MOUSEBUTTONUP:
self.panning = False
return False
def draw_pre(self):
pass
def draw_post(self):
pass
def add_modal(self, m):
self.things.append(m)
self.modals.append(m)
def update(self):
for m in self.modals:
if m.closed:
self.things.remove(m)
self.modals.remove(m)
if self.panning:
self.pan((self.mpos[0] - self.poff[0], self.mpos[1]-self.poff[1]))
self.poff = self.mpos
pass
def run(self):
while not self.done:
time.sleep(.01667)
self.mpos = pygame.mouse.get_pos()
self.translate_mouse(self.mpos)
for event in pygame.event.get():
caught= False
for t in self.things[::-1]:
if t.event(event):
caught=True
break
if not caught:
if self.usemini:
self.minimap.event(event)
self.event(event)
for t in self.things:
t.update()
self.update()
self.things.sort(key=lambda x: x.sort())
self.screen.fill(self.bgcolor)
self.draw_pre()
for t in self.things:
t.draw(self.screen)
if self.usemini:
self.minimap.update()
self.draw_post()
self.render()
pygame.display.flip()
| [
"[email protected]"
] | |
99d4816939c2101502a3b5846c0f9d0cf12b593f | 600c2df560bad7b1a0b4ede78887be563b68df6e | /plt_traning_loss.py | 113fe002b9973e1f117a77a675dc84d11db24a5d | [] | no_license | Amitabha2018/AdaSGD-D | 2c2dfc3e66f0f11266d45a10d5a01ff79f56d898 | a55cc677ae9604578034d432ce31c47234b17521 | refs/heads/master | 2023-03-26T14:19:30.961643 | 2021-03-20T00:55:01 | 2021-03-20T00:55:01 | 349,451,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 778 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'IT小叮当'
__time__ = '2021-03-09 21:37'
import pandas as pd
import matplotlib.pyplot as plt
#支持中文显示
from pylab import *
mpl.rcParams['font.sans-serif'] = ['SimHei']
df = pd.read_excel("G:\\AdaSGD-D\\moment.xlsx")
#print(df)
plt.plot(df["Train_Loss"],label='SGD-M',linewidth=2,c='b',ls='-',marker='o')
# plt.plot(df["blue1"],df["blue2"],label='较小学习率',linewidth=3,color='b',ls='-.')
# plt.plot(df["red1"],df["red2"],label='适当学习率',linewidth=3,color='r')
# plt.plot(df["black1"],df["black2"],label='较大学习率',linewidth=3,color='k',ls=':')
plt.xlabel("Epochx")
# plt.ylabel('损失函数值')
#plt.xlim(0, 20)
plt.xticks(range(0,21,3))
plt.legend()
plt.grid()
plt.show()
| [
"[email protected]"
] | |
c836eb2f26492efe6593da686ab6664829916a53 | 5ad131249ee80a2fc137ede7de313601e4f68c5e | /oop/05has_a.py | 14ec12273950a576b5a0ea81f2bf44905fd95c5f | [] | no_license | loujitao/PythonLearn | 703a404f1e7330dcb00a897fdf5fcc2a96487de4 | 49d74bb8a4b72f76d8f52bd9070311013d45267b | refs/heads/master | 2022-02-28T20:50:55.788497 | 2022-02-14T06:51:51 | 2022-02-14T06:51:51 | 204,436,733 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 655 | py | class Engine:
pass
class Liangqu(Engine):
def __init__(self):
print("两驱引擎")
class Siqu(Engine):
def __init__(self):
print("四驱引擎")
#===================
class Car:
car_eng= Engine # 车有一个引擎
class LiangCar(Car):
car_eng = Liangqu # 两驱车有一个两驱引擎
def __init__(self):
self.power= self.car_eng() #找类属性的car_eng,实例化对应的Engine对象
class SiCar(Car):
car_eng = Siqu # 四驱车有一个四驱引擎
def __init__(self):
self.power = self.car_eng()
l= LiangCar()
s= SiCar()
print(type(l.power))
print(type(s.power))
| [
"[email protected]"
] | |
b33ae600ed738f20964da455cb841bcdc0b42544 | f548c6ef0e3ecfc7700e50d4c975e1883fb6d63c | /ejercicio8.py | ce8cf548c777f52f55784234be69504c36dc531e | [] | no_license | gabas001/Python | 34c0841b1de2bc77f7f752455f1ca82e46e29140 | bd5c26186cbc62416c579525551af6e1d25291ed | refs/heads/master | 2022-04-25T10:31:59.217173 | 2020-04-28T23:22:37 | 2020-04-28T23:22:37 | 259,699,907 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | '''Pedirle al usuario que ingrese dos números enteros por teclado y contar
cuantos números pares hay entre ambos valores ingresados, verificando también
los números ingresados por el usuario.'''
#se declaran variables y mediante un imput se pide un entero
n1=int (input('Ingresa primer numero: '))
n2=int (input('Ingresa segundo numero: '))
# el for va a recorrer el for entre los numeros ingresados, sumando de a dos
# para encontrar todos los pares entre dichos numeros
for x in range (n1,n2,2):
x==+1
#se muestran los numeros
print(x)
| [
"[email protected]"
] | |
1856d5be861e0964e28aa4f9ce2a648c81ba0a4b | 1da4af09bc143314696f273e99310a7b3fb1df0f | /seleniumwire/proxy/modifier.py | 1ff879f83143cfbcc0204cccfffc7fb3cc657462 | [
"MIT"
] | permissive | fossilet/selenium-wire | 1fbf97d354f7d4fabf5c440c176ee526de5b4ca9 | 62771b1d1c78c60f3b78caacbbf2bb37622ee965 | refs/heads/master | 2020-07-04T00:59:43.875095 | 2020-02-18T01:51:39 | 2020-02-18T01:51:39 | 202,104,673 | 0 | 0 | MIT | 2019-08-13T08:57:56 | 2019-08-13T08:57:53 | null | UTF-8 | Python | false | false | 4,525 | py | import re
import threading
from urllib.parse import urlsplit
class RequestModifier:
"""This class is responsible for modifying the URL and headers
of a request.
Instances of this class are designed to be stateful and threadsafe.
"""
def __init__(self):
"""Initialise a new RequestModifier."""
self._lock = threading.Lock()
self._headers = {}
self._rewrite_rules = []
@property
def headers(self):
"""The headers that should be used to override the request headers.
The value of the headers should be a dictionary. Where a header in
the dictionary exists in the request, the dictionary value will
overwrite the one in the request. Where a header in the dictionary
does not exist in the request, it will be added to the request as a
new header. To filter out a header from the request, set that header
in the dictionary with a value of None. Header names are case insensitive.
"""
with self._lock:
return dict(self._headers)
@headers.setter
def headers(self, headers):
"""Sets the headers to override request headers.
Args:
headers: The dictionary of headers to set.
"""
with self._lock:
self._headers = headers
@headers.deleter
def headers(self):
"""Clears the headers being used to override request headers.
After this is called, request headers will pass through unmodified.
"""
with self._lock:
self._headers.clear()
@property
def rewrite_rules(self):
"""The rules used to rewrite request URLs.
The value of the rewrite rules should be a list of sublists (or tuples)
with each sublist containing the pattern and replacement.
For example:
rewrite_rules = [
('pattern', 'replacement'),
('pattern', 'replacement'),
]
"""
with self._lock:
return [(pat.pattern, repl) for pat, repl in self._rewrite_rules]
@rewrite_rules.setter
def rewrite_rules(self, rewrite_rules):
"""Sets the rewrite rules used to modify request URLs.
Args:
rewrite_rules: The list of rewrite rules, which should
be a list of sublists, with each sublist having two
elements - the pattern and replacement.
"""
compiled = []
for pattern, replacement in rewrite_rules:
compiled.append((re.compile(pattern), replacement))
with self._lock:
self._rewrite_rules = compiled
@rewrite_rules.deleter
def rewrite_rules(self):
"""Clears the rewrite rules being used to modify request URLs.
After this is called, request URLs will no longer be modified.
"""
with self._lock:
self._rewrite_rules.clear()
def modify(self, request):
"""Performs modifications to the request.
Args:
request: The request (a BaseHTTPHandler instance) to modify.
"""
self._modify_headers(request)
self._rewrite_url(request)
def _modify_headers(self, request):
with self._lock:
headers_lc = {h.lower(): (h, v) for h, v in self._headers.items()}
# Remove/replace any header that already exists in the request
for header in list(request.headers):
try:
value = headers_lc.pop(header.lower())[1]
except KeyError:
pass
else:
del request.headers[header]
if value is not None:
request.headers[header] = value
# Add new headers to the request that don't already exist
for header, value in headers_lc.values():
request.headers[header] = value
def _rewrite_url(self, request):
with self._lock:
rewrite_rules = self._rewrite_rules[:]
original_netloc = urlsplit(request.path).netloc
for pattern, replacement in rewrite_rules:
modified, count = pattern.subn(replacement, request.path)
if count > 0:
request.path = modified
break
modified_netloc = urlsplit(request.path).netloc
if original_netloc != modified_netloc:
# Modify the Host header if it exists
if 'Host' in request.headers:
request.headers['Host'] = modified_netloc
| [
"[email protected]"
] | |
c3166149b10a5574db2f8695947dc6c8ee7ad116 | 7651bb30a4b065b0580e2fbe595d8ec99831734b | /local/django.wsgi | 843aa0823bed7969c5348b3d2c9ff6349ed49e03 | [] | no_license | pgmdti/pgmcdl | 78e1316762bae703ab7cd9815aacb7e7b1403777 | ead4691b437f296effdfe39b689c0bb00e22ac8f | refs/heads/master | 2022-11-10T00:03:39.518228 | 2020-06-26T01:40:51 | 2020-06-26T01:40:51 | 275,050,844 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | wsgi | /home/francisco/PycharmProjects/pgmcdl/django.wsgi | [
"[email protected]"
] | |
e6ef2ff3aef2eb28e1e428b09f1b275b3f65b883 | 676b765f96cad25393d3cd1243d9b6bcafdd0b62 | /funciones_camara.py | 5a7da5cb7292c1bc253b5b1c76cfb6475bbd5e0f | [] | no_license | santosg572/Camara_USB | 32e2fe6fcda3b0e67e7b6dff53a185b949882213 | 2e235a2a986326bbae9e8649671b41ba0487e458 | refs/heads/main | 2023-04-23T00:43:48.699211 | 2021-05-06T18:22:01 | 2021-05-06T18:22:01 | 365,000,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | import numpy as np
import cv2
import time
def CapturaImg(file=''):
cap = cv2.VideoCapture(0)
def DespliegaImagen():
while(True):
ret, frame = cap.read()
# cv2.imshow('frame',frame)
if cv2.waitKey(1):
break
return frame
fm = DespliegaImagen()
time.sleep(10)
cv2.imwrite(file+'.jpg', fm, [cv2.IMWRITE_JPEG_QUALITY, 100])
cap.release()
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
3bb30c6db1a0356f3af2b6f8d2c7b95adbca7a3e | 02fea9b0494d3f0ee498ae107e27c0d69fe411bc | /PySpark/ngt_utils.py | dc0f33ea86b0f3d0e4e18abd776fd5ae385c767f | [] | no_license | antrad1978/bigdata | 3af09c40e13c7463ec2ff907e998672bf6966753 | bd1b0c78decabc7031d3bf2888adbf310c082979 | refs/heads/master | 2020-04-05T14:25:04.746939 | 2020-02-24T10:25:24 | 2020-02-24T10:25:24 | 156,928,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | import pyspark.sql.functions
def get_sum_grouped_by_date(sales: pyspark.sql.functions.DataFrame, gbdestination: str, total_destination: str,
source_date_field: str, format: str, agg_field: str) -> pyspark.sql.functions.DataFrame:
"""
:type sales: object
"""
res = sales.withColumn(gbdestination,
pyspark.sql.functions.year(pyspark.sql.functions.to_timestamp(source_date_field, format)))
res = res.groupBy(gbdestination).agg({agg_field: 'sum'})
res = res.withColumnRenamed('sum(' + agg_field + ')', total_destination).sort(gbdestination)
return res
| [
"[email protected]"
] | |
03159897d72bde6296f55d1e3c46df799c87511e | c4381edcf681a0d6f9fa734096daeb2490330ce6 | /ITGK Øvinger/Øving 6/Teoridelen på eksamen.py | 90cf04e392230595ea8db68e23dad38a4f6e49c5 | [] | no_license | sirirusten/Python-kode | 9f9e3e18b28915a227e43eec24bc6829c0fcbb8f | 969532edee5876696e72dab8f841c087059cd815 | refs/heads/main | 2023-08-23T11:57:50.990160 | 2021-10-08T21:04:13 | 2021-10-08T21:04:13 | 415,123,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,061 | py | #!/usr/bin/env python
# coding: utf-8
# <nav class="navbar navbar-default">
# <div class="container-fluid">
# <div class="navbar-header">
# <a class="navbar-brand" href="_Oving6.ipynb">Øving 6</a>
# </div>
# <ul class="nav navbar-nav">
# <li ><a href="Generelt%20om%20lister.ipynb">Generelt om lister</a></li>
# <li ><a href="Lett%20og%20blandet.ipynb">Lett og blandet</a></li>
# <li ><a href="Kodeforstaelse.ipynb">Kodeforståelse</a></li>
# <li ><a href="Vektorer.ipynb">Vektorer</a></li>
# <li ><a href="Lister%20og%20lokker.ipynb">Lister og løkker</a></li>
# <li class="active"><a href="Teoridelen%20paa%20eksamen.ipynb">Teoridelen på eksamen</a></li>
# <li><a href="Gangetabell%20og%20lister.ipynb">Gangetabell og lister</a></li>
# <li ><a href="Lotto.ipynb">Lotto</a></li>
# <li ><a href="Tannfeen.ipynb">Tannfeen</a></li>
# <li><a href="Chattebot.ipynb">Chattebot</a></li>
# <li ><a href="Matriseaddisjon.ipynb">Matriseaddisjon</a></li>
# <li ><a href="Intro%20til%20numpy-arrays.ipynb">Intro til numpy-arrays</a></li>
# </ul>
# </div>
# </nav>
#
# # Teoridelen på eksamen
#
# **Læringsmål:**
#
# * Lister
# * funksjoner
#
# **Starting Out with Python:**
#
# * Kap. 7
#
# I denne oppgaven skal du sammenligne to lister og studere hvor like de er hverandre.
#
# 25% av eksamen er flervalgsoppgaver, og her skal vi anta at det alltid vil være 20 oppgaver. Riktige svar for oppgavene er som følger:
# Riktige svar|- |- |-
# ---|---|---|---
# 1.A|6.A|11.D|16.A
# 2.C |7.B| 12.A| 17.B
# 3.B| 8.A |13.C |18.A
# 4.D |9.C| 14.C| 19.C
# 5.A |10.A| 15.B| 20.D
#
# **a)**
# Lag en liste *fasit* som inneholder de korrekte svarene.
#
# ***Skriv svaret ditt i boksen under.***
# In[8]:
fasit=['A', 'C', 'B', 'D', 'A', 'A', 'B', 'A', 'C', 'A', 'D', 'A', 'C', 'C', 'B', 'A', 'B', 'A', 'C', 'D']
# **b)**
# Lag en funksjon `sjekk_svar` som tar inn studentens_svar som argument. studentens_svar er en liste som inneholder en students svar på oppgavene. Ved å sammenligne studenten sine svar med de riktige svarene, skal funksjonen returnere hvor mange prosent av oppgavene studenten klarte.
#
# Eksempel på kjøring:
#
# ```python
# print(sjekk_svar(['A', 'C', 'B', 'D', 'A', 'A', 'B', 'A', 'C', 'A', 'D', 'A', 'C', 'C', 'B', 'A', 'B', 'A', 'A', 'C'])) # Gir 90% riktig
# print(sjekk_svar(fasit)) # Gir 100% riktig
# ```
#
# ***Skriv svaret ditt i boksen under.***
# In[9]:
def sjekk_svar(svar):
s=0
for i in range(0,20):
if svar[i]==fasit[i]:
s+=5
print(f'{s}% riktig')
print(sjekk_svar(['A', 'C', 'B', 'D', 'A', 'A', 'B', 'A', 'C', 'A', 'D', 'A', 'C', 'C', 'B', 'A', 'B', 'A', 'A', 'C'])) # Gir 90% riktig
print(sjekk_svar(fasit)) # Gir 100% riktig
# Har du gjort det på riktig måte skal koden under gi 20% riktig. Trykk på blokka under og trykk `ctrl + enter` for å kjøre den.
# In[10]:
print(sjekk_svar(['B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B']))
| [
"[email protected]"
] | |
d482bf2007e6ca4bc5811018ed948bdb73595fb1 | 9ac4fa2714f735e83a5527ad5961cf2219c2bc89 | /python prog vit ques/mongo/insert.py | 2e32144973dd47cdb21628eadc4c585059dca2b0 | [] | no_license | jyotsnatiwary/Data-structures-algo | 40705928e4332af166c9a27b238cffb4a6bdffd2 | 6051c51638328db932e35d644fa6964d7c5ec509 | refs/heads/master | 2023-03-09T01:49:32.622607 | 2021-02-15T19:58:57 | 2021-02-15T19:58:57 | 294,473,802 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | import pymongo
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["mydatabase"]
mycol = mydb["customers"]
mydict = { "name": "John", "address": "Highway 37" }
x = mycol.insert_one(mydict)
| [
"[email protected]"
] | |
47cf272440f88b1aa5b80e249d9103aa94560525 | a2c7064e18bc358ee1fe85ed4058cd3054515eaa | /sfepy/fem/extmods/setup.py | 1f43f58ae18fc236122554fb40e8877e9ab18b32 | [
"BSD-3-Clause"
] | permissive | akshaydolas09/sfepy | b775b9d701bec84377eccd0a8714cf2489f0cf8b | 6518a7b9970fe2d92c0f5675ee4e85d3b29794f9 | refs/heads/master | 2021-01-24T20:02:53.634703 | 2013-12-14T23:15:59 | 2013-12-14T23:15:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,034 | py | #!/usr/bin/env python
def configuration(parent_package='', top_path=None):
import os.path as op
from numpy.distutils.misc_util import Configuration
from sfepy import Config
site_config = Config()
os_flag = {'posix' : 0, 'windows' : 1}
auto_dir = op.dirname(__file__)
auto_name = op.split(auto_dir)[-1]
config = Configuration(auto_name, parent_package, top_path)
defines = [('__SDIR__', "'\"%s\"'" % auto_dir),
('SFEPY_PLATFORM', os_flag[site_config.system()])]
if '-DDEBUG_FMF' in site_config.debug_flags():
defines.append(('DEBUG_FMF', None))
if '-DDEBUG_MESH' in site_config.debug_flags():
defines.append(('DEBUG_MESH', None))
common_src = ['fmfield.c', 'refmaps.c', 'geommech.c', 'common_python.c']
config.add_library('sfepy_common',
sources=common_src,
extra_compiler_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir, site_config.python_include()],
macros=defines)
src = ['_fmfield.pyx']
config.add_extension('_fmfield',
sources=src,
libraries=['sfepy_common'],
depends=common_src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
src = ['mappings.pyx']
config.add_extension('mappings',
sources=src,
libraries=['sfepy_common'],
depends=common_src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
src = ['assemble.pyx']
config.add_extension('assemble',
sources=src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
src = ['bases.pyx', 'lagrange.c']
config.add_extension('bases',
sources=src,
libraries=['sfepy_common'],
depends=common_src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
src = ['cmesh.pyx', 'geomtrans.c', 'mesh.c', 'meshutils.c', 'sort.c',
'common_python.c']
config.add_extension('cmesh',
sources=src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
src = ['lobatto_bases.pyx', 'lobatto.c', 'lobatto1d.c']
config.add_extension('lobatto_bases',
sources=src,
libraries=['sfepy_common'],
depends=common_src,
extra_compile_args=site_config.compile_flags(),
extra_link_args=site_config.link_flags(),
include_dirs=[auto_dir],
define_macros=defines)
# Include *.pxd files in distribution tarball and install them along
# with the extension modules.
pxd_files = ['mappings.pxd', 'types.pxd', '_fmfield.pxd']
config.add_data_files(('', pxd_files))
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| [
"[email protected]"
] | |
e3d86e847164add00cad6f8328582c87bf288fdc | f0d1bfb45ff5fad468270b586f4cebc3aa73da10 | /1-Docker/Typing_practice_app/Symbiosis/urls.py | efceb0bf0b42d7c92e9f26ccdd7776802bc3aebb | [] | no_license | D-GopalKrishna/Django-with-Docker | 28a87a94fe224af9a1d9e3d1ef5df8c48ace6622 | f3241ce01e3c6dc30be4f565037723f710619aa8 | refs/heads/master | 2023-03-01T21:29:58.106486 | 2021-01-29T09:27:34 | 2021-01-29T09:27:34 | 331,938,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.rendertypingPage, name="rendertypingPage"),
]
| [
"[email protected]"
] | |
8319ca257a9797c0458b66d41fe28358ce09d48b | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/224/users/4464/codes/1692_1100.py | 239ea5c42efa5964b11417144314bec588596035 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | # Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Ao testar sua solução, não se limite ao caso de exemplo.
ceduula = int(input("valor da cedula: "))
print("Entrada:", ceduula)
if (ceduula == 5 or 2 or 10 or 20 or 50 or 100):
if (ceduula == 2):
mensagem = "Tartaruga"
print("Animal:" , mensagem)
elif (ceduula == 5):
mensagem = "Garca"
print("Animal:" , mensagem)
elif (ceduula == 10):
mensagem = "Arara"
print("Animal:" , mensagem)
elif (ceduula == 20):
mensagem = "Mico-leao-dourado"
print("Animal:" , mensagem)
elif (ceduula == 50):
mensagem = "Onca-pintada"
print("Animal:" , mensagem)
elif (ceduula == 100):
mensagem = "Garoupa"
print("Animal:" , mensagem)
else:
print("Animal: Invalido")
else:
print("Animal: Invalido")
| [
"[email protected]"
] | |
ef3c22bf6d7baa651ec5bd61199cd7a54b6ae4f4 | 13ea98f80a13da42edacf845a35c00d401e5465a | /magnetic_matter~.py | 3e5246a2a6e36707c33da062c3df033b986f2fe3 | [] | no_license | ikursakov/PhysPython | 5b02d76be0cc5756fd30a1c36a6cc5fbe3897ce2 | da5dbd9ddd12429b55edba7e4593e1ce205f1830 | refs/heads/master | 2021-07-18T14:29:01.296885 | 2021-01-25T00:12:36 | 2021-01-25T00:12:36 | 235,593,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,317 | py | # -*- coding: utf-8 -*-
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.6.0
# kernelspec:
# display_name: Python 3 (Spyder)
# language: python3
# name: python3
# ---
# # Магнитное поле в веществе
# Определить поля вектора магнитной индукции $\vec{B}$ и вектора напряжённости магнитного поля $\vec{H}$ внутри и вне цилиндра длиной $l$ и радиусом $a$ с замороженной намагниченностью $\vec{M}$, параллельной оси цилиндра.
#
# 
# Намагниченность цилиндра по-определению равна
# $$\vec{M} = \frac{\vec{p_m}}{V}$$
# где $\vec{p_m}$ — вектор магнитного момента, $V$ — объём цилиндра.
#
# В рамках модели Ампера намагниченность постоянного магнита эквивалентна создаваемой электромагнитом, по боковой поверхности которого по кругу течёт ток намагничивания $I_m$, поэтому магнитный момент цилиндра
# $$\vec{p_m} = I_m S \vec{n}, \;V = S\,l,$$
# где $S$ — площадь плоскости элементарного контура, т.е. торца цилиндра.
#
# Отсюда ток намагничивания: $$I_m = l\,M.$$
# Напряжённость магнитного поля $d\vec{H}$ в точке на расстоянии $\vec{r^\prime}$ от элемента тока намагничивания $\vec{dI_m} = dI_m\,d\vec{l}$ с координатами $(x_0,y_0,z_0)$ по закону Био - Савара - Лапласа определяется как
# $$d\vec{H} = \frac{dI_m \,\left[ d\vec{l} \times \vec{r^\prime} \right] }{4\pi \left| \vec{r^\prime} \right|^3},$$
# где элемент тока
# $$dI_m = I_m\frac{dz_0}{l} = M\,dz_0,$$
# вектор-элемент контура
# $$d\vec{l} = \left\{ -a\,d\varphi\,\sin\varphi, \;a\,d\varphi\,\cos\varphi, \;0 \right\},$$
# вектор, направленный от элемента тока в точку наблюдения,
# $$\vec{r^\prime} = \left\{ x - x_0, y - y_0, z - z_0 \right\} = \left\{ x - a\,\cos\varphi, \;y - a\,\sin\varphi, \;z - z_0\right\},$$
# а их векторное произведение
# $$\left[ d\vec{l} \times \vec{r^\prime} \right] = \hat{\vec{q}}\,a\,d\varphi,\; \hat{\vec{q}} = \left\{ \cos\varphi\,(z-z_0), \;\sin\varphi\,(z-z_0), \;-\sin\varphi(y-a\sin\varphi)-\cos\varphi(x-a\cos\varphi)\right\}.$$
# Здесь учтено, что ось магнитного цилиндра совпадает с осью $z$.
# Тогда напряжённость магнитного поля от всего цилиндрического магнита рассчитывается как
# $$\vec{H}(x,y,z) = \iiint d\vec{H} = \frac{M\,a}{4\pi} \int_0^l dz_0 \int_0^{2\pi} \frac{ \hat{\vec{q}} d\varphi}{ \left| \vec{r^\prime} \right|^3 }.$$
# Наконец, вектор магнитной индукции $\vec{B}$ внутри цилиндрического магнита по-определению равен
# $$\vec{B} = \mu_0 \left( \vec{H} + \vec{M} \right),$$
# а вне магнита
# $$ \vec{B} = \mu_0 \vec{H}.$$
# ## Постановка вычислительной задачи
# Начнём с импорта необходимых библиотек: `NumPy` для работы с массивами, `SciPy` для численного интегрирования, `tqdm` для отображения индикатора выполнения (прогресс-бара).
# +
import numpy as np
import scipy.integrate
from tqdm import tqdm
# -
# Зададим конкретные численные значения для констант, входящих в задачу: намагниченность $M$, длина и радиус магнита $l, a$.
# +
M = 20000 # А/м
l = 0.02 # м
a = 0.005 # м
mu0 = 1.2566e-6 # Н/м, магнитная постоянная
# -
# Определим функции, возвращающие компоненты вектора напряжённости магнитного поля $\vec{H}$. При этом воспользуемся методом `scipy.integrated.dblquad()` (double quadrature) для численного расчёта двойного определённого интеграла. Следует обратить внимание, что в `dblquad()` предполагается, что интегрируемая функция $f$ от двух переменных принимает их в порядке $f(\xi_2, \xi_1)$.
# +
def dH_xyz(x,y,z):
"""
3 компонента подинтегральной функции для расчёта напряжённости магнитного поля
Args:
x,y,z (float): координаты точек наблюдения.
Returns:
tuple of functions: функции для расчёта dHx, dHy, dHz.
"""
def r3(z0,phi):
r = np.sqrt( (x - a*np.cos(phi))**2 + (y - a*np.sin(phi))**2 + (z - z0)**2 )
return r*r*r
# NB: Порядок аргументов у функций!
return \
lambda phi,z0: np.cos(phi)*(z - z0) / r3(z0,phi), \
lambda phi,z0: np.sin(phi)*(z - z0) / r3(z0,phi), \
lambda phi,z0: -( np.sin(phi)*(y - a*np.sin(phi)) + np.cos(phi)*(x - a*np.cos(phi)) ) / r3(z0,phi)
def getH_xyz(XYZ):
"""
3 компонента вектора напряжённости магнитного поля
Args:
XYZ (4D ndarray): координаты точек наблюдения.
Returns:
tuple of ndarrays: компоненты Hx, Hy, Hz.
"""
nx, ny, nz, nf = XYZ.shape
progressbar = tqdm(total=nx*ny*nz*nf, ncols=80)
_h = np.empty_like( XYZ )
for k in range(0,nz):
for j in range(0,ny):
for i in range(0,nx):
x, y, z = XYZ[i,j,k,:]
dH = dH_xyz(x,y,z) # предвычисленная функция от (phi, z0) в данной точке (x,y,z)
for f in range(nf):
res = scipy.integrate.dblquad(
dH[f], # подинтегральная функция, должна принимать аргументы в порядке (phi, z0)
0, # нижний предел интегрирования по первой переменной `z0`
l, # верхний предел интегрирования по первой переменной `z0`
lambda z0: 0, # нижний предел интегрирования по второй переменной `phi` как функция от `z0`
lambda z0: 2*np.pi # верхний предел интегрирования по второй переменной `phi` как функция от `z0`
)
_h[i,j,k, f] = res[0]
progressbar.update(1)
progressbar.close()
return _h * M*a/4/np.pi
# -
# Определим функцию для расчёта поля вектора магнитной индукции $\vec{B}(x,y,z)$ на основе предварительно вычисленного поля напряжённости
def getB_xyz( XYZ, H_xyz ):
"""
3 компонента вектора напряжённости магнитного поля
Args:
H_xyz (4D ndarray): компоненты вектора напряжённости магнитного поля
XYZ (4D ndarray): координаты точек наблюдения.
Returns:
tuple of ndarrays: компоненты Bx, By, Bz.
"""
vM = np.array( [0,0,M] ) # вектор намагниченности
# Точка наблюдения внутри магнита?
def is_inside(x,y,z):
return x**2 + y**2 <= a**2 and z >= 0 and z <= l
nx, ny, nz, nf = XYZ.shape
_B = np.empty_like( XYZ )
for k in range(0,nz):
for j in range(0,ny):
for i in range(0,nx):
x, y, z = XYZ[i,j,k,:]
H = H_xyz[i,j,k]
_B[i,j,k] = H + vM if is_inside(x,y,z) else H
return mu0 * _B
# ### Расчётная сетка
#
# Очевидно задача имеет осевую симметрию, поэтому, не нарушая общности, можно проводить расчёты в плоскости $(x,z)$ при $y=0$.
#
# Определим формально трёхмерную, но фактически двухмерную, декартову сетку, состоящую из равномерно распределённых узлов. В качестве расчётной области выберем прямоугольник со сторонами $x,y,z \in \left[-0.03,0.03\right], 0, \left[0,0.05\right]$. Границы расчетной области обозначим переменными `{x,y,z}_start` и `{x,y,z}_end`, а число узлов вдоль каждого направления — `grid_N{x,y,z}`.
#
# Сетка создаётся с помощью специального объекта `numpy.mgrid[start:end:step,...]`, который генерирует координаты узлов с равномерным распределением от `start` до `end` с шагом `step`. Причём eсли `step = N*1j` (мнимая комплексная величина), то он определяется числом точек разбиения `N`.
#
# Координаты сеточных узлов хранятся в 4х-мерном массиве `grid_XYZ` размерностью `(Nx, Ny, Nz, 3)`, где `N*` — число узлов вдоль каждого направления, 3 — число значений в каждом узле (координаты $x$, $y$ и $z$).
# +
# Границы расчётной области
x_start, x_end = -0.02, 0.02
y_start, y_end = 0.00, 0.00
z_start, z_end = -0.01, 0.04
# Число узлов сетки в каждом направлении
grid_Nx, grid_Ny, grid_Nz = 21, 1, 7
def make_grid():
# 1D массивы координат узлов сетки
xx = np.linspace(x_start, x_end, grid_Nx)
yy = np.linspace(y_start, y_end, grid_Ny)
zz = np.linspace(z_start, z_end, grid_Nz)
# 3D массивы координат узлов сетки
# NB: последовательность
X, Y, Z = np.meshgrid(xx,yy,zz, indexing='ij')
# Объединённый 4D массив координат: [ {x0,y0,z0}, {x1,y1,z1}, ... ]
return np.stack( (X, Y, Z), axis=3 )
grid_XYZ = make_grid()
print( 'Размерность сетки: ', grid_XYZ.shape )
# -
# ### Расчёт магнитного поля
#
# Выполним расчёт поля напряжённости $\vec{H}(x,y,z)$ в узлах сетки. Вычисления не оптимизированы и в зависимости от мелкости разбиения могут продолжаться заметное время.
# +
# Длительная функция численного интегрирования
Hxyz = getH_xyz(grid_XYZ)
# Абсолютное значение вектора напряжённости магнитного поля
absH = np.sqrt( Hxyz[:,:,:, 0]**2 + Hxyz[:,:,:, 1]**2 + Hxyz[:,:,:, 2]**2 )
# -
# На основе поля напряжённости вычилслим поле магнитной индукции $\vec{B}(x,y,z)$ в узлах сетки:
# +
Bxyz = getB_xyz(grid_XYZ, Hxyz)
# Абсолютное значение вектора магнитной индукции
absB = np.sqrt( Bxyz[:,:,:, 0]**2 + Bxyz[:,:,:, 1]**2 + Bxyz[:,:,:, 2]**2 )
# -
# ## Визуализация
#
# Как обычно для построения графиков воспользуемся библиотекой `MatplotLib`. Дополнительно потребуется `matplotlib.patches` для рисования фигуры магнита поверх графика.
# +
# отображение графиков внутри блокнота (вызывать ПЕРЕД импортом библиотеки)
%matplotlib inline
import matplotlib.pyplot as plt
# Рисование поверх графиков
import matplotlib.patches as pltpatches
# -
# Для визуализации полученного решения построим поле абсолютного значения $\left|\vec{H}\right|$ с помощью `matplotlib.contourf()`, а также векторное поле $\vec{H}$ в плоскости $(x,z)$ с помощью `matplotlib.streamplot()`. При этом на контурном графике используем увеличенное число цветовых уровней. Для наглядности отобразим поверх графика изображение контуров магнита с помощью `matplotlib.patches.Rectangle`.
# +
# Двухмерные поля в плоскости (x,z)
# NB: Используется транспонирование для соответствия `contourf`
X, Z = grid_XYZ[:,0,:, 0].T, grid_XYZ[:,0,:, 2].T
Hx, Hz = Hxyz[:,0,:, 0].T, Hxyz[:,0,:, 2].T
aH = absH[:,0,:].T
size = 6
plt.figure(figsize=(size, (z_end-z_start)/(x_end-x_start)*size))
plt.xlabel('x, м')
plt.ylabel('z, м')
# Контурный график
figC = plt.contourf(X, Z, aH, 30)
cbar = plt.colorbar(figC)
cbar.ax.set_ylabel('H, A/м')
# Векторное поле
figV = plt.streamplot(X,Z, Hx,Hz, color='k')
# Границы магнита
plt.gca().add_patch(pltpatches.Rectangle((-a,0), 2*a, l, edgecolor='r', facecolor='none'))
plt.show()
# -
# Аналогичные графики построим для поля магнитной индукции $\vec{B}(x,y,z)$.
# +
# Двухмерные поля в плоскости (x,z)
# NB: Используется транспонирование для соответствия `contourf`
X, Z = grid_XYZ[:,0,:, 0].T, grid_XYZ[:,0,:, 2].T
Bx, Bz = Bxyz[:,0,:, 0].T, Bxyz[:,0,:, 2].T
aB = absB[:,0,:].T
size = 6
plt.figure(figsize=(size, (z_end-z_start)/(x_end-x_start)*size))
plt.xlabel('x, м')
plt.ylabel('z, м')
# Контурный график
figC = plt.contourf(X, Z, aB, 30)
cbar = plt.colorbar(figC)
cbar.ax.set_ylabel('B, Т')
# Векторное поле
figV = plt.streamplot(X,Z, Bx,Bz, color='k')
# Магнит
plt.gca().add_patch(pltpatches.Rectangle((-a,0), 2*a, l, edgecolor='r', facecolor='none'))
plt.show()
# -
| [
"[email protected]"
] | |
98e1f4b684ce01a53c3137b621a085607febcb4c | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /ec2_read_2/scheduled-instance-availability_list.py | 223f1e5aac895b58ce0cbfd9584db5de193fc0ff | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 618 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import execute_two_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-scheduled-instance-availability.html
if __name__ == '__main__':
"""
"""
parameter_display_string = """
# first-slot-start-time-range :
# recurrence :
"""
execute_two_parameter("ec2", "describe-scheduled-instance-availability", "first-slot-start-time-range", "recurrence", parameter_display_string) | [
"[email protected]"
] | |
ff231ff6ef6cc7841fb8b8ec677794c305ffdc53 | 03efe8d9df9fe4c6849bc77c99900159ca2a15ec | /views/views.py | af7cb1c20807bc7511fd89b526785f03d6421008 | [] | no_license | Farighno/Bot-n-Emergencia | fdc15583a5a31efd43b7fd33a2bca6d0e2179674 | 5a5fdec74289afc6bd59b23b0ab9610c536f48ec | refs/heads/master | 2021-01-12T17:53:46.000708 | 2016-10-24T18:49:18 | 2016-10-24T18:49:18 | 71,296,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,446 | py | from BotPan import app
from flask import render_template, redirect
from api.apis import mandaMensaje, registro, ingreso, telefonosUrgencias
from forms.forms import identificacion, regTelefonosUrgencias
@app.route("/")
def intro():
return render_template("index.html")
@app.route("/reg", methods=('GET','POST'))
def registrarse():
miForm = identificacion()
if miForm.validate_on_submit():
registro(miForm.nombres.data, miForm.identificadores.data)
return redirect ("/tels")
else:
return render_template("register.html", form = miForm)
@app.route("/ingreso", methods=('GET','POST'))
def ingresar():
miForm = identificacion()
if miForm.validate_on_submit():
global nom, ident, users
nom=(miForm.nombres.data)
ident=(miForm.identificadores.data)
users=ingreso(nom,ident)
return redirect ("/main")
else:
return render_template("login.html", form = miForm)
@app.route("/tels", methods=('GET','POST'))
def telef():
miForm = regTelefonosUrgencias()
if miForm.validate_on_submit():
global users
telefonosUrgencias(users, miForm.data)
return redirect ("/main")
else:
return render_template("telefonos.html", form = miForm)
@app.route("/main")
def main():
return render_template("switch.html")
@app.route("/alerta")
def alerta():
global users
mandaMensaje(users)
return redirect ("/main")
| [
"[email protected]"
] | |
2fe2da55b13f9076998a27c8b7fd9e3e739cb822 | e901838dd4fc60221eb65cd7e34495c04a6c03fc | /02-numpy/page96.py | f113853e24e60d0c7dcc9140bed86f19423d20fd | [] | no_license | j415/data_analysis | 7fdd7f27005ff2d9940170a1ac702c10d36c33b4 | bfccb8a0fb27702a623c2cec2c13bca84f2b2d48 | refs/heads/master | 2020-03-28T23:42:00.390912 | 2018-09-29T08:50:59 | 2018-09-29T08:50:59 | 149,304,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | import numpy as np
# 随机种子
np.random.seed(10)
# 生成随机列表
t= np.random.randint(0,20,(3,4))
print(t) | [
"[email protected]"
] | |
c0e5f4bf9977533db466b1310984df01269e7eec | cf6ebecf4fb96537519977b992e71b71cbcda9ff | /python/importing/dqmclassic_importer.py | 799a1b5b88551b84270600dac5ee4a2653626fe4 | [] | no_license | srimanob/dqmgui | 594753893a4c855b3c526280e32d0c3f6c6c0fbc | a805e1e7247154c72f3417cdb8773d892179d06f | refs/heads/master | 2023-04-30T10:27:24.300255 | 2021-04-02T17:06:21 | 2021-04-02T17:06:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,277 | py | import re
from ioservice import IOService
from data_types import MEInfo, ScalarValue, EfficiencyFlag, QTest
from nanoroot.tfile import TFile
from reading.reading import DQMCLASSICReader
class DQMCLASSICImporter:
# Don't import these (known obsolete/broken stuff)
# The notorious SiStrip bad component workflow creates are varying number of MEs
# for each run. We just hardcode-ban them here to help the deduplication
__BLACKLIST = re.compile(b'BadModuleList')
ioservice = IOService()
@classmethod
async def get_me_lists(cls, filename, dataset, run, lumi):
"""
Returns a list which contains dicts. Keys of the dicts are (run, lumi)
tuples and values are lists of tuples (me_path, me_info). Full structure:
[(run, lumi):[(me_path, me_info)]]
me_path is normalized and represented as a binary string.
We can return multiple (run, lumi) pairs because some file formats might
contain multiple runs/lumis in ine file.
me_path, me_info will be saved as separete blobs in the DB.
"""
buffer = await cls.ioservice.open_url(filename, blockcache=False, xrootd=False)
tfile = TFile().load(buffer)
result = cls.list_mes(tfile, run)
return { (run, 0): result }
@classmethod
def list_mes(cls, tfile, run):
result = []
fulllist = tfile.fulllist()
for path, name, class_name, offset in fulllist:
if cls.__BLACKLIST.search(path):
continue
if class_name == b'TObjString':
parsed = DQMCLASSICReader.parse_string_entry(name)
if isinstance(parsed, EfficiencyFlag):
item = (path + parsed.name + b'\0e=1', MEInfo(b'Flag'))
elif isinstance(parsed, ScalarValue):
if parsed.type == b'i':
item = (path + parsed.name, MEInfo(b'Int', value = int(parsed.value.decode("ascii"))))
elif parsed.type == b'f':
item = (path + parsed.name, MEInfo(b'Float', value = float(parsed.value.decode("ascii"))))
elif parsed.type == b's':
item = (path + parsed.name, MEInfo(b'XMLString', offset))
else:
# An unknown Scalar type, skip it
continue
else:
# QTest. Only save mename and qtestname, values need to be fetched later.
# Separate QTest name with \0 to prevent collisions with ME names.
item = (path + parsed.name + b'\0.' + parsed.qtestname,
MEInfo(b'QTest', offset, qteststatus=int(parsed.status.decode("ascii"))))
else:
item = (path + name, MEInfo(class_name, offset))
# Append an item to a final result list
result.append(item)
return result
@classmethod
def parse_filename(cls, full_path):
"""Splits full path to a TDirectory ROOT file and returns run number and dataset."""
name = full_path.split('/')[-1]
run = name[11:20].lstrip('0')
dataset = '/'.join(name[20:-5].split('__'))
return run, dataset
| [
"[email protected]"
] | |
7a79bbf9912312d72c4878a1041c7ee4dce6dcac | fcb8b4bcb56d91cecd380a7134b089316d4280f4 | /algorithm_trees.py | a170d61b69dbbe3525702d8ee8681ee2338882d2 | [] | no_license | DebW99/tile_based_DNA_assembly_trees | fe09c48dd28a94258fc2352b2312d03db9d97d94 | 21f8f2f7cdcfe3b7c1a281ed2779cfe5fddb1906 | refs/heads/main | 2023-04-28T11:26:00.605555 | 2021-05-12T13:22:35 | 2021-05-12T13:22:35 | 366,721,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,268 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# =============================================================================
# Created By : Debbie Wagenaar
# Created Date: April 2021
# =============================================================================
"""
This code determines the minimum amount of tiles and bond-edge types needed in
scenario 2 and/or 3 for tile-based DNA-assembly and draws the trees coming out
of the algorithm. The algorithms programmed are described in "Minimal tile and
bond-edge types for self-assembling DNA graphs" by J. Ellis-Monaghan et. al.
To use this code you need to create a graph G (see example below)
G = nx.Graph()
G.add_nodes_from([1,2,3,4,5,6,7,8,9,10])
G.add_edges_from([(1,2),(2,3),(2,5),(5,6),(3,4),(3,7),(7,8),(8,9),(8,10)])
and next call the desired algorithm (scenario 2 or 3)
"""
# =============================================================================
# Imports
# =============================================================================
import matplotlib.pyplot as plt
import networkx as nx
from networkx.drawing.nx_pydot import graphviz_layout
#Make an LSO of a graph G
def LSO(G):
#Make empty directed graph
T = nx.DiGraph()
T.add_nodes_from(list(G.nodes))
#Make LSO
for u,v,a in list(G.edges(data=True)):
G.remove_edge(u, v)
l1 = len(nx.node_connected_component(G,u))
l2 = len(nx.node_connected_component(G,v))
if l2>l1:
T.add_edge(v,u)
else:
T.add_edge(u,v)
G.add_edge(u,v)
#Draw LSO
pos = graphviz_layout(T, prog="dot")
nx.draw(T, pos, with_labels=True)
plt.savefig("LSO.png") # save as png
plt.show() # display
return T
#Scenario 2
def alg2(G):
#Make an LSO of graph G
T = LSO(G)
#Find the root
root = 0
for n,d in T.in_degree():
if d == 0:
root = n
#Traverse through the tree from the leaves to the root
T_list = list(nx.dfs_postorder_nodes(T, source = root))
labels = []
tiles = []
#Label all the edges
for x in T_list:
if x != root:
k = len(list(nx.dfs_tree(T, x))) #Determine size of subtrees
T.edges[list(T.predecessors(x))[0], x]["labels"] = k #Label the edge with size of subtree
if k not in labels: #Add label to list of labels for total
labels.append(k)
tile = []
if list(T.successors(x)) == []: #Add first empty tile for leaves
if tile not in tiles:
tiles.append([])
else:
#Determine lesser-subtree sequence
for i in list(nx.dfs_preorder_nodes(T, source = x)):
tile.append(len(list(nx.dfs_tree(T, i))))
tile.sort()
if tile not in tiles: #Add new tiles for total
tiles.append(tile)
#Draw LSO with bond-edge types
pos = graphviz_layout(T, prog="dot")
nx.draw(T, pos, with_labels=True)
nx.draw_networkx_edge_labels(T, pos, edge_labels=nx.get_edge_attributes(T,'labels'), font_color='green')
plt.savefig("scenario2.png")
plt.show()
return(len(labels), len(tiles))
#scenario 3
def alg3(G):
#Make an LSO of graph G
T = LSO(G)
#Determine the height
h = nx.dag_longest_path_length(T)
#Find the root
root = 0
for n,d in T.in_degree():
if d == 0:
root = n
subgraphs = []
#Find the leaves and labelling those edges with 1
for x in T.nodes():
if T.out_degree(x) == 0 and T.in_degree(x) == 1:
T.edges[list(T.predecessors(x))[0], x]["labels"] = 1
j = 1
for i in range(h-1, 0, -1): #Iterate over the levels from bottom to top
for l in list(nx.descendants_at_distance(T,root,i)): #Iterate over vertices per level
if subgraphs == []:
if T.edges[list(T.predecessors(l))[0], l] == {}:
j = j + 1
T.edges[list(T.predecessors(l))[0], l]["labels"] = j #Label the edges
subgraphs.append(nx.dfs_tree(T,l)) #Add current subtree to list
else:
if not any([nx.is_isomorphic(nx.dfs_tree(T,l), sub) for sub in subgraphs]) and T.edges[list(T.predecessors(l))[0], l] == {}:
j = j + 1
T.edges[list(T.predecessors(l))[0], l]["labels"] = j
subgraphs.append(nx.dfs_tree(T,l)) #Add current subtree to list
#Draw LSO with bond-edge types
pos = graphviz_layout(T, prog="dot")
nx.draw(T, pos, with_labels=True)
nx.draw_networkx_edge_labels(T, pos, edge_labels=nx.get_edge_attributes(T,'labels'), font_color='green')
plt.savefig("scenario3.png")
plt.show()
return(j, j+1)
#Make a graph
G = nx.Graph()
G.add_nodes_from([1,2,3,4,5,6,7,8,9])
G.add_edges_from([(1,3),(2,3),(3,4),(4,5),(5,6),(6,7),(7,8),(6,9)])
nx.draw(G, with_labels=True)
plt.savefig("G.png") # save as png
plt.show()
b2,t2 = alg2(G)
print("B2 =", b2, "and T2 =", t2)
b3,t3 = alg3(G)
print("B3 =", b3, "and T3 =", t3)
| [
"[email protected]"
] | |
5fabb56daf386a587981ca756429c23831ecd423 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_to_M_Gk3_no_pad/pyr_Tcrop256_pad60_jit15/pyr_3s/L7/step09_3side_L7.py | c950ee398333287fa3c141e7a3f687dc98eb8ccd | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59,545 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_I_to_M import I_to_M
from step08_b_use_G_generate_0_util import Tight_crop
from step09_c_train_step import Train_step_I_to_M
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
use_what_gen_op = I_to_M( Tight_crop(pad_size=60, resize=(256, 256), jit_scale= 0) )
use_what_train_step = Train_step_I_to_M( Tight_crop(pad_size=60, resize=(256, 256), jit_scale=15) )
import time
start_time = time.time()
###############################################################################################################################################################################################
###############################################################################################################################################################################################
########################################################### Block1
### Block1
#########################################################################################
#3
pyramid_1side_1__2side_1__3side_1 = [3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3]
#6
pyramid_1side_2__2side_1__3side_1 = [3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3]
pyramid_1side_2__2side_2__3side_1 = [3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3]
pyramid_1side_2__2side_2__3side_2 = [3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3]
#10
pyramid_1side_3__2side_1__3side_1 = [3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3]
pyramid_1side_3__2side_2__3side_1 = [3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3]
pyramid_1side_3__2side_2__3side_2 = [3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3]
pyramid_1side_3__2side_3__3side_1 = [3, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3]
pyramid_1side_3__2side_3__3side_2 = [3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3]
pyramid_1side_3__2side_3__3side_3 = [3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3]
#15
pyramid_1side_4__2side_1__3side_1 = [3, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 3]
pyramid_1side_4__2side_2__3side_1 = [3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 3]
pyramid_1side_4__2side_2__3side_2 = [3, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 3]
pyramid_1side_4__2side_3__3side_1 = [3, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 3]
pyramid_1side_4__2side_3__3side_2 = [3, 3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 3]
pyramid_1side_4__2side_3__3side_3 = [3, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 3]
pyramid_1side_4__2side_4__3side_1 = [3, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 3]
pyramid_1side_4__2side_4__3side_2 = [3, 3, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3, 3]
pyramid_1side_4__2side_4__3side_3 = [3, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 3]
pyramid_1side_4__2side_4__3side_4 = [3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3]
#21
pyramid_1side_5__2side_1__3side_1 = [3, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 3]
pyramid_1side_5__2side_2__3side_1 = [3, 2, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 2, 3]
pyramid_1side_5__2side_2__3side_2 = [3, 3, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 3, 3]
pyramid_1side_5__2side_3__3side_1 = [3, 2, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3]
pyramid_1side_5__2side_3__3side_2 = [3, 3, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 3, 3]
pyramid_1side_5__2side_3__3side_3 = [3, 3, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 3, 3]
pyramid_1side_5__2side_4__3side_1 = [3, 2, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 3]
pyramid_1side_5__2side_4__3side_2 = [3, 3, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 3, 3]
pyramid_1side_5__2side_4__3side_3 = [3, 3, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 3, 3]
pyramid_1side_5__2side_4__3side_4 = [3, 3, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 3, 3]
pyramid_1side_5__2side_5__3side_1 = [3, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 3]
pyramid_1side_5__2side_5__3side_2 = [3, 3, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 3, 3]
pyramid_1side_5__2side_5__3side_3 = [3, 3, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 3, 3]
pyramid_1side_5__2side_5__3side_4 = [3, 3, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 3, 3]
pyramid_1side_5__2side_5__3side_5 = [3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3]
#28
pyramid_1side_6__2side_1__3side_1 = [3, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 3]
pyramid_1side_6__2side_2__3side_1 = [3, 2, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2, 3]
pyramid_1side_6__2side_2__3side_2 = [3, 3, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 3, 3]
pyramid_1side_6__2side_3__3side_1 = [3, 2, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 2, 3]
pyramid_1side_6__2side_3__3side_2 = [3, 3, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 3, 3]
pyramid_1side_6__2side_3__3side_3 = [3, 3, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 3, 3]
pyramid_1side_6__2side_4__3side_1 = [3, 2, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 2, 3]
pyramid_1side_6__2side_4__3side_2 = [3, 3, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 3, 3]
pyramid_1side_6__2side_4__3side_3 = [3, 3, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 3, 3]
pyramid_1side_6__2side_4__3side_4 = [3, 3, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 3, 3]
pyramid_1side_6__2side_5__3side_1 = [3, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 2, 3]
pyramid_1side_6__2side_5__3side_2 = [3, 3, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 3, 3]
pyramid_1side_6__2side_5__3side_3 = [3, 3, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 3, 3]
pyramid_1side_6__2side_5__3side_4 = [3, 3, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 3, 3]
pyramid_1side_6__2side_5__3side_5 = [3, 3, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 3, 3]
pyramid_1side_6__2side_6__3side_1 = [3, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 3]
pyramid_1side_6__2side_6__3side_2 = [3, 3, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 3, 3]
pyramid_1side_6__2side_6__3side_3 = [3, 3, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 3, 3]
pyramid_1side_6__2side_6__3side_4 = [3, 3, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 3, 3]
pyramid_1side_6__2side_6__3side_5 = [3, 3, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 3, 3]
pyramid_1side_6__2side_6__3side_6 = [3, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 3, 3]
#36
pyramid_1side_7__2side_1__3side_1 = [3, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 3]
pyramid_1side_7__2side_2__3side_1 = [3, 2, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 2, 3]
pyramid_1side_7__2side_2__3side_2 = [3, 3, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 3, 3]
pyramid_1side_7__2side_3__3side_1 = [3, 2, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 3]
pyramid_1side_7__2side_3__3side_2 = [3, 3, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 3, 3]
pyramid_1side_7__2side_3__3side_3 = [3, 3, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 3, 3]
pyramid_1side_7__2side_4__3side_1 = [3, 2, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 2, 3]
pyramid_1side_7__2side_4__3side_2 = [3, 3, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 3, 3]
pyramid_1side_7__2side_4__3side_3 = [3, 3, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 3, 3]
pyramid_1side_7__2side_4__3side_4 = [3, 3, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 3, 3]
pyramid_1side_7__2side_5__3side_1 = [3, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 3]
pyramid_1side_7__2side_5__3side_2 = [3, 3, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 3, 3]
pyramid_1side_7__2side_5__3side_3 = [3, 3, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 3, 3]
pyramid_1side_7__2side_5__3side_4 = [3, 3, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 3, 3]
pyramid_1side_7__2side_5__3side_5 = [3, 3, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 3, 3]
pyramid_1side_7__2side_6__3side_1 = [3, 2, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 2, 3]
pyramid_1side_7__2side_6__3side_2 = [3, 3, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 3, 3]
pyramid_1side_7__2side_6__3side_3 = [3, 3, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 3, 3]
pyramid_1side_7__2side_6__3side_4 = [3, 3, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 3, 3]
pyramid_1side_7__2side_6__3side_5 = [3, 3, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 3, 3]
pyramid_1side_7__2side_6__3side_6 = [3, 3, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 3, 3]
pyramid_1side_7__2side_7__3side_1 = [3, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 3]
pyramid_1side_7__2side_7__3side_2 = [3, 3, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 3, 3]
pyramid_1side_7__2side_7__3side_3 = [3, 3, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 3, 3]
pyramid_1side_7__2side_7__3side_4 = [3, 3, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 3, 3]
pyramid_1side_7__2side_7__3side_5 = [3, 3, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 3, 3]
pyramid_1side_7__2side_7__3side_6 = [3, 3, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 3, 3]
pyramid_1side_7__2side_7__3side_7 = [3, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 3]
#45
pyramid_1side_8__2side_1__3side_1 = [3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3]
pyramid_1side_8__2side_2__3side_1 = [3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3]
pyramid_1side_8__2side_2__3side_2 = [3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3]
pyramid_1side_8__2side_3__3side_1 = [3, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3]
pyramid_1side_8__2side_3__3side_2 = [3, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3]
pyramid_1side_8__2side_3__3side_3 = [3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3]
pyramid_1side_8__2side_4__3side_1 = [3, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 3]
pyramid_1side_8__2side_4__3side_2 = [3, 3, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 3]
pyramid_1side_8__2side_4__3side_3 = [3, 3, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3]
pyramid_1side_8__2side_4__3side_4 = [3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 3]
pyramid_1side_8__2side_5__3side_1 = [3, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3]
pyramid_1side_8__2side_5__3side_2 = [3, 3, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 3, 3]
pyramid_1side_8__2side_5__3side_3 = [3, 3, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3]
pyramid_1side_8__2side_5__3side_4 = [3, 3, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 3, 3]
pyramid_1side_8__2side_5__3side_5 = [3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_6__3side_1 = [3, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3]
pyramid_1side_8__2side_6__3side_2 = [3, 3, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 3, 3]
pyramid_1side_8__2side_6__3side_3 = [3, 3, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 3, 3]
pyramid_1side_8__2side_6__3side_4 = [3, 3, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 3, 3]
pyramid_1side_8__2side_6__3side_5 = [3, 3, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_6__3side_6 = [3, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_7__3side_1 = [3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 3]
pyramid_1side_8__2side_7__3side_2 = [3, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 3]
pyramid_1side_8__2side_7__3side_3 = [3, 3, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 3, 3]
pyramid_1side_8__2side_7__3side_4 = [3, 3, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 3]
pyramid_1side_8__2side_7__3side_5 = [3, 3, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_7__3side_6 = [3, 3, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_7__3side_7 = [3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_8__3side_1 = [3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3]
pyramid_1side_8__2side_8__3side_2 = [3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3]
pyramid_1side_8__2side_8__3side_3 = [3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3]
pyramid_1side_8__2side_8__3side_4 = [3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
pyramid_1side_8__2side_8__3side_5 = [3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_8__3side_6 = [3, 3, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_8__3side_7 = [3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3]
pyramid_1side_8__2side_8__3side_8 = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
#########################################################################################
ch032_pyramid_1side_1__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_1__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_2__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_2__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_2__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_3__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_4__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_4__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_4__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_4__2side_4__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_4__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_4__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_4__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_4__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_5__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_5__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_5__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_5__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_5__2side_5__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_4__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_4__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_4__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_4__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_5__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_5__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_5__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_5__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_5__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_6__2side_6__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_4__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_4__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_4__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_4__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_5__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_5__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_5__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_5__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_5__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_6__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_7__2side_7__3side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_1__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_1__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_2__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_2__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_3__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_3__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_3__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_4__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_4__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_4__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_4__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_5__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_5__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_5__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_5__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_5__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_6__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_7__3side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_1, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
ch032_pyramid_1side_8__2side_8__3side_8 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8, ch_upper_bound= 2 ** 14).set_gen_op( use_what_gen_op ).set_train_step( use_what_train_step )
#########################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_4__2side_3__3side_2
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
print(use_model.model_describe)
| [
"[email protected]"
] | |
aba0d1be5693801aa0ce6f9cf16d37d6dbefeb14 | fe5fe77738bb861bb5572982c10febb1c0c63d2d | /flask_fake_server.py | c86247837ccb0c7b81b2d52c51b9b8ec089dbdba | [] | no_license | saraakuroda/web-workshop | e1fb5040b85953a3d11e9bfd1dafc979b81c4a9f | 8f9cc75a65173889a2b3f1f5cbbe8d64556c36ec | refs/heads/master | 2022-11-19T19:46:12.550356 | 2020-07-01T03:46:34 | 2020-07-01T03:46:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,443 | py | from flask import *
app = Flask(__name__)
@app.route("/list") # ,methods=["GET", "POST"])
def new():
return str({'data': [{'id': 15, 'user': 'n', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}, {'id': 14, 'user': 'g', 'kami': 'aa',
'naka': 'bb', 'shimo': 'cc'}, {'id': 13, 'user': 'g', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}, {'id': 12, 'user':
'e', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}, {'id': 11, 'user': 'r', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'},
{'id': 10, 'user': 'D', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}, {'id': 9, 'user': 'J', 'kami': 'aa', 'naka': 'bb',
'shimo': 'cc'}, {'id': 8, 'user': 'B', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}, {'id': 7, 'user': 'G', 'kami': 'aa',
'naka': 'bb', 'shimo': 'cc'}, {'id': 6, 'user': 'X', 'kami': 'aa', 'naka': 'bb', 'shimo': 'cc'}]})
if __name__ == "__main__":
app.run(port=8888)
| [
"[email protected]"
] | |
6825210ae768c3ece86e560e983d86f51e1ada00 | a5e5c215daff8c21698c6ec0abc5589804812c2d | /cm_plugin_cart_api_exchange/cart_api_operations.py | b1d6bb3dea49898cae00003ba0d5e5e9afb0d7ef | [] | no_license | arielmorelli/cm_plugin_cart_api_exchange | e019d62b7e1946b40847873911040cd6fcf682ef | 75906704786c057a253ed59db403c2a5db2238e3 | refs/heads/main | 2023-02-23T01:35:58.631681 | 2021-01-29T19:26:42 | 2021-01-29T19:26:42 | 333,059,630 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,108 | py | import requests
import logging
from requests.auth import HTTPBasicAuth
def chunk_dict(target, size):
values = list(target.values())
for n in range(0, len(values), size):
yield values[n:size + n]
class ExchangeApi(object):
CREATE_CART_URI = "https://market.feedbooks.com/carts"
def __init__(self, user, password):
self.user = user
self.password = password
def create_cart(self, cart_name):
request_body = {
"name": cart_name,
}
try:
response = self._make_get_request(self.CREATE_CART_URI, request_body)
except:
raise
try:
return response.headers["Location"]
except:
raise Exception("Cannot create cart")
@classmethod
def _items_to_api_request_entry(cls, work):
return {
"id": work.get("identifier", ""),
"quantity": work.get("copies", 0),
}
def send_items(self, url, items, cart_name, chunk_size=1000):
total = 0
total_with_error = 0
current_chunk = 0
for chunk in chunk_dict(items, chunk_size):
current_chunk += 1
total += len(chunk)
request_body_as_dict = {
"name": cart_name,
"total": {
"items": len(chunk),
"copies": sum([item.get("copies", 0) for item in chunk]),
},
# "values": {
# "USD": sum([w.get("price", 0) for w in chunk]),
# },
"items": [self._items_to_api_request_entry(item)
for item in chunk],
}
try:
response = self._make_patch_request(url, request_body_as_dict)
except Exception as err:
total_with_error += len(chunk)
logging.warning("Error sending items. Chunk %d. %s", current_chunk, err)
continue
if response.status_code < 300:
if "items" in response.json():
entries = response.json()
for entry in entries["items"]:
if "error" in entries["items"] and entries["items"]["error"]:
total_with_error += 1
else:
total_with_error += len(chunk)
logging.error("Cannot send values. %d. %s", response.status_code, response.content)
logging.info("Sent %d. %d with error.", total, total_with_error)
def _make_get_request(self, url, data):
headers = {
"Content-Type": "application/json",
}
return requests.post(
url, json=data, headers=headers,
auth=HTTPBasicAuth(self.user, self.password)
)
def _make_patch_request(self, url, data):
headers = {
"Content-Type": "application/vnd.demarque.market.cart+json",
}
return requests.patch(
url, json=data, headers=headers,
auth=HTTPBasicAuth(self.user, self.password)
)
| [
"[email protected]"
] | |
088b98b7b149fa57eb4da266f239468dfffc5dcf | 42ad8af851f911a12ea831d752897ecba597abcc | /wargame/wsgi.py | 7b2c2991bee61567f727276c9968c9a70b91e229 | [] | no_license | wojtekzozlak/wargame | 75d2afebbaaa19a8d57f24bfd778f4f1cb39de76 | d61b8c60be70f5fc1c514e8b08e61a2cd5691061 | refs/heads/master | 2020-12-20T12:42:00.472359 | 2016-08-04T20:43:58 | 2016-08-04T20:43:58 | 53,892,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
WSGI config for wargame project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wargame.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
f115cc4f7a505488250757f330d9420cbdc28a8c | abf44d209786884a7e51a5c8fce090f30430c193 | /判断一个字符串是否为数字.py | bd49d600137ec6e6f8993d9c05d31f8ec40e427d | [] | no_license | Bonnie-Bonnie-li/python | 27714d9bb5a4a241cefaa66225d08f2f14761360 | e6162432a849137e23bac0b78c44137ac4a8baa9 | refs/heads/master | 2022-03-14T00:14:39.269640 | 2019-10-31T07:48:51 | 2019-10-31T07:48:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | def is_number():
a=input()
try:
float(a)
print(True)
except ValueError:
print(False)
if __name__ == '__main__':
is_number() | [
"[email protected]"
] | |
22d5fc4243eec9d12069733703960af6a6d1fc2b | 8defde7b3ee16a7ef8d24d5ed9cb33803137776d | /blockparser/base58.py | 9ce735ec4bba8da67024d4a87e7ee0e7ec05c59d | [
"MIT"
] | permissive | baobunuo/crypto-deanonymization | afddc71230eaf097c0d4e3bea4aeee02c3134073 | 54e7b0e2d8d5fb290fc829583ed60f6036ae428a | refs/heads/master | 2021-12-23T05:04:06.470460 | 2017-10-23T20:21:18 | 2017-10-23T20:21:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | """
Filename: base58.py
Purpose: encode/decode base58 in the same way that Bitcoin does
Author: Gavin Andresen (github.com/gavinandresen/bitcointools)
License: MIT
"""
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58."""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += ord(c) << (8 * i) # 2x speedup vs. exponentiation
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0':
nPad += 1
else:
break
return (__b58chars[0] * nPad) + result
def b58decode(v, length):
""" decode v into a string of len bytes
"""
long_value = 0L
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base ** i)
result = ''
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]:
nPad += 1
else:
break
result = chr(0) * nPad + result
if length is not None and len(result) != length:
return None
return result
try:
import hashlib
hashlib.new('ripemd160')
have_crypto = True
except ImportError:
have_crypto = False
def hash_160(public_key):
if not have_crypto:
return ''
h1 = hashlib.sha256(public_key).digest()
r160 = hashlib.new('ripemd160')
r160.update(h1)
h2 = r160.digest()
return h2
def public_key_to_bc_address(public_key, version="\x00"):
if not have_crypto or public_key is None:
return ''
h160 = hash_160(public_key)
return hash_160_to_bc_address(h160, version=version)
def hash_160_to_bc_address(h160, version="\x00"):
if not have_crypto:
return ''
vh160 = version + h160
h3 = hashlib.sha256(hashlib.sha256(vh160).digest()).digest()
addr = vh160 + h3[0:4]
return b58encode(addr)
def bc_address_to_hash_160(addr):
bytes = b58decode(addr, 25)
return bytes[1:21]
if __name__ == '__main__':
x = '005cc87f4a3fdfe3a2346b6953267ca867282630d3f9b78e64'.decode('hex_codec')
encoded = b58encode(x)
print encoded, '19TbMSWwHvnxAKy12iNm3KdbGfzfaMFViT'
print b58decode(encoded, len(x)).encode('hex_codec'), x.encode('hex_codec')
print hash_160_to_bc_address('991fd9bf50ad8d6a551efceb5c228d21bcc0fe92'.decode('hex_codec'))
| [
"[email protected]"
] | |
d32d0455d55966975e881038ac6adb3879e7e60e | 466ed5560fc7ecc07b427f9d5cbdcc04b5b57eab | /json_module/confirmation.py | f2df2f7f071c14d1e6941e5b837224abac366e6c | [] | no_license | IcySakura/hadoop_food_ordering_system | e244e85e0f0e80c216e940dca321891257d0cf47 | 5c17ef5935038a9fcc4abb717119c077abb09fb9 | refs/heads/main | 2023-03-22T17:51:48.736227 | 2021-03-17T21:44:12 | 2021-03-17T21:44:12 | 335,792,467 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,408 | py | from constants import ORDER_ID_KEY, CUSTOMER_ID_KEY, RESTAUARNT_ID_KEY, ESTIMATED_FINISH_KEY
from info import info
from menu_item import menu_item
#TODO: add type checking, conversion to correct type
class confirmation:
def __init__(self,confirmation_dict=dict()):
self.order_id = ""
self.customer_id = ""
self.restaurant_id = ""
self.estimated_time = ""
if len(confirmation_dict) != 0:
self.load_confirmation_from_dict(confirmation_dict)
def load_confirmation_from_dict(self, confirmation_dict):
if ORDER_ID_KEY in confirmation_dict:
self.order_id = confirmation_dict[ORDER_ID_KEY]
if CUSTOMER_ID_KEY in confirmation_dict:
self.customer_id = confirmation_dict[CUSTOMER_ID_KEY]
if RESTAUARNT_ID_KEY in confirmation_dict:
self.restaurant_id = confirmation_dict[RESTAUARNT_ID_KEY]
if ESTIMATED_FINISH_KEY in confirmation_dict:
self.estimated_time = confirmation_dict[ESTIMATED_FINISH_KEY]
def convert_to_dict(self):
to_return = dict()
to_return[ORDER_ID_KEY] = self.order_id
to_return[CUSTOMER_ID_KEY] = self.customer_id
to_return[RESTAUARNT_ID_KEY] = self.restaurant_id
to_return[ESTIMATED_FINISH_KEY] = self.estimated_time
return to_return
| [
"[email protected]"
] | |
f7899f842aa5257da695fbd13711e0dd38238e10 | 79094076e9e489abbd30b99a75d16f42f32a1a5c | /lesson9/lr.py | 6d5dde297e6028e90f1413f61506d6a38bbb972e | [] | no_license | greenmapc/machine_learning_course | 220d75cdcc93dd1942fb3923e43a6a472cf98612 | 9d4d7d77401b936911b117d08756b8daab17eeef | refs/heads/master | 2023-02-01T16:58:39.264797 | 2020-12-16T22:23:03 | 2020-12-16T22:23:03 | 295,010,867 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,839 | py | import numpy as np
import plotly.graph_objects as go
from sklearn.linear_model import LogisticRegression
n = 100
cluster_1_x = []
cluster_1_y = []
cluster_1_z = []
cluster_2_x = []
cluster_2_y = []
cluster_2_z = []
# генерируем точки случайно
def generate_points():
x = np.random.randint(0, 100, n)
y = np.random.randint(0, 100, n)
z = np.random.randint(0, 100, n)
points = []
for i in range(100):
points.append((x[i], y[i], z[i]))
return points
# генерируем тестовую плоскость, которая разобьет точки по кластерам (фиктивная кластеризация)
# Ax + By + Cz + d = 0
# d = 0
def generate_test_plane():
a = np.random.random() * 0.5
b = np.random.random() * 0.5
c = - np.random.random() * 0.9
return a, b, c
# на основе случайной плоскости определяем точки в кластеры
def assign_points_to_cluster(points, a, b, c):
clusters = []
for i in range(100):
x_i = points[i][0]
y_i = points[i][1]
z_i = points[i][2]
if a * x_i + b * y_i + c * z_i < 0:
clusters.append(0)
cluster_1_x.append(x_i)
cluster_1_y.append(y_i)
cluster_1_z.append(z_i)
else:
cluster_2_x.append(x_i)
cluster_2_y.append(y_i)
cluster_2_z.append(z_i)
clusters.append(1)
return clusters
# поиск весов с помощью логистической регрессии
def find_weights_for_dividing_plane(points, clusters):
lr = LogisticRegression()
model = lr.fit(points, clusters)
return model
# поиск точек разделяющей плоскости с помощью найденных весов для построения
def find_points_for_dividing_plane(model):
plane_z = np.ones((100, 100))
for i in range(0, 100):
for j in range(0, 100):
plane_z[i][j] = (-model.coef_[0][0] * i - model.coef_[0][1] * j - model.intercept_) / model.coef_[0][2]
return plane_z
# визуализация плоскости и точек
def draw(plane_z):
fig = go.Figure(data=[
go.Scatter3d(x=cluster_1_x, y=cluster_1_y, z=cluster_1_z, mode="markers", name="-1"),
go.Scatter3d(x=cluster_2_x, y=cluster_2_y, z=cluster_2_z, mode="markers", name="1"),
go.Surface(z=plane_z)
])
fig.show()
def logistic_regression_with_dividing_plane():
points = generate_points()
a, b, c = generate_test_plane()
clusters = assign_points_to_cluster(points, a, b, c)
model = find_weights_for_dividing_plane(points, clusters)
plane_z = find_points_for_dividing_plane(model)
draw(plane_z)
logistic_regression_with_dividing_plane()
| [
"[email protected]"
] | |
fdab3b2f1ec2ce6d9e4cecce45be8359f9a73ce4 | bf2eab60a0e6e905c63766620731e8ac68f834a7 | /opencv/blur_demo.py | 2ff13c2eb9730ad7a90c49c50238c04ababda0ab | [] | no_license | Xenos24R/vscode | cd95b85b589af83e6cd41207438e4dc17ecfb7a0 | dc6aca51a6d8c332fba2acd8b7eb5f09b06d9a54 | refs/heads/master | 2021-05-21T08:05:06.472335 | 2020-06-20T05:06:33 | 2020-06-20T05:06:33 | 252,612,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,741 | py | import cv2.cv2 as cv
import numpy as np
def blur_demo(image):#均值模糊,去噪
dst = cv.blur(image,(1,20))#第二个参数是卷积范围
cv.imshow("image",dst)
def middle_blue_demo(image):#中值模糊,去除椒盐噪声
dst = cv.medianBlur(image,5)
cv.imshow("image",dst)
def custom_blur_demo(image):#自定义滤波,模板总和为1做增强,模板总和为0做边缘梯度
#kernel = np.ones([5,5],np.float32)/25#均值滤波
kernel = np.array([[0,-1,0],[-1,5,-1],[0,-1,0]],np.float32)/9#锐化算子
dst = cv.filter2D(image,-1,kernel)
cv.imshow("image",dst)
src = cv.imread("C:/Users/32936/Desktop/2/lena.png")
custom_blur_demo(src)
cv.waitKey(0)
"""
均值滤波
cv2.blur(src, ksize[], dst[], anchor[], borderType)
src:输入的图像
ksize[]:模板大小
anchor[]:锚点,处理的像素位于模板的什么位置
borderType:用于推断图像外部像素的某种边界模式
->dst[]:与src大小相同的输出结果
中值滤波
cv2.medianBlur(src, ksize[], dst)
src:输入的图像
ksize[]:模板大小
->dst[]:与src大小相同的输出结果
卷积运算函数
cv2.filter2D(src, ddepth, kernel[], dst[], anchor[], delta[], borderType)
src:输入的图像
ddepth:目标图像深度,当ddepth输入值为-1时,目标图像和原图像深度保持一致。图像深度是指存储每个像素所用的位数,也用于量度图像的色彩分辨率
kernel:卷积核
anchor[]:锚点,处理的像素位于模板的什么位置
delta[]:在储存目标图像前可选的添加到像素的值,默认值为0
borderType:用于推断图像外部像素的某种边界模式
->dst[]:与src大小相同的输出结果
""" | [
"[email protected]"
] | |
37fdd0d6b3eafd9a93257c5ec2279c25b4408568 | c974438dc90164c27669ced877ccd2ddf64e6e82 | /plugins/base.py | 1b6b0d3fb9aadc3a11fa4327098c99e781b65826 | [] | no_license | numkem/pycollect | bf07eef7a4ff8bb7a7ee33cdba40b7f693f38106 | 9fbcba80afbc4058a174deb9035a23601268c4b1 | refs/heads/master | 2021-01-10T20:19:05.324641 | 2015-12-28T02:50:57 | 2015-12-28T02:50:57 | 38,643,246 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 221 | py | from lib.command import Command
import sys
class BaseCommand(Command):
def __init__(self, *args, **kwargs):
self.register_command('exit', sys.exit)
self.register_command('quit', sys.exit)
main_class = BaseCommand | [
"[email protected]"
] | |
a2bd706091303ba3fede7a4be4de5334638fca56 | 82d6803a32622ec56b27f246c7a7d53cc25b74d6 | /regex2.py | 24d73a3696599952862e48d3b8659f6ebffa0a14 | [] | no_license | andrzejStempien/CompletedScriptsFromAutomateBoringStuffWithPython | c635296759442ee68e17aac2e560a7ede5b36571 | 38cb91639e9d30de4f4b18a46922e50bf59f7bc9 | refs/heads/master | 2020-04-05T08:38:45.037127 | 2020-03-04T09:09:19 | 2020-03-04T09:09:19 | 156,722,850 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | batRegex = re.compile(r'Bat(wo)?man')
mo1 = batRegex.search('The Adventures of Batman')
mo1.group()
// 'Batman'
mo2 = batRegex.search('The Adventures of Batwoman')
mo2.group()
// 'Batwoman'
// optional matching with question mark
| [
"[email protected]"
] | |
5478cb2e613d6a8e1ebd600288759cef5bb1afb2 | b70f7c563109580201f77b939483f7b8c59fc9f4 | /Intervals/SIVIA.py | a72307e2610f8925d85b23fa0b8ad673aef223be | [] | no_license | thiagoliveira/Gascogne | 68fb0e0c65bcaca07a2eda2474bdc86e529df8cb | e43cd1374b6ed44167a642bbfd214b38892c28db | refs/heads/master | 2021-01-24T04:19:07.733807 | 2016-02-19T07:24:11 | 2016-02-19T07:24:11 | 52,081,797 | 0 | 0 | null | 2016-02-19T10:53:40 | 2016-02-19T10:53:39 | null | UTF-8 | Python | false | false | 1,154 | py | """ Simple SIVIA
"""
from enum import Enum
class IBOOL(Enum):
IN = 0
OUT = 1
MAYBE = 2
UNK = 3
EMPTY = 4
UNK2 = 5
import pyIbex
from pyIbex import IntervalVector, LargestFirst, testR
from collections import deque
from vibes import vibes
def SIVIA(X0, data, eps):
stack = deque([IntervalVector(X0)])
lf = LargestFirst(eps/2.0)
k = 0
rbox = []
while len(stack) > 0:
X = stack.popleft()
k = k+1
t = IBOOL(testR(X,data[0],float(data[1]),data[2]))
if (t == IBOOL.IN):
vibes.drawBox(X[0][0],X[0][1], X[1][0], X[1][1], '[r]' )
#rbox.append((X[0][0],X[0][1], X[1][0], X[1][1]))
elif (t == IBOOL.OUT):
# r = 1;
vibes.drawBox(X[0][0],X[0][1], X[1][0], X[1][1], '[b]' )
elif (t == IBOOL.MAYBE):
# r = 1;
vibes.drawBox(X[0][0],X[0][1], X[1][0], X[1][1], '[orange]' )
elif (t != IBOOL.OUT and t != IBOOL.MAYBE):
if (X.max_diam() > eps):
(X1, X2) = lf.bisect(X)
stack.append(X1)
stack.append(X2)
else :
vibes.drawBox(X[0][0],X[0][1], X[1][0], X[1][1], '[y]' )
#vibes.drawBoxesUnion(rbox,'[r]')
vibes.axisEqual()
| [
"[email protected]"
] | |
a76fbeb11b842fae93134861888f350e5b1bedd6 | a1a4a31934ff0e56380535783cd4a5f42783e62e | /geodata/tests/__init__.py | bccab5f24a225381f36589e4140ff56fcf2163da | [] | no_license | pablotcarreira/GeoData | 830a9d359e6ecaf24c3d906f858403ee1417f41a | 58fa657fc9bc5b568b912219ca4970048397f21d | refs/heads/master | 2021-08-29T13:54:43.334353 | 2021-08-10T19:27:50 | 2021-08-10T19:27:50 | 84,600,317 | 2 | 0 | null | 2021-04-20T17:26:53 | 2017-03-10T21:00:00 | Python | UTF-8 | Python | false | false | 27 | py | # Pablo Carreira - 22/06/18 | [
"[email protected]"
] | |
c67ed12480fa56e0b55a51a7c1650c332d839165 | 040123f6873fe3e69a6e630a1a6c42cab6cb1f5b | /rest_api.py | c0fc0de7db22eff36c6b5b7d81baeeecb531be5c | [] | no_license | npalermo10/experiments_rest_api | 6e975800d3010f4b0e9b63761b1eef4ab33893b3 | 82df3c457b8c6a1a4b754b329687e09f3b019660 | refs/heads/master | 2021-01-16T21:33:54.552000 | 2016-08-27T22:38:23 | 2016-08-27T22:38:23 | 44,119,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,020 | py | from flask import Flask, request
import flask_sqlalchemy
import flask_restless
def run_api(database_name):
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{}.db'.format(database_name)
db = flask_sqlalchemy.SQLAlchemy(app)
db.drop_all()
# the rest api will show up under localhost:5000/api
# the class names below are used as the request api directory. For example: if the class is Circ(db.Model) you can request the database at localhost:5000/api/circ
class Experiments(db.Model):
id = db.Column(db.Integer, primary_key=True)
exp_name = db.Column(db.Unicode)
box = db.Column(db.Unicode)
datetime = db.Column(db.Unicode)
correct = db.Column(db.Boolean)
testing = db.Column(db.Boolean)
db.create_all()
manager = flask_restless.APIManager(app, flask_sqlalchemy_db=db)
manager.create_api(Experiments, methods= ['GET', 'POST'], results_per_page=None)
app.run(host = '0.0.0.0')
| [
"[email protected]"
] | |
30aad7d8cf00122c84821f2562d2c9f239523d57 | 1bdb8ed9cb4522c2621d5b1f9a6c80ebac631c2a | /movies/urls.py | 833d8d6227fb791749a4152d197b0a7dd71e7114 | [
"MIT"
] | permissive | kumaranubhav1654/Django-MOVIE-Ticket-booking | 5f9b740c592f868015f8ea9a762d6c60e455402f | 218411f057cb46c1687e1fbd9c9d0a12c3897dd3 | refs/heads/master | 2022-12-11T12:16:33.243737 | 2020-08-30T18:45:35 | 2020-08-30T18:45:35 | 291,526,588 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | """movies URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('',include('tickets.urls')),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
1d9f383f868286802d1e94ff5a19b2fc82bf0a2e | 3540b090312fdec1233605457924f9c174aa30cd | /chatbot_website/chatbot_interface/apps.py | 31985bce98d8a798ca46d25bc36c4fabe9e07137 | [
"Apache-2.0"
] | permissive | Conchylicultor/DeepQA | f74afbfbbe1a91a8df88a3317070ddea4f6ec413 | 886ec77ac49ff70be1b6d4de30fffe6a96480a4f | refs/heads/master | 2023-09-02T00:15:32.483337 | 2022-12-30T14:42:36 | 2022-12-30T14:42:36 | 62,824,998 | 3,186 | 1,315 | Apache-2.0 | 2019-07-21T15:57:45 | 2016-07-07T17:17:20 | Python | UTF-8 | Python | false | false | 108 | py | from django.apps import AppConfig
class ChatbotInterfaceConfig(AppConfig):
name = 'chatbot_interface'
| [
"[email protected]"
] | |
1bef64bd8a37bd50cecf76e9e0840390f06f0cc4 | 4a0de55146538657e49e7889265d32a2f7600a04 | /ali/2019_templete_query.py | 6d08ad673bd68f5e6f846c45146e836ffd92369e | [] | no_license | listenviolet/NLP | 39491d8e97525b6ca57592a3165d6d6e92eeb269 | d36d139e5915f6b8aac9611e1e7357830f7fb5a5 | refs/heads/master | 2020-04-23T10:13:23.542104 | 2019-04-12T07:13:08 | 2019-04-12T07:13:08 | 159,591,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,760 | py | 1. 问答题
在基于自然语言的人机交互系统中,通常会定义一些语义模板来训练NLU(自然语言理解)模型,比如下面的模板可以支持用户通过语音控制机器播放音乐:
1.放几首@{singer}的歌
2.播放一首@{singer}的歌
3.来一曲@{singer}的歌曲
4.来首@{singer}的音乐
5.来个@{singer}的流行音乐
其中"@{singer}"是一个参数,代表歌手,比如第一个模板可以匹配这样的用户query:“放几首刘德华的歌“。可以看到,同样是放歌,有很多种不同但相似的说法,但把他们一条一条单独列出来,编辑的成本会比较高,而且会漏掉一些说法,不严谨。实际上,上面的5个模板,可以用下面的语义模板表达式来表示:
<[播]放|来>[一|几]<首|曲|个>@{singer}的<歌[曲]|[流行]音乐>
其中包含中括号(“[]”)、尖括号(“<>”)和竖线(“|”)三种元素:
1.中括号代表其中的内容是可选的,比如“歌[曲]”,能匹配“歌”和“歌曲”;
2.尖括号代表其中的内容是必选的,比如“<播>放”,能匹配“播放”;
3.括号是可以嵌套的;
4.竖线代表“或”的关系,即竖线分隔的内容是可替换的,比如“<播放|来首>歌曲”,能匹配“播放歌曲”和“来首歌曲”,再如“[播放|来首]歌曲”,能匹配”播放歌曲“,”来首歌曲“和”歌曲“(因为中括号里面是可选的,所以可以匹配”歌曲“);
5.竖线在其所属的括号内,优先级大于括号中的其他括号,比如” <[播]放|来>首歌曲”,能匹配“播放首歌曲“,“放首歌曲“和”来首歌曲“;
6.竖线可以脱离括号独立存在,比如”在哪里|哪里有”,可以匹配“在哪里”和“哪里有”;
那么,给一个上述的语义模板表达式和用户的query,你能判断用户的query是否能匹配这个表达式吗?
编译器版本: gcc 4.8.4
请使用标准输入输出(stdin,stdout) ;请把所有程序写在一个文件里,勿使用已禁用图形、文件、网络、系统相关的头文件和操作,如sys/stat.h , unistd.h , curl/curl.h , process.h
时间限制: 3S (C/C++以外的语言为: 5 S) 内存限制: 128M (C/C++以外的语言为: 640 M)
输入:
输入数据包含两行,
第一行,上述格式的语义模板表达式
第二行,用户的自然语言指令(即:用户query)
输出:
当前query是否匹配当前语义模板表达式。匹配,则输出1,否则输出0.
输入范例:
<[播]放|来>[一|几]<首|曲|个>@{singer}的<歌[曲]|[流行]音乐>
来几首@{singer}的流行歌曲
输出范例:
0 | [
"[email protected]"
] | |
3b925241ca7074ca16dc93aaa04a946f3ca24f6b | eadc41731120d5d6044cd120644bd6dda1387792 | /service.py | 7cec07443b34210f51da51c84629c553ab21a244 | [] | no_license | chetan30597/cloud-project | d977fb7b215cea985daa60b6e5ad3053bc0e6169 | c0c1ad23fcf330d28d9236f96dcb5c53c0c26c72 | refs/heads/master | 2020-07-03T12:44:51.358902 | 2019-08-12T10:30:08 | 2019-08-12T10:30:08 | 201,908,377 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 882 | py | #!/usr/bin/python2
import cgi,commands
print "content-type:text/html"
print ""
b=(commands.getoutput('hostname -I')).split(' ')
x=cgi.FieldStorage()
c_service=x.getvalue('dn')
if c_service == 'saas' :
print "<a href='http://{}/saas.html'>".format(b[0])
print "<center>"
print "<b><i>ACCESS SAAS CLOUD</b></i>"
print "</center>"
print "</a>"
elif c_service == 'staas' :
print "<a href='http://{}/staas.html'>".format(b[0])
print "<center>"
print "<b><i>ACCESS StAAS CLOUD</b></i>"
print "</center>"
print "</a>"
elif c_service == 'pass' :
print "<a href='http://{}/paas.html'>".format(b[0])
print "<center>"
print "<b><i>ACCESS PAAS CLOUD</b></i>"
print "</center>"
print "</a>"
elif c_service == 'iaas' :
print "<a href='http://{}/iaas.html'>".format(b[0])
print "<center>"
print "<b><i>ACCESS IAAS CLOUD</b></i>"
print "</center>"
print "</a>"
| [
"[email protected]"
] | |
601220c66edc366564c071b4cf9be99e4f2021ef | fb2ba196c33fa647909914533559485b716f24b6 | /py/PartitionArrayIntoThreePartsWithEqualSum.py | 28bd6333ae38559bcb0afe5ec54354c8db2cb23f | [] | no_license | ZihengZZH/LeetCode | 7d62ab7211004c0ee5fe49ef3adeb72c698da44f | 96e2faaa8c18636c173883cca55b2c228c81477a | refs/heads/master | 2021-06-04T05:31:03.306503 | 2020-03-07T03:07:28 | 2020-03-07T03:07:28 | 108,906,661 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py | '''
Given an array A of integers, return true if and only if
we can partition the array into three non-empty parts with equal sums.
Formally, we can partition the array if we can find indexes i+1 < j
with (A[0] + A[1] + ... + A[i] == A[i+1] + A[i+2] + ... + A[j-1] == A[j] + A[j-1] + ... + A[A.length - 1])
Example 1:
Input: [0,2,1,-6,6,-7,9,1,2,0,1]
Output: true
Explanation: 0 + 2 + 1 = -6 + 6 - 7 + 9 + 1 = 2 + 0 + 1
Example 2:
Input: [0,2,1,-6,6,7,9,-1,2,0,1]
Output: false
Example 3:
Input: [3,3,6,5,-2,2,5,1,-9,4]
Output: true
Explanation: 3 + 3 = 6 = 5 - 2 + 2 + 5 + 1 - 9 + 4
Note:
3 <= A.length <= 50000
-10000 <= A[i] <= 10000
'''
class Solution:
def canThreePartsEqualSum(self, A):
total = sum(A)
if total % 3 != 0:
return False
part = total // 3
l, r = 0, len(A) - 1
lsum = rsum = 0
# fix the left pivot
while lsum != part and l < len(A):
lsum, l = lsum + A[l], l + 1
# to find right pivot
while rsum != part and r > l:
rsum, r = rsum + A[r], r - 1
# if while ends idealy, l & r are one step ahead respectively
return rsum == part and l <= r
if __name__ == "__main__":
solu = Solution()
input_1 = [0,2,1,-6,6,-7,9,1,2,0,1]
input_2 = [0,2,1,-6,6,7,9,-1,2,0,1]
input_3 = [3,3,6,5,-2,2,5,1,-9,4]
print(input_1, "%d" % solu.canThreePartsEqualSum(input_1))
print(input_2, "%d" % solu.canThreePartsEqualSum(input_2))
print(input_3, "%d" % solu.canThreePartsEqualSum(input_3)) | [
"[email protected]"
] | |
78de47db0f28969bb2785394b90a579faf6c8da7 | 66333a96a8806181228a860085ee5d01888c7b2f | /text_recognition/easyocr/config/model.py | f8241aa784211bef68ec824aa83bcfebc7f97e2a | [
"Apache-2.0"
] | permissive | axinc-ai/ailia-models | 7c5c15d813a58fcae678a8107ebc9c39ee9912f2 | da1c277b602606586cd83943ef6b23eb705ec604 | refs/heads/master | 2023-08-31T07:43:39.848448 | 2023-08-29T05:06:27 | 2023-08-29T05:06:27 | 206,917,330 | 1,554 | 286 | null | 2023-09-14T19:56:14 | 2019-09-07T04:50:59 | Python | UTF-8 | Python | false | false | 31,420 | py |
separator_list = {
'th': ['\xa2', '\xa3'],
'en': ['\xa4', '\xa5']
}
separator_char = []
for lang, sep in separator_list.items():
separator_char += sep
recognition_models = {
'zh_sim_g2': {
'characters': " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~。〈〉《》「」『』一丁七万丈三上下不与丐丑专且丕世丘丙业丛东丝丞丢两严丧个丫中丰串临丸丹为主丽举乃久么义之乌乍乎乏乐乒乓乔乖乘乙乜九乞也习乡书乩买乱乳乾了予争事二亍于亏云互亓五井亘亚些亟亡亢交亥亦产亨亩享京亭亮亲亳亵人亿什仁仂仃仄仅仆仇仉今介仍从仑仓仔仕他仗付仙仞仟仡代令以仨仪仫们仰仲仳仵件价任份仿企伉伊伍伎伏伐休众优伙会伛伞伟传伢伤伦伧伪伫伯估伴伶伸伺似伽佃但位低住佐佑体何佗佘余佚佛作佝佞佟你佣佤佥佩佬佯佰佳佴佶佻佼佾使侃侄侈侉例侍侏侑侔侗供依侠侣侥侦侧侨侩侪侬侮侯侵便促俄俅俊俎俏俐俑俗俘俚俜保俞俟信俣俦俨俩俪俭修俯俱俳俸俺俾倌倍倏倒倔倘候倚倜借倡倥倦倨倩倪倬倭债值倾偃假偈偌偎偏偕做停健偬偶偷偻偾偿傀傅傈傍傣傥傧储傩催傲傻像僖僚僦僧僬僭僮僳僵僻儆儇儋儒儡儿兀允元兄充兆先光克免兑兔兕兖党兜兢入全八公六兮兰共关兴兵其具典兹养兼兽冀冁内冈冉册再冒冕冗写军农冠冢冤冥冬冯冰冱冲决况冶冷冻冼冽净凄准凇凉凋凌减凑凛凝几凡凤凫凭凯凰凳凶凸凹出击凼函凿刀刁刃分切刈刊刍刎刑划刖列刘则刚创初删判刨利别刭刮到刳制刷券刹刺刻刽刿剀剁剂剃削剌前剐剑剔剖剜剞剡剥剧剩剪副割剽剿劁劂劈劐劓力劝办功加务劢劣动助努劫劬劭励劲劳劾势勃勇勉勋勐勒勖勘募勤勰勺勾勿匀包匆匈匍匏匐匕化北匙匝匠匡匣匦匪匮匹区医匾匿十千卅升午卉半华协卑卒卓单卖南博卜卞卟占卡卢卣卤卦卧卫卮卯印危即却卵卷卸卺卿厂厄厅历厉压厌厍厕厘厚厝原厢厣厥厦厨厩厮去县叁参又叉及友双反发叔取受变叙叛叟叠口古句另叨叩只叫召叭叮可台叱史右叵叶号司叹叻叼叽吁吃各吆合吉吊同名后吏吐向吒吓吕吗君吝吞吟吠吡吣否吧吨吩含听吭吮启吱吲吴吵吸吹吻吼吾呀呃呆呈告呋呐呓呔呕呖呗员呙呛呜呢呤呦周呱呲味呵呶呷呸呻呼命咀咂咄咆咋和咎咏咐咒咔咕咖咙咚咛咝咣咤咦咧咨咩咪咫咬咭咯咱咳咴咸咻咽咿哀品哂哄哆哇哈哉哌响哎哏哐哑哓哔哕哗哙哚哝哞哟哥哦哧哨哩哪哭哮哲哳哺哼哽哿唁唆唇唉唏唐唑唔唛唠唢唣唤唧唪唬售唯唱唳唷唼唾唿啁啃啄商啉啊啐啕啖啜啡啤啥啦啧啪啬啭啮啵啶啷啸啻啼啾喀喁喂喃善喇喈喉喊喋喏喑喔喘喙喜喝喟喧喱喳喵喷喹喻喽喾嗄嗅嗉嗌嗍嗑嗒嗓嗔嗖嗜嗝嗟嗡嗣嗤嗥嗦嗨嗪嗫嗬嗯嗲嗳嗵嗷嗽嗾嘀嘁嘈嘉嘌嘎嘏嘘嘛嘞嘟嘣嘤嘧嘬嘭嘱嘲嘴嘶嘹嘻嘿噌噍噎噔噗噙噜噢噤器噩噪噫噬噱噶噻噼嚅嚆嚎嚏嚓嚣嚯嚷嚼囊囔囚四囝回囟因囡团囤囫园困囱围囵囹固国图囿圃圄圆圈圉圊圜土圣在圩圪圬圭圮圯地圳圹场圻圾址坂均坊坌坍坎坏坐坑块坚坛坜坝坞坟坠坡坤坦坨坩坪坫坭坯坳坷坻坼垂垃垄垅垆型垌垒垓垛垠垡垢垣垤垦垧垩垫垭垮垲垸埂埃埋城埏埒埔埕埘埙埚埝域埠埤埭埯埴埸培基埽堂堆堇堋堍堑堕堙堞堠堡堤堪堰堵塄塌塍塑塔塘塞塥填塬塾墀墁境墅墉墒墓墙增墟墨墩墼壁壅壑壕壤士壬壮声壳壶壹处备复夏夔夕外夙多夜够夤夥大天太夫夭央夯失头夷夸夹夺夼奁奂奄奇奈奉奋奎奏契奔奕奖套奘奚奠奢奥女奴奶奸她好妁如妃妄妆妇妈妊妍妒妓妖妗妙妞妣妤妥妨妩妪妫妮妯妲妹妻妾姆姊始姐姑姒姓委姗姘姚姜姝姣姥姨姬姹姻姿威娃娄娅娆娇娈娉娌娑娓娘娜娟娠娣娥娩娱娲娴娶娼婀婆婉婊婕婚婢婧婪婴婵婶婷婺婿媒媚媛媪媲媳媵媸媾嫁嫂嫉嫌嫒嫔嫖嫘嫜嫠嫡嫣嫦嫩嫫嫱嬉嬖嬗嬴嬷孀子孑孓孔孕字存孙孚孛孜孝孟孢季孤孥学孩孪孬孰孱孳孵孺孽宁它宄宅宇守安宋完宏宓宕宗官宙定宛宜宝实宠审客宣室宥宦宪宫宰害宴宵家宸容宽宾宿寂寄寅密寇富寐寒寓寝寞察寡寤寥寨寮寰寸对寺寻导寿封射将尉尊小少尔尕尖尘尚尜尝尤尥尧尬就尴尸尹尺尻尼尽尾尿局屁层居屈屉届屋屎屏屐屑展屙属屠屡屣履屦屯山屹屺屿岁岂岈岌岍岐岑岔岖岗岘岙岚岛岢岣岩岫岬岭岱岳岵岷岸岿峁峄峋峒峙峡峤峥峦峨峪峭峰峻崂崃崆崇崎崔崖崛崞崤崦崧崩崭崮崴崽嵇嵊嵋嵌嵘嵛嵝嵩嵫嵬嵯嵴嶂嶙嶝嶷巅巍川州巡巢工左巧巨巩巫差巯己已巳巴巷巽巾币市布帅帆师希帏帐帑帔帕帖帘帙帚帛帜帝带帧席帮帱帷常帻帼帽幂幄幅幌幔幕幛幞幡幢干平年并幸幺幻幼幽广庀庄庆庇床庋序庐庑库应底庖店庙庚府庞废庠庥度座庭庳庵庶康庸庹庾廉廊廒廓廖廛廨廪延廷建廿开弁异弃弄弈弊弋式弑弓引弗弘弛弟张弥弦弧弩弭弯弱弹强弼彀归当录彖彗彘彝形彤彦彩彪彬彭彰影彳彷役彻彼往征徂径待徇很徉徊律後徐徒徕得徘徙徜御徨循徭微徵德徼徽心必忆忉忌忍忏忐忑忒忖志忘忙忝忠忡忤忧忪快忭忮忱念忸忻忽忾忿怀态怂怃怄怅怆怊怍怎怏怒怔怕怖怙怛怜思怠怡急怦性怨怩怪怫怯怵总怼怿恁恂恃恋恍恐恒恕恙恚恝恢恣恤恧恨恩恪恫恬恭息恰恳恶恸恹恺恻恼恽恿悃悄悉悌悍悒悔悖悚悛悝悟悠患悦您悫悬悭悯悱悲悴悸悻悼情惆惊惋惑惕惘惚惜惝惟惠惦惧惨惩惫惬惭惮惯惰想惴惶惹惺愀愁愆愈愉愍愎意愕愚感愠愣愤愦愧愫愿慈慊慌慎慑慕慝慢慧慨慰慵慷憋憎憔憝憧憨憩憬憷憾懂懈懊懋懑懒懦懵懿戆戈戊戋戌戍戎戏成我戒戕或戗战戚戛戟戡戢戥截戬戮戳戴户戽戾房所扁扃扇扈扉手扌才扎扑扒打扔托扛扣扦执扩扪扫扬扭扮扯扰扳扶批扼找承技抄抉把抑抒抓投抖抗折抚抛抟抠抡抢护报抨披抬抱抵抹抻押抽抿拂拄担拆拇拈拉拊拌拍拎拐拒拓拔拖拗拘拙拚招拜拟拢拣拥拦拧拨择括拭拮拯拱拳拴拶拷拼拽拾拿持挂指挈按挎挑挖挚挛挝挞挟挠挡挢挣挤挥挨挪挫振挲挹挺挽捂捃捅捆捉捋捌捍捎捏捐捕捞损捡换捣捧捩捭据捱捶捷捺捻掀掂掇授掉掊掌掎掏掐排掖掘掠探掣接控推掩措掬掭掮掰掳掴掷掸掺掼掾揄揆揉揍揎描提插揖揞揠握揣揩揪揭援揶揸揽揿搀搁搂搅搋搌搏搐搓搔搛搜搞搠搡搦搪搬搭搴携搽摁摄摅摆摇摈摊摒摔摘摞摧摩摭摸摹摺撂撄撅撇撑撒撕撖撙撞撤撩撬播撮撰撵撷撸撺撼擀擂擅操擎擐擒擘擞擢擤擦攀攉攒攘攥攫攮支收攸改攻放政故效敉敌敏救敕敖教敛敝敞敢散敦敫敬数敲整敷文斋斌斐斑斓斗料斛斜斟斡斤斥斧斩斫断斯新方於施旁旃旄旅旆旋旌旎族旒旖旗无既日旦旧旨早旬旭旮旯旰旱时旷旺昀昂昃昆昊昌明昏易昔昕昙昝星映春昧昨昭是昱昴昵昶昼显晁晃晋晌晏晒晓晔晕晖晗晚晟晡晤晦晨普景晰晴晶晷智晾暂暄暇暌暑暖暗暝暧暨暮暴暹暾曙曛曜曝曦曩曰曲曳更曷曹曼曾替最月有朊朋服朐朔朕朗望朝期朦木未末本札术朱朴朵机朽杀杂权杆杈杉杌李杏材村杓杖杜杞束杠条来杨杪杭杯杰杲杳杵杷杼松板极构枇枉枋析枕林枘枚果枝枞枢枣枥枧枨枪枫枭枯枰枳枵架枷枸柁柃柄柏某柑柒染柔柘柙柚柜柝柞柠柢查柩柬柯柰柱柳柴柽柿栀栅标栈栉栊栋栌栎栏树栓栖栗栝校栩株栲栳样核根格栽栾桀桁桂桃桄桅框案桉桊桌桎桐桑桓桔桕桠桡桢档桤桥桦桧桨桩桫桴桶桷梁梃梅梆梏梓梗梢梦梧梨梭梯械梳梵检棂棉棋棍棒棕棘棚棠棣森棰棱棵棹棺棼椁椅椋植椎椐椒椟椠椤椭椰椴椹椽椿楂楔楗楚楝楞楠楣楦楫楮楷楸楹楼榀概榄榆榇榈榉榍榔榕榛榜榧榨榫榭榱榴榷榻槁槊槌槎槐槔槛槟槠槭槲槽槿樊樗樘樟模樨横樯樱樵樽樾橄橇橐橘橙橛橡橥橱橹橼檀檄檎檐檑檗檠檩檫檬欠次欢欣欤欧欲欷欺款歃歆歇歉歌歙止正此步武歧歪歹死歼殁殂殃殄殆殇殉殊残殍殒殓殖殚殛殡殪殳殴段殷殿毁毂毅毋母每毒毓比毕毖毗毙毛毡毪毫毯毳毵毹毽氅氆氇氍氏氐民氓气氕氖氘氙氚氛氟氡氢氤氦氧氨氩氪氮氯氰氲水永氽汀汁求汆汇汉汊汐汔汕汗汛汜汝汞江池污汤汨汩汪汰汲汴汶汹汽汾沁沂沃沅沆沈沉沌沏沐沓沔沙沛沟没沣沤沥沦沧沩沪沫沭沮沱河沸油治沼沽沾沿泄泅泉泊泌泐泓泔法泖泗泛泞泠泡波泣泥注泪泫泮泯泰泱泳泵泷泸泺泻泼泽泾洁洄洇洋洌洎洒洗洙洚洛洞津洧洪洫洮洱洲洳洵洹活洼洽派流浃浅浆浇浈浊测浍济浏浑浒浓浔浙浚浜浞浠浣浦浩浪浮浯浴海浸浼涂涅消涉涌涎涑涓涔涕涛涝涞涟涠涡涣涤润涧涨涩涪涫涮涯液涵涸涿淀淄淅淆淇淋淌淑淖淘淙淝淞淠淡淤淦淫淬淮深淳混淹添淼清渊渌渍渎渐渑渔渖渗渚渝渠渡渣渤渥温渫渭港渲渴游渺湃湄湍湎湔湖湘湛湟湫湮湾湿溃溅溆溉溏源溘溜溟溢溥溧溪溯溱溲溴溶溷溺溻溽滁滂滇滋滏滑滓滔滕滗滚滞滟滠满滢滤滥滦滨滩滴滹漂漆漉漏漓演漕漠漤漩漪漫漭漯漱漳漶漾潆潇潋潍潘潜潞潢潦潭潮潲潴潸潺潼澄澈澉澌澍澎澜澡澧澳澶澹激濂濉濑濒濞濠濡濮濯瀑瀚瀛瀣瀵瀹灌灏灞火灭灯灰灵灶灸灼灾灿炀炅炉炊炎炒炔炕炖炙炜炝炫炬炭炮炯炱炳炷炸点炻炼炽烀烁烂烃烈烊烘烙烛烟烤烦烧烨烩烫烬热烯烷烹烽焉焊焐焓焕焖焘焙焚焦焯焰焱然煅煊煌煎煜煞煤煦照煨煮煲煳煸煺煽熄熊熏熔熘熙熟熠熨熬熵熹燃燎燔燕燠燥燧燮燹爆爝爨爪爬爰爱爵父爷爸爹爻爽爿片版牌牍牒牖牙牛牝牟牡牢牦牧物牮牯牲牵特牺牾犀犁犄犊犋犍犏犒犟犬犯犰犴状犷犸犹狁狂狃狄狈狍狎狐狒狗狙狞狠狡狨狩独狭狮狯狰狱狲狳狴狷狸狺狻狼猁猃猊猎猕猖猗猛猜猝猞猡猢猥猩猪猫猬献猱猴猷猸猹猾猿獍獐獒獗獠獬獭獯獾玄率玉王玎玑玖玛玢玩玫玮环现玲玳玷玺玻珀珂珈珉珊珍珏珐珑珙珞珠珥珧珩班珲球琅理琉琏琐琚琛琢琥琦琨琪琬琮琰琳琴琵琶琼瑁瑕瑗瑙瑚瑛瑜瑞瑟瑭瑰瑶瑾璀璁璃璇璋璎璐璜璞璧璨璩瓒瓜瓞瓠瓢瓣瓤瓦瓮瓯瓴瓶瓷瓿甄甏甑甓甘甙甚甜生甥用甩甫甬甭田由甲申电男甸町画甾畀畅畈畋界畎畏畔留畚畛畜略畦番畲畴畸畹畿疃疆疋疏疑疔疖疗疙疚疝疟疠疡疣疤疥疫疬疮疯疰疱疲疳疴疵疸疹疼疽疾痂痃痄病症痈痉痊痍痒痔痕痘痛痞痢痣痤痦痧痨痪痫痰痱痴痹痼痿瘀瘁瘃瘅瘊瘌瘐瘗瘘瘙瘛瘟瘠瘢瘤瘥瘦瘩瘪瘫瘭瘰瘳瘴瘵瘸瘼瘾瘿癀癃癌癍癔癖癜癞癣癫癯癸登白百皂的皆皇皈皋皎皑皓皖皙皤皮皱皲皴皿盂盅盆盈益盍盎盏盐监盒盔盖盗盘盛盟盥目盯盱盲直相盹盼盾省眄眇眈眉看眍眙眚真眠眢眦眨眩眭眯眵眶眷眸眺眼着睁睃睇睐睑睚睛睡睢督睥睦睨睫睬睹睽睾睿瞀瞄瞅瞌瞍瞎瞑瞒瞟瞠瞢瞥瞧瞩瞪瞬瞰瞳瞵瞻瞽瞿矍矗矛矜矢矣知矧矩矫矬短矮石矶矸矽矾矿砀码砂砉砌砍砑砒研砖砗砘砚砜砝砟砣砥砧砭砰破砷砸砹砺砻砼砾础硅硇硌硎硐硒硕硖硗硝硪硫硬硭确硷硼碇碉碌碍碎碑碓碗碘碚碛碜碟碡碣碥碧碰碱碲碳碴碹碾磁磅磉磊磋磐磔磕磙磨磬磲磴磷磺礁礅礓礞礤礴示礻礼社祀祁祆祈祉祓祖祗祚祛祜祝神祟祠祢祥祧票祭祯祷祸祺禀禁禄禅禊福禚禧禳禹禺离禽禾秀私秃秆秉秋种科秒秕秘租秣秤秦秧秩秫秭积称秸移秽稀稂稆程稍税稔稗稚稞稠稣稳稷稻稼稽稿穆穑穗穰穴究穷穸穹空穿窀突窃窄窈窍窑窒窕窖窗窘窜窝窟窠窥窦窨窬窭窳窿立竖站竞竟章竣童竦竭端竹竺竽竿笃笄笆笈笊笋笏笑笔笕笙笛笞笠笤笥符笨笪笫第笮笱笳笸笺笼笾筅筇等筋筌筏筐筑筒答策筘筚筛筝筠筢筮筱筲筵筷筹筻签简箅箍箐箔箕算箜管箢箦箧箨箩箪箫箬箭箱箴箸篁篆篇篌篑篓篙篚篝篡篥篦篪篮篱篷篼篾簇簋簌簏簖簟簦簧簪簸簿籀籁籍米籴类籼籽粉粑粒粕粗粘粜粝粞粟粤粥粪粮粱粲粳粹粼粽精糁糅糇糈糊糌糍糕糖糗糙糜糟糠糨糯系紊素索紧紫累絮絷綦綮縻繁繇纂纛纠纡红纣纤纥约级纨纩纪纫纬纭纯纰纱纲纳纵纶纷纸纹纺纽纾线绀绁绂练组绅细织终绉绊绋绌绍绎经绐绑绒结绔绕绗绘给绚绛络绝绞统绠绡绢绣绥绦继绨绩绪绫续绮绯绰绲绳维绵绶绷绸绺绻综绽绾绿缀缁缂缃缄缅缆缇缈缉缌缎缏缑缒缓缔缕编缗缘缙缚缛缜缝缟缠缡缢缣缤缥缦缧缨缩缪缫缬缭缮缯缰缱缲缳缴缵缶缸缺罂罄罅罐网罔罕罗罘罚罟罡罢罨罩罪置罱署罴罹罾羁羊羌美羔羚羝羞羟羡群羧羯羰羲羸羹羼羽羿翁翅翊翌翎翔翕翘翟翠翡翥翦翩翮翰翱翳翻翼耀老考耄者耆耋而耍耐耒耔耕耖耗耘耙耜耠耢耥耦耧耨耩耪耱耳耵耶耷耸耻耽耿聂聃聆聊聋职聍聒联聘聚聩聪聱聿肃肄肆肇肉肋肌肓肖肘肚肛肝肟肠股肢肤肥肩肪肫肭肮肯肱育肴肷肺肼肽肾肿胀胁胂胃胄胆背胍胎胖胗胙胚胛胜胝胞胡胤胥胧胨胩胪胫胬胭胯胰胱胲胳胴胶胸胺胼能脂脆脉脊脍脎脏脐脑脒脓脔脖脘脚脞脬脯脱脲脶脸脾腆腈腊腋腌腐腑腓腔腕腙腚腠腥腧腩腭腮腰腱腴腹腺腻腼腽腾腿膀膂膈膊膏膑膘膛膜膝膦膨膪膳膺膻臀臁臂臃臆臊臌臣臧自臬臭至致臻臼臾舀舁舂舄舅舆舌舍舐舒舔舛舜舞舟舡舢舣舨航舫般舰舱舳舴舵舶舷舸船舻舾艄艇艋艘艚艟艨艮良艰色艳艴艺艽艾艿节芄芈芊芋芍芎芏芑芒芗芘芙芜芝芟芡芥芦芨芩芪芫芬芭芮芯芰花芳芴芷芸芹芽芾苁苄苇苈苊苋苌苍苎苏苑苒苓苔苕苗苘苛苜苞苟苠苡苣苤若苦苫苯英苴苷苹苻茁茂范茄茅茆茈茉茌茎茏茑茔茕茗茚茛茜茧茨茫茬茭茯茱茳茴茵茶茸茹茼荀荃荆荇草荏荐荑荒荔荚荛荜荞荟荠荡荣荤荥荦荧荨荩荪荫荬荭药荷荸荻荼荽莅莆莉莎莒莓莘莛莜莞莠莨莩莪莫莰莱莲莳莴莶获莸莹莺莼莽菀菁菅菇菊菌菏菔菖菘菜菝菟菠菡菥菩菪菰菱菲菹菽萁萃萄萋萌萍萎萏萑萘萜萝萤营萦萧萨萱萸萼落葆葑著葚葛葜葡董葩葫葬葭葱葳葵葶葸葺蒂蒇蒈蒉蒋蒌蒎蒗蒙蒜蒡蒯蒲蒴蒸蒹蒺蒽蒿蓁蓄蓉蓊蓍蓐蓑蓓蓖蓝蓟蓠蓣蓥蓦蓬蓰蓼蓿蔌蔑蔓蔗蔚蔟蔡蔫蔬蔷蔸蔹蔺蔻蔼蔽蕃蕈蕉蕊蕖蕙蕞蕤蕨蕲蕴蕹蕺蕻蕾薄薅薇薏薛薜薤薨薪薮薯薰薷薹藁藉藏藐藓藕藜藤藩藻藿蘅蘑蘖蘧蘩蘸蘼虎虏虐虑虔虚虞虢虫虬虮虱虹虺虻虼虽虾虿蚀蚁蚂蚊蚋蚌蚍蚓蚕蚜蚝蚣蚤蚧蚨蚩蚬蚯蚰蚱蚴蚶蚺蛀蛄蛆蛇蛉蛊蛋蛎蛏蛐蛑蛔蛘蛙蛛蛞蛟蛤蛩蛭蛮蛰蛱蛲蛳蛴蛸蛹蛾蜀蜂蜃蜇蜈蜉蜊蜍蜒蜓蜕蜗蜘蜚蜜蜞蜡蜢蜣蜥蜩蜮蜱蜴蜷蜻蜾蜿蝇蝈蝉蝌蝎蝓蝗蝙蝠蝣蝤蝥蝮蝰蝴蝶蝻蝼蝽蝾螂螃螅螈螋融螗螟螨螫螬螭螯螳螵螺螽蟀蟆蟊蟋蟑蟒蟛蟠蟥蟪蟮蟹蟾蠃蠊蠓蠕蠖蠡蠢蠲蠹蠼血衄衅行衍衔街衙衡衢衣补表衩衫衬衮衰衲衷衽衾衿袁袂袄袅袈袋袍袒袖袜袢袤被袭袱袼裁裂装裆裉裎裒裔裕裘裙裟裢裣裤裥裨裰裱裳裴裸裹裼裾褂褊褐褒褓褙褚褛褡褥褪褫褰褴褶襁襄襞襟襦襻西要覃覆见观规觅视觇览觉觊觋觌觎觏觐觑角觖觚觜觞解觥触觫觯觳言訇訾詈詹誉誊誓謇警譬计订讣认讥讦讧讨让讪讫训议讯记讲讳讴讵讶讷许讹论讼讽设访诀证诂诃评诅识诈诉诊诋诌词诎诏译诒诓诔试诖诗诘诙诚诛诜话诞诟诠诡询诣诤该详诧诨诩诫诬语诮误诰诱诲诳说诵请诸诹诺读诼诽课诿谀谁谂调谄谅谆谇谈谊谋谌谍谎谏谐谑谒谓谔谕谖谗谙谚谛谜谝谟谠谡谢谣谤谥谦谧谨谩谪谫谬谭谮谯谰谱谲谳谴谵谶谷豁豆豇豉豌豕豚象豢豪豫豳豸豹豺貂貅貉貊貌貔貘贝贞负贡财责贤败账货质贩贪贫贬购贮贯贰贱贲贳贴贵贶贷贸费贺贻贼贽贾贿赀赁赂赃资赅赆赇赈赉赊赋赌赍赎赏赐赓赔赖赘赙赚赛赜赝赞赠赡赢赣赤赦赧赫赭走赳赴赵赶起趁趄超越趋趑趔趟趣趱足趴趵趸趺趼趾趿跃跄跆跋跌跎跏跑跖跗跚跛距跞跟跣跤跨跪跬路跳践跷跸跹跺跻跽踅踉踊踌踏踔踝踞踟踢踣踩踪踬踮踯踱踵踹踺踽蹀蹁蹂蹄蹇蹈蹉蹊蹋蹑蹒蹙蹦蹩蹬蹭蹯蹰蹲蹴蹶蹼蹿躁躅躇躏躐躔躜躞身躬躯躲躺车轧轨轩轫转轭轮软轰轱轲轳轴轵轶轷轸轺轻轼载轾轿辁辂较辄辅辆辇辈辉辊辋辍辎辏辐辑输辔辕辖辗辘辙辚辛辜辞辟辣辨辩辫辰辱边辽达迁迂迄迅过迈迎运近迓返迕还这进远违连迟迢迤迥迦迨迩迪迫迭迮述迷迸迹追退送适逃逄逅逆选逊逋逍透逐逑递途逖逗通逛逝逞速造逡逢逦逭逮逯逵逶逸逻逼逾遁遂遄遇遍遏遐遑遒道遗遘遛遢遣遥遨遭遮遴遵遽避邀邂邃邈邋邑邓邕邗邙邛邝邡邢那邦邪邬邮邯邰邱邳邴邵邶邸邹邺邻邾郁郄郅郇郊郎郏郐郑郓郗郛郜郝郡郢郦郧部郫郭郯郴郸都郾鄂鄄鄙鄞鄢鄣鄯鄱鄹酃酆酉酊酋酌配酎酏酐酒酗酚酝酞酡酢酣酤酥酩酪酬酮酯酰酱酲酴酵酶酷酸酹酽酾酿醅醇醉醋醌醍醐醑醒醚醛醢醪醭醮醯醴醵醺采釉释里重野量金釜鉴銎銮鋈錾鍪鎏鏊鏖鐾鑫钆钇针钉钊钋钌钍钎钏钐钒钓钔钕钗钙钚钛钜钝钞钟钠钡钢钣钤钥钦钧钨钩钪钫钬钭钮钯钰钱钲钳钴钵钷钹钺钻钼钽钾钿铀铁铂铃铄铅铆铈铉铊铋铌铍铎铐铑铒铕铗铘铙铛铜铝铞铟铠铡铢铣铤铥铧铨铩铪铫铬铭铮铯铰铱铲铳铴铵银铷铸铹铺铼铽链铿销锁锂锃锄锅锆锇锈锉锊锋锌锎锏锐锑锒锓锔锕锖锗锘错锚锛锝锞锟锡锢锣锤锥锦锨锩锪锫锬锭键锯锰锱锲锴锵锶锷锸锹锺锻锾锿镀镁镂镄镅镆镇镉镊镌镍镎镏镐镑镒镓镔镖镗镘镛镜镝镞镡镢镣镤镥镦镧镨镩镪镫镬镭镯镰镱镲镳镶长门闩闪闫闭问闯闰闱闲闳间闵闶闷闸闹闺闻闼闽闾阀阁阂阃阄阅阆阈阉阊阋阌阍阎阏阐阑阒阔阕阖阗阙阚阜队阡阢阪阮阱防阳阴阵阶阻阼阽阿陀陂附际陆陇陈陉陋陌降限陔陕陛陟陡院除陧陨险陪陬陲陴陵陶陷隅隆隈隋隍随隐隔隗隘隙障隧隰隳隶隼隽难雀雁雄雅集雇雉雌雍雎雏雒雕雠雨雩雪雯雳零雷雹雾需霁霄霆震霈霉霍霎霏霓霖霜霞霪霭霰露霸霹霾青靓靖静靛非靠靡面靥革靳靴靶靼鞅鞋鞍鞑鞒鞘鞠鞣鞫鞭鞯鞲鞴韦韧韩韪韫韬韭音韵韶页顶顷顸项顺须顼顽顾顿颀颁颂颃预颅领颇颈颉颊颌颍颏颐频颓颔颖颗题颚颛颜额颞颟颠颡颢颤颥颦颧风飑飒飓飕飘飙飚飞食飧飨餍餐餮饔饕饥饧饨饩饪饫饬饭饮饯饰饱饲饴饵饶饷饺饼饽饿馀馁馄馅馆馇馈馊馋馍馏馐馑馒馓馔馕首馗馘香馥馨马驭驮驯驰驱驳驴驵驶驷驸驹驺驻驼驽驾驿骀骁骂骄骅骆骇骈骊骋验骏骐骑骒骓骖骗骘骚骛骜骝骞骟骠骡骢骣骤骥骧骨骰骶骷骸骺骼髀髁髂髅髋髌髑髓高髡髦髫髭髯髹髻鬃鬈鬏鬓鬟鬣鬯鬲鬻鬼魁魂魃魄魅魇魈魉魍魏魑魔鱼鱿鲁鲂鲅鲆鲇鲈鲋鲍鲎鲐鲑鲔鲚鲛鲜鲞鲟鲠鲡鲢鲣鲤鲥鲦鲧鲨鲩鲫鲭鲮鲰鲱鲲鲳鲴鲵鲷鲸鲺鲻鲼鲽鳃鳄鳅鳆鳇鳊鳌鳍鳎鳏鳐鳓鳔鳕鳖鳗鳘鳙鳜鳝鳞鳟鳢鸟鸠鸡鸢鸣鸥鸦鸨鸩鸪鸫鸬鸭鸯鸱鸲鸳鸵鸶鸷鸸鸹鸺鸽鸾鸿鹁鹂鹃鹄鹅鹆鹇鹈鹉鹊鹋鹌鹎鹏鹑鹕鹗鹘鹚鹛鹜鹞鹣鹤鹦鹧鹨鹩鹪鹫鹬鹭鹰鹱鹳鹿麂麇麈麋麒麓麝麟麦麸麻麽麾黄黉黍黎黏黑黔默黛黜黝黟黠黢黥黧黩黯黹黻黼黾鼋鼍鼎鼐鼓鼗鼙鼠鼢鼬鼯鼷鼹鼻鼾齐齑齿龀龃龄龅龆龇龈龉龊龋龌龙龚龛龟龠",
'symbols': " !\"#$%&'()*+,-./0123456789:;<=>?@[\\]^_`{|}~。〈〉《》「」『』"
},
'japanese_g2': {
'characters': " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~‥…※♪、。々〈〉《》「」『』【】〔〕あぃいうぇえおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろわをんァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミムメモャヤュユョヨラリルレロワヱヲンヴヶ・ー一丁七万丈三上下不与丑世丘丙丞両並中串丸丹主乃久之乗乙九也乱乳乾亀了予争事二互五井亘亜亡交亥亨享京亭亮人仁今介仏仕他付仙代令以仮仰仲件任企伊伍伎伏休会伝伯伴伶伸似伽位低住佐佑体何余作佳使例侍侑供依価便係俊俗保信俣修俵倉個倍倒倖候借値倫倭倶偉偏健側偵偽傍傑備債傷働像僧儀優允元兄充先光克免兎児党入全八公六共兵其具典兼内円再写冠冨冬冴冶冷凍凡処凪出刀刃分切刈刑列初判別利制刷券刺刻則削前剛剣剤剥副割創劇力功加助労効勅勇勉動勘務勝勢勤勧勲包化北匠匡区医十千升午半卒卓協南単博占卯印危即卵卸厚原厨厳去参又叉及友双反収取受叡口古句只叫召可台史右叶号司各合吉同名后吏吐向君吟否吸吹吾呂呉告呑周味呼命和咲哀品哉員哲唄唐唯唱商問啓善喜喬喰営嗣嘉噌器四回因団囲図固国國園土圧在圭地坂均坊坐坪垂型垢垣埋城埜域執基埼堀堂堅堤堰報場堺塔塗塚塩塵境墓増墨墳壁壇壊士壬壮声壱売壷変夏夕外多夜夢大天太夫央失夷奇奈奉奏契奥奨女奴好如妃妙妹妻姉始姓委姥姫姿威娘婆婚婦嫌嬉子孔字存孝孟季孤学孫宅宇守安完宏宗官宙定宜宝実客宣室宥宮宰害家容宿寂寄寅密富寒寛寝察寧審寸寺対寿封専射将尊尋導小少尚尭就尺尻尼尽尾尿局居屈屋展属層屯山岐岡岩岬岱岳岸峠峡峨峯峰島峻崇崎崩嵐嵩嵯嶋嶺巌川州巡巣工左巧巨差己巳巴巻巾市布帆希帝師席帯帰帳常帽幅幌幕幡幣干平年幸幹幽幾庁広庄床序底店府度座庫庭庵康庸廃廉廣延建廻弁式弐弓引弘弟弥弦弱張強弾当形彦彩彪彫彬彰影役彼往征径待律後徒従得御復微徳徹心必忌忍志応忠快念怒怜思急性恋恐恒恩恭息恵悌悟悠患悦悪悲情惇惑惟惣想意愚愛感慈態慎慣慧慶憂憲憶懐懸戎成我戒戦戯戸戻房所扇手才打払扶承技投抗折抜抱押担拓拝拡拳拾持指挙振捕捨捷掃排掘掛採探接推掻提揖揚換揮援揺損摂摘摩摺撃撫播撮操擦擬支改攻放政故敏救敗教敢散敦敬数整敵敷文斉斎斐斑斗料斜斤断斯新方於施旅旋族旗日旦旧旨早旬旭旺昂昆昇昌明易星映春昭是昼時晃晋晩普景晴晶智暁暖暗暢暦暮暴曇曙曜曲曳更書曹曽曾替最月有朋服朔朗望朝期木未末本札朱朴杉李杏材村杖杜束条来杭東杵松板析枕林枚果枝枯架柄柊柏柑染柔柚柱柳柴査柿栃栄栖栗校株核根格桂桃案桐桑桜桝桧桶梁梅梓梢梨梯械梶棄棒棚棟森椋植椎検椿楊楓楠楢業楯極楼楽榊榎榛構槌様槙槻樋標模権横樫樹樺樽橋橘機檀櫛欠次欣欧欲欽歌歓止正此武歩歯歳歴死殊残殖段殺殻殿毅母毎毒比毛氏民気水氷永汀汁求汐汗汚江池汰汲決沈沓沖沙沢河油治沼泉泊法波泣泥注泰洋洗洞津洪洲活派流浄浅浜浦浩浪浮浴海消涌涙液涼淀淑淡深淳淵混添清済渉渋渓渕渚減渡渥温測港湊湖湧湯湾湿満源準溜溝溶滅滋滑滝漁漆漏演漢漬潔潜潟潤潮潰澄澤激濃濱瀧瀬灘火灯灰災炉炎炭点為烈烏無焦然焼煙照煮熊熟熱燃燈燕燦燭爆爪父爽爾片版牛牟牧物特犬犯状狂狐狗狩独狭狼猛猪猫献猿獄獅獣玄率玉王玖玲珍珠現球理琉琢琳琴瑚瑛瑞瑠瑳璃環瓜瓦瓶甘甚生産用甫田由甲申男町画界畑畔留畜畝畠略番異畳疾病症痛療発登白百的皆皇皮皿盆益盗盛盟監盤目盲直相省眉看県眞真眠眼着督睦瞬瞳矢知矩短石砂研砲破硫硬碑碧碩確磁磐磨磯礁示礼社祇祈祉祐祖祝神祢祥票祭禁禄禅禎福禰秀私秋科秘秦秩称移稀程税稔稗稚種稲穂積穏穴究空突窓窪立竜章童竪端競竹笑笛笠符第笹筆等筋筑筒答策箇箕算管箱箸節範築篠篤篭簡簾籍米粉粒粕粗粟粥精糖糞糠糸系紀約紅紋納純紗紘紙級素紡索紫細紳紹紺終組経結絡絢給統絵絶絹継続綜維綱網綾綿緋総緑緒線締編緩練縁縄縦縫縮績繁織繰罪置羅羊美群義羽翁習翔翠翼耀老考者耐耕耳耶聖聞聡聴職肇肉肌肝股肥肩育肺背胞胡胤胸能脂脇脈脚脱脳腐腕腫腰腸腹膜膳臣臥臨自臭至致臼興舌舎舘舛舜舞舟航般船艦良色艶芋芙芝芥芦花芳芸芹芽苅苑苔苗若苦苫英茂茄茅茉茜茨茶草荒荘荷荻莉菅菊菌菓菖菜華菱萌萩萱落葉葛葦葵蒔蒲蒸蒼蓋蓑蓬蓮蔦蔭蔵蕗薄薩薫薬薮藁藍藤藻蘇蘭虎虚虫虹虻蚊蛇蛍蛭蜂蜜蝦蝶融螺蟹蟻血衆行術街衛衝衡衣表袋袖被裁裂装裏裕補裟裸製複西要覆覇見規視覚覧親観角解触言計訓託記訪設許訳訴診証評詞詠試詩詰話誉誌認誓誘語誠誤説読課調談請諏論諭諸謙講謝謹識警議譲護讃谷豆豊豚象豪貝貞負財貢貧貨販貫責貯貴買貸費賀賃資賞賢質赤赦走起超越足跡路跳踏身車軌軍軒軟転軸軽載輔輝輪輸辛辞辰農辺辻込迎近返迦迫述迷追退送逃逆透途通速造逢連週進逸遅遊運過道達違遠遣遥適選遺遼避邑那邦邪郁郎郡部郭郵郷都配酒酔酢酸醍醐采釈里重野量金釘釜針釣鈴鉄鉛鉢鉱鉾銀銃銅銘銭鋭鋼錦録鍋鍛鍬鍵鎌鎖鎮鏡鐘鑑長門閉開閑間関閣闇闘阪防阿陀附降限院陣除陰陳陵陶陸険陽隅隆隈隊階随隔際障隠隣隼雀雁雄雅集雑雛離難雨雪雲零雷電震霊霜霞霧露青靖静非面革鞍鞠韓音響頂頃項順須預頓領頭頼題額顔顕願類風飛食飯飲飼飽飾餅養館首香馨馬駄駅駆駐駒駿騎験骨高髪鬼魁魂魅魔魚鮎鮫鮮鯉鯨鳥鳩鳳鳴鴨鴻鵜鶏鶴鷲鷹鷺鹿麓麗麦麻麿黄黒黙鼓鼠鼻齢龍*",
'symbols': " !\"#$%&'()*+,-./0123456789:;<=>?@[\\]^_`{|}~‥…※♪、。々〈〉《》「」『』【】〔〕"
},
'english_g2': {
'characters': "0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ €ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
'symbols': "0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ €"
},
'latin_g2': {
'characters': " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ªÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćČčĎďĐđĒēĖėĘęĚěĞğĨĩĪīĮįİıĶķĹĺĻļĽľŁłŃńŅņŇňŒœŔŕŘřŚśŞşŠšŤťŨũŪūŮůŲųŸŹźŻżŽžƏƠơƯưȘșȚțə̇ḌḍḶḷṀṁṂṃṄṅṆṇṬṭẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐốỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹ€",
'symbols': " !\"#$%&'()*+,-./0123456789:;<=>?@[\\]^_`{|}~ €"
},
'korean_g2': {
'characters': " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~가각간갇갈감갑값강갖같갚갛개객걀거걱건걷걸검겁것겉게겨격겪견결겹경곁계고곡곤곧골곰곱곳공과관광괜괴굉교구국군굳굴굵굶굽궁권귀규균그극근글긁금급긋긍기긴길김깅깊까깎깐깔깜깝깥깨꺼꺾껍껏껑께껴꼬꼭꼴꼼꼽꽂꽃꽉꽤꾸꿀꿈뀌끄끈끊끌끓끔끗끝끼낌나낙낚난날낡남납낫낭낮낯낱낳내냄냉냐냥너넉널넓넘넣네넥넷녀녁년념녕노녹논놀놈농높놓놔뇌뇨누눈눕뉘뉴늄느늑는늘늙능늦늬니닐님다닥닦단닫달닭닮담답닷당닿대댁댐더덕던덜덤덥덧덩덮데델도독돈돌돕동돼되된두둑둘둠둡둥뒤뒷드득든듣들듬듭듯등디딩딪따딱딴딸땀땅때땜떠떡떤떨떻떼또똑뚜뚫뚱뛰뜨뜩뜯뜰뜻띄라락란람랍랑랗래랜램랫략량러럭런럴럼럽럿렁렇레렉렌려력련렬렵령례로록론롬롭롯료루룩룹룻뤄류륙률륭르른름릇릎리릭린림립릿마막만많말맑맘맙맛망맞맡맣매맥맨맵맺머먹먼멀멈멋멍멎메멘멩며면멸명몇모목몰몸몹못몽묘무묵묶문묻물뭄뭇뭐뭣므미민믿밀밉밌및밑바박밖반받발밝밟밤밥방밭배백뱀뱃뱉버번벌범법벗베벤벼벽변별볍병볕보복볶본볼봄봇봉뵈뵙부북분불붉붐붓붕붙뷰브블비빌빗빚빛빠빨빵빼뺨뻐뻔뻗뼈뽑뿌뿐쁘쁨사삭산살삶삼상새색샌생서석섞선설섬섭섯성세센셈셋션소속손솔솜솟송솥쇄쇠쇼수숙순술숨숫숲쉬쉽슈스슨슬슴습슷승시식신싣실싫심십싱싶싸싹쌀쌍쌓써썩썰썹쎄쏘쏟쑤쓰쓸씀씌씨씩씬씹씻아악안앉않알앓암압앗앙앞애액야약얇양얗얘어억언얹얻얼엄업없엇엉엌엎에엔엘여역연열엷염엽엿영옆예옛오옥온올옮옳옷와완왕왜왠외왼요욕용우욱운울움웃웅워원월웨웬위윗유육율으윽은을음응의이익인일읽잃임입잇있잊잎자작잔잖잘잠잡장잦재쟁저적전절젊점접젓정젖제젠젯져조족존졸좀좁종좋좌죄주죽준줄줌줍중쥐즈즉즌즐즘증지직진질짐집짓징짙짚짜짝짧째쨌쩌쩍쩐쪽쫓쭈쭉찌찍찢차착찬찮찰참창찾채책챔챙처척천철첫청체쳐초촉촌총촬최추축춘출춤춥춧충취츠측츰층치칙친칠침칭카칸칼캐캠커컨컬컴컵컷켓켜코콜콤콩쾌쿠퀴크큰클큼키킬타탁탄탈탑탓탕태택탤터턱털텅테텍텔템토톤톱통퇴투툼퉁튀튜트특튼튿틀틈티틱팀팅파팎판팔패팩팬퍼퍽페펴편펼평폐포폭표푸푹풀품풍퓨프플픔피픽필핏핑하학한할함합항해핵핸햄햇행향허헌험헤헬혀현혈협형혜호혹혼홀홍화확환활황회획횟효후훈훌훔훨휘휴흉흐흑흔흘흙흡흥흩희흰히힘",
'symbols': " !\"#$%&'()*+,-./0123456789:;<=>?@[\\]^_`{|}~"
},
'thai_g1': {
'characters': ''.join(separator_char)+'!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZกขคฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรลวศษสหฬอฮฤเแโใไะาุูิีืึั่้๊๋็์ำํฺฯๆ0123456789๑๒๓๔๕๖๗๘๙',
'symbols': "0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ "
}
}
| [
"[email protected]"
] | |
699130c762ef47518063fd168d4ad07559d13f83 | 054eefaa17157b32869ea986347b3e539d2bf06b | /big_o_coding/Blue_13/Schoolwork/day_16_DISJOINT_SET_UNION.py | 8f2440568b1c442c8c0885d6458abe62531d9a84 | [] | no_license | baocogn/self-learning | f2cb2f45f05575b6d195fc3c407daf4edcfe7d0e | f50a3946966354c793cac6b28d09cb5dba2ec57a | refs/heads/master | 2021-07-12T23:32:14.728163 | 2019-02-10T14:24:46 | 2019-02-10T14:24:46 | 143,170,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | MAX = 20
parent = []
def makeSet():
global parent
parent = [i for i in range(MAX + 5)]
def findSet(u):
while u != parent[u]:
u = parent[u]
return u
def unionSet(u, v):
up = findSet(u)
vp = findSet(v)
parent[up] = vp
if __name__ == "__main__":
Q = int(input())
makeSet()
for i in range(Q):
u, v, q = map(int, input().split())
if q == 1:
unionSet(u, v)
if q == 2:
parentU = findSet(u)
parentV = findSet(v)
if parentU == parentV:
print(1)
else:
print(0)
| [
"[email protected]"
] | |
9f2e2505e2cb58d895d7deee1a5c00d1eebb2472 | a599264b545cbe7340193a98af580b653e690db9 | /eapython/in-class-notebooks/modules/bbox.py | 929ce1fa3396b6c5548428f432ef22910d4affac | [] | no_license | earthlab-education/ea-course-notebooks | 705e426485a8a2e0e408bcef165d8bb1c9f497b7 | 1d49f1134c5379e3e0ed45d05b1d55cb426eb3aa | refs/heads/main | 2023-06-28T08:35:16.420359 | 2023-06-13T18:35:26 | 2023-06-13T18:35:26 | 444,532,413 | 6 | 15 | null | null | null | null | UTF-8 | Python | false | false | 1,015 | py | """
A Bounding Box class
copyright
authors
"""
import geopandas as gpd
from shapely.geometry import Point
class BBox:
"""
Corner coordinates of a bounding box
"""
def __init__(self, minx, miny, maxx, maxy, crs='EPSG:4326'):
if minx > 180:
raise ValueError('x values cannot exceed 180')
self.minx = minx if (minx < maxx) else maxx
self.miny = miny
self.maxx = maxx if (minx < maxx) else minx
self.maxy = maxy
self.crs = crs
@property
def min_lon(self):
return self.minx
@property
def _min_point(self):
return Point(self.minx, self.miny)
@property
def _max_point(self):
return Point(self.maxx, self.maxy)
@property
def _gdf(self):
return gpd.GeoDataFrame(
{'geometry': [self._min_point, self._max_point]},
crs=self.crs)
def to_crs(self, crs):
gdf_reproj = self._gdf.to_crs(crs)
self.minx = gdf_reproj.loc[0].geometry.x
self.miny = gdf_reproj.loc[0].geometry.y
self.maxx = gdf_reproj.loc[1].geometry.x
self.maxy = gdf_reproj.loc[1].geometry.y
| [
"[email protected]"
] | |
a5481029b8657142853a53868a1ce5c6e8e6bfa6 | 419e02cddf6364f70253956c243b828c41011fe1 | /split_to_frames.py | 194e4543a82fcb73f8df57da3401e96d6ac61f2c | [] | no_license | alloky/inteligenceSystemsProject | 2c379b15ae75e6a9522d59c72b5373d33e0c5678 | 2566c93dd395b3e8ba1ee58060baf43bb31f76e0 | refs/heads/main | 2023-04-29T04:41:52.297790 | 2021-05-10T09:56:41 | 2021-05-10T09:56:41 | 358,875,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,907 | py | import cv2
import json
import os
import shutil
from multiprocessing import Pool
from os import listdir
from tqdm import tqdm
def process_video(video_arg):
source_video, target_dir, fps = video_arg
vidcap = cv2.VideoCapture(source_video)
def getFrame(sec):
vidcap.set(cv2.CAP_PROP_POS_MSEC,sec*1000)
hasFrames,image = vidcap.read()
if hasFrames:
image = cv2.resize(image, (224, 224))
cv2.imwrite(target_dir + '/' + str(count) + '.jpg', image) # save frame as JPEG file
return hasFrames
sec = 0
frameRate = fps / 60
count=1
success = getFrame(sec)
while success:
count += 1
sec += frameRate
sec = round(sec, 2)
success = getFrame(sec)
def process_directory(source_dir, target_dir, json_file):
contents = json.load(open(json_file))
id_to_fps = {}
for content in contents:
video_id = content['url'][len('https://www.youtube.com/watch?v='):]
id_to_fps[video_id] = content['fps']
shutil.rmtree(target_dir, ignore_errors=True)
os.makedirs(target_dir, mode=0o777)
video_list = []
for source_video in tqdm(listdir(source_dir)):
#print("source_video", source_video)
video_id = source_video[:source_video.find('.')]
os.makedirs(target_dir + '/' + video_id, mode=0o777)
#print("VIDEO_ID", video_id)
video_list.append([
source_dir + '/' + source_video,
target_dir + '/' + video_id,
id_to_fps[video_id] if video_id in id_to_fps else 30.0
])
print(len(video_list))
with Pool(processes=10) as p:
p.map(process_video, video_list)
if __name__ == '__main__':
process_directory('train', 'train_frames', 'MSASL_train.json')
process_directory('test', 'test_frames', 'MSASL_test.json')
process_directory('val', 'val_frames', 'MSASL_val.json')
| [
"[email protected]"
] | |
df5bcabe560a6dbec253c154a164052c95265eaa | 7850831b7dcda9730cdd1ce40f873843b079c6a4 | /tests/test_camera_params.py | aade7f33df18d6ebda3c11c0ae7ec40dbbca1a9c | [] | no_license | ashar6194/human_pose_util | 2f64a3ce88995ea08a87910fd49816eec28c5b7a | bff2fcc7f92dfef14996e821a2768200c7a751dd | refs/heads/master | 2021-05-06T05:27:13.499542 | 2017-10-25T22:56:02 | 2017-10-25T22:56:02 | 115,153,665 | 0 | 0 | null | 2017-12-22T22:12:24 | 2017-12-22T22:12:24 | null | UTF-8 | Python | false | false | 2,011 | py | from __future__ import division
import unittest
import numpy as np
from human_pose_util.transforms.camera_params import \
calculate_extrinsics
from human_pose_util.transforms.camera_params import \
calculate_intrinsics_1d, calculate_intrinsics
class TestCameraIntrinsics(unittest.TestCase):
"""Test calculate_camera_intinsics functions."""
def test_1d(self):
n = 1000
x3 = np.random.uniform(size=(n,))
z3 = np.random.uniform(size=(n,)) + 10
f = 2.0
c = 3.4
x2 = x3 / z3 * f + c
actual_f, actual_c = calculate_intrinsics_1d(x3, z3, x2)
np.testing.assert_allclose(actual_f, f)
np.testing.assert_allclose(actual_c, c)
def test_2d(self):
n = 1000
p3 = np.random.uniform(size=(n, 3))
p3[:, 2] += 2
f = np.array([1.2, 1.1])
c = np.array([200., 201.])
p2 = p3[:, :2] / p3[:, -1:] * f + c
actual_f, actual_c = calculate_intrinsics(p3, p2)
np.testing.assert_allclose(actual_f, f)
np.testing.assert_allclose(actual_c, c)
class TestCameraExtrinsics(unittest.TestCase):
"""Test calculate_extrinsics."""
def test_camera_extrinsics(self):
from human_pose_util.transforms.np_impl import euler_matrix_nh
from human_pose_util.transforms.np_impl import euler_from_matrix_nh
# from transformations import euler_from_matrix
n = 1000
A = np.random.uniform(size=(n, 3))
r = np.random.uniform(size=(3,))
t = np.random.uniform(-1, 1, size=(3,))
R = euler_matrix_nh(r[0], r[1], r[2])
k = 1.3
B = k * np.dot(A, R.T) + t
actual_R, actual_t, actual_k = calculate_extrinsics(A, B)
np.testing.assert_allclose(actual_R, R)
np.testing.assert_allclose(actual_t, t)
np.testing.assert_allclose(actual_k, k)
actual_r = euler_from_matrix_nh(actual_R)
np.testing.assert_allclose(actual_r, r)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
f11c14ddced2d612bfb6725e3bbc6615a7973140 | 6104023226c47f1eb10613388c8cb5bb02760ea5 | /Text Analytics Model/text_classification_keras.py | 5dcbcf11505e60135305baeae5abc3f43fae791c | [] | no_license | eddible95/Show_Me_Telegram_Bot | 8a5467265b4e54e2fdafa83663740df5f8690ec1 | 31362fc38072cf31a114b9184798b442064307e1 | refs/heads/master | 2022-04-17T19:45:30.900866 | 2020-04-14T05:15:53 | 2020-04-14T05:15:53 | 255,151,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,789 | py | from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from nltk.tokenize import RegexpTokenizer
from sklearn import preprocessing
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras import utils
import pandas as pd
import numpy as np
import joblib
# tokenizer to remove unwanted elements from out data like symbols and numbers
from sklearn.utils import compute_class_weight
df = pd.read_csv('Movie_Metadata_Sentiments.csv')
# Subset only emotions required to get overall emotion detected from the text content
sub_df = df[['anger', 'joy', 'fear', 'sadness']]
# Label the movie with the highest count of emotions
df['Max'] = sub_df.idxmax(axis=1)
token = RegexpTokenizer(r'[a-zA-Z0-9]+')
cv = CountVectorizer(lowercase=True, stop_words='english', ngram_range=(1, 1), tokenizer=token.tokenize)
cv = cv.fit(df['Text_Content'])
text_counts = cv.transform(df['Text_Content'])
# Save the vectorizer
joblib.dump(cv, "vectorizer.pkl")
X_train, X_test, y_train, y_test = train_test_split(
text_counts, df['Max'], test_size=0.2, random_state=1)
# Neural Network
encoder = preprocessing.LabelEncoder()
encoder.fit(y_train)
print(encoder.classes_)
y_train = encoder.transform(y_train)
y_test = encoder.transform(y_test)
# Resolves the imbalance in dataset
class_weights = compute_class_weight('balanced', np.unique(y_train), y_train)
class_weights_dict = dict(zip(encoder.transform(list(encoder.classes_)), class_weights))
print(class_weights_dict)
num_classes = np.max(y_train) + 1
y_train = utils.to_categorical(y_train, num_classes)
y_test = utils.to_categorical(y_test, num_classes)
print(y_train)
print(y_train.shape)
batch_size = 64
epochs = 3
# Build the model
model = Sequential()
model.add(Dense(256, input_shape=(X_train.shape[1],), activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(X_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_split=0.1,
class_weight=class_weights_dict)
score = model.evaluate(X_test, y_test,
batch_size=batch_size, verbose=1)
print('Test accuracy:', score[1])
# Prints the Classification Report
y_pred = model.predict(X_test)
print(classification_report(y_test.argmax(axis=1), y_pred.argmax(axis=1), target_names=encoder.classes_))
# Export the Model
model.save('Movie_Metadata_Sentiments_Weighted_Keras.h5', history)
| [
"[email protected]"
] | |
0b6d8aa8f5f4709ba022486e13dd579e4d3cb215 | 258aa96b9934da81efccc879f3e07a12014d6700 | /src/sas/sascalc/dataloader/readers/hfir1d_reader.py | 840e796ea912ca9f280a3a385b572e7d9321f0a4 | [
"BSD-3-Clause"
] | permissive | serenidpity/sasview | d56f0f281afe5dfe4f16c8e19acbd9431fa5a373 | bc04647999920281741c996da43238a1d78f7975 | refs/heads/master | 2020-12-03T08:16:06.458244 | 2017-06-19T13:42:18 | 2017-06-19T13:42:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,930 | py | """
HFIR 1D 4-column data reader
"""
#####################################################################
#This software was developed by the University of Tennessee as part of the
#Distributed Data Analysis of Neutron Scattering Experiments (DANSE)
#project funded by the US National Science Foundation.
#See the license text in license.txt
#copyright 2008, University of Tennessee
######################################################################
import numpy as np
import os
from sas.sascalc.dataloader.data_info import Data1D
# Check whether we have a converter available
has_converter = True
try:
from sas.sascalc.data_util.nxsunit import Converter
except:
has_converter = False
class Reader(object):
"""
Class to load HFIR 1D 4-column files
"""
## File type
type_name = "HFIR 1D"
## Wildcards
type = ["HFIR 1D files (*.d1d)|*.d1d"]
## List of allowed extensions
ext = ['.d1d']
def read(self, path):
"""
Load data file
:param path: file path
:return: Data1D object, or None
:raise RuntimeError: when the file can't be opened
:raise ValueError: when the length of the data vectors are inconsistent
"""
if os.path.isfile(path):
basename = os.path.basename(path)
root, extension = os.path.splitext(basename)
if extension.lower() in self.ext:
try:
input_f = open(path,'r')
except:
raise RuntimeError, "hfir1d_reader: cannot open %s" % path
buff = input_f.read()
lines = buff.split('\n')
x = np.zeros(0)
y = np.zeros(0)
dx = np.zeros(0)
dy = np.zeros(0)
output = Data1D(x, y, dx=dx, dy=dy)
self.filename = output.filename = basename
data_conv_q = None
data_conv_i = None
if has_converter == True and output.x_unit != '1/A':
data_conv_q = Converter('1/A')
# Test it
data_conv_q(1.0, output.x_unit)
if has_converter == True and output.y_unit != '1/cm':
data_conv_i = Converter('1/cm')
# Test it
data_conv_i(1.0, output.y_unit)
for line in lines:
toks = line.split()
try:
_x = float(toks[0])
_y = float(toks[1])
_dx = float(toks[3])
_dy = float(toks[2])
if data_conv_q is not None:
_x = data_conv_q(_x, units=output.x_unit)
_dx = data_conv_q(_dx, units=output.x_unit)
if data_conv_i is not None:
_y = data_conv_i(_y, units=output.y_unit)
_dy = data_conv_i(_dy, units=output.y_unit)
x = np.append(x, _x)
y = np.append(y, _y)
dx = np.append(dx, _dx)
dy = np.append(dy, _dy)
except:
# Couldn't parse this line, skip it
pass
# Sanity check
if not len(y) == len(dy):
msg = "hfir1d_reader: y and dy have different length"
raise RuntimeError, msg
if not len(x) == len(dx):
msg = "hfir1d_reader: x and dx have different length"
raise RuntimeError, msg
# If the data length is zero, consider this as
# though we were not able to read the file.
if len(x) == 0:
raise RuntimeError, "hfir1d_reader: could not load file"
output.x = x
output.y = y
output.dy = dy
output.dx = dx
if data_conv_q is not None:
output.xaxis("\\rm{Q}", output.x_unit)
else:
output.xaxis("\\rm{Q}", 'A^{-1}')
if data_conv_i is not None:
output.yaxis("\\rm{Intensity}", output.y_unit)
else:
output.yaxis("\\rm{Intensity}", "cm^{-1}")
# Store loading process information
output.meta_data['loader'] = self.type_name
return output
else:
raise RuntimeError, "%s is not a file" % path
return None
| [
"[email protected]"
] | |
7294cfe369b946422c5a8bb14eb763880391f0ab | 5ebed2d5c3dbf62ac7dda073b70d79f4e4b1d341 | /nutricionApp/migrations/0006_regla_recomendacion.py | b82362c2eb883c37090e0887cc7876aaaa2dbd0e | [] | no_license | danielm117/nutrition | 7baf8356fc849e4f8c3a2264be72257d1f29a6a4 | 7866cf199a079808cdbf122eca85771a694ea191 | refs/heads/master | 2020-04-06T07:06:04.089305 | 2016-09-22T01:06:07 | 2016-09-22T01:06:07 | 61,918,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('nutricionApp', '0005_remove_regla_recomendacion'),
]
operations = [
migrations.AddField(
model_name='regla',
name='recomendacion',
field=models.CharField(max_length=500, default='no disponible'),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
f683e873eb3af00903c9ed3512b46349b1fc7c5f | 4e66c6d90b76ea5938779d67e5c6dd8d71c5df94 | /test.py | 2663064569f457d7f0b1e404a91f592cdb461daf | [] | no_license | huuhoa143/telethon | 043510c179941a1ba45a86fa53f0d16e730f6b81 | a36a8563cb2185012519f38d86b1118031aa03ea | refs/heads/main | 2023-03-27T17:58:46.648017 | 2021-03-26T10:49:12 | 2021-03-26T10:49:12 | 350,533,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,927 | py | from telethon.sync import TelegramClient
from telethon.tl.functions.messages import GetDialogsRequest
from telethon.tl.types import InputPeerEmpty
import csv
from decouple import config
api_id = int(config('API_ID'))
api_hash = config('API_HASH')
phone = config('PHONE')
client = TelegramClient(phone, api_id, api_hash)
client.connect()
if not client.is_user_authorized():
client.send_code_request(phone)
client.sign_in(phone, input('Enter the code: '))
chats = []
last_date = None
chunk_size = 200
groups = []
result = client(GetDialogsRequest(
offset_date=last_date,
offset_id=0,
offset_peer=InputPeerEmpty(),
limit=chunk_size,
hash=0
))
chats.extend(result.chats)
for chat in chats:
try:
if chat.megagroup==True:
groups.append(chat)
except:
continue
print('Choose a group to scape members from: ')
i=0
for g in groups:
print(str(i) + '- ' + g.title)
i+=1
g_index = input('Enter a Number of Group: ')
target_group=groups[int(g_index)]
print('Fetching Members .....')
all_participants = []
all_participants = client.get_participants(target_group, aggressive=True)
print('Saving in file .....')
with open('member.csv', 'w', encoding='UTF-8') as f:
writer = csv.writer(f, delimiter=',', lineterminator='\n')
writer.writerow(['username', 'user id', 'access hash', 'name', 'group', 'group id'])
for user in all_participants:
if user.username:
username = user.username
else:
username= ''
if user.first_name:
first_name= user.first_name
else:
first_name= ''
if user.last_name:
last_name= user.last_name
else:
last_name= ''
name = (first_name + ' ' + last_name).strip()
writer.writerow([username,user.id,user.access_hash,name,target_group.title,target_group.id])
print('Members scraped successfully') | [
"[email protected]"
] | |
ed26457231e3546c2ac46685307f27b2f06b6920 | 5037e085787881f29a635f18c11daa9b7c784205 | /debolina/myproject/new/lib/python3.5/site-packages/lockout/exceptions.py | 86fcc58119e229af8671b13df329f4a8cacf2447 | [] | no_license | debolinakar13/infibeam | a29b5d0d72c4d3638e97af422818efa071ff67dd | d7a896e376ee5e32c42bc681a574fac61664e27b | refs/heads/master | 2020-05-18T20:05:37.534257 | 2019-05-02T17:22:08 | 2019-05-02T17:22:08 | 184,621,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 71 | py | """
Lockout Exceptions
"""
class LockedOut(Exception):
pass
| [
"[email protected]"
] | |
3fa75e32076d5053bcba8b18fab81659263489da | 896939d12044f8cf632827bd7151c80cb9fe8c57 | /djangoDemo/djangoDemo/urls.py | 3b2edce3620c58ee98c1aff3628c165a7031c36d | [] | no_license | king0429/clear_python | b931cac0dc5d8915fab1e928361907f0d163d9e1 | ea0e4071bf79535bd3d7f9a08825dd118b74190b | refs/heads/master | 2020-05-07T12:23:28.947782 | 2019-04-10T07:04:46 | 2019-04-10T07:04:46 | 180,502,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | """djangoDemo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
import os, sys
sys.path.append(os.path.split(os.path.realpath(__file__))[0] + '\\views')
import index
from django.contrib import admin
from django.urls import path
from django.conf.urls import url
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^$', index.good),
url(r'^hello$', index.hello1),
]
| [
"[email protected]"
] | |
e37f78288f16167c02435940f591c80a408d4600 | b6df7cda5c23cda304fcc0af1450ac3c27a224c1 | /data/codes/pdhoot_client.py | cadffd6a6d289bbc7f98ff473cbccb52757f54d8 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | vieira-rafael/py-search | 88ee167fa1949414cc4f3c98d33f8ecec1ce756d | b8c6dccc58d72af35e4d4631f21178296f610b8a | refs/heads/master | 2021-01-21T04:59:36.220510 | 2016-06-20T01:45:34 | 2016-06-20T01:45:34 | 54,433,313 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,424 | py | import socket , sys , signalfrom Node import Nodefrom threading import Threadfrom clint.textui import colored
class Client(Node):
def __init__(self , host , port , passwd): self.host = host self.port = port self.passwd = passwd + '\n' self.sock = socket.socket(socket.AF_INET , socket.SOCK_STREAM)
def start(self): self.sock.connect((self.host , self.port)) self.sendMsg(self.sock ,self.passwd) message = self.recvMsg(self.sock , '\n') if message.replace('\n' , '')!='~q': username = raw_input(colored.yellow("Enter your username: ")) Thread(target=self.sends , args = (username,)).start() Thread(target=self.recvs , args=('\n',)).start() else: sys.exit(0)
def sends(self , username): while True: msg = raw_input() # signal.signal(signal.SIGINT , signal_handler) print "\033[A \033[A" print colored.yellow("<me>") , colored.magenta(msg) if msg=='~q': msg+='\n' self.sendMsg(self.sock , msg) self.sock.shutdown(socket.SHUT_WR) break msg+='\n' msg = '<' + username + '> ' + msg self.sendMsg(self.sock , msg) def recvs(self , delimeter): while True: msg = self.recvMsg(self.sock , delimeter) if msg.replace('\n' , '')=='~q': # self.sock.shutdown(socket.SHUT_RD) self.sock.close() break msg = msg.replace('\n' , '') ind = msg.index('>') print colored.yellow(msg[0:ind+1]) , colored.red(msg[ind+1:])
def main(argv): client = Client(argv[0] , 1060 , argv[1]) client.start() | [
"[email protected]"
] | |
996a526987cf8d6eb927c2c21acaa536add44275 | a6db0ba6f4548dace1fab6a40dd8e207dc87824e | /app/modules/api.py | 94ee9fd633566464d72db3f6b86dec48d89f5428 | [] | no_license | AquaUseful/exam-countdown | 65c108f9148ddf0d9f2b76fe50e5a73e51f3df65 | 797251e2c7bc535ed9f9fcfc9e584f2fc7c5bc95 | refs/heads/master | 2023-01-02T14:35:04.818035 | 2020-10-26T22:13:13 | 2020-10-26T22:13:13 | 307,140,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,982 | py | import quart
import datetime
import os
try:
from app import config
except ImportError:
pass
blueprint = quart.Blueprint("api", __name__)
async def calc_exam_timestamp() -> float:
try:
exam_month = config.EXAM_MONTH
exam_day = config.EXAM_DAY
exam_hour = config.EXAM_HOUR
exam_min = config.EXAM_MIN
timedelta = config.TIMEDELTA
except NameError:
exam_month = int(os.environ["EXAM_MONTH"])
exam_day = int(os.environ["EXAM_DAY"])
exam_hour = int(os.environ["EXAM_HOUR"])
exam_min = int(os.environ["EXAM_MIN"])
timedelta = int(os.environ["TIMEDELTA"])
tzinfo = datetime.timezone(datetime.timedelta(hours=timedelta))
curr_datetime = datetime.datetime.now(tz=tzinfo)
curr_year = curr_datetime.year
if curr_datetime > datetime.datetime(curr_year,
exam_month,
exam_day,
exam_hour,
exam_min,
tzinfo=tzinfo):
exam_datetime = datetime.datetime(curr_year + 1,
exam_month,
exam_day,
exam_hour,
exam_min,
tzinfo=tzinfo)
else:
exam_datetime = datetime.datetime(curr_year,
exam_month,
exam_day,
exam_hour,
exam_min,
tzinfo=tzinfo)
return exam_datetime.timestamp()
@blueprint.route("/api/timestamp")
async def get_time():
timestamp = await calc_exam_timestamp()
json = {"timestamp": timestamp}
return quart.jsonify(json)
| [
"[email protected]"
] | |
01a894f509a39faaf4841d463b1bb9de17125961 | 468b1f37f8dc207a32f52002001cedeca0b87b80 | /python/ups/tarballs.py | 7f7059362e9f737f8967e851373c336d0a837e5a | [] | no_license | brettviren/python-ups-utils | a0494b4a60336a752612d3212fb1490fa89a2277 | 0014af2dc9427e59a49c5c7afb575e2905ff6c3e | refs/heads/master | 2021-01-10T21:42:39.469722 | 2017-04-27T22:15:21 | 2017-04-27T22:15:21 | 22,932,668 | 0 | 1 | null | 2015-07-31T13:28:12 | 2014-08-13T21:59:32 | Python | UTF-8 | Python | false | false | 538 | py | #!/usr/bin/env python
'''
Interact with UPS packaged tarballs.
'''
import os
import urllib
default_url_pattern = 'http://oink.fnal.gov/distro/packages/{name}/{tarball}'
# fixme move this and similar use in commands.py to some util module
def download(url, target):
if not os.path.exists(target):
urllib.urlretrieve(url, target)
def form_url(me, url_pattern = default_url_pattern):
'''
Form the URL to a package given its ManifestEntry object <me>
'''
return url_pattern.format(**me._asdict())
| [
"[email protected]"
] | |
078c6790ecd20b0d1f89e3614c50aa51102c5534 | d2c9e2c7139fab07b1ded4ee3e4f09b933b18af9 | /deep_learning/standard/runner.py | ec723ad9b1c1a7f3298d5b8f9a4dce636fb801d1 | [] | no_license | AlbertoCastelo/Neuro-Evolution-BNN | dbc3a7f64c407526ed98693487058abb1860f2ae | 70bb0db0dc72ed4b55401f08658b89ee3cd6dbc7 | refs/heads/master | 2023-08-13T17:55:02.410908 | 2020-08-30T10:53:33 | 2020-08-30T10:53:33 | 194,387,488 | 0 | 0 | null | 2021-09-08T02:29:04 | 2019-06-29T09:23:27 | Jupyter Notebook | UTF-8 | Python | false | false | 1,826 | py | from deep_learning.standard.evaluate_standard_dl import EvaluateStandardDL
from neat.evaluation.utils import get_dataset
import numpy as np
from sklearn.metrics import confusion_matrix, accuracy_score
from neat.neat_logger import logger
lr = 0.01
weight_decay = 0.0005
batch_size = 50000
class StandardDLRunner:
def __init__(self, config, n_epochs=1000):
self.config = config
self.n_epochs = n_epochs
self.evaluator = None
def run(self):
dataset = get_dataset(dataset=self.config.dataset, train_percentage=self.config.train_percentage,
random_state=self.config.dataset_random_state, noise=self.config.noise,
label_noise=self.config.label_noise)
is_cuda = False
self.evaluator = EvaluateStandardDL(dataset=dataset,
batch_size=batch_size,
lr=lr,
weight_decay=weight_decay,
n_epochs=self.n_epochs,
n_neurons_per_layer=10,
n_hidden_layers=1,
is_cuda=is_cuda)
self.evaluator.run()
# self.evaluator.save_network(network_filename)
# Show Evaluation metrics
x, y_true, y_pred = self.evaluator.evaluate()
x = x.numpy()
y_true = y_true.numpy()
y_pred = y_pred.numpy()
# plot results
y_pred = np.argmax(y_pred, 1)
logger.info('Evaluate on Validation Test')
logger.info('Confusion Matrix:')
logger.info(confusion_matrix(y_true, y_pred))
logger.info(f'Accuracy: {accuracy_score(y_true, y_pred)*100} %') | [
"[email protected]"
] | |
64b7acb284733c2e588363812f6786b308a55f9b | ac1c9fbc1f1019efb19d0a8f3a088e8889f1e83c | /out/release/pyproto/third_party/metrics_proto/cast_logs_pb2.py | 8cb4f16b6f5a7b16bba526d497e7931e11da775a | [
"BSD-3-Clause"
] | permissive | xueqiya/chromium_src | 5d20b4d3a2a0251c063a7fb9952195cda6d29e34 | d4aa7a8f0e07cfaa448fcad8c12b29242a615103 | refs/heads/main | 2022-07-30T03:15:14.818330 | 2021-01-16T16:47:22 | 2021-01-16T16:47:22 | 330,115,551 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | true | 51,589 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cast_logs.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='cast_logs.proto',
package='metrics',
syntax='proto2',
serialized_options=_b('\n\037org.chromium.components.metricsB\016CastLogsProtosH\003'),
serialized_pb=_b('\n\x0f\x63\x61st_logs.proto\x12\x07metrics\"\xa9\x1b\n\rCastLogsProto\x12?\n\x10\x63\x61st_device_info\x18\x01 \x01(\x0b\x32%.metrics.CastLogsProto.CastDeviceInfo\x12G\n\x14\x63\x61st_connection_info\x18\x02 \x03(\x0b\x32).metrics.CastLogsProto.CastConnectionInfo\x12\x39\n\ncast_event\x18\x03 \x03(\x0b\x32%.metrics.CastLogsProto.CastEventProto\x12\x1d\n\x15virtual_release_track\x18\x04 \x01(\x07\x12N\n\x18\x63\x61st_device_mutable_info\x18\x05 \x01(\x0b\x32,.metrics.CastLogsProto.CastDeviceMutableInfo\x12\x1b\n\x13receiver_metrics_id\x18\x06 \x01(\x06\x1a\x8c\x06\n\x0e\x43\x61stDeviceInfo\x12\x43\n\x04type\x18\x01 \x01(\x0e\x32\x35.metrics.CastLogsProto.CastDeviceInfo.CastProductType\x12\x19\n\x11hardware_revision\x18\x02 \x01(\t\x12\x14\n\x0cmanufacturer\x18\x03 \x01(\t\x12\r\n\x05model\x18\x04 \x01(\t\x12\x15\n\rserial_number\x18\x05 \x01(\t\x12I\n\rhardware_info\x18\x06 \x01(\x0b\x32\x32.metrics.CastLogsProto.CastDeviceInfo.HardwareInfo\x1a\xbe\x01\n\x0cHardwareInfo\x12\r\n\x05\x63olor\x18\x01 \x01(\t\x12\x0b\n\x03mic\x18\x02 \x01(\t\x12\x0e\n\x06memory\x18\x03 \x01(\t\x12\x0c\n\x04nand\x18\x04 \x01(\t\x12\x10\n\x08mfg_date\x18\x05 \x01(\t\x12\x12\n\nbuild_name\x18\x06 \x01(\t\x12\x0e\n\x06\x63onfig\x18\x07 \x01(\t\x12\x0c\n\x04\x65mmc\x18\x08 \x01(\t\x12\x0f\n\x07\x64isplay\x18\t \x01(\t\x12\x0b\n\x03\x61mp\x18\n \x01(\t\x12\x12\n\nboard_name\x18\x0b \x01(\t\"\xd1\x02\n\x0f\x43\x61stProductType\x12\x1d\n\x19\x43\x41ST_PRODUCT_TYPE_UNKNOWN\x10\x00\x12 \n\x1c\x43\x41ST_PRODUCT_TYPE_CHROMECAST\x10\x01\x12\x18\n\x14\x43\x41ST_PRODUCT_TYPE_TV\x10\x02\x12\x1b\n\x17\x43\x41ST_PRODUCT_TYPE_AUDIO\x10\x03\x12 \n\x1c\x43\x41ST_PRODUCT_TYPE_ANDROID_TV\x10\x04\x12\x1f\n\x1b\x43\x41ST_PRODUCT_TYPE_ASSISTANT\x10\x05\x12$\n CAST_PRODUCT_TYPE_ANDROID_THINGS\x10\x06\x12\x1f\n\x1b\x43\x41ST_PRODUCT_TYPE_CHROME_OS\x10\x07\x12 \n\x1c\x43\x41ST_PRODUCT_TYPE_FUCHSIA_OS\x10\x08\x12\x1a\n\x16\x43\x41ST_PRODUCT_TYPE_LITE\x10\t\x1a\xc4\x07\n\x12\x43\x61stConnectionInfo\x12\x1f\n\x17transport_connection_id\x18\x01 \x01(\x07\x12\x1d\n\x15virtual_connection_id\x18\x02 \x01(\x07\x12I\n\x0bsender_info\x18\x03 \x01(\x0b\x32\x34.metrics.CastLogsProto.CastConnectionInfo.SenderInfo\x1a\xa2\x06\n\nSenderInfo\x12\x18\n\x10sender_device_id\x18\x01 \x01(\x06\x12N\n\x08sdk_type\x18\x02 \x01(\x0e\x32<.metrics.CastLogsProto.CastConnectionInfo.SenderInfo.SDKType\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x1e\n\x16\x63hrome_browser_version\x18\x04 \x01(\t\x12O\n\x08platform\x18\x05 \x01(\x0e\x32=.metrics.CastLogsProto.CastConnectionInfo.SenderInfo.Platform\x12\x16\n\x0esystem_version\x18\x06 \x01(\t\x12\x66\n\x19transport_connection_type\x18\x07 \x01(\x0e\x32\x43.metrics.CastLogsProto.CastConnectionInfo.SenderInfo.ConnectionType\x12\r\n\x05model\x18\x08 \x01(\t\x12 \n\x18sender_local_ip_fragment\x18\t \x01(\x05\"D\n\x07SDKType\x12\x0f\n\x0bSDK_UNKNOWN\x10\x00\x12\x0e\n\nSDK_NATIVE\x10\x01\x12\x18\n\x14SDK_CHROME_EXTENSION\x10\x02\"\xac\x01\n\x08Platform\x12\x12\n\x0ePLATFORM_OTHER\x10\x00\x12\x14\n\x10PLATFORM_ANDROID\x10\x01\x12\x10\n\x0cPLATFORM_IOS\x10\x02\x12\x14\n\x10PLATFORM_WINDOWS\x10\x03\x12\x10\n\x0cPLATFORM_OSX\x10\x04\x12\x15\n\x11PLATFORM_CHROMEOS\x10\x05\x12\x12\n\x0ePLATFORM_LINUX\x10\x06\x12\x11\n\rPLATFORM_CAST\x10\x07\"\x81\x01\n\x0e\x43onnectionType\x12\x1b\n\x17\x43ONNECTION_TYPE_UNKNOWN\x10\x00\x12\x19\n\x15\x43ONNECTION_TYPE_LOCAL\x10\x01\x12\x19\n\x15\x43ONNECTION_TYPE_RELAY\x10\x02\x12\x1c\n\x18\x43ONNECTION_TYPE_INTERNAL\x10\x03\x1a\xf6\x04\n\x0e\x43\x61stEventProto\x12\x11\n\tname_hash\x18\x01 \x01(\x06\x12\x11\n\ttime_msec\x18\x02 \x01(\x03\x12\x0e\n\x06\x61pp_id\x18\x03 \x01(\x07\x12\x15\n\rremote_app_id\x18\x13 \x01(\x07\x12\x1e\n\x16\x61pplication_session_id\x18\x04 \x01(\x06\x12\x1d\n\x15\x63\x61st_receiver_version\x18\x05 \x01(\x06\x12\x18\n\x10\x63\x61st_mpl_version\x18\t \x01(\x06\x12\x1f\n\x17transport_connection_id\x18\x06 \x01(\x07\x12\x1d\n\x15virtual_connection_id\x18\x07 \x01(\x07\x12\r\n\x05value\x18\x08 \x01(\x03\x12\x12\n\ngroup_uuid\x18\n \x01(\x06\x12\x18\n\x10\x63onversation_key\x18\x0b \x01(\t\x12\x12\n\nrequest_id\x18\x0c \x01(\x07\x12\x10\n\x08\x65vent_id\x18\r \x01(\t\x12\x17\n\x0f\x61ogh_request_id\x18\x10 \x01(\t\x12\x1c\n\x14\x61ogh_local_device_id\x18\x12 \x01(\x03\x12\x15\n\raogh_agent_id\x18\x15 \x01(\t\x12@\n\x08metadata\x18\x0e \x03(\x0b\x32..metrics.CastLogsProto.CastEventProto.Metadata\x12\x16\n\x0e\x66\x65\x61ture_vector\x18\x0f \x03(\x02\x12\x13\n\x0btimezone_id\x18\x11 \x01(\t\x12\x12\n\nui_version\x18\x14 \x01(\t\x12\x1c\n\x14selinux_audit_detail\x18\x16 \x01(\t\x1a,\n\x08Metadata\x12\x11\n\tname_hash\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x03\x1a\xf7\x05\n\x15\x43\x61stDeviceMutableInfo\x12Q\n\x10last_reboot_type\x18\x01 \x01(\x0e\x32\x37.metrics.CastLogsProto.CastDeviceMutableInfo.RebootType\x12\x1b\n\x13system_build_number\x18\x02 \x01(\x06\x12\x1e\n\x16\x62\x61\x63kdrop_app_device_id\x18\x03 \x01(\t\x12\x17\n\x0frelease_version\x18\x04 \x01(\x07\x12U\n\x10netif_ip_version\x18\x05 \x01(\x0e\x32;.metrics.CastLogsProto.CastDeviceMutableInfo.NetifIPVersion\x12\x1f\n\x17ip_dual_stack_supported\x18\x06 \x01(\x08\x12\x13\n\x0btimezone_id\x18\x07 \x01(\t\x12\x19\n\x11latest_ui_version\x18\x08 \x01(\t\"\xc1\x02\n\nRebootType\x12\x17\n\x13REBOOT_TYPE_UNKNOWN\x10\x00\x12\x16\n\x12REBOOT_TYPE_FORCED\x10\x01\x12\x13\n\x0fREBOOT_TYPE_API\x10\x02\x12\x17\n\x13REBOOT_TYPE_NIGHTLY\x10\x03\x12\x13\n\x0fREBOOT_TYPE_OTA\x10\x04\x12\x18\n\x14REBOOT_TYPE_WATCHDOG\x10\x05\x12\x1f\n\x1bREBOOT_TYPE_PROCESS_MANAGER\x10\x06\x12\x1e\n\x1aREBOOT_TYPE_CRASH_UPLOADER\x10\x07\x12\x13\n\x0fREBOOT_TYPE_FDR\x10\x08\x12\x1b\n\x17REBOOT_TYPE_HW_WATCHDOG\x10\t\x12\x18\n\x14REBOOT_TYPE_SW_OTHER\x10\n\x12\x18\n\x14REBOOT_TYPE_OVERHEAT\x10\x0b\"I\n\x0eNetifIPVersion\x12\x0e\n\nIP_UNKNOWN\x10\x00\x12\t\n\x05IP_V4\x10\x01\x12\t\n\x05IP_V6\x10\x02\x12\x11\n\rIP_DUAL_STACK\x10\x03\x42\x33\n\x1forg.chromium.components.metricsB\x0e\x43\x61stLogsProtosH\x03')
)
_CASTLOGSPROTO_CASTDEVICEINFO_CASTPRODUCTTYPE = _descriptor.EnumDescriptor(
name='CastProductType',
full_name='metrics.CastLogsProto.CastDeviceInfo.CastProductType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_CHROMECAST', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_TV', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_AUDIO', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_ANDROID_TV', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_ASSISTANT', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_ANDROID_THINGS', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_CHROME_OS', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_FUCHSIA_OS', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAST_PRODUCT_TYPE_LITE', index=9, number=9,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=827,
serialized_end=1164,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTDEVICEINFO_CASTPRODUCTTYPE)
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_SDKTYPE = _descriptor.EnumDescriptor(
name='SDKType',
full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.SDKType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SDK_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SDK_NATIVE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SDK_CHROME_EXTENSION', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1756,
serialized_end=1824,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_SDKTYPE)
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_PLATFORM = _descriptor.EnumDescriptor(
name='Platform',
full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.Platform',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PLATFORM_OTHER', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_ANDROID', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_IOS', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_WINDOWS', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_OSX', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_CHROMEOS', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_LINUX', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PLATFORM_CAST', index=7, number=7,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1827,
serialized_end=1999,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_PLATFORM)
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_CONNECTIONTYPE = _descriptor.EnumDescriptor(
name='ConnectionType',
full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.ConnectionType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CONNECTION_TYPE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONNECTION_TYPE_LOCAL', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONNECTION_TYPE_RELAY', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONNECTION_TYPE_INTERNAL', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=2002,
serialized_end=2131,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_CONNECTIONTYPE)
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_REBOOTTYPE = _descriptor.EnumDescriptor(
name='RebootType',
full_name='metrics.CastLogsProto.CastDeviceMutableInfo.RebootType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_FORCED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_API', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_NIGHTLY', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_OTA', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_WATCHDOG', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_PROCESS_MANAGER', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_CRASH_UPLOADER', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_FDR', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_HW_WATCHDOG', index=9, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_SW_OTHER', index=10, number=10,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REBOOT_TYPE_OVERHEAT', index=11, number=11,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=3130,
serialized_end=3451,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_REBOOTTYPE)
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_NETIFIPVERSION = _descriptor.EnumDescriptor(
name='NetifIPVersion',
full_name='metrics.CastLogsProto.CastDeviceMutableInfo.NetifIPVersion',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='IP_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IP_V4', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IP_V6', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IP_DUAL_STACK', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=3453,
serialized_end=3526,
)
_sym_db.RegisterEnumDescriptor(_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_NETIFIPVERSION)
_CASTLOGSPROTO_CASTDEVICEINFO_HARDWAREINFO = _descriptor.Descriptor(
name='HardwareInfo',
full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='color', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.color', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mic', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.mic', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='memory', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.memory', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nand', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.nand', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mfg_date', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.mfg_date', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build_name', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.build_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='config', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.config', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='emmc', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.emmc', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='display', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.display', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amp', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.amp', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='board_name', full_name='metrics.CastLogsProto.CastDeviceInfo.HardwareInfo.board_name', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=634,
serialized_end=824,
)
_CASTLOGSPROTO_CASTDEVICEINFO = _descriptor.Descriptor(
name='CastDeviceInfo',
full_name='metrics.CastLogsProto.CastDeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='metrics.CastLogsProto.CastDeviceInfo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hardware_revision', full_name='metrics.CastLogsProto.CastDeviceInfo.hardware_revision', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manufacturer', full_name='metrics.CastLogsProto.CastDeviceInfo.manufacturer', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='metrics.CastLogsProto.CastDeviceInfo.model', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='serial_number', full_name='metrics.CastLogsProto.CastDeviceInfo.serial_number', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hardware_info', full_name='metrics.CastLogsProto.CastDeviceInfo.hardware_info', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CASTLOGSPROTO_CASTDEVICEINFO_HARDWAREINFO, ],
enum_types=[
_CASTLOGSPROTO_CASTDEVICEINFO_CASTPRODUCTTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=384,
serialized_end=1164,
)
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO = _descriptor.Descriptor(
name='SenderInfo',
full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='sender_device_id', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.sender_device_id', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sdk_type', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.sdk_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.version', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chrome_browser_version', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.chrome_browser_version', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='platform', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.platform', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='system_version', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.system_version', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transport_connection_type', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.transport_connection_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.model', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sender_local_ip_fragment', full_name='metrics.CastLogsProto.CastConnectionInfo.SenderInfo.sender_local_ip_fragment', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_SDKTYPE,
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_PLATFORM,
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_CONNECTIONTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1329,
serialized_end=2131,
)
_CASTLOGSPROTO_CASTCONNECTIONINFO = _descriptor.Descriptor(
name='CastConnectionInfo',
full_name='metrics.CastLogsProto.CastConnectionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='transport_connection_id', full_name='metrics.CastLogsProto.CastConnectionInfo.transport_connection_id', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='virtual_connection_id', full_name='metrics.CastLogsProto.CastConnectionInfo.virtual_connection_id', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sender_info', full_name='metrics.CastLogsProto.CastConnectionInfo.sender_info', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1167,
serialized_end=2131,
)
_CASTLOGSPROTO_CASTEVENTPROTO_METADATA = _descriptor.Descriptor(
name='Metadata',
full_name='metrics.CastLogsProto.CastEventProto.Metadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name_hash', full_name='metrics.CastLogsProto.CastEventProto.Metadata.name_hash', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='metrics.CastLogsProto.CastEventProto.Metadata.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2720,
serialized_end=2764,
)
_CASTLOGSPROTO_CASTEVENTPROTO = _descriptor.Descriptor(
name='CastEventProto',
full_name='metrics.CastLogsProto.CastEventProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name_hash', full_name='metrics.CastLogsProto.CastEventProto.name_hash', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_msec', full_name='metrics.CastLogsProto.CastEventProto.time_msec', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_id', full_name='metrics.CastLogsProto.CastEventProto.app_id', index=2,
number=3, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_app_id', full_name='metrics.CastLogsProto.CastEventProto.remote_app_id', index=3,
number=19, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='application_session_id', full_name='metrics.CastLogsProto.CastEventProto.application_session_id', index=4,
number=4, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cast_receiver_version', full_name='metrics.CastLogsProto.CastEventProto.cast_receiver_version', index=5,
number=5, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cast_mpl_version', full_name='metrics.CastLogsProto.CastEventProto.cast_mpl_version', index=6,
number=9, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transport_connection_id', full_name='metrics.CastLogsProto.CastEventProto.transport_connection_id', index=7,
number=6, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='virtual_connection_id', full_name='metrics.CastLogsProto.CastEventProto.virtual_connection_id', index=8,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='metrics.CastLogsProto.CastEventProto.value', index=9,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='group_uuid', full_name='metrics.CastLogsProto.CastEventProto.group_uuid', index=10,
number=10, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='conversation_key', full_name='metrics.CastLogsProto.CastEventProto.conversation_key', index=11,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='request_id', full_name='metrics.CastLogsProto.CastEventProto.request_id', index=12,
number=12, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='event_id', full_name='metrics.CastLogsProto.CastEventProto.event_id', index=13,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aogh_request_id', full_name='metrics.CastLogsProto.CastEventProto.aogh_request_id', index=14,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aogh_local_device_id', full_name='metrics.CastLogsProto.CastEventProto.aogh_local_device_id', index=15,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aogh_agent_id', full_name='metrics.CastLogsProto.CastEventProto.aogh_agent_id', index=16,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='metrics.CastLogsProto.CastEventProto.metadata', index=17,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feature_vector', full_name='metrics.CastLogsProto.CastEventProto.feature_vector', index=18,
number=15, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timezone_id', full_name='metrics.CastLogsProto.CastEventProto.timezone_id', index=19,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ui_version', full_name='metrics.CastLogsProto.CastEventProto.ui_version', index=20,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='selinux_audit_detail', full_name='metrics.CastLogsProto.CastEventProto.selinux_audit_detail', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CASTLOGSPROTO_CASTEVENTPROTO_METADATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2134,
serialized_end=2764,
)
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO = _descriptor.Descriptor(
name='CastDeviceMutableInfo',
full_name='metrics.CastLogsProto.CastDeviceMutableInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='last_reboot_type', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.last_reboot_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='system_build_number', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.system_build_number', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='backdrop_app_device_id', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.backdrop_app_device_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='release_version', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.release_version', index=3,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='netif_ip_version', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.netif_ip_version', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ip_dual_stack_supported', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.ip_dual_stack_supported', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timezone_id', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.timezone_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='latest_ui_version', full_name='metrics.CastLogsProto.CastDeviceMutableInfo.latest_ui_version', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_REBOOTTYPE,
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_NETIFIPVERSION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=2767,
serialized_end=3526,
)
_CASTLOGSPROTO = _descriptor.Descriptor(
name='CastLogsProto',
full_name='metrics.CastLogsProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cast_device_info', full_name='metrics.CastLogsProto.cast_device_info', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cast_connection_info', full_name='metrics.CastLogsProto.cast_connection_info', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cast_event', full_name='metrics.CastLogsProto.cast_event', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='virtual_release_track', full_name='metrics.CastLogsProto.virtual_release_track', index=3,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cast_device_mutable_info', full_name='metrics.CastLogsProto.cast_device_mutable_info', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='receiver_metrics_id', full_name='metrics.CastLogsProto.receiver_metrics_id', index=5,
number=6, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CASTLOGSPROTO_CASTDEVICEINFO, _CASTLOGSPROTO_CASTCONNECTIONINFO, _CASTLOGSPROTO_CASTEVENTPROTO, _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=29,
serialized_end=3526,
)
_CASTLOGSPROTO_CASTDEVICEINFO_HARDWAREINFO.containing_type = _CASTLOGSPROTO_CASTDEVICEINFO
_CASTLOGSPROTO_CASTDEVICEINFO.fields_by_name['type'].enum_type = _CASTLOGSPROTO_CASTDEVICEINFO_CASTPRODUCTTYPE
_CASTLOGSPROTO_CASTDEVICEINFO.fields_by_name['hardware_info'].message_type = _CASTLOGSPROTO_CASTDEVICEINFO_HARDWAREINFO
_CASTLOGSPROTO_CASTDEVICEINFO.containing_type = _CASTLOGSPROTO
_CASTLOGSPROTO_CASTDEVICEINFO_CASTPRODUCTTYPE.containing_type = _CASTLOGSPROTO_CASTDEVICEINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO.fields_by_name['sdk_type'].enum_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_SDKTYPE
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO.fields_by_name['platform'].enum_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_PLATFORM
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO.fields_by_name['transport_connection_type'].enum_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_CONNECTIONTYPE
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO.containing_type = _CASTLOGSPROTO_CASTCONNECTIONINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_SDKTYPE.containing_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_PLATFORM.containing_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO_CONNECTIONTYPE.containing_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO.fields_by_name['sender_info'].message_type = _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO
_CASTLOGSPROTO_CASTCONNECTIONINFO.containing_type = _CASTLOGSPROTO
_CASTLOGSPROTO_CASTEVENTPROTO_METADATA.containing_type = _CASTLOGSPROTO_CASTEVENTPROTO
_CASTLOGSPROTO_CASTEVENTPROTO.fields_by_name['metadata'].message_type = _CASTLOGSPROTO_CASTEVENTPROTO_METADATA
_CASTLOGSPROTO_CASTEVENTPROTO.containing_type = _CASTLOGSPROTO
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO.fields_by_name['last_reboot_type'].enum_type = _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_REBOOTTYPE
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO.fields_by_name['netif_ip_version'].enum_type = _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_NETIFIPVERSION
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO.containing_type = _CASTLOGSPROTO
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_REBOOTTYPE.containing_type = _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO
_CASTLOGSPROTO_CASTDEVICEMUTABLEINFO_NETIFIPVERSION.containing_type = _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO
_CASTLOGSPROTO.fields_by_name['cast_device_info'].message_type = _CASTLOGSPROTO_CASTDEVICEINFO
_CASTLOGSPROTO.fields_by_name['cast_connection_info'].message_type = _CASTLOGSPROTO_CASTCONNECTIONINFO
_CASTLOGSPROTO.fields_by_name['cast_event'].message_type = _CASTLOGSPROTO_CASTEVENTPROTO
_CASTLOGSPROTO.fields_by_name['cast_device_mutable_info'].message_type = _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO
DESCRIPTOR.message_types_by_name['CastLogsProto'] = _CASTLOGSPROTO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CastLogsProto = _reflection.GeneratedProtocolMessageType('CastLogsProto', (_message.Message,), {
'CastDeviceInfo' : _reflection.GeneratedProtocolMessageType('CastDeviceInfo', (_message.Message,), {
'HardwareInfo' : _reflection.GeneratedProtocolMessageType('HardwareInfo', (_message.Message,), {
'DESCRIPTOR' : _CASTLOGSPROTO_CASTDEVICEINFO_HARDWAREINFO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastDeviceInfo.HardwareInfo)
})
,
'DESCRIPTOR' : _CASTLOGSPROTO_CASTDEVICEINFO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastDeviceInfo)
})
,
'CastConnectionInfo' : _reflection.GeneratedProtocolMessageType('CastConnectionInfo', (_message.Message,), {
'SenderInfo' : _reflection.GeneratedProtocolMessageType('SenderInfo', (_message.Message,), {
'DESCRIPTOR' : _CASTLOGSPROTO_CASTCONNECTIONINFO_SENDERINFO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastConnectionInfo.SenderInfo)
})
,
'DESCRIPTOR' : _CASTLOGSPROTO_CASTCONNECTIONINFO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastConnectionInfo)
})
,
'CastEventProto' : _reflection.GeneratedProtocolMessageType('CastEventProto', (_message.Message,), {
'Metadata' : _reflection.GeneratedProtocolMessageType('Metadata', (_message.Message,), {
'DESCRIPTOR' : _CASTLOGSPROTO_CASTEVENTPROTO_METADATA,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastEventProto.Metadata)
})
,
'DESCRIPTOR' : _CASTLOGSPROTO_CASTEVENTPROTO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastEventProto)
})
,
'CastDeviceMutableInfo' : _reflection.GeneratedProtocolMessageType('CastDeviceMutableInfo', (_message.Message,), {
'DESCRIPTOR' : _CASTLOGSPROTO_CASTDEVICEMUTABLEINFO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto.CastDeviceMutableInfo)
})
,
'DESCRIPTOR' : _CASTLOGSPROTO,
'__module__' : 'cast_logs_pb2'
# @@protoc_insertion_point(class_scope:metrics.CastLogsProto)
})
_sym_db.RegisterMessage(CastLogsProto)
_sym_db.RegisterMessage(CastLogsProto.CastDeviceInfo)
_sym_db.RegisterMessage(CastLogsProto.CastDeviceInfo.HardwareInfo)
_sym_db.RegisterMessage(CastLogsProto.CastConnectionInfo)
_sym_db.RegisterMessage(CastLogsProto.CastConnectionInfo.SenderInfo)
_sym_db.RegisterMessage(CastLogsProto.CastEventProto)
_sym_db.RegisterMessage(CastLogsProto.CastEventProto.Metadata)
_sym_db.RegisterMessage(CastLogsProto.CastDeviceMutableInfo)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
ee2b503fd9062a81cacf1adf5cebe5fe6a404958 | db8d95cfb73b541e07e219c20e3106eb2f792036 | /intcode/tests.py | bd6c5cb780cf76a459163756761a79f5bba79581 | [] | no_license | ey3ball/adventofcode2019 | 48845950260867e5fb56935ebc1f0eea1574b499 | 34027f98ed24a88978234a920787548cd4c6e2d5 | refs/heads/master | 2020-09-23T09:42:20.846058 | 2019-12-24T13:24:22 | 2019-12-24T13:24:22 | 225,468,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,122 | py | #!/usr/bin/env python3
from .lang import IntCodeInterpreter
def test_day2():
program = "1,0,0,0,99"
interpreter = IntCodeInterpreter(program).run()
assert interpreter.mem == [2,0,0,0,99], "Day2 T1"
program = "2,3,0,3,99"
interpreter = IntCodeInterpreter(program).run()
assert interpreter.mem == [2,3,0,6,99], "Day2 T2"
program = "2,4,4,5,99,0"
interpreter = IntCodeInterpreter(program).run()
assert interpreter.mem == [2,4,4,5,99,9801], "Day2 T3"
program = "1,1,1,4,99,5,6,0,99"
interpreter = IntCodeInterpreter(program).run()
assert interpreter.mem == [30,1,1,4,2,5,6,0,99], "Day2 T4"
program = "1,12,2,3,1,1,2,3,1,3,4,3,1,5,0,3,2,10,1,19,1,19,6,23,2,13,23,27,1,27,13,31,1,9,31,35,1,35,9,39,1,39,5,43,2,6,43,47,1,47,6,51,2,51,9,55,2,55,13,59,1,59,6,63,1,10,63,67,2,67,9,71,2,6,71,75,1,75,5,79,2,79,10,83,1,5,83,87,2,9,87,91,1,5,91,95,2,13,95,99,1,99,10,103,1,103,2,107,1,107,6,0,99,2,14,0,0"
interpreter = IntCodeInterpreter(program).run()
assert interpreter.mem[0] == 2842648
def test_day5_diag():
program = "3,225,1,225,6,6,1100,1,238,225,104,0,1102,9,19,225,1,136,139,224,101,-17,224,224,4,224,102,8,223,223,101,6,224,224,1,223,224,223,2,218,213,224,1001,224,-4560,224,4,224,102,8,223,223,1001,224,4,224,1,223,224,223,1102,25,63,224,101,-1575,224,224,4,224,102,8,223,223,1001,224,4,224,1,223,224,223,1102,55,31,225,1101,38,15,225,1001,13,88,224,1001,224,-97,224,4,224,102,8,223,223,101,5,224,224,1,224,223,223,1002,87,88,224,101,-3344,224,224,4,224,102,8,223,223,1001,224,7,224,1,224,223,223,1102,39,10,225,1102,7,70,225,1101,19,47,224,101,-66,224,224,4,224,1002,223,8,223,1001,224,6,224,1,224,223,223,1102,49,72,225,102,77,166,224,101,-5544,224,224,4,224,102,8,223,223,1001,224,4,224,1,223,224,223,101,32,83,224,101,-87,224,224,4,224,102,8,223,223,1001,224,3,224,1,224,223,223,1101,80,5,225,1101,47,57,225,4,223,99,0,0,0,677,0,0,0,0,0,0,0,0,0,0,0,1105,0,99999,1105,227,247,1105,1,99999,1005,227,99999,1005,0,256,1105,1,99999,1106,227,99999,1106,0,265,1105,1,99999,1006,0,99999,1006,227,274,1105,1,99999,1105,1,280,1105,1,99999,1,225,225,225,1101,294,0,0,105,1,0,1105,1,99999,1106,0,300,1105,1,99999,1,225,225,225,1101,314,0,0,106,0,0,1105,1,99999,1008,677,226,224,1002,223,2,223,1005,224,329,1001,223,1,223,107,226,677,224,1002,223,2,223,1006,224,344,101,1,223,223,1007,677,677,224,1002,223,2,223,1006,224,359,1001,223,1,223,8,677,226,224,102,2,223,223,1005,224,374,101,1,223,223,108,226,677,224,102,2,223,223,1006,224,389,1001,223,1,223,1008,677,677,224,1002,223,2,223,1006,224,404,1001,223,1,223,1107,677,677,224,102,2,223,223,1005,224,419,1001,223,1,223,1008,226,226,224,102,2,223,223,1005,224,434,101,1,223,223,8,226,677,224,1002,223,2,223,1006,224,449,101,1,223,223,1007,677,226,224,102,2,223,223,1005,224,464,1001,223,1,223,107,677,677,224,1002,223,2,223,1005,224,479,1001,223,1,223,1107,226,677,224,1002,223,2,223,1005,224,494,1001,223,1,223,7,677,677,224,102,2,223,223,1006,224,509,101,1,223,223,1007,226,226,224,1002,223,2,223,1005,224,524,101,1,223,223,7,677,226,224,102,2,223,223,1005,224,539,101,1,223,223,8,226,226,224,1002,223,2,223,1006,224,554,101,1,223,223,7,226,677,224,102,2,223,223,1005,224,569,101,1,223,223,1108,677,226,224,1002,223,2,223,1005,224,584,101,1,223,223,108,677,677,224,1002,223,2,223,1006,224,599,101,1,223,223,107,226,226,224,1002,223,2,223,1006,224,614,101,1,223,223,1108,226,226,224,1002,223,2,223,1005,224,629,1001,223,1,223,1107,677,226,224,1002,223,2,223,1005,224,644,101,1,223,223,108,226,226,224,1002,223,2,223,1005,224,659,101,1,223,223,1108,226,677,224,1002,223,2,223,1005,224,674,1001,223,1,223,4,223,99,226"
interpreter = IntCodeInterpreter(program).run([1])
assert interpreter.reg["out"][-1] == 13787043
interpreter = IntCodeInterpreter(program).run([5])
assert interpreter.reg["out"][-1] == 3892695
def test_day5_2():
program = "3,9,8,9,10,9,4,9,99,-1,8"
interpreter = IntCodeInterpreter(program).run([7])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([8])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([9])
assert interpreter.reg["out"] == [0]
program = "3,9,7,9,10,9,4,9,99,-1,8"
interpreter = IntCodeInterpreter(program).run([7])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([8])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([9])
assert interpreter.reg["out"] == [0]
program = "3,3,1108,-1,8,3,4,3,99"
interpreter = IntCodeInterpreter(program).run([7])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([8])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([9])
assert interpreter.reg["out"] == [0]
program = "3,3,1107,-1,8,3,4,3,99"
interpreter = IntCodeInterpreter(program).run([7])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([8])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([9])
assert interpreter.reg["out"] == [0]
program = "3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9"
interpreter = IntCodeInterpreter(program).run([0])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([1])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([-1])
assert interpreter.reg["out"] == [1]
program = "3,3,1105,-1,9,1101,0,0,12,4,12,99,1"
interpreter = IntCodeInterpreter(program).run([0])
assert interpreter.reg["out"] == [0]
interpreter = IntCodeInterpreter(program).run([1])
assert interpreter.reg["out"] == [1]
interpreter = IntCodeInterpreter(program).run([-1])
assert interpreter.reg["out"] == [1]
program = "3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99"
interpreter = IntCodeInterpreter(program).run([7])
assert interpreter.reg["out"] == [999]
interpreter = IntCodeInterpreter(program).run([8])
assert interpreter.reg["out"] == [1000]
interpreter = IntCodeInterpreter(program).run([9])
assert interpreter.reg["out"] == [1001]
def test_day9_diag():
program = "1102,34463338,34463338,63,1007,63,34463338,63,1005,63,53,1101,0,3,1000,109,988,209,12,9,1000,209,6,209,3,203,0,1008,1000,1,63,1005,63,65,1008,1000,2,63,1005,63,902,1008,1000,0,63,1005,63,58,4,25,104,0,99,4,0,104,0,99,4,17,104,0,99,0,0,1102,32,1,1019,1101,0,500,1023,1101,0,636,1025,1102,36,1,1010,1101,0,29,1013,1102,864,1,1029,1102,21,1,1000,1102,1,507,1022,1102,1,28,1011,1102,38,1,1008,1101,0,35,1004,1101,25,0,1018,1102,24,1,1005,1102,30,1,1009,1102,1,869,1028,1101,0,37,1007,1102,1,23,1017,1102,1,20,1015,1102,1,22,1003,1101,0,39,1001,1102,1,31,1012,1101,701,0,1026,1101,0,641,1024,1101,0,34,1016,1102,1,0,1020,1102,698,1,1027,1102,33,1,1002,1102,26,1,1006,1101,0,1,1021,1101,0,27,1014,109,12,21101,40,0,0,1008,1012,40,63,1005,63,203,4,187,1105,1,207,1001,64,1,64,1002,64,2,64,109,-11,1207,7,37,63,1005,63,223,1105,1,229,4,213,1001,64,1,64,1002,64,2,64,109,14,1206,5,247,4,235,1001,64,1,64,1105,1,247,1002,64,2,64,109,-2,1207,-4,31,63,1005,63,269,4,253,1001,64,1,64,1105,1,269,1002,64,2,64,109,-6,1208,-5,35,63,1005,63,289,1001,64,1,64,1106,0,291,4,275,1002,64,2,64,109,9,21108,41,39,-1,1005,1015,311,1001,64,1,64,1105,1,313,4,297,1002,64,2,64,109,-5,2101,0,-9,63,1008,63,33,63,1005,63,339,4,319,1001,64,1,64,1106,0,339,1002,64,2,64,1205,10,351,4,343,1106,0,355,1001,64,1,64,1002,64,2,64,109,-18,2108,35,9,63,1005,63,375,1001,64,1,64,1105,1,377,4,361,1002,64,2,64,109,18,1205,9,389,1105,1,395,4,383,1001,64,1,64,1002,64,2,64,109,7,21107,42,41,-8,1005,1010,415,1001,64,1,64,1106,0,417,4,401,1002,64,2,64,109,-12,2102,1,0,63,1008,63,29,63,1005,63,437,1106,0,443,4,423,1001,64,1,64,1002,64,2,64,109,3,1208,0,30,63,1005,63,461,4,449,1105,1,465,1001,64,1,64,1002,64,2,64,109,5,1202,-5,1,63,1008,63,31,63,1005,63,489,1001,64,1,64,1106,0,491,4,471,1002,64,2,64,109,15,2105,1,-6,1001,64,1,64,1106,0,509,4,497,1002,64,2,64,109,-10,1206,2,525,1001,64,1,64,1106,0,527,4,515,1002,64,2,64,109,-18,1202,0,1,63,1008,63,39,63,1005,63,553,4,533,1001,64,1,64,1106,0,553,1002,64,2,64,109,1,2107,21,1,63,1005,63,571,4,559,1105,1,575,1001,64,1,64,1002,64,2,64,109,7,2102,1,-8,63,1008,63,39,63,1005,63,601,4,581,1001,64,1,64,1105,1,601,1002,64,2,64,109,2,1201,-7,0,63,1008,63,35,63,1005,63,623,4,607,1106,0,627,1001,64,1,64,1002,64,2,64,109,20,2105,1,-7,4,633,1106,0,645,1001,64,1,64,1002,64,2,64,109,-16,21107,43,44,-4,1005,1011,663,4,651,1105,1,667,1001,64,1,64,1002,64,2,64,109,-11,2107,36,0,63,1005,63,687,1001,64,1,64,1106,0,689,4,673,1002,64,2,64,109,19,2106,0,4,1106,0,707,4,695,1001,64,1,64,1002,64,2,64,109,-14,21108,44,44,6,1005,1015,725,4,713,1105,1,729,1001,64,1,64,1002,64,2,64,109,1,1201,-6,0,63,1008,63,36,63,1005,63,749,1106,0,755,4,735,1001,64,1,64,1002,64,2,64,109,-1,21101,45,0,10,1008,1019,42,63,1005,63,775,1105,1,781,4,761,1001,64,1,64,1002,64,2,64,109,16,21102,46,1,-7,1008,1018,44,63,1005,63,801,1105,1,807,4,787,1001,64,1,64,1002,64,2,64,109,-3,21102,47,1,-4,1008,1018,47,63,1005,63,833,4,813,1001,64,1,64,1105,1,833,1002,64,2,64,109,-14,2108,38,0,63,1005,63,851,4,839,1105,1,855,1001,64,1,64,1002,64,2,64,109,17,2106,0,3,4,861,1106,0,873,1001,64,1,64,1002,64,2,64,109,-31,2101,0,10,63,1008,63,36,63,1005,63,897,1001,64,1,64,1106,0,899,4,879,4,64,99,21101,0,27,1,21101,0,913,0,1106,0,920,21201,1,53612,1,204,1,99,109,3,1207,-2,3,63,1005,63,962,21201,-2,-1,1,21102,940,1,0,1106,0,920,21202,1,1,-1,21201,-2,-3,1,21101,955,0,0,1106,0,920,22201,1,-1,-2,1105,1,966,21201,-2,0,-2,109,-3,2106,0,0"
interpreter = IntCodeInterpreter(program).run([1])
assert interpreter.reg["out"] == [2377080455]
interpreter = IntCodeInterpreter(program).run([2])
assert interpreter.reg["out"] == [74917]
def run_all_tests():
test_day2()
test_day5_diag()
test_day5_2()
test_day9_diag()
if __name__ == "__main__":
run_all_tests()
| [
"[email protected]"
] | |
0b2f2a71542b974ed43a821a461d32ea33df334d | 19decad8830dee81b7951ca7cbe40eab4bf60ff5 | /src/pipes/coreference.py | f98d21999b59ca92ebe83bb3174a6d83d0ddd378 | [] | no_license | rihardssp/annotationtranslation | 9a0423fc94842065e0636db036a9b7f78a28cec2 | 69f1200e2784a486f31c3d2fb7a5e9785c04179a | refs/heads/master | 2023-05-25T08:58:50.576454 | 2020-05-28T17:38:10 | 2020-05-28T17:38:10 | 257,121,970 | 1 | 0 | null | 2023-05-22T23:27:27 | 2020-04-19T23:16:32 | Python | UTF-8 | Python | false | false | 5,420 | py | import typing
from src.configuration import config_reader
from src.container.base import IContainer, ContainerStatistic, stat_incr
from src.mapping_defaults.coreference import ICoReferenceMapping, CoReferenceMapping
from src.pipes.base import PipeBase
from src.readers.coreference import ICoReferenceAnnotationReaderBase, CoReferenceFilesAnnotationReader
class CoReferencePipe(PipeBase):
"""This is the initial pipe, which creates the base of AMR by using propbank verb and its arguments and adding
some things from underlying treebank """
def __init__(self, mapping: ICoReferenceMapping = None, annotation_reader: ICoReferenceAnnotationReaderBase = None):
super().__init__()
self.mapping = mapping if mapping is not None else CoReferenceMapping()
self.annotation_reader: ICoReferenceAnnotationReaderBase = annotation_reader if annotation_reader is not None \
else CoReferenceFilesAnnotationReader(config_reader.get_co_reference_resource_folder_path(), False)
def _process_amr(self, container_list: typing.List[IContainer]) -> typing.List[IContainer]:
for sentence in self.annotation_reader.read():
# ToDo: improve sent_id matching so that it isn't O(nm)
# This co reference has a match
potential_container = next((x for x in container_list if x.text == sentence.text), None)
if not potential_container:
continue
container = potential_container
container.set_stat(ContainerStatistic.HAS_COREFERENCE, True)
container.set_stat(ContainerStatistic.COREFERENCE_TOTAL_COUNT, sentence.co_reference_count)
container.set_stat(ContainerStatistic.COREFERENCE_COUNT, 0)
for name, group in sentence.co_references.items():
# Not all group members may be in container
members_to_keep = []
non_members_to_keep = []
members_to_coreference = []
# Separate words into groups (to keep, to co reference, to insert into container if possible)
for group_word in group:
# word not in container - can't remove or co reference
if container.has_instance(group_word.id):
# the mapping defines which words NOT to keep
if group_word.pos_value[0:2] not in self.mapping.get_replaceable_pos_values():
members_to_keep.append(group_word)
else:
members_to_coreference.append(group_word)
elif group_word.pos_value[0:2] not in self.mapping.get_replaceable_pos_values():
non_members_to_keep.append(group_word)
# in case there is some additional context for our coreference group
if name in sentence.additional_context_references:
for context_group_words in sentence.additional_context_references[name]:
if context_group_words.pos_value[0:2] not in self.mapping.get_replaceable_pos_values():
non_members_to_keep.append(context_group_words)
is_reference_added = False
last_member_to_keep = None
last_member_to_coreference = None
# join all references that we want to get rid of
if len(members_to_coreference) > 0:
for reference in range(1, len(members_to_coreference)):
container.replace_instance(members_to_coreference[reference].id, members_to_coreference[0].id)
container.update_stat(ContainerStatistic.SENTENCE_TOKEN_REMOVED_COUNT, stat_incr)
is_reference_added = True
last_member_to_coreference = members_to_coreference[0]
# join all references that we want to keep (to one specific value)
if len(members_to_keep) > 0:
for reference in range(1, len(members_to_keep)):
container.replace_instance(members_to_keep[reference].id, members_to_keep[0].id)
container.update_stat(ContainerStatistic.SENTENCE_TOKEN_REMOVED_COUNT, stat_incr)
is_reference_added = True
last_member_to_keep = members_to_keep[0]
# simple scenario - replace with desireable word 1-1
if last_member_to_coreference and last_member_to_keep:
is_reference_added = True
container.replace_instance(last_member_to_coreference.id, last_member_to_keep.id)
container.update_stat(ContainerStatistic.SENTENCE_TOKEN_REMOVED_COUNT, stat_incr)
# update with value of first non-member desireable word 1-1
elif last_member_to_coreference and len(non_members_to_keep) > 0 and non_members_to_keep[0].pos_value[0:1] != "p":
is_reference_added = True
# ToDo: Need to transform to base word. Perhaps querying UD somehow?
container.update_instance_value(last_member_to_coreference.id, non_members_to_keep[0].form)
if is_reference_added:
container.update_stat(ContainerStatistic.COREFERENCE_COUNT, stat_incr)
return container_list
| [
"[email protected]"
] | |
94341232b326a230d42f1aa2cb8148416aac46b4 | 9f6add7afbceff0b0a53b9bb0eb5ebd33b4a5284 | /school_management/school.py | f81e4443d51893329c0ecd139916d14e29fd2199 | [] | no_license | nakuldave/training_module | 99045ce6b2199a2f01de32e90f3a38fd945a0d00 | 3c47b3f86a60747a99c996f7b15941eea18824e8 | refs/heads/master | 2016-09-05T10:54:36.316518 | 2015-04-22T14:22:56 | 2015-04-22T14:22:56 | 34,394,079 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,118 | py | from openerp.osv import fields,osv
from datetime import datetime,date
from tools.translate import _
class school_school (osv.Model):
_name = 'school.school'
def create(self, cr, uid, vals, context=None):
res={}
if not vals.get('code'):
vals.update({'code':vals.get('name')})
return super(school_school, self).create(cr, uid, vals, context=context)
def write(self,cr,uid,ids,vals,context=None):
if vals.get('code'):
raise osv.except_osv(_('Error!'),_('You Cannot Change the Code'))
return super (school_school, self).write(cr,uid,ids,vals,context=context)
_columns ={
'name': fields.char('School Name'),
'established': fields.date('Established Date'),
'code': fields.char('Code'),
'contact': fields.char('Contact Number'),
'student_ids': fields.one2many('student.student','school_id','StudentID'),
}
class student_student (osv.Model):
_name = 'student.student'
status_selection = [('draft', 'New'),
('process', 'In Progress'),
('approve', 'Done'),
('cancel', 'Rejected')]
def _full_name(self, cr, uid, ids, fields, arg, context=None):
res={}
for record in self.browse(cr, uid, ids,context=context):
if not record.lname:
res[record.id]=record.name
return res
else:
res[record.id]=record.name+' '+record.lname
return res
def get_address(self, cr, uid, ids, customer_id, context=None):
res = {}
if customer_id:
partner_rec = self.pool.get('res.partner').browse(cr, uid, customer_id, context=context)
res = {'value':{'street':partner_rec.street or "",
'street2':partner_rec.street2 or "",
'city':partner_rec.city or "",
'state_id':partner_rec.state_id and partner_rec.state_id.id or False,
'zip':partner_rec.zip or "",
'country_id':partner_rec.country_id and partner_rec.country_id.id or False,
}}
return res
def get_admissiondate(self,cr,uid,ids,dob,context=None):
res = {}
d=datetime.strptime(dob,'%Y-%m-%d')
y=d.year+5
m=d.month
d=d.day
m=str(m)
d=str(d)
y=str(y)
fin=m+'/'+d+'/'+y
print ":::::::",type(fin)
res={'value':{'admission_date':fin}}
return res
def set_in_process(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':"process"}, context=context)
return True
def set_to_approve(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':"approve"}, context=context)
return True
def set_to_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':"cancel"}, context=context)
return True
def set_to_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':"draft"}, context=context)
return True
_columns={
'partner_id':fields.many2one('res.partner', "Partner"),
'name':fields.char('Student Name'),
'lname':fields.char('Last Name'),
'dob':fields.date('Date of Birth'),
'admission_date':fields.char('Admission Date'),
'contact':fields.char('Contact Number'),
'school_id':fields.many2one('school.school','School'),
'full_name':fields.function(_full_name,type='char',string='Full Name'),
'street':fields.char('street'),
'customer_id':fields.many2one('res.partner', "customer"),
'street2':fields.char('street2'),
'city':fields.char('city'),
'state_id':fields.many2one('res.country.state',"State"),
'zip':fields.char('Zip'),
'country_id':fields.many2one('res.country', "Country"),
'state':fields.selection(status_selection, "State"),
'student_ids':fields.one2many('result.result','result_id','Student')
}
_sql_constraints = [
('student_name_unique',
'unique(name)',
'Name Already Exists Please Enter a New Name'),
]
def check_date(self, cr, uid, ids, context=None):
res = {}
current_date = date.today()
for rec in self.browse(cr, uid, ids, context=context):
if rec.dob and rec.dob > str(current_date):
return False
return True
_constraints = [(check_date, 'Enter Value is not correct!', ['dob'])]
_defaults = {
'state':'draft'
}
class result_result (osv.Model):
_name = 'result.result'
def _final_result(self, cr, uid, ids, fields, arg, context=None):
res={}
for record in self.browse(cr, uid, ids,context=context):
if record.obtained_marks >= record.passing_marks:
res[record.id]="Pass"
else:
res[record.id]="Fail"
return res
# def get_studentname(self, cr, uid, ids,result_id,context=None):
# res = {}
# if result_id:
# student_rec = self.pool.get('student.student').browse(cr, uid, result_id, context=context)
# res = {'value':{'name': student_rec.name or ''}}
# return res
def get_customername(self, cr, uid, ids,result_id,context=None):
res = {}
if result_id:
partner_rec = self.pool.get('student.student').browse(cr, uid, result_id, context=context)
res = {'value':{'name': partner_rec.name or '',
'customer_id': partner_rec.customer_id and partner_rec.customer_id.id or False
}}
return res
_columns = {
'name':fields.char('Name'),
'subject_name':fields.char('Subject Name'),
'obtained_marks': fields.float('Obtained Marks'),
'maximum_marks': fields.integer('Maximum Marks'),
'result_id': fields.many2one('student.student','Student Name'),
'passing_marks': fields.integer('Passing Marks'),
'final_result':fields.function(_final_result,type='char',string='Final Result'),
'student_ids':fields.many2one('result.result','student Result'),
'customer_id':fields.many2one('res.partner','customer')
}
def check_marks(self, cr, uid, student_ids, context=None):
res = {}
for rec in self.browse(cr, uid, student_ids, context=context):
if rec.obtained_marks > rec.maximum_marks:
return False
return True
_constraints = [(check_marks, 'Enter Value is not correct!', ['obtained_marks'])] | [
"[email protected]"
] | |
de0f646dd751c2cc6dee5b44128b8cc319603c6a | 2080a7c9345c866972174853cbc752dd5bf302fd | /EXAMPLE1.py | 6d80075fc69b50e550c4c8427e3727358f1d0298 | [] | no_license | lollipop6prince/nfu40341107_ZhangRuiShan | 32f9deb67f67ad75366d6559b7bb1752efedb329 | 16bbb840c31ce4b4df586f5b8cc9eaaf383ed67c | refs/heads/master | 2021-06-17T14:56:10.465223 | 2017-06-08T09:48:40 | 2017-06-08T09:48:40 | 85,920,816 | 0 | 0 | null | 2017-04-06T12:24:35 | 2017-03-23T07:42:03 | Python | UTF-8 | Python | false | false | 422 | py | # -*- coding: utf-8 -*-
import numpy as np
from sympy import *
import matplotlib.pyplot as plt
fig = plt.gcf()
fig.set_size_inches(8,5)
var('x')
f = lambda x: exp(-x**2/2)
x = np.linspace(-4,4,100)
y = np.array([f(v) for v in x],dtype='float')
plt.grid(True)
plt.title('Gaussian Curve')
plt.xlabel('X')
plt.ylabel('Y')
plt.plot(x,y,color='gray')
plt.fill_between(x,y,0,color='#c0f0c0')
plt.show()
| [
"[email protected]"
] | |
85607e941bfa1fa1142c6789f5c9b2b4c41ed1b9 | ef4abe23a7dfe97ee2b61441f62e07b40c9db313 | /usk3_protocol/usk3_protocol.py | 8aa7099fbfc51ea1f66a2f13b045bbcecc96f228 | [] | no_license | KurlesHS/USK3Protocol | 569df5e358b70892cedef23dc9f5153a3b28e1f2 | ebc8baa769d185e2ceb95c9319fcfa1d2df0187e | refs/heads/master | 2020-04-05T23:41:04.303955 | 2014-09-24T13:02:19 | 2014-09-24T13:02:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,575 | py | # coding=utf-8
from duplicity.pexpect import searcher_re
from twisted.internet.task import LoopingCall
from twisted.internet.protocol import Protocol, connectionDone
from .usk3_packet import Usk3Packet
class Usk3ProtocolSharedData(object):
def __init__(self):
self.command_queue = list()
self.commands_in_process = dict()
self.protocol = None
self.incoming_packet_listener = None
def add_command(self, packet):
if not isinstance(packet, Usk3Packet):
return
protocol = self.protocol
if isinstance(protocol, Usk3Protocol):
protocol.add_command(packet)
else:
self.command_queue.append(packet)
def inform_about_incoming_packet(self, packet):
if self.incoming_packet_listener:
self.incoming_packet_listener(packet)
class Usk3Protocol(Protocol):
WAIT_RESPONSE_TIMEOUT = 30
def __init__(self, usk3_protocol_shared_data, clock=None):
"""
:type usk3_protocol_shared_data: Usk3ProtocolSharedData
"""
if clock is None:
from twisted.internet import reactor
clock = reactor
self.shared_command_data = usk3_protocol_shared_data
self.shared_command_data.protocol = self
self.clock = clock
self._lc = LoopingCall(self.check_commands_ttl)
self._lc.clock = clock
self.received_buffer = bytearray()
self._callback_func = None
def _stop_timer(self):
if self._lc.running:
self._lc.stop()
def _start_timer(self):
self._stop_timer()
self._lc.start(5., False)
def release_resources(self):
self._stop_timer()
def dataReceived(self, data):
self.received_buffer += bytearray(data)
self.parse_received_data()
def parse_received_data(self):
if len(self.received_buffer) == 0:
return
packet = Usk3Packet.from_raw_data(self.received_buffer)
if packet.state == Usk3Packet.INCOMPLETE_PACKET:
self._start_timer()
elif packet.state == Usk3Packet.INCORRECT_PACKET:
#self.transport.loseConnection()
del self.received_buffer[:]
elif packet.state == Usk3Packet.CORRECT_PACKET:
del self.received_buffer[:packet.length]
if packet.command == 0:
packet_id = packet.packet_id
if packet_id in self.shared_command_data.commands_in_process:
command = self.shared_command_data.commands_in_process[packet_id]
if isinstance(command, Usk3Packet):
if command.callback_func is not None:
command.callback_func(packet)
del self.shared_command_data.commands_in_process[packet_id]
else:
self.shared_command_data.inform_about_incoming_packet(packet)
# даём отклик
# self.transport.write(str(Usk3Packet(packet.module_number, 0x00)))
self.parse_received_data()
def connectionLost(self, reason=connectionDone):
self._stop_timer()
self.shared_command_data.protocol = None
self.connected = False
def connectionMade(self):
self._start_timer()
self._check_commands()
def _check_commands(self):
if self.connected:
for command in self.shared_command_data.command_queue:
if isinstance(command, Usk3Packet):
self.transport.write(str(command))
if command.callback_func is not None:
command.set_current_timestamp()
self.shared_command_data.commands_in_process[command.packet_id] = command
del self.shared_command_data.command_queue[:]
def check_commands_ttl(self):
#print len(self.shared_command_data.commands_in_process)
commands_to_delete = list()
for command_id, command in self.shared_command_data.commands_in_process.iteritems():
if isinstance(command, Usk3Packet):
if command.passed_time() >= Usk3Protocol.WAIT_RESPONSE_TIMEOUT:
commands_to_delete.append(command_id)
for command_id in commands_to_delete:
try:
del self.shared_command_data.commands_in_process[command_id]
except KeyError:
pass
def add_command(self, command):
self.shared_command_data.command_queue.append(command)
self._check_commands() | [
"[email protected]"
] | |
19b9d8423c168d1629f32779ca59512c0030b359 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertOneElementTupleToList.py | 7a88eac62fac3f8402073e1824a9900e34cc3636 | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 12 | py | (<caret>42,) | [
"[email protected]"
] | |
5355fdb733a60e0bebf7ccb2b59e0802981c723c | f78654a152d248592b1bcef22af7113002908fed | /src/estudiantes/domain/eliminar.py | 737d851ac1a5bbb15cf80a28725205eee9890efe | [] | no_license | Bralediro/Parcial_Final_Arq_Hex | 0cfa40d8ca872d339fd774f3b7aa6d302b176c13 | 71ba5df0fe5deefa0f97667b62e160799236998a | refs/heads/master | 2023-05-28T08:47:50.901863 | 2021-06-12T23:06:30 | 2021-06-12T23:06:30 | 376,397,287 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | class EliminarEstudiante():
def __init__(self, DB):
self.DB = DB
def run(self,id):
cur = self.DB.cursor()
cur.execute('DELETE FROM estudiante WHERE id = {0}'.format(id))
cur.close()
| [
"[email protected]"
] | |
ca985069bd5d0ec2cc72bd76624bc5e6bbcf14c8 | 42d42645de1dba29813c3df1f0c5c14ea3030fb1 | /doc/conf.py | f4441e77df646ee6a09f8bb413a82c44b99f0104 | [
"MIT"
] | permissive | synapticarbors/MatrixDepot.jl | f0d24698ba50e7ccfc4f11efd68487928b544c3a | 22b70365c80d83270915b78f3c7330df07cd28fa | refs/heads/master | 2021-01-18T00:01:50.710348 | 2015-01-09T08:22:00 | 2015-01-09T08:22:00 | 29,146,318 | 0 | 0 | null | 2015-01-12T17:02:19 | 2015-01-12T17:02:19 | null | UTF-8 | Python | false | false | 8,363 | py | # -*- coding: utf-8 -*-
#
# Matrix Depot documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 23 00:46:02 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Matrix Depot'
copyright = u'2014, Weijian Zhang'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {"textcolor":"black", "headingcolor":"black"}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Matrix Depot: A Test Matrix Collection"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = MatrixDepot.jl
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = "logo2.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MatrixDepotdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'MatrixDepot.tex', u'Matrix Depot Documentation',
u'Weijian Zhang', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'matrixdepot', u'Matrix Depot Documentation',
[u'Weijian Zhang'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MatrixDepot', u'Matrix Depot Documentation',
u'Weijian Zhang', 'MatrixDepot', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| [
"[email protected]"
] | |
6c439cd976d75cc06bba20b1fbb5712902ede5f7 | a4595b7d42573499f4572bdd056ea27770b041e7 | /mysite/settings.py | a4bf328e05ebdb603fc71405607bd9ec56924eb3 | [] | no_license | bobnorigun/my-first-blog | 651da180fbbeb119045d390c10d92da3ff706b11 | c60f455f8cb7d5af34bea21dcecd85c5d6096ee4 | refs/heads/master | 2021-08-19T20:26:35.862020 | 2020-04-11T09:22:22 | 2020-04-11T09:22:22 | 160,304,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,851 | py | import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'r4f699iclp6-v%j6+*4sv@ko&8thxunr7=ovfilc($m7-lbe(o'
DEBUG = True
ALLOWED_HOSTS = ['localhost', '127.0.0.1', '.pythonanywhere.com']
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
'polls',
'csvfile',
'caldist',
'distsum',
'memo',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'ko'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIR = {
os.path.join(BASE_DIR, 'static'),
}
# Media file 재시도
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media') | [
"[email protected]"
] | |
761a16ea5afd9ff1a5cc8d377dfb623a5be75c44 | 4f8d6d3769f26ebe15213e46460325c9165d4e17 | /src/state.py | 73e4f789c7e53a0f2a78d58dd65fff12eaec23f5 | [
"MIT"
] | permissive | KM-200/PyBitmessage | 482b8f88751b7b74724c46c0000e80a2a9ae6815 | 2b2bdce40ef530482f9b545a45e42f125844f19b | refs/heads/master | 2020-03-15T22:23:42.266320 | 2018-09-01T08:53:59 | 2018-09-01T08:53:59 | 132,372,385 | 1 | 0 | MIT | 2018-07-17T02:05:55 | 2018-05-06T20:12:17 | Python | UTF-8 | Python | false | false | 1,705 | py | import collections
neededPubkeys = {}
streamsInWhichIAmParticipating = []
sendDataQueues = [] #each sendData thread puts its queue in this list.
# For UPnP
extPort = None
# for Tor hidden service
socksIP = None
# Network protocols availability, initialised below
networkProtocolAvailability = None
appdata = '' #holds the location of the application data storage directory
shutdown = 0 #Set to 1 by the doCleanShutdown function. Used to tell the proof of work worker threads to exit.
curses = False
sqlReady = False # set to true by sqlTread when ready for processing
maximumNumberOfHalfOpenConnections = 0
invThread = None
addrThread = None
downloadThread = None
ownAddresses = {}
# If the trustedpeer option is specified in keys.dat then this will
# contain a Peer which will be connected to instead of using the
# addresses advertised by other peers. The client will only connect to
# this peer and the timing attack mitigation will be disabled in order
# to download data faster. The expected use case is where the user has
# a fast connection to a trusted server where they run a BitMessage
# daemon permanently. If they then run a second instance of the client
# on a local machine periodically when they want to check for messages
# it will sync with the network a lot faster without compromising
# security.
trustedPeer = None
discoveredPeers = {}
# tracking pending downloads globally, for stats
missingObjects = {}
Peer = collections.namedtuple('Peer', ['host', 'port'])
def resetNetworkProtocolAvailability():
global networkProtocolAvailability
networkProtocolAvailability = {'IPv4': None, 'IPv6': None, 'onion': None}
resetNetworkProtocolAvailability()
dandelion = 0
| [
"[email protected]"
] | |
68b98cb29f9ef2a047b4a3942a769644673c1e4e | 9951b1f9957031f9b5015bf48da854faba1079c0 | /slam/imaging/no_lens_light/source_inversion/mass_total__subhalo_nfw/hyper_all.py | dabec6a1c3469e5d8d2a94f16aee5a46550cb822 | [] | no_license | jonathanfrawley/autolens_workspace_test_copy | 07c069b77759afb2d836d9763846d27727eea67c | 7f7c72feedf5ad3dd7b6d65a3b6de463eaaa4fe6 | refs/heads/master | 2023-04-24T11:57:52.620442 | 2021-02-25T15:52:54 | 2021-02-25T15:52:54 | 367,427,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,238 | py | """
__SLaM (Source, Light and Mass)__
This SLaM pipeline runner loads a strong lens dataset and analyses it using a SLaM lens modeling
pipeline.
__THIS RUNNER__
Using 1 source pipeline, a mass pipeline and a subhalo pipeline this runner fits `Imaging` of a strong lens system,
where in the final phase of the pipeline:
- The lens `Galaxy`'s light is omitted from the data and model.
- The lens `Galaxy`'s total mass distribution is modeled as an `EllipticalIsothermal`.
- A dark matter subhalo`s within the lens galaxy is modeled as a `EllipticalNFWMCRLudlow`.
- The source galaxy is modeled as an `EllipticalSersic`.
This runner uses the SLaM pipelines:
`slam/imaging/no_lens_light/pipelines/source__mass_sie__source_parametric.py`.
`slam/imaging/no_lens_light/pipelines/source__mass_sie__source_inversion.py`.
`slam/imaging/no_lens_light/pipelines/mass__mass_power_law__source.py`.
`slam/imaging/no_lens_light/pipelines/subhalo__mass__subhalo_nfw__source.py`.
Check them out for a detailed description of the analysis!
"""
from os import path
import autofit as af
import autolens as al
import autolens.plot as aplt
dataset_name = "mass_sie__source_sersic"
pixel_scales = 0.2
dataset_path = path.join("dataset", "imaging", "no_lens_light", dataset_name)
"""
Using the dataset path, load the data (image, noise-map, PSF) as an `Imaging` object from .fits files.
"""
imaging = al.Imaging.from_fits(
image_path=path.join(dataset_path, "image.fits"),
psf_path=path.join(dataset_path, "psf.fits"),
noise_map_path=path.join(dataset_path, "noise_map.fits"),
pixel_scales=pixel_scales,
)
mask = al.Mask2D.circular(
shape_native=imaging.shape_native, pixel_scales=pixel_scales, radius=3.0
)
imaging_plotter = aplt.ImagingPlotter(
imaging=imaging, visuals_2d=aplt.Visuals2D(mask=mask)
)
imaging_plotter.subplot_imaging()
"""
__Settings__
The `SettingsPhaseImaging` describe how the model is fitted to the data in the log likelihood function.
These settings are used and described throughout the `autolens_workspace/examples/model` example scripts, with a
complete description of all settings given in `autolens_workspace/examples/model/customize/settings.py`.
The settings chosen here are applied to all phases in the pipeline.
"""
settings_masked_imaging = al.SettingsMaskedImaging(grid_class=al.Grid2D, sub_size=2)
settings = al.SettingsPhaseImaging(
settings_masked_imaging=settings_masked_imaging,
settings_lens=al.SettingsLens(stochastic_samples=1),
)
"""
__PIPELINE SETUP__
Transdimensional pipelines used the `SetupPipeline` object to customize the analysis performed by the pipeline,
for example if a shear was included in the mass model and the model used for the source galaxy.
SLaM pipelines break the analysis down into multiple pipelines which focus on modeling a specific aspect of the strong
lens, first the Source, then the (lens) Light and finally the Mass. Each of these pipelines has it own setup object
which is equivalent to the `SetupPipeline` object, customizing the analysis in that pipeline. Each pipeline therefore
has its own `SetupMass` and `SetupSourceParametric` object.
The `Setup` used in earlier pipelines determine the model used in later pipelines. For example, if the `Source`
pipeline is given a `Pixelization` and `Regularization`, than this `Inversion` will be used in the subsequent
`SLaMPipelineMass` pipeline.
The `Setup` again tags the path structure of every pipeline in a unique way, such than combinations of different
SLaM pipelines can be used to fit lenses with different models. If the earlier pipelines are identical (e.g. they use
the same `SLaMPipelineSource`. they will reuse those results before branching off to fit different models in the
`SLaMPipelineLightParametric` and / or `SLaMPipelineMass` pipelines.
"""
"""
__HYPER SETUP__
The `SetupHyper` determines which hyper-mode features are used during the model-fit and is used identically to the
hyper pipeline examples.
The `SetupHyper` object has a new input available, `hyper_fixed_after_source`, which fixes the hyper-parameters to
the values computed by the hyper-phase at the end of the Source pipeline. By fixing the hyper-parameter values in the
_SLaMPipelineLight_ and `SLaMPipelineMass` pipelines, model comparison can be performed in a consistent fashion.
"""
hyper = al.SetupHyper(
hyper_search_no_inversion=af.DynestyStatic(maxcall=1),
hyper_search_with_inversion=af.DynestyStatic(maxcall=1),
hyper_galaxies_lens=True,
hyper_galaxies_source=True,
hyper_image_sky=al.hyper_data.HyperImageSky,
hyper_background_noise=al.hyper_data.HyperBackgroundNoise,
)
"""
__SLaMPipelineSourceParametric__
The parametric source pipeline aims to initialize a robust model for the source galaxy using `LightProfile` objects.
_SLaMPipelineSourceParametric_ determines the source model used by the parametric source pipeline. A full description of all
options can be found ? and ?.
By default, this assumes an `EllipticalIsothermal` profile for the lens `Galaxy`'s mass. Our experience with lens
modeling has shown they are the simpliest models that provide a good fit to the majority of strong lenses.
For this runner the `SLaMPipelineSourceParametric` customizes:
- The `MassProfile` fitted by the pipeline (and the following `SLaMPipelineSourceInversion`.
- If there is an `ExternalShear` in the mass model or not.
"""
setup_mass = al.SetupMassTotal(
mass_prior_model=al.mp.EllipticalIsothermal, with_shear=True
)
setup_source = al.SetupSourceParametric(
bulge_prior_model=al.lp.EllipticalSersic,
disk_prior_model=None,
envelope_prior_model=None,
)
pipeline_source_parametric = al.SLaMPipelineSourceParametric(
setup_mass=setup_mass, setup_source=setup_source
)
"""
__SLaMPipelineSourceInversion__
The Source inversion pipeline aims to initialize a robust model for the source galaxy using an `Inversion`.
_SLaMPipelineSourceInversion_ determines the `Inversion` used by the inversion source pipeline. A full description of all
options can be found ? and ?.
By default, this again assumes `EllipticalIsothermal` profile for the lens `Galaxy`'s mass model.
For this runner the `SLaMPipelineSourceInversion` customizes:
- The `Pixelization` used by the `Inversion` of this pipeline.
- The `Regularization` scheme used by the `Inversion` of this pipeline.
The `SLaMPipelineSourceInversion` use`s the `SetupMass` of the `SLaMPipelineSourceParametric`.
The `SLaMPipelineSourceInversion` determines the source model used in the `SLaMPipelineLightParametric` and `SLaMPipelineMass` pipelines, which in this
example therefore both use an `Inversion`.
"""
setup_source = al.SetupSourceInversion(
pixelization_prior_model=al.pix.VoronoiBrightnessImage,
regularization_prior_model=al.reg.AdaptiveBrightness,
)
pipeline_source_inversion = al.SLaMPipelineSourceInversion(setup_source=setup_source)
"""
__SLaMPipelineMassTotal__
The `SLaMPipelineMassTotal` pipeline fits the model for the lens `Galaxy`'s total mass distribution.
A full description of all options can be found ? and ?.
The model used to represent the lens `Galaxy`'s mass is input into `SLaMPipelineMass` and this runner uses an
`EllipticalIsothermal` in this example.
For this runner the `SLaMPipelineMass` customizes:
- The `MassProfile` fitted by the pipeline.
- If there is an `ExternalShear` in the mass model or not.
"""
setup_mass = al.SetupMassTotal(
mass_prior_model=al.mp.EllipticalIsothermal, with_shear=True
)
pipeline_mass = al.SLaMPipelineMass(setup_mass=setup_mass)
"""
__SetupSubhalo__
The final pipeline fits the lens and source model including a `SphericalNFW` subhalo, using a grid-search of non-linear
searchesn.
A full description of all options can be found ? and ?.
The models used to represent the lens `Galaxy`'s mass and the source are those used in the previous pipelines.
For this runner the `SetupSubhalo` customizes:
- If the source galaxy (parametric or _Inversion) is treated as a model (all free parameters) or instance (all fixed)
during the subhalo detection grid search.
- The NxN size of the grid-search.
"""
setup_subhalo = al.SetupSubhalo(source_is_model=False, number_of_steps=2)
"""
__SLaM__
We combine all of the above `SLaM` pipelines into a `SLaM` object.
The `SLaM` object contains a number of methods used in the make_pipeline functions which are used to compose the model
based on the input values. It also handles pipeline tagging and path structure.
"""
slam = al.SLaM(
path_prefix=path.join("slam", dataset_name),
setup_hyper=hyper,
pipeline_source_parametric=pipeline_source_parametric,
pipeline_source_inversion=pipeline_source_inversion,
pipeline_mass=pipeline_mass,
setup_subhalo=setup_subhalo,
)
"""
__PIPELINE CREATION__
We import and make pipelines as per usual, albeit we'll now be doing this for multiple pipelines!
We then run each pipeline, passing the results of previous pipelines to subsequent pipelines.
"""
from slam.imaging.no_lens_light.pipelines import source__parametric
from slam.imaging.no_lens_light.pipelines import source__inversion
from slam.imaging.no_lens_light.pipelines import mass__total
from slam.imaging.no_lens_light.pipelines import subhalo
source__parametric = source__parametric.make_pipeline(slam=slam, settings=settings)
source_results = source__parametric.run(dataset=imaging, mask=mask)
source__inversion = source__inversion.make_pipeline(
slam=slam, settings=settings, source_parametric_results=source_results
)
source_results = source__inversion.run(dataset=imaging, mask=mask)
mass__total = mass__total.make_pipeline(
slam=slam, settings=settings, source_results=source_results
)
mass_results = mass__total.run(dataset=imaging, mask=mask)
subhalo = subhalo.make_pipeline_single_plane(
slam=slam, settings=settings, mass_results=mass_results, end_stochastic=True
)
subhalo.run(dataset=imaging, mask=mask)
| [
"[email protected]"
] | |
599bef3c88ebe2dc1d0f788a7ba9a671c1a147cf | 836a539f8d143fde8dd2d9e8db0187b3322ee567 | /tfne/encodings/codeepneat/codeepneat_optimizer_factory.py | fc053650159a64269314f11c3890e0105568804d | [
"Apache-2.0"
] | permissive | YangZhafou/Tensorflow-Neuroevolution | 1e2a060ea136888a43678d2fbe4792f70c0aef0d | 77a885b31d065ee0b339569347c0e7614fa5c98f | refs/heads/master | 2022-12-07T18:10:38.370395 | 2020-06-05T14:57:48 | 2020-06-05T14:57:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | from typing import Union
import tensorflow as tf
class OptimizerFactory:
""""""
def __init__(self, optimizer_parameters):
""""""
# Register parameters for optimizer
self.optimizer_parameters = optimizer_parameters
def __str__(self) -> str:
""""""
return "Optimizer: {} (Config: {})".format(self.optimizer_parameters['class_name'],
self.optimizer_parameters['config'])
def create_optimizer(self) -> tf.keras.optimizers.Optimizer:
""""""
return tf.keras.optimizers.deserialize(self.optimizer_parameters)
def duplicate(self):
""""""
return OptimizerFactory(self.optimizer_parameters)
def get_parameters(self) -> {str: Union[str, dict]}:
""""""
return self.optimizer_parameters
def get_name(self) -> str:
""""""
return self.optimizer_parameters['class_name']
| [
"[email protected]"
] | |
de279398afe0fcc3910d8628d08b1563ce65bcf8 | 08d98b08a308a053124a888830c2a3b2cfc5df67 | /RateParser/PageRateParser.py | 733405101d3effb5955d570149d7fbfb3e4b6a2c | [] | no_license | mortent/EasyShip | c9bc6304d573a8f28cebf7c35f65eadaa593b5d2 | 7f1421ceb5a8d773fe8f5f7250e88c7003536412 | refs/heads/master | 2021-01-20T15:42:15.128811 | 2013-02-01T19:14:51 | 2013-02-01T19:14:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,580 | py | import re
import Utils as Utils
def parse_page(lines, results):
"""
Find each column and identify if it is a column of weights or rates.
"""
weights = None
column = []
for line in lines:
if Utils.is_only_newline(line): # No content in this line, it must separate two columns.
if column and is_weight_column(column[0]):
weights = parse_weight_column(column[1:])
if column and not is_weight_column(column[0]):
parse_data_rate_column(column, weights, results)
column = []
else:
column.append(line)
else:
parse_data_rate_column(column, weights, results)
def parse_data_rate_column(column, weights, results):
"""
Parse a column of rates and connect them to the correct column.
"""
zone = Utils.strip_non_digit_characters(column[0])
for (counter, line) in enumerate(column[1:]):
weight = weights[counter]
rate = Utils.clean_price_string(line)
results.append({"weight": weight, "zone": zone, "rate": rate})
def parse_weight_column(column):
"""
Parse a column of weights and put them into a datastructure to be used by parse_data_rate_column
"""
weights = []
for line in column:
if is_letter_weight(line):
weights.append("Letter")
else:
weights.append(Utils.strip_non_digit_characters(line))
return weights
def is_weight_column(line):
return re.match("^Zones$", line)
def is_letter_weight(line):
return re.match("^Letter.*", line)
| [
"[email protected]"
] | |
0eae92a8ca0999637346d12e1ae5d80eea7b7256 | 8dcb8bf9e92fa6b0e818a78af5b641ee197e1e57 | /misc/python_decorators/simple.py | 4f948974376a785c26b12ff0de9e89e9e078a8fc | [] | no_license | deepakguptagit/practice | d7316d96d9afc15e6bedde8d54e7d7bba380f7d0 | d40d5eb5cbce6770a5ca243dee9d352cc1c37a7c | refs/heads/master | 2016-08-11T12:22:25.779056 | 2015-10-23T12:32:32 | 2015-10-23T12:32:32 | 44,811,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | def printargs(func):
def wrapper(*ar, **rgs):
print ar
ret = func(*ar, **rgs)
ret = sorted(ret)
return ret
return wrapper
@printargs
def add(a, b):
return a + b
@printargs
def hello(a , b, c):
print a, b, c
def test(y, x = None):
print x, y
hello([232,323,42,34,23], {"helo":"workd"}, "helloworst")
| [
"[email protected]"
] | |
68d0b8e93907ac1a51114f52f7c8f44df3fe37ef | 183a53ab5185c4aeca1b5ad50b748b542a695a25 | /nspmessage/models.py | 7ddaf6597780d1b826d128db3884dbaf85aca430 | [
"MIT"
] | permissive | MFOSSociety/NSP | 696bb4e6c1dcbc71b2e16cb23f2e36e35710a748 | 578349872041f7b080ee3c6346f9a82e63bd94c9 | refs/heads/master | 2023-01-07T04:47:49.414038 | 2020-06-24T21:12:38 | 2020-06-24T21:12:38 | 127,533,655 | 6 | 5 | MIT | 2022-12-26T20:15:10 | 2018-03-31T13:18:35 | Python | UTF-8 | Python | false | false | 398 | py | from django.db import models
from accounts.models import User
class Message(models.Model):
sender = models.ForeignKey(User, related_name="sender", on_delete=models.CASCADE)
receiver = models.ForeignKey(User, related_name="receiver", on_delete=models.CASCADE)
msg_content = models.CharField(max_length=140, blank=False, null=False)
created_at = models.DateField(auto_now_add=True)
| [
"[email protected]"
] | |
54b01ea95e7979928107296d665a0e0e0a33a485 | c7750528bdc5cdb9b6b2eddb8fc60915386b3dfa | /harshad_number.py | 4619d06fb7ec431d8a12e65ee1a3ec15861de336 | [] | no_license | shreshta2000/function_questions | c0723270b1770d767e806f3f41207e2b3d763ffe | 9124e4ae9d5e42c6ec7047f915eb2cf0dbb4858a | refs/heads/main | 2023-01-09T04:07:58.113514 | 2020-10-27T15:18:10 | 2020-10-27T15:18:10 | 307,741,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | def is_harshad_number(num):
i=num
while i>0:
j=0
rem=0
sum=0
while j<=i:
rem=num%10
sum=sum+rem
num=num//10
#reverse=rem*10+rem
#sum=sum+reverse
j=j+1
i=i+1
print(sum,i)
if num%sum==0:
print(i,"harshad number")
is_harshad_number(100)
| [
"[email protected]"
] | |
cfe5576a1ad5cc059f9bcb15573b75d1104a2553 | 05125b3eef5e092a9ee06a1a17ad850b0ee0e70c | /MiscInterview/unSortedArray.py | 336132647a14623ab1fdf1e202b1a19efcd08b3b | [] | no_license | ischang/Practice-CS-Problems | 6ca91296e3d924c4eec82b70f0e5440015535e80 | eec53da0b98bef1f5bbfcc8bf4546065fff797f9 | refs/heads/master | 2021-01-18T06:57:35.599630 | 2020-03-28T18:17:27 | 2020-03-28T18:17:27 | 46,593,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | def unsortedArray(arr):
dupHash = dict()
cleanList = []
for integer in arr:
if integer not in dupHash:
dupHash[integer] = integer
if integer in dupHash:
continue
for integer in dupHash:
cleanList.append(integer)
print "duplicates removed and sorted", cleanList
arr = [1,5,2,6,8,9,1,1,10,3,2,4,1,3,11,3]
print "original array", arr
unsortedArray(arr) | [
"[email protected]"
] | |
03b4a4b016aa93232c07a4a78fb4b51336fd30b1 | 8c0f3864d7ecd5d72f1af5df2a89407687928b83 | /utils/content-spider/content_spider/spiders/quinto_dia.py | 9265a31126f63d8f0b97c6b93f75698f9b62e187 | [] | no_license | jredondo/django_topic_explorer | 440f3a6cbd31f2db646838f0b1fead385664d9f8 | 8310f7540a63e743ad7399de283b3f3309e38621 | refs/heads/master | 2021-01-18T22:21:19.021224 | 2017-03-17T15:16:10 | 2017-03-17T15:16:10 | 35,054,920 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import scrapy
from utils import *
from settings import settings
class SiteSpySpider(scrapy.Spider):
name = "quinto_dia"
start_urls = [settings[name]['url']]
def parse(self, response):
for href in response.css(settings[self.name]['links']):
full_url = response.urljoin(href.extract())
yield scrapy.Request(full_url, callback=self.parse_links)
def parse_links(self, response):
body = limpiar_quintodia(response.css(settings[self.name]['body']).extract())
yield {
'titulo': response.css(settings[self.name]['titulo']).extract()[0],
'autor': response.css(settings[self.name]['autor']).extract()[-1],
'fecha': '',
'body': [body],
'link': response.url,
}
| [
"[email protected]"
] | |
4902bef02a7a707fe36124ed3842bb6d73289a70 | 0f4a0752059bc56991291337c3f8f462116e5995 | /google_ping.py | 18e5dc63de1f53456490f139015984332c5a29d7 | [] | no_license | oleh-bodnevych/itea | f067661f7d9c5ad19931540ba781d698e71154eb | 92be5c8bf87effa7c6e66da7e0aa6bcc28c6bd9a | refs/heads/main | 2023-08-20T23:53:08.896590 | 2021-10-07T10:49:14 | 2021-10-07T10:49:14 | 414,549,775 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | #! /usr/bin/python3
import os
hostname = "Google.com"
response = os.system("ping -c 2 " + hostname)
#and then check
if response == 0:
print ('\n',hostname, ' available!')
else:
print('\n',hostname, 'is down!')
print("My Script working!!!!!!")
| [
"[email protected]"
] | |
6736aa42ac8fb27417ecab59e96ef302dc25c84f | e3819b3badcdd17e1a7bb0042e4efeb0151ec39b | /facial_expression_video.py | 3f2e652aa2b3abd63d123525bd39b53b109c742c | [] | no_license | SwetaNikki/Facial-Expression-using-Keras | e6b4ddbb1300cfcdb0327d965353859b2fc32703 | 30158c50fb2dfe815c8755e9375a97691ac666bc | refs/heads/main | 2022-12-30T10:55:26.352453 | 2020-10-19T10:22:12 | 2020-10-19T10:22:12 | 305,315,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | # -*- coding: utf-8 -*-
"""Facial expression VIDEO.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1fLItY0WxsA4uaNYHX98_5SpB4TdSUkrf
"""
from keras.models import load_model
import cv2
import numpy as np
model = load_model('model-007.model')
face_clsfr=cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
source=cv2.VideoCapture(0)
labels_dict={0: 'angry', 1: 'disgust', 2: 'fear', 3: 'happy', 4: 'neutral', 5: 'sad', 6: 'surprise'}
color_dict={0:(0,255,0),1:(0,0,255),2:(255, 255, 0),3:(0, 128, 0), 4:(250, 128, 114), 5:(128, 128, 0), 6:(255,0,0)}
while(True):
ret,img=source.read()
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
faces=face_clsfr.detectMultiScale(gray,1.3,5)
for x,y,w,h in faces:
face_img=gray[y:y+w,x:x+w]
resized=cv2.resize(face_img,(48,48))
normalized=resized/255.0
reshaped=np.reshape(normalized,(1,48,48,1))
result=model.predict(reshaped)
label=np.argmax(result,axis=1)[0]
cv2.rectangle(img,(x,y),(x+w,y+h),color_dict[label],2)
cv2.rectangle(img,(x,y-40),(x+w,y),color_dict[label],-1)
cv2.putText(img, labels_dict[label], (x, y-10),cv2.FONT_HERSHEY_SIMPLEX,0.8,(255,255,255),2)
cv2.imshow('FACIAL DETECTION',img)
key=cv2.waitKey(1)
#press esc to escape
if(key==27):
break
cv2.destroyAllWindows()
source.release()
| [
"[email protected]"
] | |
7f0607b397f49fb16ab6139277e00a465535b048 | 0e660c413047dd3a160feb10789e2175dab51175 | /Dame/logic.py | 0a16b3a4499d76e86df9ca762ae9364bb75c14f0 | [] | no_license | Yameni-code/Checkers | d21c67fdaba6f4de9c72b12e4904a78c7c304e16 | 77290211b596ec2bee1d5884b3378090ce23266e | refs/heads/main | 2023-07-13T01:02:27.052006 | 2021-08-22T18:53:12 | 2021-08-22T18:53:12 | 359,057,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,000 | py | from .constant import PLAYER2, PLAYER1, KING1, KING2, Y
from .table import get_all_pieces
def check_move_piece(array, n1, m1, n, m, piece, ob_move):
# This function checks if the piece move is valid
# for a piece move to be valid, it should be an element of the possibles moves and it should of max 2 squares for
# (normal piece) or it should be on the diagonal for (King piece)
"""
:param array: Possible moves
:param n1: y-axis index new position
:param m1: x-axis index new position
:param n: y-axis index old position
:param m: x-axis index old position
:param piece: Piece or King
:param ob_move: Obligatory move
:return: If the giving move is valid or not
"""
if piece == KING2 or piece == KING1:
if n - n1 > 0 and m - m1 > 0:
for i in range(Y):
for ele in array:
if ele == (n - i, m - i):
if (n1, m1) == ele:
return True
if array[(n - i, m - i)] != 0:
return False
elif m - m1 < 0 < n - n1:
for i in range(Y):
for ele in array:
if ele == (n - i, m + i):
if (n1, m1) == ele:
return True
if array[(n - i, m + i)] != 0:
return False
elif n - n1 < 0 and m - m1 < 0:
for i in range(Y):
for ele in array:
if ele == (n + i, m + i):
if (n1, m1) == ele:
return True
if array[(n + i, m + i)] != 0:
return False
elif m - m1 > 0 > n - n1:
for i in range(Y):
for ele in array:
if ele == (n + i, m - i):
if (n1, m1) == ele:
return True
if array[(n + i, m - i)] != 0:
return False
if piece == PLAYER1 or piece == PLAYER2:
if ob_move and abs(n - n1) != 2:
return False
for ele in array:
# n - n1 == 2 capture, == 1 normal piece move
if (n1, m1) == ele and (abs(n - n1) == 2 or abs(n - n1) == 1):
return True
return False
def king_turn(king):
# This function change the turn of the king
if king == KING1:
king = KING2
else:
king = KING1
return king
def player_turn(player):
# this changes the turn of the player
if player == PLAYER1:
player = PLAYER2
else:
player = PLAYER1
return player
def execute_move(pos1, pos2, board, cap_piece):
# this function execute a giving move
"""
:param pos1: old position
:param pos2: new position
:param board: board
:param cap_piece: capture piece
:return:
"""
capture = False
board[pos2[0]][pos2[1]] = board[pos1[0]][pos1[1]]
board[pos1[0]][pos1[1]] = 0
if cap_piece != 0:
board[cap_piece[0]][cap_piece[1]] = 0
capture = True
return board, capture
def game_status(board):
# This function checkers if the game is over or not
"""
:param board: board
:return: Boolean
"""
all_moves = []
for piece in get_all_pieces(board, PLAYER1):
poss = possible_moves(piece[0], piece[1], board)
all_moves.append(poss)
if len(all_moves) == 1 and all_moves[0] == {}:
return PLAYER2, False
all_moves.clear()
for piece in get_all_pieces(board, PLAYER2):
poss = possible_moves(piece[0], piece[1], board)
all_moves.append(poss)
if len(all_moves) == 1 and all_moves[0] == {}:
return PLAYER1, False
all_moves.clear()
counter1 = counter2 = 0
for i in range(Y):
for j in range(Y):
if board[i][j] == PLAYER1 or board[i][j] == KING1:
counter1 += 1
if board[i][j] == PLAYER2 or board[i][j] == KING2:
counter2 += 1
if counter1 != 0 and counter2 != 0:
return 0, True
elif counter1 == 0:
return PLAYER2, False
elif counter2 == 0:
return PLAYER1, False
def crowing_king(board):
# This function crown a normal piece to a king piece
for i in range(Y):
if board[0][i] == PLAYER2:
board[0][i] = KING2
if board[Y-1][i] == PLAYER1:
board[Y-1][i] = KING1
return board
def possible_moves(n, m, board, capture=False, old_n=Y, old_m=Y):
# check the possible moves for a giving piece
"""
:param n: y-axis piece position
:param m: x-axis piece position
:param board: board
:param capture: (boolean) have we just capture a piece
:param old_n: y-axis old piece position
:param old_m: x-axis old piece position
:return: list of the possible position for this piece
"""
possible = {}
if board[n][m] == PLAYER1:
if n + 1 <= Y - 1 and m + 1 <= Y - 1:
if board[n + 1][m + 1] == 0 and not capture:
possible[(n+1, m+1)] = 0
if n + 1 <= Y - 1 and m - 1 >= 0:
if board[n + 1][m - 1] == 0 and not capture:
possible[(n+1, m-1)] = 0
if n + 2 <= Y - 1 and m + 2 <= Y - 1:
if board[n + 2][m + 2] == 0 and (board[n + 1][m + 1] == PLAYER2 or board[n + 1][m + 1] == KING2):
possible[(n + 2, m + 2)] = (n + 1, m + 1)
if n + 2 <= Y - 1 and m - 2 >= 0:
if board[n + 2][m - 2] == 0 and (board[n + 1][m - 1] == PLAYER2 or board[n + 1][m - 1] == KING2):
possible[(n + 2, m - 2)] = (n + 1, m - 1)
if n - 2 >= 0 and m - 2 >= 0:
if board[n - 2][m - 2] == 0 and (board[n - 1][m - 1] == PLAYER2 or board[n - 1][m - 1] == KING2):
possible[(n - 2, m - 2)] = (n - 1, m - 1)
if n - 2 >= 0 and m + 2 <= Y - 1:
if board[n - 2][m + 2] == 0 and (board[n - 1][m + 1] == PLAYER2 or board[n - 1][m + 1] == KING2):
possible[(n - 2, m + 2)] = (n - 1, m + 1)
elif board[n][m] == PLAYER2:
if n - 1 >= 0 and m - 1 >= 0:
if board[n - 1][m - 1] == 0 and not capture:
possible[(n-1, m-1)] = 0
if n - 1 >= 0 and m + 1 <= Y - 1:
if board[n - 1][m + 1] == 0 and not capture:
possible[(n-1, m+1)] = 0
if n + 2 <= Y - 1 and m + 2 <= Y - 1:
if board[n + 2][m + 2] == 0 and (board[n + 1][m + 1] == PLAYER1 or board[n + 1][m + 1] == KING1):
possible[(n + 2, m + 2)] = (n + 1, m + 1)
if n + 2 <= Y - 1 and m - 2 >= 0:
if board[n + 2][m - 2] == 0 and (board[n + 1][m - 1] == PLAYER1 or board[n + 1][m - 1] == KING1):
possible[(n + 2, m - 2)] = (n + 1, m - 1)
if n - 2 >= 0 and m - 2 >= 0:
if board[n - 2][m - 2] == 0 and (board[n - 1][m - 1] == PLAYER1 or board[n - 1][m - 1] == KING1):
possible[(n - 2, m - 2)] = (n - 1, m - 1)
if n - 2 >= 0 and m + 2 <= Y - 1:
if board[n - 2][m + 2] == 0 and (board[n - 1][m + 1] == PLAYER1 or board[n - 1][m + 1] == KING1):
possible[(n - 2, m + 2)] = (n - 1, m + 1)
elif board[n][m] == KING1:
con4 = con2 = con3 = con1 = True
if old_n != Y and old_m != Y:
if old_n - n < 0 and old_m - m < 0:
con1 = False
elif old_m - m < 0 < old_n - n:
con3 = False
elif old_n - n > 0 and old_m - m > 0:
con4 = False
elif old_m - m > 0 > old_n - n:
con2 = False
for i in range(Y):
if n - i >= 0 and m - i >= 0 and con1:
if board[n - i][m - i] == 0:
if not capture:
possible[(n - i, m - i)] = 0
if board[n - i + 1][m - i + 1] == PLAYER2 or board[n - i + 1][m - i + 1] == KING2:
possible[(n - i, m - i)] = (n - i + 1, m - i + 1)
con1 = False
elif board[n - i][m - i] == PLAYER1 or (n - i == 0 or m - i == 0):
con1 = False
elif board[n - i][m - i] == PLAYER2:
if board[n - i - 1][m - i - 1] != 0:
con1 = False
if n - i >= 0 and m + i <= Y - 1 and con2:
if board[n - i][m + i] == 0:
if not capture:
possible[(n - i, m + i)] = 0
if board[n - i + 1][m + i - 1] == PLAYER2 or board[n - i + 1][m + i - 1] == KING2:
possible[(n - i, m + i)] = (n - i + 1, m + i - 1)
con2 = False
elif board[n - i][m + i] == PLAYER1 or (n - i == 0 or m + i == Y - 1):
con2 = False
elif board[n - i][m + i] == PLAYER2:
if board[n - i - 1][m + i + 1] != 0:
con2 = False
if n + i <= Y - 1 and m + i <= Y - 1 and con4:
if board[n + i][m + i] == 0:
if not capture:
possible[(n + i, m + i)] = 0
if board[n + i - 1][m + i - 1] == PLAYER2 or board[n + i - 1][m + i - 1] == KING2:
possible[(n + i, m + i)] = (n + i - 1, m + i - 1)
con4 = False
elif board[n + i][m + i] == PLAYER1 or (n + i == Y - 1 or m + i == Y - 1):
con4 = False
elif board[n + i][m + i] == PLAYER2:
if board[n + i + 1][m + i + 1] != 0:
con4 = False
if n + i <= Y - 1 and m - i >= 0 and con3:
if board[n + i][m - i] == 0:
if not capture:
possible[(n + i, m - i)] = 0
if board[n + i - 1][m - i + 1] == PLAYER2 or board[n + i - 1][m - i + 1] == KING2:
possible[(n + i, m - i)] = (n + i - 1, m - i + 1)
con3 = False
elif board[n + i][m - i] == PLAYER1 or (n + i == Y - 1 or m - i == 0):
con3 = False
elif board[n + i][m - i] == PLAYER2:
if board[n + i + 1][m - i - 1] != 0:
con3 = False
elif board[n][m] == KING2:
con1 = con2 = con3 = con4 = True
if old_n != Y and old_m != Y:
if old_n - n < 0 and old_m - m < 0:
con1 = False
elif old_m - m < 0 < old_n - n:
con3 = False
elif old_n - n > 0 and old_m - m > 0:
con4 = False
elif old_m - m > 0 > old_n - n:
con2 = False
for i in range(Y):
if n - i >= 0 and m - i >= 0 and con1:
if board[n - i][m - i] == 0:
if not capture:
possible[(n - i, m - i)] = 0
if board[n - i + 1][m - i + 1] == PLAYER1 or board[n - i + 1][m - i + 1] == KING1:
possible[(n - i, m - i)] = (n - i + 1, m - i + 1)
con1 = False
elif board[n - i][m - i] == PLAYER2 or (n - i == 0 or m - i == 0):
con1 = False
elif board[n - i][m - i] == PLAYER1:
if board[n - i - 1][m - i - 1] != 0:
con1 = False
if n - i >= 0 and m + i <= Y - 1 and con2:
if board[n - i][m + i] == 0:
if not capture:
possible[(n - i, m + i)] = 0
if board[n - i + 1][m + i - 1] == PLAYER1 or board[n - i + 1][m + i - 1] == KING1:
possible[(n - i, m + i)] = (n - i + 1, m + i - 1)
con2 = False
elif board[n - i][m + i] == PLAYER2 or (n - i == 0 or m + i == Y - 1):
con2 = False
elif board[n - i][m + i] == PLAYER1:
if board[n - i - 1][m + i + 1] != 0:
con2 = False
if n + i <= Y - 1 and m + i <= Y - 1 and con4:
if board[n + i][m + i] == 0:
if not capture:
possible[(n + i, m + i)] = 0
if board[n + i - 1][m + i - 1] == PLAYER1 or board[n + i - 1][m + i - 1] == KING1:
possible[(n + i, m + i)] = (n + i - 1, m + i - 1)
con4 = False
elif board[n + i][m + i] == PLAYER2 or (n + i == Y - 1 or m + i == Y - 1):
con4 = False
elif board[n + i][m + i] == PLAYER1:
if board[n + i + 1][m + i + 1] != 0:
con4 = False
if n + i <= Y - 1 and m - i >= 0 and con3:
if board[n + i][m - i] == 0:
if not capture:
possible[(n + i, m - i)] = 0
if board[n + i - 1][m - i + 1] == PLAYER1 or board[n + i - 1][m - i + 1] == KING1:
possible[(n + i, m - i)] = (n + i - 1, m - i + 1)
con3 = False
elif board[n + i][m - i] == PLAYER2 or (n + i == Y - 1 or m - i == 0):
con3 = False
elif board[n + i][m - i] == PLAYER1:
if board[n + i + 1][m - i - 1] != 0:
con3 = False
return possible
def ob_capture(board, player):
# checks if for a giving player a capture is possible on the board
"""
:param board: board
:param player: player
:return:
"""
for piece in get_all_pieces(board, player):
piece_moves = possible_moves(piece[0], piece[1], board)
for move in piece_moves:
if piece_moves[move] != 0:
return True
return False
| [
"[email protected]"
] | |
5de963ed9a19ea27981543f32e902ba33d6b85b8 | 031a7ac86ce97c257bb8dd1b0ef8ce625a38ddbb | /synthesize.py | b5ae838d369991f3e5a2f1607bd31b04f39881f4 | [
"MIT"
] | permissive | aidiary/tacotron-pytorch | 47f36ddd53f78d313e5104c513ca0679315e4951 | 8ea9b1bb61bf753a64ff611b441326ea8c001d20 | refs/heads/master | 2020-08-03T16:20:39.433942 | 2019-10-11T03:55:44 | 2019-10-11T03:55:44 | 211,811,774 | 0 | 0 | MIT | 2019-10-07T01:19:21 | 2019-09-30T08:24:15 | Jupyter Notebook | UTF-8 | Python | false | false | 1,833 | py | import argparse
import os
import numpy as np
import torch
from model import Tacotron
from utils.text import phoneme_to_sequence
from utils.text.symbols import phonemes
from utils.audio import AudioProcessor
parser = argparse.ArgumentParser()
parser.add_argument('text', type=str, help='text to generate speech')
parser.add_argument('model_path', type=str, help='path to model file')
parser.add_argument('out_path', type=str,
help='path to save final wav file')
parser.add_argument('--gpu_id', type=str, default=0, help='gpu_id')
args = parser.parse_args()
print(args)
device = torch.device('cuda:{}'.format(args.gpu_id)
if torch.cuda.is_available() else 'cpu')
if not os.path.exists(args.out_path):
os.makedirs(args.out_path, exist_ok=True)
def main():
ap = AudioProcessor()
# load model
num_chars = len(phonemes)
model = Tacotron(num_chars).to(device)
cp = torch.load(args.model_path)
model.load_state_dict(cp['model'])
model.eval()
print('Text: {}'.format(args.text))
wav = tts(model, args.text, ap)
file_name = args.text.replace(' ', '_') + '.wav'
out_path = os.path.join(args.out_path, file_name)
ap.save_wav(wav, out_path)
def tts(model, text, ap):
inputs = phoneme_to_sequence(text,
['phoneme_cleaners'],
language='en-us',
enable_eos_bos=False)
inputs = np.asarray(inputs, dtype=np.int32)
inputs = torch.from_numpy(inputs).unsqueeze(0).long().to(device)
decoder_output, postnet_output, alignments, stop_tokens = \
model.inference(inputs)
postnet_output = postnet_output[0].data.cpu().numpy()
wav = ap.inv_spectrogram(postnet_output.T)
return wav
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
73b3645926d9994b439b76f08e9e33f9edde5f68 | 5c86a796c32351c2f4de1170d6f16fb2f0e18d66 | /code/generate_data.py | 91da01e5f84f18860dd9adc2c0464f1a295a3545 | [
"MIT"
] | permissive | ilkhem/WDTW | 599c0ecac460f7a3302924eb72e057e929203a79 | b8bd27a4494aa52acb45ecd2427512e1a682edb7 | refs/heads/master | 2021-01-25T05:09:48.015195 | 2017-08-02T09:06:16 | 2017-08-02T09:06:16 | 93,514,363 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | import numpy as np
def generate_data(d1=10, d2=10, d3=10, m=5, n=4):
At_ = np.random.rand(d1, d2, d3, m).astype(np.float64)
At = At_ / np.sum(At_, axis=(0, 1, 2))
Bt_ = np.random.rand(d1, d2, d3, n).astype(np.float64)
Bt = Bt_ / np.sum(Bt_, axis=(0, 1, 2))
return At, Bt
def generate_multiple(d1=10, d2=10, d3=10, m=5, nb=5):
At = np.random.rand(d1, d2, d3, m, nb).astype(np.float64)
At = At / np.sum(At, axis=(0, 1, 2))
return At
def generate_single(d1=10, d2=10, d3=10):
a, b = generate_data(d1, d2, d3, 1, 1)
# return a.squeeze(), b.squeeze()
return a, b
def generate_nice(d1=10, d2=10, d3=10, m=4, nb=3, thresh=1e-3):
y = generate_multiple(d1, d2, d3, m, nb)
y[y < thresh] = 0
y = y/np.sum(y, axis=(0,1,2))
print(np.sum(y, axis=(0,1,2)))
return y
| [
"[email protected]"
] | |
79d414c018b7519a529149e6461afb25073f5448 | 78144baee82268a550400bbdb8c68de524adc68f | /Production/python/Summer16v3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8_cff.py | 50ebcb86f45450555db52fd14c26ab823c5a71af | [] | no_license | tklijnsma/TreeMaker | e6989c03189b849aff2007bad22e2bfc6922a244 | 248f2c04cc690ef2e2202b452d6f52837c4c08e5 | refs/heads/Run2_2017 | 2023-05-26T23:03:42.512963 | 2020-05-12T18:44:15 | 2020-05-12T18:44:15 | 263,960,056 | 1 | 2 | null | 2020-09-25T00:27:35 | 2020-05-14T15:57:20 | null | UTF-8 | Python | false | false | 46,425 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/00D392C1-F4E4-E811-8695-0CC47A5FC2A1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/00E6A0A7-83E5-E811-9710-0017A4770448.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/02951320-01E5-E811-83DC-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/064273C0-EAE5-E811-843E-0CC47A5FC2A5.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/06B2EFCA-49E5-E811-B758-0025B3E025B6.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0833AB12-19E5-E811-9188-0017A4771064.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/084B6F06-2BE5-E811-A7AB-002481DE49B6.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/08588660-11E5-E811-9EC6-0025B3E01E66.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0A3B07C1-18E5-E811-AAC1-6CC2173D6B10.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0C294F76-62E5-E811-94EC-002481CFB40E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0C609221-3DE5-E811-A65E-002481D2C9DE.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0E1F4407-1CE5-E811-B86B-00269E95B17C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/0E822A72-76E5-E811-BC06-0017A4770440.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/10944CB5-02E5-E811-A14F-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/126AF66A-7FE5-E811-8C05-0025B3E025B6.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/12DE6C02-A3E5-E811-A266-0CC47A5FBDC1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/14F5E22C-F4E4-E811-86E8-0017A4771078.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/16CD32D2-42E5-E811-AF73-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/182BBDAC-ECE4-E811-84F3-1CC1DE1D0AD4.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1851A550-5DE5-E811-B871-002481DE4C6E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1A54CBBF-36E5-E811-8806-90B11CBCFF5B.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1C1E5C6C-ECE4-E811-AE42-B499BAA53EC2.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1CAACE8B-46E5-E811-9289-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1CE8F3FF-82E5-E811-BEF3-6CC2173D6E60.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/1EAEF986-47E5-E811-AE76-0017A4771054.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/20513CD3-72E5-E811-8F6D-0017A477104C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/209F67BE-77E5-E811-BFBA-90B11CBCFF68.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/20E2C357-35E5-E811-9ED6-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/224284A0-F2E4-E811-979F-0017A4770460.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/240DEA15-22E5-E811-B79B-0017A4770478.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/2476BC90-EEE4-E811-A08C-0017A477104C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/24C09532-2EE5-E811-B6F0-6CC2173D44D0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/26263E92-90E5-E811-8199-002481D2495A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/266DF904-35E5-E811-96A0-0017A4771078.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/26AD8EEE-11E5-E811-A7A5-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/2E5D4D5C-23E5-E811-A0E1-00269E95B128.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/30139619-83E5-E811-89B8-6CC2173D4980.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/30995867-74E5-E811-9BEA-002481D2C9DE.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/32048D30-78E5-E811-9C41-0017A4770444.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/32E12C39-F3E4-E811-BCD3-90B11CBCFFEA.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/3401FB2E-49E5-E811-BA4C-0017A4771074.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/343B5FD8-3DE5-E811-9911-B499BAA53EC2.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/38BCFCFC-5FE5-E811-8F26-68B5996BD98E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/3A962353-75E5-E811-83A0-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/3AECF32A-86E5-E811-B776-6CC2173C4580.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/401D2D56-F0E4-E811-99B5-6CC2173CAAE0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/408D7AF8-35E5-E811-93D8-0017A4770440.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/40BC2D5D-FEE4-E811-8A99-0CC47A5FC2A5.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/4215D1FA-7FE5-E811-B6AF-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/44D279E4-47E5-E811-AA89-90B11CBCFF82.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/44F83E42-22E5-E811-A04B-6CC2173C4580.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/460A9B77-36E5-E811-B8A9-90B11CBCFFEA.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/4A3188E5-A5E5-E811-A449-0017A477047C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/4ADAC275-00E5-E811-B630-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/4E02E5CE-30E5-E811-A866-6CC2173C39E0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/4E8FEBEB-2FE5-E811-A4CE-0017A477104C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/522C93E2-31E5-E811-93C7-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/54791180-53E5-E811-872B-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/5622DEB7-EFE4-E811-B91F-1CC1DE1CF622.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/564430FF-F2E4-E811-9E30-90B11CBCFFC3.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/5C0144FF-FDE4-E811-81D0-6CC2173D6B10.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/5CDDCEFE-ECE4-E811-A511-002481DE4818.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/5CF46AAF-38E5-E811-96C0-90B11CBCFF75.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/5EE3328E-A5E5-E811-963F-90B11CBCFF75.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6048755B-1BE5-E811-9632-90B11CBCFFC3.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6216D5D6-38E5-E811-B6EA-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/627CD38B-33E5-E811-88D6-1CC1DE1D0AD4.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6406A629-2DE5-E811-A734-002481CFB40E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/640C277E-06E5-E811-8A6D-6CC2173D46A0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/64571AB6-5DE5-E811-B580-0CC47A5FBDC1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/664FD2EB-48E5-E811-9CCD-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/66EA10AA-38E5-E811-B606-047D7B416516.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6A354F49-7AE5-E811-A7D2-002481DE4818.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6C7173B3-F4E4-E811-8509-0CC47A5FC2A1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6E709697-78E5-E811-A335-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/6E7CC1CD-42E5-E811-A0C4-002481DE4818.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/70AF5274-53E5-E811-B959-0017A4771050.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/7227BEAC-35E5-E811-95CB-90B11CBCFF68.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/72595691-7CE5-E811-A271-B499BAA53EC2.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/74759B1B-21E5-E811-9127-0017A4770474.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/74F76048-A2E5-E811-8414-0CC47A5FBE25.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/78CB0CE2-11E5-E811-9932-0017A4771050.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/7A863B53-F6E4-E811-8242-002481CFB40E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/7E636A8F-22E5-E811-A802-6CC2173CAAE0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/80ED01A1-65E5-E811-816F-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/82347C78-39E5-E811-B03D-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/82AA346E-F7E4-E811-937F-00269E95ACFC.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/84891DA2-53E5-E811-A694-6CC2173C9150.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/84AB6246-11E5-E811-987C-002481CFE708.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8634588C-13E5-E811-8540-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8637B55B-2AE5-E811-987B-0025B3E01F20.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/888CEEF7-1BE5-E811-9D88-0017A4770470.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8A1BD8B4-07E5-E811-B47B-90B11CBCFF82.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8C2D6C4E-6BE5-E811-AF70-1CC1DE1D0AD4.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8C5A2804-36E5-E811-92BA-0017A4770444.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/8E54693E-78E5-E811-84F9-0025B3E01E66.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/92493FC9-5FE5-E811-96E2-002481DE49B6.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/92A5878B-26E5-E811-B6CD-0CC47A5FBDC1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/94EC528F-04E5-E811-B8A6-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/96CA7C8D-EDE4-E811-9D16-002481D2495A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/9E4B8579-EDE4-E811-AB62-6CC2173C9150.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/9E806115-4AE5-E811-A0E1-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/A43F5F7C-20E5-E811-B722-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/A84F1E54-EEE4-E811-BBF8-B499BAA6776E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/A8E464A0-2AE5-E811-B5BA-B499BAA67780.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/AA867DA2-75E5-E811-BF01-90B11CBCFFF7.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B077D6D2-27E5-E811-A39C-ECB1D79E5C40.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B0CCB0E3-0FE5-E811-B04C-002481D2C9DE.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B640CF35-EEE4-E811-A489-90B11CBCFF9C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B647030E-85E5-E811-B25D-6CC2173D6E60.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B6DBEACE-34E5-E811-8499-0017A4770460.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B6E593E0-7EE5-E811-B3E0-90B11CBCFF4E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/B8F1750E-25E5-E811-B29C-6CC2173D6140.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/BCA5D062-0EE5-E811-A892-0CC47A5FA215.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/BCA7CAD7-1FE5-E811-BC96-6CC2173C9150.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/BCCA5091-4EE5-E811-A344-0CC47A5FC495.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/BE11699B-10E5-E811-AA8A-6CC2173D44D0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/BEA56DE3-3EE5-E811-AF45-90B11CBCFF4E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/C0107F50-5DE5-E811-B815-1CC1DE1CF622.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/C06B5642-F3E4-E811-A660-002481DE49B6.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/C2502400-21E5-E811-8342-6CC2173D6E60.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/C42872CB-6EE5-E811-ADED-0CC47A5FBE25.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/C48ACBFC-09E5-E811-8D57-002481CFE834.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CA23B79A-FAE4-E811-AAEC-002481CFE888.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CC3449AF-1DE5-E811-93F2-68B5996BD98E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CC447848-F5E4-E811-A0A5-90B11CBCFF5B.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CC930CBC-07E5-E811-824F-0017A4771054.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CCB0B7B1-35E5-E811-8F4C-90B11CBCFFF7.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/CEC941E1-10E5-E811-90EA-0CC47A5FA3B9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/D01031B6-FEE4-E811-BBF5-90B11CBCFF4E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/D49681D0-54E5-E811-B6E1-6CC2173C4580.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/D6F42851-EBE4-E811-BF1F-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/D6FEEE59-76E5-E811-92E3-0017A4771078.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/D8A03193-60E5-E811-97B4-0025B3E01F20.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/DE08BAC8-EEE4-E811-B092-78E7D1E4617C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/DEA031BA-EDE4-E811-AA06-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E00F1F3C-71E5-E811-B895-0CC47A5FBE35.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E07A266F-1CE5-E811-BD77-B499BAA53EC2.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E0F557B2-13E5-E811-9CD4-0CC47A5FC495.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E28F41DD-62E5-E811-A02A-6CC2173D44D0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E425F4D3-67E5-E811-9719-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/E8C65D6A-0EE5-E811-AB6F-0CC47A5FC491.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/EA94E83A-38E5-E811-A461-0CC47A5FBE25.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/EC1A8E47-26E5-E811-994B-1CC1DE1CF622.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/EC451748-58E5-E811-909C-002481D2495A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/EED8F765-7EE5-E811-8EFB-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F0B7C842-22E5-E811-8D21-002481D2495A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F0C1FF3B-74E5-E811-828C-047D7B416516.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F0F9DB0F-41E5-E811-9447-0025B3E01E66.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F2D124AB-00E5-E811-BCC1-6CC2173D8740.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F453B461-7CE5-E811-A207-90B11CBCFFEA.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F65CA30A-2FE5-E811-9AFE-00269E95ACFC.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F6780A8A-13E5-E811-B55A-0CC47A5FBE31.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F682520C-53E5-E811-B14E-6CC2173D6E60.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/F885E7B3-EEE4-E811-B03C-6CC2173D6E60.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/FC5A3CA2-86E5-E811-8786-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/120000/FE402953-38E5-E811-AE3F-0CC47A5FBE35.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/0093C728-02E5-E811-874D-90B11CBCFFC3.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/06BE1511-FFE4-E811-A6E5-047D7B416516.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/08244040-01E5-E811-9F3C-90B11CBCFFA9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/0A349D87-1CE1-E811-A822-6CC2173C3DD0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/0A6EA2FC-4AE5-E811-A324-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/0ED97CCF-0FE1-E811-AF37-B499BAA67C7E.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/102ADDDE-43E5-E811-B4AF-90B11CBCFFA9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/1421B5CA-11E5-E811-AB9B-0CC47A5FC495.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/146FCFAF-27E1-E811-BEB3-0017A4771078.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/181BA0ED-69E5-E811-9BCA-90B11CBCFF41.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/18562DB5-02E5-E811-81D6-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/2073DEA7-1FE1-E811-8F19-0CC47A5FBE35.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/20C975A5-49E5-E811-AB8C-0CC47A5FA3B9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/28325D67-1BE5-E811-B8CF-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/28972F9C-45E5-E811-BA19-002481CFE642.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/2ED60997-07E1-E811-A35F-0CC47A5FA211.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/3247D4E2-1CE1-E811-9534-ECB1D79E5C40.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/3E9FDFAB-12E5-E811-AAF9-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/4032DADB-7FE5-E811-88E6-002481D24972.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/463209A7-4DE5-E811-8AA7-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/468D859D-3EE5-E811-8215-D485646A4E1A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/4ACC099C-14E5-E811-86FE-0CC47A5FC679.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/4AE7C9CC-35E5-E811-BCBB-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/4CF12F7A-7FE5-E811-B68A-0CC47A5FC61D.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/503705FB-02E5-E811-B1BB-90B11CBCFF5B.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/56B9A3D2-45E5-E811-83E3-90B11CBCFF5B.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/5A256F50-87E5-E811-86CC-90B11CBCFFC3.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/5AD53F7E-42E5-E811-A016-90B11CBCFFC3.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/5CB2E4EF-A1E5-E811-BC05-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/60998537-14E1-E811-A7E0-90B11CBCFFA9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/62EFE1D8-A0E5-E811-88F7-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/62F5DFA1-97E5-E811-8643-6CC2173CAAE0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/663A47DD-A1E5-E811-8F56-0CC47A5FA211.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/68A29CF2-81E5-E811-972D-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/6AA987E9-4BE5-E811-ABFD-0CC47A5FC495.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/6ACFA446-9CE5-E811-BAD3-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/6E4912A1-4CE5-E811-A027-0CC47A5FBDC1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/76712CFC-1FE5-E811-AA37-90B11CBCFFEA.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/76F9D0FB-84E5-E811-93DA-90B11CBCFF5B.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/78B6CA9E-24E5-E811-A3A9-90B11CBCFF41.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/7A10BA14-12E1-E811-8264-0CC47A5FC491.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/848807B2-82E5-E811-87AF-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/863166B1-97E5-E811-AC48-6CC2173D46A0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/869F40DD-4BE5-E811-B994-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/924102B1-16E1-E811-99E7-6CC2173D6B10.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/981B9863-7BE5-E811-9597-0CC47A5FC679.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/98C62AAE-93E0-E811-AEAA-0017A4770440.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/9E7B9592-57E5-E811-A40E-0CC47A5FC2A5.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/A43DC9EE-27E5-E811-9160-6CC2173D46A0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/A4FFFE1F-86E5-E811-830A-90B11CBCFFA9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/A6A78DD5-7EE5-E811-8EF2-0CC47A5FA3B9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/AEDAAF4A-7AE5-E811-B7DF-002481CFE642.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/B40F7203-0FE1-E811-9A6F-0CC47A5FBE25.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/B6554EC8-3FE5-E811-982A-6CC2173D6B10.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/B80C6F9B-10E5-E811-AC6A-002481D24972.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/BCD2193D-13E5-E811-AC87-0CC47A5FA3B9.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/BE26958D-00E5-E811-90A3-D485646A4E1A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/BEBBF051-5AE5-E811-A58E-6CC2173D46A0.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/C0821931-F7E0-E811-BE33-0017A477107C.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/C462B5EE-78E5-E811-AC0D-D485646A4E1A.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/C682CE34-98E5-E811-A2FE-0017A4771064.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/CC32B2CB-12E5-E811-A3F4-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/D8C00554-24E5-E811-887F-0CC47A5FC2A5.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/DA880670-10E5-E811-BD7B-002481CFE642.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/E2430B72-4BE5-E811-B29E-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/E6DADFFB-65E5-E811-BDD6-90B11CBCFFEA.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F046E2DB-71E5-E811-88F7-6CC2173D6B10.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F2470E8E-9AE5-E811-833F-90B11CBCFF68.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F255259E-97E5-E811-8568-0017A4771074.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F2F3FA46-2FE1-E811-B9E1-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F673FE2A-15E5-E811-B17C-0CC47A5FBDC1.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/F815F473-82E5-E811-B597-0CC47A5FA3BD.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/FE99BCBC-4BE5-E811-8D19-002481D24972.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/FE9D2DE8-69E5-E811-8773-6CC2173C3E80.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/FEDDB8C4-12E5-E811-9645-0CC47A5FC285.root',
'/store/mc/RunIISummer16MiniAODv3/QCD_Pt-20to30_MuEnrichedPt5_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/PUMoriond17_94X_mcRun2_asymptotic_v3-v2/270000/FEF45BE4-47E5-E811-8192-0CC47A5FC679.root',
] )
| [
"[email protected]"
] | |
884d22df40efccdab6054745feb79110fdfb3e07 | 55a9f007886340a21cab8d5a86cad74b69d789ec | /blog/forms.py | 61b370300fe2097494d84a762db7bce0de06bb28 | [] | no_license | JackSnowdon/BlogTools | e14b5c4937c35d0e29420ca644d752bde56734f1 | 7077c7922917e589b157a8d71f5172e60cebe6b6 | refs/heads/master | 2021-09-25T14:04:51.023120 | 2020-04-09T13:14:01 | 2020-04-09T13:14:01 | 248,284,246 | 0 | 0 | null | 2021-09-22T18:51:06 | 2020-03-18T16:26:41 | Python | UTF-8 | Python | false | false | 329 | py | from django import forms
from .models import *
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = ['created_on', 'done_by', 'last_modified', 'views']
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
exclude = ['post', 'created_date', 'author']
| [
"[email protected]"
] | |
d5fbac6f749489b5b63810fa4d9a276aaaab110b | 88e7780f549c51e77dda336174e5d4df9af71bb4 | /valuation_undergraduate-spring_2021/session14/target_pnr.py | c37410763d98ced344af6914400fe81293bf810e | [] | no_license | webclinic017/valuation_course | 3db0de92acd4a933fa2ea938e0c5003eef0a9a92 | 14d70cf5b5eb1eb42ae5a51f286214328c035cff | refs/heads/main | 2023-05-10T13:07:12.071847 | 2021-06-11T11:14:57 | 2021-06-11T11:14:57 | 417,762,128 | 1 | 0 | null | 2021-10-16T08:08:26 | 2021-10-16T08:08:26 | null | UTF-8 | Python | false | false | 658 | py | # https://youtu.be/uj5xrGBFf44?t=4308
# Target price and returns calculation
"""
Assume that you believe that your valuation of Con Ed ($42.30) is a fair
estimate of the value, 7.70% is a reasonable estimate of Con Ed's cost of equity
and that your expected dividends for next year (2.32*1.021) is a fair estimate,
what is the expected stock price a year from now (assuming that the market
corrects its mistake?)
"""
if __name__ == "__main__":
current_price = 42.3
coe = 0.077
exp_dividends = 2.32 * 1.021
target_vps = current_price * (1+coe)
target_price = target_vps - exp_dividends
print(target_price)
# 43.188379999999995
| [
"[email protected]"
] | |
9b640140f21c870b89db32c924866bd0f75bc9b8 | 96afeeb22fad2a78edaff62ca0b1ebbc8c0b5a53 | /app.py | 40e5fd77d9a98c7a44fa281726e1009e5df4e653 | [] | no_license | GopalTewari/Diabeties_Prediction | 8b8c31b5e4911f5a58760b82cf31d086e8df0cec | fd8b3e2049fb1ffd0d53117444a2e386331f9917 | refs/heads/master | 2022-11-29T17:06:42.051095 | 2020-08-10T08:11:18 | 2020-08-10T08:11:18 | 286,405,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,617 | py | from wsgiref import simple_server
from flask import Flask, request, app, send_file,render_template
from flask import Response
from flask_cors import CORS
from flask_cors import CORS,cross_origin
import pickle
import pandas as pd
import seaborn as sns
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os
sns.set()
app = Flask(__name__)
@app.route('/',methods=['GET']) # route to display the home page
@cross_origin()
def homePage():
return render_template("index.html")
@app.route("/predict", methods=['POST'])
def predictRoute():
try:
Pregnancies = int(request.form['Pregnancies'])
Glucose = float(request.form['Glucose'])
BloodPressure = float(request.form['BloodPressure'])
SkinThickness = float(request.form['SkinThickness'])
Insulin = float(request.form['Insulin'])
DiabetesPedigreeFunction = float(request.form['DiabetesPedigreeFunction'])
BMI = float(request.form['BMI'])
Age = int(request.form['Age'])
with open("Model/sandardScalar.sav", 'rb') as f:
scalar = pickle.load(f)
with open("Model/modelForPrediction.sav", 'rb') as f:
model = pickle.load(f)
predict = model.predict(scalar.transform([[Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age]]))
if predict[0] ==1 :
result = 'Diabetic'
else:
result ='Non-Diabetic'
print('result is ',result)
return render_template('results.html',prediction=result)
except Exception as e:
print('exception is ',e)
return Response(e)
#---------------------------------------------------------------------------------------------------------
@app.route('/csv',methods=['POST','GET']) # route to show the predictions in a web UI
@cross_origin()
def csv():
if request.method == 'POST':
try:
#reading csv file
uploaded_file = request.files['upload_file']
filename = uploaded_file.filename
#procede only if file is available
if uploaded_file.filename != '':
uploaded_file.save(filename)
data = pd.read_csv(filename)
# procede only if file is in correct format
if len(data.columns) == 8:
#filling NaN values if present in dataset
data['Pregnancies'].fillna(value=round(data['Pregnancies'].mean()), inplace=True)
data['Glucose'].fillna(value=round(data['Glucose'].mean()), inplace=True)
data['BloodPressure'].fillna(value=round(data['BloodPressure'].mean()), inplace=True)
data['SkinThickness'].fillna(value=round(data['SkinThickness'].mean()), inplace=True)
data['Insulin'].fillna(value=round(data['Insulin'].mean()), inplace=True)
data['BMI'].fillna(value=data['BMI'].mean(), inplace=True)
data['DiabetesPedigreeFunction'].fillna(value=data['DiabetesPedigreeFunction'].mean(), inplace=True)
data['Age'].fillna(value=round(data['Age'].mean()), inplace=True)
# loading the model file from the storage
with open("Model/sandardScalar.sav", 'rb') as f:
scaler = pickle.load(f)
with open("Model/modelForPrediction.sav", 'rb') as f:
loaded_model = pickle.load(f)
#deleting previous files present in csv_file folder
csv_files = './csv_file'
list_of_files = os.listdir(csv_files)
for csfile in list_of_files:
try:
os.remove("./csv_file/" + csfile)
except Exception as e:
print('error in deleting: ', e)
# making prediction
prediction = loaded_model.predict(scaler.transform(data))
data['Predictions'] = prediction
#saving pandas dataframe as a csv file in csv_file folder
result_file = './csv_file/result_output_data.csv'
data.to_csv(result_file)
#plot for prediction analysis
sns.set_style("ticks", {"xtick.major.size": 8, "ytick.major.size": 8})
total_pridiction = sns.catplot(x='Predictions', kind='count', data=data)
age_relation=sns.catplot(x='Predictions', y='Age', data=data)
# deleting previous graph images present in statistics folder
image_files = './static/statistics'
list_of_files = os.listdir(image_files)
for imgfile in list_of_files:
try:
os.remove("./static/statistics/" + imgfile)
except Exception as e:
print('error in deleting: ', e)
#save graph in statictics folder inside static
output_path_total = './static/statistics/output_prediction.png'
output_path_age = './static/statistics/relationship_age.png'
total_pridiction.savefig(output_path_total)
age_relation.savefig(output_path_age)
return render_template('csv.html')
else:
return 'Error: Please Make Sure that csv file is in standard acceptable format,Please go through given Sample csv file format'
else:
return 'File Not Found'
except Exception as e:
print('The Exception message is: ', e)
return 'something is wrong'
else:
return render_template('index.html')
@app.route('/uploadfile',methods=['POST','GET']) #
@cross_origin()
def uploadfile():
return render_template('upload.html')
@app.route('/download') #
@cross_origin()
def download_file():
p = './csv_file/result_output_data.csv'
return send_file(p, as_attachment=True)
@app.route('/statistics',methods=['POST','GET']) #
@cross_origin()
def stat_graph():
return render_template('show_statistics.html')
if __name__ == "__main__":
#clntApp = ClientApi()
#host = '0.0.0.0'
#port = 5000
#app.run(debug=True)
app.run(host='127.0.0.1', port=8001, debug=True)
#httpd = simple_server.make_server(host, port, app)
#print("Serving on %s %d" % (host, port))
#httpd.serve_forever() | [
"[email protected]"
] | |
df473644bee8dc05da278b05f1e4406397899161 | 7a68ad190010c01f5a681074524fe30a830e336f | /IIC/datasets/__init__.py | 0d46346884af4cd26db5eea36bf2706d9c90c662 | [
"MIT"
] | permissive | jizongFox/IIC | c9817f355cf5b3281539d515defddd59d4c98da4 | 572076d5c0c26516ff3e807f2bad4e3498ab12c1 | refs/heads/master | 2022-02-24T18:53:47.271641 | 2022-02-08T23:14:39 | 2022-02-08T23:14:39 | 182,317,472 | 0 | 0 | MIT | 2022-02-02T23:32:37 | 2019-04-19T19:47:43 | Python | UTF-8 | Python | false | false | 54 | py | from .clustering import *
from .segmentation import *
| [
"[email protected]"
] | |
dab9e5915901da7382a754421dfa3c67aac61caf | eac3bc8813c8a27c3f828c82d3506327b1a5dde8 | /neutronclient/v2_0/uos.py | 8420e0dd677c1d2e2d71872a0575dbbd53c5b2b1 | [] | no_license | CingHu/neutronclient-ustack | a21143753321426cc168b3d3430ee1a97cdbbfa9 | 21cb3a1f405f4fc598336c1a30589b346f6479a5 | refs/heads/master | 2021-01-10T18:20:16.110116 | 2015-11-27T03:58:10 | 2015-11-27T03:58:10 | 46,958,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,583 | py | # Copyright 2014 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from neutronclient.v2_0 import APIParamsCall
from neutronclient.common import exceptions
LOG = logging.getLogger(__name__)
class UosClientMixin(object):
vpnusers_path = "/vpn/vpnusers"
vpnuser_path = "/vpn/vpnusers/%s"
pptpconnections_path = "/vpn/pptpconnections"
pptpconnection_path = "/vpn/pptpconnections/%s"
openvpnconnections_path = "/vpn/openvpnconnections"
openvpnconnection_path = "/vpn/openvpnconnections/%s"
uos_resources = '/uos_resources'
uos_resource = '/uos_resources/%s'
add_portforwarding_path = uos_resource + '/add_router_portforwarding'
remove_portforwarding_path = uos_resource + '/remove_router_portforwarding'
update_ratelimit_path = uos_resource + '/update_floatingip_ratelimit'
update_registerno_path = uos_resource + '/update_floatingip_registerno'
get_router_details_path = uos_resource + '/get_router_details'
associate_floatingip_router_path = (uos_resource +
'/associate_floatingip_router')
swap_router_path = (uos_resource + '/swap_router')
change_router2ha_path = (uos_resource + '/change_router_to_ha')
change_router2nonha_path = (uos_resource + '/change_router_to_nonha')
ping_agent_path = (uos_resource + '/ping_agent')
get_fip_usage_path = (uos_resource + '/get_fip_usage')
get_resource_counter_path = uos_resource + '/get_resource_counter'
get_resource_host_counter_path = uos_resource + '/get_resource_host_counter'
#device_id=<uuid>& uuid is 36
device_id_filter_len = 47
@APIParamsCall
def list_uos_resources(self, **_params):
"""Fetch all resources of a tenant."""
result = {}
try:
result = self.get(self.uos_resources, params=_params)
except exceptions.RequestURITooLong as uri_len_exc:
# (Zebra) support split one request to several requests
# The URI is too long because of too many device_id filters
# Use the excess attribute of the exception to know how many
result = {}
devices = _params.pop('device_id',[])
if isinstance(devices, basestring):
devices = [devices]
device_count = len(devices)
if device_count <= 0:
LOG.error("device_count <=0 maybe the cause is not device_id")
return result
#uri_len_exc.excess is the length overload
#so max_size is the max size for device_id for
#the first(every) request
if self.device_id_filter_len * device_count <= uri_len_exc.excess:
LOG.error("maybe the cause is not device_id please check more")
return result
max_size = ((self.device_id_filter_len * device_count) -
uri_len_exc.excess)
chunk_size = max_size / self.device_id_filter_len
check_ids = {}
for i in range(0, device_count, chunk_size):
_params['device_id'] = devices[i: i + chunk_size]
tmp = self.get(self.uos_resources, params=_params)
# tmp is dict
for key, value in tmp.items():
if key in result:
for _i in value:
if _i['id'] not in check_ids[key]:
result[key].append(_i)
check_ids[key].add(_i['id'])
else:
result[key] = value
check_ids[key] = set()
for _i in value:
check_ids[key].add(_i['id'])
return result
@APIParamsCall
def show_router_detail(self, router_id, **_params):
"""Fetch router's details."""
return self.get(self.get_router_details_path % (router_id),
params=_params)
@APIParamsCall
def add_router_portforwarding(self, router_id, body=None):
"""Fetch router's details."""
return self.put(self.add_portforwarding_path % (router_id),
body=body)
@APIParamsCall
def remove_router_portforwarding(self, router_id, body=None):
"""Fetch router's details."""
return self.put(self.remove_portforwarding_path % (router_id),
body=body)
@APIParamsCall
def update_rate_limit(self, floatingip_id, body=None):
"""update floatingip's rate limit."""
return self.put(self.update_ratelimit_path % (floatingip_id),
body=body)
@APIParamsCall
def update_floatingip_registerno(self, floatingip_id, body=None):
"""update floatingip's registerno."""
return self.put(self.update_registerno_path % (floatingip_id),
body=body)
@APIParamsCall
def list_vpnusers(self, retrieve_all=True, **_params):
"""Fetches a list of all vpnusers for a tenant."""
# Pass filters in "params" argument to do_request
return self.list('vpnusers', self.vpnusers_path, retrieve_all,
**_params)
@APIParamsCall
def show_vpnuser(self, vpnuser, **_params):
"""Fetches information of a certain vpnuser."""
return self.get(self.vpnuser_path % (vpnuser), params=_params)
@APIParamsCall
def create_vpnuser(self, body=None):
"""Creates a new vpnuser."""
return self.post(self.vpnusers_path, body=body)
@APIParamsCall
def update_vpnuser(self, vpnuser, body=None):
"""Updates a vpnuser."""
return self.put(self.vpnuser_path % (vpnuser), body=body)
@APIParamsCall
def delete_vpnuser(self, vpnuser):
"""Deletes the specified vpnuser."""
return self.delete(self.vpnuser_path % (vpnuser))
@APIParamsCall
def list_pptpconnections(self, retrieve_all=True, **_params):
"""Fetches a list of all networks for a tenant."""
# Pass filters in "params" argument to do_request
return self.list('pptpconnections', self.pptpconnections_path,
retrieve_all, **_params)
@APIParamsCall
def show_pptpconnection(self, pptpconnection, **_params):
"""Fetches information of a certain network."""
return self.get(self.pptpconnection_path % (pptpconnection),
params=_params)
@APIParamsCall
def create_pptpconnection(self, body=None):
"""Creates a new pptpconnection."""
return self.post(self.pptpconnections_path, body=body)
@APIParamsCall
def update_pptpconnection(self, pptpconnection, body=None):
"""Updates a pptpconnection."""
return self.put(self.pptpconnection_path % (pptpconnection),
body=body)
@APIParamsCall
def delete_pptpconnection(self, pptpconnection):
"""Deletes the specified pptpconnection."""
return self.delete(self.pptpconnection_path % (pptpconnection))
@APIParamsCall
def associate_floatingip_router(self, fip_id, body=None):
"""Associate a floatingip with a router."""
return self.put(self.associate_floatingip_router_path % (fip_id),
body=body)
@APIParamsCall
def swap_router(self, router_id, body=None):
"""Swap router's master l3 agent."""
return self.put(self.swap_router_path % (router_id),
body=body)
@APIParamsCall
def change_router_to_ha(self, router_id, body=None):
"""Change router into HA router."""
return self.put(self.change_router2ha_path % (router_id),
body=body)
@APIParamsCall
def change_router_to_nonha(self, router_id, body=None):
"""Change router into non HA router."""
return self.put(self.change_router2nonha_path % (router_id),
body=body)
@APIParamsCall
def ping_agent(self, body):
"""Ping agent."""
return self.put(self.ping_agent_path % 'dumyid',
body=body)
@APIParamsCall
def get_fip_usages(self, **_params):
"""get fip usages."""
return self.get(self.get_fip_usage_path % 'dumyid',
params=_params)
@APIParamsCall
def get_resource_counter(self, resource, **_params):
"""get resource counter."""
return self.get(self.get_resource_counter_path % resource,
params=_params)
@APIParamsCall
def get_resource_host_counter(self, resource_id, **_params):
"""get list counter."""
return self.get(self.get_resource_host_counter_path % resource_id,
params=_params)
@APIParamsCall
def list_openvpnconnections(self, retrieve_all=True, **_params):
"""Fetches a list of all networks for a tenant."""
# Pass filters in "params" argument to do_request
return self.list('openvpnconnections', self.openvpnconnections_path,
retrieve_all, **_params)
@APIParamsCall
def show_openvpnconnection(self, openvpnconnection, **_params):
"""Fetches information of a certain network."""
return self.get(self.openvpnconnection_path % (openvpnconnection),
params=_params)
@APIParamsCall
def create_openvpnconnection(self, body=None):
"""Creates a new openvpnconnection."""
return self.post(self.openvpnconnections_path, body=body)
@APIParamsCall
def update_openvpnconnection(self, openvpnconnection, body=None):
"""Updates a openvpnconnection."""
return self.put(self.openvpnconnection_path % (openvpnconnection),
body=body)
@APIParamsCall
def delete_openvpnconnection(self, openvpnconnection):
"""Deletes the specified openvpnconnection."""
return self.delete(self.openvpnconnection_path % (openvpnconnection))
| [
"[email protected]"
] | |
29137ff2018cec0c6b0913e8b4d19ec7b709c5db | 9d49ae4c2aaabb4f0a41d5cebc736f08c33dcbfd | /comp_classes.py | 474aeb29597ccc1c8d472a59cec3c8f417c933e7 | [] | no_license | beva-empa/CentralOpt_Classes | 900525f23453f058ccbd3984093b6cfc38d87cf9 | e7b647e0fb210ba16923613c06c7def628e26aba | refs/heads/master | 2023-03-07T21:06:14.550978 | 2021-01-30T23:09:44 | 2021-01-30T23:09:44 | 334,528,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,548 | py | import cvxpy as cp
from test import *
import mosek
import datetime as dt
import time
class PV:
def __init__(self, num_opt_var):
self.P_PV = cp.Variable(num_opt_var)
self.b_PV = cp.Variable(num_opt_var, boolean=True)
self.I_PV = [0] * num_opt_var
self.TempEff_PV = [0] * num_opt_var
class PVT:
def __init__(self, num_opt_var):
self.P_PVT = cp.Variable(num_opt_var)
self.Q_PVT = cp.Variable(num_opt_var)
self.Out_PVT = cp.Variable(num_opt_var)
self.b_PVT = cp.Variable(num_opt_var, boolean=True)
self.I_PVT = [0] * num_opt_var
self.TempEff_PVT = [0] * num_opt_var
class mCHP:
def __init__(self, num_opt_var):
self.Out_mCHP = cp.Variable(num_opt_var)
self.P_mCHP = cp.Variable(num_opt_var)
self.Q_mCHP = cp.Variable(num_opt_var)
self.C_mCHP = cp.Variable(num_opt_var)
self.F_mCHP = cp.Variable(num_opt_var)
self.b_mCHP = cp.Variable(num_opt_var, boolean=True)
class CHP:
def __init__(self, num_opt_var):
self.C_CHP = cp.Variable(num_opt_var)
self.F_CHP = cp.Variable(num_opt_var)
self.P_CHP = cp.Variable(num_opt_var)
self.Q_CHP = cp.Variable(num_opt_var)
self.w11_CHP = cp.Variable(num_opt_var)
self.w12_CHP = cp.Variable(num_opt_var)
self.w13_CHP = cp.Variable(num_opt_var)
self.w14_CHP = cp.Variable(num_opt_var)
self.w21_CHP = cp.Variable(num_opt_var)
self.w22_CHP = cp.Variable(num_opt_var)
self.w23_CHP = cp.Variable(num_opt_var)
self.w24_CHP = cp.Variable(num_opt_var)
self.R_CHP = cp.Variable(1)
self.D_CHP = cp.Variable(1)
self.b_CHP = cp.Variable(num_opt_var, boolean=True)
self.b1_CHP = cp.Variable(num_opt_var, boolean=True)
self.b2_CHP = cp.Variable(num_opt_var, boolean=True)
self.yon_CHP = cp.Variable(num_opt_var, boolean=True)
self.zoff_CHP = cp.Variable(num_opt_var, boolean=True)
self.ysum_CHP = cp.Variable(num_opt_var)
self.zsum_CHP = cp.Variable(num_opt_var)
class GSHP:
def __init__(self, num_opt_var):
self.b_GSHP = cp.Variable(num_opt_var, boolean=True)
self.P_GSHP = cp.Variable(num_opt_var)
self.Q_GSHP = cp.Variable(num_opt_var)
class GB:
def __init__(self, num_opt_var):
self.F_GB = cp.Variable(num_opt_var)
self.C_GB = cp.Variable(num_opt_var)
self.Q_GB = cp.Variable(num_opt_var)
self.w0_GB = cp.Variable(num_opt_var)
self.w1_GB = cp.Variable(num_opt_var)
self.w2_GB = cp.Variable(num_opt_var)
self.w3_GB = cp.Variable(num_opt_var)
self.w4_GB = cp.Variable(num_opt_var)
self.b_GB = cp.Variable(num_opt_var, boolean=True)
self.b1_GB = cp.Variable(num_opt_var, boolean=True)
self.b2_GB = cp.Variable(num_opt_var, boolean=True)
self.b3_GB = cp.Variable(num_opt_var, boolean=True)
self.b4_GB = cp.Variable(num_opt_var, boolean=True)
class Heat_Storage:
def __init__(self, num_opt_var):
self.Q_StorageCh = cp.Variable(num_opt_var)
self.Q_StorageDc = cp.Variable(num_opt_var)
self.Q_StorageTot = cp.Variable(num_opt_var)
self.b_StorageCh = cp.Variable(num_opt_var, boolean=True)
self.b_StorageDc = cp.Variable(num_opt_var, boolean=True)
class Elec_Storage:
def __init__(self, num_opt_var):
self.C_Battery = cp.Variable(num_opt_var)
self.P_BatteryCh = cp.Variable(num_opt_var)
self.P_BatteryDc = cp.Variable(num_opt_var)
self.P_BatteryTot = cp.Variable(num_opt_var)
self.w1_Battery = cp.Variable(num_opt_var)
self.w2_Battery = cp.Variable(num_opt_var)
self.w3_Battery = cp.Variable(num_opt_var)
self.w4_Battery = cp.Variable(num_opt_var)
self.p1_BatteryCh = cp.Variable(num_opt_var)
self.p2_BatteryCh = cp.Variable(num_opt_var)
self.p3_BatteryCh = cp.Variable(num_opt_var)
self.p4_BatteryCh = cp.Variable(num_opt_var)
self.p1_BatteryDc = cp.Variable(num_opt_var)
self.p2_BatteryDc = cp.Variable(num_opt_var)
self.p3_BatteryDc = cp.Variable(num_opt_var)
self.p4_BatteryDc = cp.Variable(num_opt_var)
self.b_BatteryCh = cp.Variable(num_opt_var, boolean=True)
self.b_BatteryDc = cp.Variable(num_opt_var, boolean=True)
class Elec_Grid:
def __init__(self, num_opt_var):
self.C_Grid = cp.Variable(num_opt_var)
self.P_GridIn = cp.Variable(num_opt_var)
self.P_GridOut = cp.Variable(num_opt_var)
self.P_Slack = cp.Variable(num_opt_var)
self.Q_Slack = cp.Variable(num_opt_var)
self.b_GridIn = cp.Variable(num_opt_var, boolean=True)
self.b_GridOut = cp.Variable(num_opt_var, boolean=True)
# num_opt_var = 24
# cost = 0
# constr = []
# I_Solar = cp.Variable(num_opt_var)
# Pmax_PV = 0
# d_PV = 1
# P_Comp = cp.Variable(num_opt_var)
# Eff_PV = 1
# TempEff_PV = 1
# d = PV(I_Solar, num_opt_var, Pmax_PV, d_PV)
# print(d.I_PV)
#
# beta = 0.003
# d_PV = 0.15
# d_PVT = 0.18
# COP = 4.5
# C_Fuel = 0.115
# Tstc = 25
# Tnoct = 48.3
# Tstd = 20
# time_start = dt.datetime(2018, 1, 1, 0, 0, 0)
# time_end = dt.datetime(2018, 1, 1, 3, 0, 0)
# time_now = time_start
# start_time = dt.datetime(2018, 1, 1)
# num_opt_var = 24
# constr = []
# Pmax_PV = 2540
# Eff_PV = 0.3
# Pmin_PV = 0
# demand_data = get_data(start_time)
# Ta = demand_data.loc[time_now: time_now + dt.timedelta(hours=23), 'temp'].values.tolist()
# I_Solar = demand_data.loc[time_now: time_now + dt.timedelta(hours=23), 'solar_roof'].values.tolist()
# P_Demand = demand_data.loc[time_now: time_now + dt.timedelta(hours=23), 'elec_1'].values.tolist()
# li = []
# li.append(PV(num_opt_var))
#
#
# print(li[0].P_PV)
#
# for t in range(num_opt_var):
# li[0].I_PV[t] = I_Solar[t] * (Pmax_PV / d_PV)
# li[0].TempEff_PV[t] = 1 + ((-beta) * ((Ta[t] - Tstc) + (Tnoct - Ta[t]) * (I_Solar[t] / 0.8)))
#
# for t in range(num_opt_var):
#
# constr += [li[0].P_PV[t] >= 0,
# li[0].P_PV[t] <= li[0].b_PV[t] * Eff_PV * li[0].TempEff_PV[t] * li[0].I_PV[t]]
#
# print('ghb')
#
# C_Grid = cp.Variable(num_opt_var)
# P_GridIn = cp.Variable(num_opt_var)
# P_GridOut = cp.Variable(num_opt_var)
#
# R_GridOut = demand_data.loc[time_now: time_now + dt.timedelta(hours=23), 'el_tariff'].values.tolist()
# R_GridIn = demand_data.loc[time_now: time_now + dt.timedelta(hours=23), 'feed_in_tariff'].values.tolist()
#
# b_GridIn = cp.Variable(num_opt_var, boolean=True)
# b_GridOut = cp.Variable(num_opt_var, boolean=True)
#
# cost = 0
# for t in range(num_opt_var):
# # Demand
# cost += C_Grid[t]
# constr += [P_Demand[t] == (li[0].P_PV[t] + P_GridOut[t] - P_GridIn[t]),
# b_GridIn[t] + b_GridOut[t] <= 1,
# P_GridIn[t] >= 0, P_GridIn[t] <= 10000000000000 * b_GridIn[t],
# P_GridOut[t] >= 0, P_GridOut[t] <= 10000000000000 * b_GridOut[t],
# C_Grid[t] == R_GridOut[t] * P_GridOut[t] - R_GridIn[t] * P_GridIn[t]]
#
# # Solve with mosek or Gurobi
# problem = cp.Problem(cp.Minimize(cost), constr)
# problem.solve(solver=cp.MOSEK, verbose=True, save_file='opt_diagnosis.opf',
# mosek_params={mosek.iparam.intpnt_solve_form: mosek.solveform.dual,
# mosek.dparam.optimizer_max_time: 100.0})
# print('test')
# class test1:
# def __init__(self, cost1):
# self.cost1 = cost1 + 5
#
# cost = 5
# d = test1(cost)
# cost = d.cost1
# print(cost)
| [
"[email protected]"
] | |
bae99138058bb1e40dcd3bca86b40f95229b0410 | 148d9ebf9920f4943c63316a18646b7e9ecb5ff1 | /working_files/build_ALS2.py | 57d74214333f83a17898455fdcb5aa2a8c180306 | [
"MIT"
] | permissive | CoraJung/book-recommendation-system | 9b5f75243d24c77053e0118c5534f51d024758d0 | b526b8eac516bb72dabe6217b32d113e5541e798 | refs/heads/main | 2023-01-31T21:46:17.453503 | 2020-12-22T04:46:09 | 2020-12-22T04:46:09 | 323,443,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 317 | py | def build_ALS(X_train, rank=10, maxIter=10, regParam=0.1):
from pyspark.ml.recommendation import ALS
als = ALS(rank=rank, maxIter=maxIter, regParam=regParam, seed=42)
train = X_train.select(['user_id', 'book_id', 'rating']).toDF('user', 'item', 'rating')
model = als.fit(train)
return model
| [
"[email protected]"
] | |
88bbb17181d92b4ddca10a82b218dc98ed361eab | 9eb380a2d27a2ea8d1f8017281e6bafcdf143c4d | /while2.py | 45f0276ab9d9f740f90479ae1f1d057fb8a459e7 | [] | no_license | 1617193103/1617193103 | 8b4163a5023702f94c58418d842b6eba8411f5e5 | 4906b4a4a1b3641274af2a019b805df71adfc04a | refs/heads/master | 2021-09-03T18:34:45.242425 | 2018-01-11T04:12:05 | 2018-01-11T04:12:05 | 109,072,661 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | i = 1
while i < 10:
i += 1
if i % 2 > 0:
continue
print i
i = 1
while 1:
print i
i += 1
if i > 10:
break
| [
"[email protected]"
] | |
d9b7a3b1c81ca2dd45e8e6f0321506e014f68742 | e0946716de0780ce3a2e24220c60e9cca6bbcd0d | /ExtractData.py | 4dc2398061b412d9c0c51f262f087c1b4a51eae8 | [] | no_license | dubeamit/project | 36fe2babd4f0c6702245ade76385918b3167dd7d | a73b904546dfe961694278d0b5a191a6e5450a6a | refs/heads/main | 2023-06-16T08:33:16.626345 | 2021-07-11T14:36:00 | 2021-07-11T14:36:00 | 384,933,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,221 | py |
import cv2
import pytesseract
import os
import preprocessing
import matplotlib.pyplot as plt
class ExtractData:
def __init__(self):
self.data_dict = {}
self.data_dict['type'] = 'NaN'
self.data_dict['length'] = 'NaN'
self.data_dict['width'] = 'NaN'
self.data_dict['height'] = 'NaN'
def get_info(self, line):
# print(line)
word = ''.join(e for e in line if e.isdigit())
if word:
word = word[:4]
return word
return 'NaN'
def extract_data(self, filelimit):
for i in range(1, filelimit):
# Set filename to recognize text from
# Again, these files will be:
# page_1.jpg
# page_2.jpg
# ....
# page_n.jpg
filename = "page_"+str(i)+".jpg"
#grayscale image
img = cv2.imread(filename, 0)
# plt.imshow(img, cmap='gray')
# plt.show()
img = preprocessing.get_binary(img)
img = preprocessing.blur(img)
img = preprocessing.erode(img)
img = preprocessing.dilate(img)
text = str((pytesseract.image_to_string(img)))
# The recognized text is stored in variable text
# Any string processing may be applied on text
# Here, basic formatting has been done:
# In many PDFs, at line ending, if a word can't
# be written fully, a 'hyphen' is added.
# The rest of the word is written in the next line
# Eg: This is a sample text this word here GeeksF-
# orGeeks is half on first line, remaining on next.
# To remove this, we replace every '-\n' to ''.
text = text.replace('-\n', '')
if self.data_dict['type'] == 'NaN':
if 'suv' in text.lower():
self.data_dict['type'] = 'suv'
elif 'sedan' in text.lower():
self.data_dict['type'] = 'sedan'
elif 'hatchback' in text.lower():
self.data_dict['type'] = 'hatchback'
elif 'muv' in text.lower():
self.data_dict['type'] = 'muv'
for word in text.split('\n'):
word = word.lower()
# print(word)
if 'length x width x height' in word:
print('line', word)
word = word[word.find('length x width x height')+23:]
data = ''.join(e for e in word if e.isdigit())
try:
self.data_dict['length'] = data[:4] if data[:4] else 'NaN'
self.data_dict['width'] = data[4:8] if data[4:8] else 'NaN'
self.data_dict['height'] = data[8:12] if data[812] else 'NaN'
except:
pass
elif ('overall length' in word or 'length' in word) and (self.data_dict['length'] == 'NaN' or len(self.data_dict['length']) < 4):
self.data_dict['length'] = self.get_info(word)
elif ('overall width' in word or 'width' in word) and (self.data_dict['width'] == 'NaN' or len(self.data_dict['length']) < 4):
self.data_dict['width'] = self.get_info(word)
elif ('overall height' in word or 'height' in word) and (self.data_dict['height'] == 'NaN' or len(self.data_dict['length']) < 4):
self.data_dict['height'] = self.get_info(word)
elif 'l x w x h' in word:
print('line', word)
word = word[word.find('l x w x h')+9:]
data = ''.join(e for e in word if e.isdigit())
try:
self.data_dict['length'] = data[:4] if data[:4] else 'NaN'
self.data_dict['width'] = data[4:8] if data[4:8] else 'NaN'
self.data_dict['height'] = data[8:12] if data[812] else 'NaN'
except:
pass
# remove all images
os.system('rm *.jpg')
return self.data_dict
| [
"[email protected]"
] | |
df01e912260575aa91cd16ef76a04c578a104d73 | 8ef36a455c2e2f6fdc2d9d98ad56cd5e0a77aa70 | /do/urls.py | c17580bbebb2ef999cbcdd78eb57f6fd39e3e722 | [
"MIT"
] | permissive | esseti/dododo-dadada | 29ae43d05d756a2d55fd804955b0dcee06e2519b | 61cee321a7d5ddb88ca81af832a5e1c9c6ee94da | refs/heads/master | 2020-04-05T16:48:41.046846 | 2014-04-04T15:40:33 | 2014-04-04T15:40:33 | 13,617,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | from django.contrib.auth.decorators import login_required
from django.views.generic.base import TemplateView
__author__ = 'stefanotranquillini'
from django.conf.urls import patterns, include, url
from do import views
urlpatterns = patterns('',
# Examples:
url(r'^todo/$', login_required(views.ToDo.as_view()), name='todo'),
url(r'^logout/$',views.logout_view, name='logout'),
url(r'^accounts/login/',TemplateView.as_view(template_name="login.html"), name='login'),
url(r'^task/(?P<pk>\d+)/done/$', views.Done, name='done'),
url(r'^$', TemplateView.as_view(template_name="home.html"), name='home'),
# url(r'^addtask/', views.AddTask, name="create" ),
# url(r'^tasks/', views.TaskList.as_view(), name="list" ),
# url(r'^percorso/$', views.percorso, name='percorso'),
# url(r'^stazione/$', views.stazione, name='stazione'),
# url(r'^trenitalia/', include('trenitalia.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
) | [
"[email protected]"
] | |
a2eb44ec4c93547c472f83caaf94adb5a266662d | 9ee1bd21df57e6c282c9df0d0e412946550de0c5 | /Task3.py | b5087da26865af2f8f42fa70fdd776f29241ea08 | [] | no_license | anukrit01/Udacity_Tasks | fd7c3d9c79d51581a430269f9332d35338732f2d | ffab10730b59472c66c881ffe30b6d64f8928e24 | refs/heads/master | 2022-09-13T10:56:23.818293 | 2020-06-03T04:28:02 | 2020-06-03T04:28:02 | 268,010,124 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,374 | py | """
Read file into texts and calls.
It's ok if you don't understand how to read files.
"""
import csv
with open('texts.csv', 'r') as f:
reader = csv.reader(f)
texts = list(reader)
with open('calls.csv', 'r') as f:
reader = csv.reader(f)
calls = list(reader)
"""
TASK 3:
(080) is the area code for fixed line telephones in Bangalore.
Fixed line numbers include parentheses, so Bangalore numbers
have the form (080)xxxxxxx.)
Part A: Find all of the area codes and mobile prefixes called by people
in Bangalore.
- Fixed lines start with an area code enclosed in brackets. The area
codes vary in length but always begin with 0.
- Mobile numbers have no parentheses, but have a space in the middle
of the number to help readability. The prefix of a mobile number
is its first four digits, and they always start with 7, 8 or 9.
- Telemarketers' numbers have no parentheses or space, but they start
with the area code 140.
Print the answer as part of a message:
"The numbers called by people in Bangalore have codes:"
<list of codes>
The list of codes should be print out one per line in lexicographic order with no duplicates.
Part B: What percentage of calls from fixed lines in Bangalore are made
to fixed lines also in Bangalore? In other words, of all the calls made
from a number starting with "(080)", what percentage of these calls
were made to a number also starting with "(080)"?
Print the answer as a part of a message::
"<percentage> percent of calls from fixed lines in Bangalore are calls
to other fixed lines in Bangalore."
The percentage should have 2 decimal digits
"""
# Part A
prefixes = []
i = 0
total = 0
for caller, reciever, time, duration in calls:
if caller.startswith('(080)'):
total += 1
if reciever.startswith('(080'):
i += 1
prefixes.append(reciever.split(')')[0][1:])
if reciever.startswith('('):
prefixes.append(reciever.split(')')[0][1:])
elif reciever.startswith('140'):
prefixes.append('140')
else:
prefixes.append(reciever[0:4])
prefixes = list(set(prefixes))
prefixes.sort()
print("The numbers called by people in Bangalore have codes:")
for codes in prefixes:
print(codes)
# Part B
percentage = i/total
percentage *= 100
print("{:.2f} percent of calls from fixed lines in Bangalore are calls to other fixed lines in Bangalore.".format(percentage)) | [
"[email protected]"
] | |
a23ad0018a553812c8ccd7276151d57eebb62e2b | a0ed3890b44174f8d9b9d9b622737e518d8f4022 | /checkio_sendgrid/settings.py | 92bf3257d0cceb7d6f7e741a1b65adb24351451c | [
"MIT"
] | permissive | CheckiO/checkio-sendgrid | fc19ef4581252f42a59aa66c66bee711a036218a | 420e6694d094ffb6ec4c554425317a4e88e714f2 | refs/heads/master | 2020-04-19T22:54:53.052519 | 2015-05-18T14:51:31 | 2015-05-18T14:51:31 | 35,486,673 | 0 | 0 | null | 2015-05-18T14:51:31 | 2015-05-12T12:30:58 | Python | UTF-8 | Python | false | false | 170 | py | from django.conf import settings
DEFAULT_TIMEOUT = getattr(settings, 'SENDGRID_DEFAULT_TIMEOUT', 10)
USER_ADD_LIMIT = getattr(settings, 'SENDGRID_USER_ADD_LIMIT', 1000)
| [
"[email protected]"
] | |
737229ad3167d7291f0ed6d47df9eff02ae43ba0 | e7d0d662df63c511fce474ce649696e64dc16f32 | /menu/translation.py | 8841d9326156c2c0c65a6d46514ac632414a1a02 | [] | no_license | jazdelu/minibobi | d78bb8e37ec3cbbc28a0e1af2647baff814ec1d2 | 4f49f893b753294ee22c8a126dd100dd7ba6ee71 | refs/heads/master | 2021-01-18T22:32:41.491151 | 2016-11-30T15:13:06 | 2016-11-30T15:13:06 | 19,132,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | from modeltranslation.translator import translator, TranslationOptions
from menu.models import *
class MenuItemTranslationOptions(TranslationOptions):
fields = ('name',)
required_languages = ('en','zh-cn')
translator.register(MenuItem,MenuItemTranslationOptions) | [
"[email protected]"
] | |
4ee2547dd2aee7dc4bb0cd7c7161b26457304d1a | af3f8f948e181276286d1f417fd7632826febc93 | /Publicar_trabajo/migrations/0041_auto_20190927_0013.py | 5454f8ce74c8457d31c24e58d5d8ab1cf89b2948 | [] | no_license | montecinosd/Proyecto_de_titutlo | c729e01b9ff8e751b0d72e2282360630edadeba9 | ab7f503f4ee9cf37a0e54437744c19e19aa1d2a2 | refs/heads/master | 2022-12-13T02:35:17.103736 | 2020-01-29T02:02:40 | 2020-01-29T02:02:40 | 173,991,278 | 0 | 0 | null | 2022-12-08T07:02:36 | 2019-03-05T17:30:51 | CSS | UTF-8 | Python | false | false | 1,068 | py | # Generated by Django 2.0.4 on 2019-09-27 03:13
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Publicar_trabajo', '0040_auto_20190927_0012'),
]
operations = [
migrations.AlterField(
model_name='calificaciones',
name='Hora',
field=models.TimeField(default=datetime.datetime(2019, 9, 27, 0, 13, 44, 47026)),
),
migrations.AlterField(
model_name='historial_trabajo',
name='Hora',
field=models.TimeField(default=datetime.datetime(2019, 9, 27, 0, 13, 44, 48961)),
),
migrations.AlterField(
model_name='postulantes',
name='Hora',
field=models.TimeField(default=datetime.datetime(2019, 9, 27, 0, 13, 44, 44118)),
),
migrations.AlterField(
model_name='trabajo_acordado',
name='Hora',
field=models.TimeField(default=datetime.datetime(2019, 9, 27, 0, 13, 44, 45684)),
),
]
| [
"[email protected]"
] | |
1993897c753893421095630b3216822e7421ea12 | a07c95d4d9c680e3331a9098e54f01931fbc84e1 | /rougetest/bleu.py | d4518d836b654f03832177e7071624178176d63e | [] | no_license | cym1021/Citation_Recommendation | f16bad3bc39e4254bf36a15658d7a759146c7aa6 | 6f0d03a360f3ed5b56eac7568486335deb344df2 | refs/heads/master | 2020-04-08T04:15:29.873977 | 2018-11-25T06:08:02 | 2018-11-25T06:08:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,468 | py | import math
import rougetest.common as common
class Bleu(object):
def __init__(self, n_size=2):
self.match_ngram = {}
self.candi_ngram = {}
self.bp_r = 0
self.bp_c = 0
self.n_size = n_size
def add_inst(self, cand: str, ref: str):
"""根据添加的预测答案和参考答案,更新match_gram和candi_gram
Arguments:
cand {str} -- 预测答案
ref {str} -- 参考答案
"""
for n_size in range(self.n_size):
self.count_ngram(cand, ref, n_size + 1)
self.count_bp(cand, ref)
def count_ngram(self, cand: str, ref: str, n_size: int):
"""计算子序列重合的个数,并存储到字典中
Arguments:
cand {str} -- 预备答案
ref {str} -- 参考答案
n_size {int} -- 子序列的大小
"""
cand_ngram = common.get_ngram(cand, n_size)
ref_ngram = common.get_ngram(ref, n_size)
if n_size not in self.match_ngram:
self.match_ngram[n_size] = 0
self.candi_ngram[n_size] = 0
# print("cand_ngram:",cand_ngram)
# print("ref_ngram:",ref_ngram)
match_size, cand_size = common.get_match_size(cand_ngram, ref_ngram)
self.match_ngram[n_size] += match_size
self.candi_ngram[n_size] += cand_size
def count_bp(self, cand: str, ref: str):
"""计算BP参数对应的r和c
Arguments:
cand {str} -- 预备答案
ref {str} -- 参考答案
Returns:
float -- BP参数计算结果
"""
self.bp_c += len(cand)
self.bp_r += len(ref)
def get_score(self) -> float:
"""计算字符串cand的Bleu分数, 并返回
Returns:
bleu_score {float} -- bleu分数
"""
# print("self.match_ngram:",self.match_ngram)
# print("self.candi_ngram:",self.candi_ngram)
prob_list = [
self.match_ngram[n_size + 1] / float(self.candi_ngram[n_size + 1])
for n_size in range(self.n_size)
]
bleu_score = prob_list[0]
for n in range(1, self.n_size):
bleu_score *= prob_list[n]
bleu_score = bleu_score ** (1. / float(self.n_size))
bp = math.exp(min(1 - self.bp_r / float(self.bp_c ), 0))
bleu_score = bp * bleu_score
# print('bleu score: {}'.format(bleu_score))
return bleu_score
| [
"[email protected]"
] | |
9cb28de1e9af79ca0a1490563bc7c6c981e70d4f | 2a4b2efeb11e700c7cc31b94f4503009b2edc6ae | /problem_111.py | d35dbbbb29ddcadf3670bddd033f3e1d4f4a5022 | [] | no_license | SeanLau/leetcode | 66b8daf2d5bfe720015e6f318f217c8892807f97 | 16b6fc4247c91a919d38bf18835f10fc29fccca7 | refs/heads/master | 2020-04-07T12:54:55.282465 | 2018-04-29T09:31:12 | 2018-04-29T09:31:12 | 124,213,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# 寻找最短深度
# 思路1,逐层遍历即可寻找到最短路径.
from collections import deque
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def minDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root:
result = [] # 存储列表的列表
result.append([root])
level = 0
while True:
temp = []
for i in result[level]:
if i.left:
temp.append(i.left)
if i.right:
temp.append(i.right)
if i.right is None and i.left is None:
return level + 1
result.append(temp)
level += 1
else:
return 0
def minDepth2(self, root):
if not root: return 0
depth = 1
bfs = deque([root])
while bfs:
for i in range(len(bfs)):
node = bfs.popleft()
if not node.left and not node.right:
return depth
if node.left: bfs.append(node.left)
if node.right: bfs.append(node.right)
depth += 1
if __name__ == '__main__':
root = TreeNode(1)
root.left = TreeNode(2)
root.right = TreeNode(2)
root.left.left = TreeNode(3)
root.left.right = TreeNode(4)
root.right.right = TreeNode(3)
root.right.left = TreeNode(4)
so = Solution()
print(so.minDepth(root)) | [
"[email protected]"
] | |
be714139f5c6cd5dee96dfbaafa20e9b8cbf182f | 2704813c127d1351ddc2772b22333cf65ff1957f | /linea.py | 50b575678a1542d5683abdb616d9a2b6c622f953 | [] | no_license | GabrielZarate/EjemploPython | cb0f3ba7a382e4c173ba8dcd1fa65d75227263c4 | 8204cf1fcee6f47d2cf1ce341c72cd9cdf25cf2e | refs/heads/master | 2020-08-03T09:22:29.393251 | 2019-10-28T02:21:59 | 2019-10-28T02:21:59 | 211,700,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,654 | py | import math
class Linea:
def __init__(self):
self.etiqueta = "NULL"
self.comando = "NULL"
self.argumento = "NULL"
def toStr(self):
return ("LECTURA " + self.etiqueta + " " + self.comando + " " + self.argumento)
def asignarValores(self, etiquetaEntrada, comandoEntrada, argumentoEntrada):
if(etiquetaEntrada == "" or etiquetaEntrada == "NULL" or etiquetaEntrada ==" "):
self.etiqueta = "NULL"
else:
self.etiqueta = etiquetaEntrada
if (comandoEntrada == "" or comandoEntrada == "NULL" or comandoEntrada == " "):
self.comando = "NULL"
else:
self.comando = comandoEntrada
if (argumentoEntrada == "" or argumentoEntrada == "NULL" or argumentoEntrada == " "):
self.argumento = "NULL"
else:
self.argumento = argumentoEntrada
def rellenarFormatoPar(self, valor):
while (len(valor) % 2 != 0):
valor = "0" + valor
return valor
def rellenarFormatoByte(self, valor):
while (len(valor) % 8 != 0):
valor = "0" + valor
return valor
def retornarBase(self, valor):
BanderaOctal = False
BanderaHexa = False
BanderaBin = False
BAnderaASCII = False
if (valor == "NULL"):
return "NULL"
for i in valor:
if i == "@":
BanderaOctal = True
if i == "$":
BanderaHexa = True
if i == "%":
BanderaBin = True
if i == "´":
BAnderaASCII = True
if (BanderaOctal) and not (BanderaHexa or BanderaBin or BAnderaASCII):
return "OCTAL"
if (BanderaHexa) and not (BanderaOctal or BanderaBin or BAnderaASCII):
return "HEXADECIMAL"
if (BanderaBin) and not (BanderaOctal or BanderaHexa or BAnderaASCII):
return "BINARIO"
if (BAnderaASCII) and not (BanderaOctal or BanderaHexa or BanderaBin):
return "ASCII"
if not (BanderaOctal and BanderaHexa and BanderaBin and BAnderaASCII):
return "DECIMAL"
return "NULL"
def retornarValorDec(self,valor):
retornar = valor
if "#" in retornar:
retornar = retornar.replace("#", "")
if "@" in retornar:
retornar = retornar.replace("@", "")
if "$" in retornar:
retornar = retornar.replace("$", "")
if "%" in retornar:
retornar = retornar.replace("%", "")
if (self.retornarBase(valor) == "NULL"):
return "NULL"
if (self.retornarBase(valor) == "DECIMAL"):
retornar = str(int(retornar))
if (self.retornarBase(valor) == "OCTAL"):
retornar = str(int(retornar, 8))
if (self.retornarBase(valor) == "HEXADECIMAL"):
retornar = str(int(retornar, 16))
if (self.retornarBase(valor) == "BINARIO"):
retornar = str(int(retornar, 2))
retornar = self.rellenarFormatoPar(retornar)
return retornar
def retornarValorOctal(self,valor):
retornar = valor
if "#" in valor:
retornar = retornar.replace("#", "")
if "@" in valor:
retornar = retornar.replace("@", "")
if "$" in valor:
retornar = retornar.replace("$", "")
if "%" in valor:
retornar = retornar.replace("%", "")
if (self.retornarBase(valor) == "NULL"):
return "NULL"
if (self.retornarBase(valor) == "DECIMAL"):
retornar = oct(int(retornar))[2:]
if (self.retornarBase(valor) == "OCTAL"):
retornar = oct(int(retornar, 8))[2:]
if (self.retornarBase(valor) == "HEXADECIMAL"):
retornar = oct(int(retornar, 16))[2:]
if (self.retornarBase(valor) == "BINARIO"):
retornar = oct(int(retornar, 2))[2:]
retornar = self.rellenarFormatoPar(retornar)
return retornar
def retornarValorHex(self,valor):
retornar = valor
if "#" in valor:
retornar = retornar.replace("#", "")
if "@" in valor:
retornar = retornar.replace("@", "")
if "$" in valor:
retornar = retornar.replace("$", "")
if "%" in valor:
retornar = retornar.replace("%", "")
if "´" in valor:
retornar = retornar.replace("´", "")
if(self.retornarBase(valor) == "NULL"):
return "NULL"
if (self.retornarBase(valor) == "DECIMAL"):
retornar = hex(int(retornar))[2:]
if (self.retornarBase(valor) == "OCTAL"):
retornar = hex(int(retornar,8))[2:]
if (self.retornarBase(valor) == "HEXADECIMAL"):
retornar = hex(int(retornar, 16))[2:]
if (self.retornarBase(valor) == "BINARIO"):
retornar = hex(int(retornar, 2))[2:]
if (self.retornarBase(valor) == "ASCII"):
retornar = str(hex(int(ord(retornar))))[2:]
retornar = self.rellenarFormatoPar(retornar)
return retornar
def retornarValorBin(self,valor):
retornar = valor
if "#" in valor:
retornar = retornar.replace("#", "")
if "@" in valor:
retornar = retornar.replace("@", "")
if "$" in valor:
retornar = retornar.replace("$", "")
if "%" in valor:
retornar = retornar.replace("%", "")
if(self.retornarBase(valor) == "NULL"):
return "NULL"
if (self.retornarBase(valor) == "DECIMAL"):
retornar = bin(int(retornar))[2:]
if (self.retornarBase(valor) == "OCTAL"):
retornar = bin(int(retornar,8))[2:]
if (self.retornarBase(valor) == "HEXADECIMAL"):
retornar = bin(int(retornar,16))[2:]
if (self.retornarBase(valor) == "BINARIO"):
retornar = bin(int(retornar,2))[2:]
retornar = self.rellenarFormatoPar(retornar)
return retornar
def sonMultiplesQ(self):
separador = self.argumento.split(",");
separador = ' '.join(separador).split()
return len(separador) > 1
def listaDeArgumentosPuros(self):
retornar = self.argumento
if "#" in retornar:
retornar = retornar.replace("#", "")
if "@" in retornar:
retornar = retornar.replace("@", "")
if "$" in retornar:
retornar = retornar.replace("$", "")
if "%" in retornar:
retornar = retornar.replace("%", "")
separador = retornar.split(",");
separador = ' '.join(separador).split()
return separador
def listaDeArgumentosBrutos(self):
retornar = self.argumento
separador = retornar.split(",");
separador = ' '.join(separador).split()
return separador
def listaDeArgumentosAHEX(self,valores):
embutido = valores
retornar = []
if "#" in embutido:
embutido = embutido.replace("#", "")
separador = embutido.split(",");
separador = ' '.join(separador).split()
for i in separador:
aux = self.retornarValorHex(i)
retornar.append(aux)
return retornar
def obtenerLongitudDeByte(self,valor):
embutido = valor
alfa = 0
if "#" in embutido:
embutido = embutido.replace("#", "")
embutido = self.retornarValorBin(embutido)
embutido = self.rellenarFormatoByte(embutido)
alfa = (len(embutido)/8)
return alfa
def LongitudDeArgumentos(self,valores):
embutido = valores
retornar = []
if "#" in embutido:
embutido = embutido.replace("#", "")
separador = embutido.split(",");
separador = ' '.join(separador).split()
for i in separador:
aux = self.obtenerLongitudDeByte(i)
retornar.append(aux)
return retornar
def formato2HEX(self,valor):
retorno = ""
for i in range(0,len(valor)):
if(i >= 2 and i%2 == 0):
retorno += "_"
retorno += valor[i]
return retorno;
| [
"[email protected]"
] | |
23ec3d6b581671203df9bb461f433a5231881885 | 4122f1fdf5f9493d91c4adccb4fcdaaa0a557c8c | /pgdrive/scene_creator/ego_vehicle/vehicle_module/PID_controller.py | 9d29063efc8af1a03fa5157a660231741f17b853 | [
"Apache-2.0"
] | permissive | rena-ganba/pgdrive | 2dcda62860ce776913224e19cec03c62169bac28 | 4aab3e74744eb884ae038fc1fcef19797ba5864d | refs/heads/main | 2023-02-07T22:06:11.093582 | 2020-12-29T14:59:01 | 2020-12-29T14:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | class PIDController:
def __init__(self, k_p: float, k_i: float, k_d: float):
self.k_p = k_p
self.k_i = k_i
self.k_d = k_d
self.p_error = 0
self.i_error = 0
self.d_error = 0
def _update_error(self, current_error: float):
self.i_error += current_error
self.d_error = current_error - self.p_error
self.p_error = current_error
def get_result(self, current_error: float, make_up_coefficient=1.0):
self._update_error(current_error)
return (-self.k_p * self.p_error - self.k_i * self.i_error - self.k_d * self.d_error) * make_up_coefficient
def reset(self):
self.p_error = 0
self.i_error = 0
self.d_error = 0
class Target:
def __init__(self, target_lateral, target_speed):
self.lateral = target_lateral
self.speed = target_speed
def go_right(self):
self.lateral += 0.25 if self.lateral < 0.625 else 0
def go_left(self):
self.lateral -= 0.25 if self.lateral > 0.125 else 0
def faster(self):
self.speed += 10
def slower(self):
self.speed -= 10 | [
"[email protected]"
] | |
f00a8f1073c9da5a3b7bb5c4ebe9ca55f99d2734 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-storage/azure/mgmt/storage/v2017_10_01/models/encryption_py3.py | d819a127105d9f1442bbb415f1d0950e4e69a13e | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 1,946 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Encryption(Model):
"""The encryption settings on the storage account.
All required parameters must be populated in order to send to Azure.
:param services: List of services which support encryption.
:type services: ~azure.mgmt.storage.v2017_10_01.models.EncryptionServices
:param key_source: Required. The encryption keySource (provider). Possible
values (case-insensitive): Microsoft.Storage, Microsoft.Keyvault.
Possible values include: 'Microsoft.Storage', 'Microsoft.Keyvault'.
Default value: "Microsoft.Storage" .
:type key_source: str or ~azure.mgmt.storage.v2017_10_01.models.KeySource
:param key_vault_properties: Properties provided by key vault.
:type key_vault_properties:
~azure.mgmt.storage.v2017_10_01.models.KeyVaultProperties
"""
_validation = {
'key_source': {'required': True},
}
_attribute_map = {
'services': {'key': 'services', 'type': 'EncryptionServices'},
'key_source': {'key': 'keySource', 'type': 'str'},
'key_vault_properties': {'key': 'keyvaultproperties', 'type': 'KeyVaultProperties'},
}
def __init__(self, *, services=None, key_source="Microsoft.Storage", key_vault_properties=None, **kwargs) -> None:
super(Encryption, self).__init__(**kwargs)
self.services = services
self.key_source = key_source
self.key_vault_properties = key_vault_properties
| [
"[email protected]"
] | |
99d3016c9da8b142f9ceef69be30b50edf9d8161 | 5cf7564c09c137a9ed76d5f40edb8ae5fa7e85b7 | /scripts/gen_cpp.py | 34c3b1d6638ac5f368e548365cf053b37975412a | [] | no_license | dkw72n/mtv | 504c289b126f598323364a3a492c0801249a4d2e | e41295c2b74c99f307f4e5a0804eed71e579a159 | refs/heads/main | 2023-01-22T08:21:10.231702 | 2020-12-08T08:40:18 | 2020-12-08T08:40:18 | 314,186,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,702 | py | import argparse
import sys
import re
import base64
parser = argparse.ArgumentParser(description='')
parser.add_argument('input', type=str, help='input')
parser.add_argument('path', type=str, help='input')
parser.add_argument('--output', type=str, default="", help='output')
parser.add_argument('--mime', type=str, default="auto", help='mime')
args = parser.parse_args()
def ClassName(i):
return "K_" + base64.urlsafe_b64encode(i.encode()).rstrip(b"=").decode()
def GetMime(i, m):
if m != "auto":
return m
if i.endswith('.html'): return 'text/html'
if i.endswith('.css'): return 'text/css'
return "application/octet-stream"
def HexContent(c):
return ','.join(map(hex, c))
def main(args):
i = open(args.input, "rb")
o = sys.stdout
if args.output:
o = open(args.output, "w")
classname = ClassName(args.input)
m = GetMime(args.input, args.mime)
o.write("""#include "../httplib.h"
#include <vector>
typedef void (*SvrRegister)(httplib::Server&);
extern std::vector<SvrRegister>* v;
static const unsigned char content[] = {"""
+ HexContent(i.read()) +
"""};
namespace UI_GEN{
static void register_func(httplib::Server& srv){
srv.Get(\"""" + args.path + """\", [](const httplib::Request &req, httplib::Response &res) {
res.set_content((const char*)content, sizeof(content), \"""" + m + """\");
});
}
struct """ + classname + """{
""" + classname + """(){
if (!v){
v = new std::vector<SvrRegister>();
}
v->push_back(register_func);
}
};
static """ + classname + """ _unused;
}
""")
o.close()
print(args)
main(args)
| [
"[email protected]"
] | |
ec175e73ef8816773523193ceee84d1b05877584 | 759cdfcc72adbc3496b52b3f1c2c1bc7f09b138f | /app/main/signals.py | f3d0a029cfba2b377cc80a8d42c5030ace779dc7 | [
"Apache-2.0"
] | permissive | 395299296/liaotian-robot | a53ade68d843a89d79b955d3058f5f1c0283f8bc | 86d96b75e327d93dc5873c31e2852afaf1b19af8 | refs/heads/master | 2021-01-19T16:02:15.021880 | 2018-02-01T10:11:39 | 2018-02-01T10:11:39 | 88,241,301 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | from __future__ import print_function
from flask import request, current_app
from blinker import Namespace
from . import models, ext
from blog.config import RobotBlogSettings
search_engine_submit_urls = RobotBlogSettings['search_engine_submit_urls']
robotblog_signals = Namespace()
post_visited = robotblog_signals.signal('post-visited')
post_pubished = robotblog_signals.signal('post-published')
@post_visited.connect
def on_post_visited(sender, post, **extra):
tracker = models.Tracker()
tracker.post = post
# if request.headers.getlist("X-Forwarded-For"):
# ip = request.headers.getlist("X-Forwarded-For")[0]
# else:
# ip = request.remote_addr
proxy_list = request.headers.getlist('X-Forwarded-For')
tracker.ip = request.remote_addr if not proxy_list else proxy_list[0]
tracker.user_agent = request.headers.get('User-Agent')
tracker.save()
try:
post_statistic = models.PostStatistics.objects.get(post=post)
except models.PostStatistics.DoesNotExist:
post_statistic = models.PostStatistics()
post_statistic.post = post
from random import randint
post_statistic.verbose_count_base = randint(500, 5000)
post_statistic.save()
post_statistic.modify(inc__visit_count=1)
@post_pubished.connect
def on_post_pubished(sender, post, **extra):
post_url = request.host + post.get_absolute_url()
# print post_url
baidu_url = search_engine_submit_urls['baidu']
if baidu_url:
# print 'Ready to post to baidu'
res = ext.submit_url_to_baidu(baidu_url, post_url)
print(res.status_code, res.text)
else:
print('Not ready to submit urls yet') | [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.