hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d04d0980d9044bdddc259cee62b292bec59d8244
| 4,018 |
py
|
Python
|
pinger/daemon.py
|
pedrospdc/pinger
|
72aaaacedab0b9af2aea1a01eb4e8049e43fd950
|
[
"MIT"
] | 4 |
2015-07-21T07:50:21.000Z
|
2018-04-28T10:06:57.000Z
|
pinger/daemon.py
|
pedrospdc/pinger
|
72aaaacedab0b9af2aea1a01eb4e8049e43fd950
|
[
"MIT"
] | 1 |
2019-10-15T22:33:25.000Z
|
2019-10-16T09:53:23.000Z
|
pinger/daemon.py
|
pedrospdc/pinger
|
72aaaacedab0b9af2aea1a01eb4e8049e43fd950
|
[
"MIT"
] | 2 |
2015-02-23T19:03:39.000Z
|
2015-03-27T05:54:05.000Z
|
import atexit
import sys
import os
import time
from signal import SIGTERM
class Daemon(object):
"""
A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
def daemonize(self):
"""
Fork a second child and exit immediately to prevent zombies. This
causes the second child process to be orphaned, making the init
process responsible for its cleanup. And, since the first child is
a session leader without a controlling terminal, it's possible for
it to acquire one by opening a terminal in the future (System V-
based systems). This second fork guarantees that the child is no
longer a session leader, preventing the daemon from ever acquiring
a controlling terminal.
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
atexit.register(self.delete_pid)
pid = str(os.getpid())
file(self.pidfile, 'w+').write("%s\n" % pid)
def delete_pid(self):
os.remove(self.pidfile)
def start(self):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run()
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self.pidfile)
return # not an error in a restart
# Try killing the daemon process
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print str(err)
sys.exit(1)
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def run(self):
"""
You should override this method when you subclass Daemon. It will be called after the process has been
daemonized by start() or restart().
"""
pass
| 28.295775 | 110 | 0.523892 |
1d8a3d3e8a2011fad0b8cc06b025251a29a15a6e
| 10,045 |
py
|
Python
|
dupont/dupontutils.py
|
afeinstein20/ceres
|
e55150c587782cbecfd45c21ba0ce0023e54c3a9
|
[
"MIT"
] | 35 |
2016-09-09T04:08:45.000Z
|
2022-02-16T20:25:49.000Z
|
dupont/dupontutils.py
|
afeinstein20/ceres
|
e55150c587782cbecfd45c21ba0ce0023e54c3a9
|
[
"MIT"
] | 29 |
2016-09-10T15:30:29.000Z
|
2021-06-02T14:55:53.000Z
|
dupont/dupontutils.py
|
afeinstein20/ceres
|
e55150c587782cbecfd45c21ba0ce0023e54c3a9
|
[
"MIT"
] | 26 |
2016-09-09T23:58:15.000Z
|
2022-02-20T17:50:35.000Z
|
import sys
import matplotlib
matplotlib.use("Agg")
base = '../'
sys.path.append(base+"utils/GLOBALutils")
import GLOBALutils
import numpy as np
import scipy
from astropy.io import fits as pyfits
import os
import glob
import scipy.signal
from scipy.signal import medfilt
from scipy import interpolate
import copy
from pylab import *
def milk_comb(ImgList, darks, zero='Bias.fits'):
n = len(ImgList)
if n==0:
raise ValueError("empty list provided!")
h = pyfits.open(ImgList[0])[0]
d = h.data
head = pyfits.getheader(ImgList[0])
expt = head['EXPTIME']
d = OverscanTrim(d,h.header['BIASSEC'])
Master = pyfits.getdata(zero)
if len(darks) > 0:
Dark = get_dark(darks,expt)
else:
Dark = np.zeros((d.shape[0],d.shape[1]),float)
d -= Master
d -= Dark
factor = 1.25
if (n < 3):
factor = 1
ron1 = h.header['ENOISE']
gain = h.header['EGAIN']
ronoise = factor * h.header['ENOISE'] / np.sqrt(n)
if (n == 1):
return d, ron1, gain
else:
for i in range(n-1):
h = pyfits.open(ImgList[i+1])[0]
head = pyfits.getheader(ImgList[i+1])
expt = head['EXPTIME']
if len(darks) > 0:
Dark = get_dark(darks,expt)
else:
Dark = np.zeros((d.shape[0],d.shape[1]),float)
rd = OverscanTrim(h.data,h.header['BIASSEC']) - Master - Dark
d = np.dstack((d,rd/np.median(rd)))
out = np.median(d,axis=2)
return out, ronoise, gain
def FileClassify(path,log):
biases = []
milkflat = []
objects = []
darks = []
thars = []
skflat = []
qflat = []
f = open(log,'w')
archs = glob.glob(path+'ccd*.fits')
if os.access(path+'bad_files.txt',os.F_OK):
ff = open(path + 'bad_files.txt', 'r')
bfiles = ff.readlines()
else:
bfiles = []
for arch in archs:
use = True
for bf in bfiles:
if arch == path + bf[:-1]:
print 'Dumped file', arch
use = False
break
if use:
h = pyfits.open(arch)
header = pyfits.getheader(arch)
name = header['OBJECT']
if True:
if header['EXPTYPE'] == 'Object':
if name.count('sky')>0:
skflat.append(arch)
elif (name.lower()).count('milky')>0:
milkflat.append(arch)
elif name.count('qFlat')>0:
qflat.append(arch)
else:
expt = header['EXPTIME']
ra = header['RA']
dec = header['DEC']
ams = header['AIRMASS']
date = header['DATE-OBS']
UT = header['UT-TIME']
line = "%-15s %10s %10s %8.2f %4.2f %8s %8s %s\n" % (name, ra, dec, expt, ams, date, UT, arch)
f.write(line)
cos = name.split('ThAr')
if 'thar' in name.lower() or len(cos)> 1 or 'comp' in name.lower():
thars.append(arch)
elif name.count('milky')>0 or name.count('Milky')>0 or name.count('flat')>0:
milkflat.append(arch)
else:
objects.append(arch)
elif header['EXPTYPE'] == 'Bias':
biases.append(arch)
elif header['EXPTYPE'] == 'Flat':
nam = header['OBJECT']
if nam.count('sky')>0 or nam.count('Sky')>0 :
skflat.append(arch)
elif nam.count('milky')>0 or nam.count('Milky')>0 or nam.count('flat')>0:
milkflat.append(arch)
elif header['EXPTYPE'] == 'Dark':
darks.append(arch)
h.close()
f.close()
return biases, milkflat, skflat, objects, thars, darks
def MedianCombine(ImgList, zero_bo=False, zero='Bias.fits', dark_bo=False, darks=[], flat_bo=False, flat='Flat.fits'):
"""
Median combine a list of images
"""
n = len(ImgList)
if n==0:
raise ValueError("empty list provided!")
h = pyfits.open(ImgList[0])[0]
d = h.data
d = OverscanTrim(d,h.header['BIASSEC'])
if zero_bo:
Master = pyfits.getdata(zero)
else:
Master = np.zeros((d.shape[0],d.shape[1]),float)
if dark_bo and len(darks)!=0:
hd = pyfits.getheader(ImgList[0])
time = hd['EXPTIME']
Dark = get_dark(darks, time)
else:
Dark = np.zeros((d.shape[0],d.shape[1]),float)
if flat_bo:
Flat = pyfits.getdata(flat)
else:
Flat = np.zeros((d.shape[0],d.shape[1]),float) + 1.0
if flat_bo:
d = (d - Master - Dark)/Flat
else:
d = (d - Master - Dark)
factor = 1.25
if (n < 3):
factor = 1
ronoise = factor * h.header['ENOISE'] / np.sqrt(n)
gain = h.header['EGAIN']
if (n == 1):
return d, ronoise, gain
else:
for i in range(n-1):
h = pyfits.open(ImgList[i+1])[0]
if flat_bo:
d = np.dstack((d,(OverscanTrim(h.data,h.header['BIASSEC'])-Master-Dark)/Flat))
else:
d = np.dstack((d,OverscanTrim(h.data,h.header['BIASSEC'])-Master-Dark))
return np.median(d,axis=2), ronoise, gain
def OverscanTrim(d,bsec):
"""
Overscan correct and Trim a refurbished DuPont image
"""
bsec = bsec[1:-1]
bsec1 = bsec.split(',')[0]
bsec2 = bsec.split(',')[1]
b11 = int(bsec1.split(':')[0])
b12 = int(bsec1.split(':')[1])
b21 = int(bsec2.split(':')[0])
b22 = int(bsec2.split(':')[1])
t1 = d[:1500,:b11-1]
t2 = d[b21-1:,:b11-1]
nd = np.zeros((t1.shape[0],t1.shape[1]),float)
nd[:1500,:] = t1
overscan1 = np.median(t2,axis=0)
newdata = nd - overscan1
return newdata
def get_dark(darks,t):
exact = 0
dts = []
for dark in darks:
hd = pyfits.getheader(dark)
dt = hd['EXPTIME']
dts.append(dt)
if dt == t:
DARK = pyfits.getdata(dark)
exact = 1
dts = np.array(dts)
if exact == 0:
if t < dts.min():
I = np.where( dts == dts.min() )[0]
DARK = pyfits.getdata(darks[I[0]])*t/dts[I[0]]
elif t > dts.max():
I = np.where( dts == dts.max() )[0]
DARK = pyfits.getdata(darks[I[0]])*t/dts[I[0]]
else:
tmin = dts.min()
tmax = dts.max()
I = np.where( dts == dts.min() )[0]
Dmin = pyfits.getdata(darks[I[0]])
Dminname=darks[I[0]]
I = np.where( dts == dts.max() )[0]
Dmax = pyfits.getdata(darks[I[0]])
Dmaxname = darks[I[0]]
i = 0
while i < len(dts):
if dts[i] < t and dts[i] > tmin:
tmin = dts[i]
Dminname = darks[i]
Dmin = pyfits.getdata(darks[i])
elif dts[i] > t and dts[i] < tmax:
tmax = dts[i]
Dmaxname = darks[i]
Dmax = pyfits.getdata(darks[i])
i+=1
num = Dmax - Dmin
den = tmax-tmin
m = num/den
n = Dmax - m*tmax
DARK = m*t+n
return DARK
def get_blaze(LL,FF, low=1.0, hi=3.0, n = 6):
NF = FF.copy()
for j in range(LL.shape[0]):
L = LL[j]
F = FF[j]
ejex = np.arange(len(F))
F[:150] = 0.0
F[-150:] = 0.0
Z = np.where(F!=0)[0]
F = scipy.signal.medfilt(F[Z],31)
ejexx = ejex.copy()
ejex = ejex[Z]
L = L[Z]
I = np.where((L>5870) & (L<5890))[0]
if len(I)>0:
W = np.where(L<5870)[0]
R = np.where(L>5890)[0]
ejetemp = np.hstack((ejex[W],ejex[R]))
Ftemp = np.hstack((F[W],F[R]))
coefs = np.polyfit(ejetemp,Ftemp,n)
fit = np.polyval(coefs,ejetemp)
else:
ejetemp=ejex
Ftemp=F
coefs = np.polyfit(ejex,F,n)
fit = np.polyval(coefs,ejex)
i = 0
while i < 30:
res = Ftemp - fit
IP = np.where((res>=0) & (Ftemp!=0.0))[0]
IN = np.where((res<0) & (Ftemp!=0.0))[0]
devp = np.mean(res[IP])
devn = np.mean(res[IN])
I = np.where((res > -low*abs(devn)) & (res < hi*abs(devp)) & (Ftemp!=0))[0]
coefs = np.polyfit(ejetemp[I],Ftemp[I],n)
fit = np.polyval(coefs,ejetemp)
i+=1
fit = np.polyval(coefs,ejexx)
NF[j]=fit
NNF = NF.copy()
for j in range(LL.shape[0]):
L = LL[j]
I = np.where((L>6520) & (L<6600))[0]
if len(I)>0:
if j+2 < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-2,i],NF[j-1,i],NF[j+1,i],NF[j+2,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,3.0,4.0]),vec,k=2)
NNF[j,i] = scipy.interpolate.splev(2.0,tck,der=0)
elif j+1 < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-2,i],NF[j-1,i],NF[j+1,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,3.0]),vec,k=1)
NNF[j,i] = scipy.interpolate.splev(2.0,tck,der=0)
elif j < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-3,i],NF[j-2,i],NF[j-1,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,2.0]),vec,k=1)
NNF[j,i] = scipy.interpolate.splev(3.0,tck,der=0)
I = np.where((L>4870) & (L<4880))[0]
if len(I)>0:
if j+2 < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-2,i],NF[j-1,i],NF[j+1,i],NF[j+2,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,3.0,4.0]),vec,k=2)
NNF[j,i] = scipy.interpolate.splev(2.0,tck,der=0)
elif j+1 < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-2,i],NF[j-1,i],NF[j+1,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,3.0]),vec,k=1)
NNF[j,i] = scipy.interpolate.splev(2.0,tck,der=0)
else:
for i in range(len(L)):
vec = np.array([NF[j-3,i],NF[j-2,i],NF[j-1,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,2.0]),vec,k=1)
NNF[j,i] = scipy.interpolate.splev(3.0,tck,der=0)
I = np.where((L>4320) & (L<4325))[0]
if len(I)>0:
if j+2 < LL.shape[0]:
for i in range(len(L)):
vec = np.array([NF[j-2,i],NF[j-1,i],NF[j+1,i],NF[j+2,i]])
tck = scipy.interpolate.splrep(np.array([0.0,1.0,3.0,4.0]),vec,k=2)
NNF[j,i] = scipy.interpolate.splev(2.0,tck,der=0)
return NNF
def get_close(tht,rat,dect,fits):
t0 = 1000000.
close = fits[0]
for fit in fits:
#print close
hd = pyfits.getheader(fit)
sct,mjd0 = mjd_fromheader(hd)
expt = hd['EXPTIME']/(3600.*24.)
dec = hd['DEC-D']
ra = hd['RA-D']
if abs(dec - dect)<0.05 and abs(ra - rat)<0.05:
#print sct+expt,tht
if abs(sct+expt-tht) < t0:
t0 = abs(sct+expt-tht)
close = fit
return close
def b_col(d):
d[:,746] = 0.5*(d[:,745]+d[:,748])
d[:,747] = 0.5*(d[:,745]+d[:,748])
return d
def mjd_fromheader(h):
"""
return modified Julian date from header
"""
datetu = h['UT-DATE']
timetu = h['UT-TIME']
mjd0,mjd,i = GLOBALutils.iau_cal2jd(int(datetu[:4]),int(datetu[5:7]),int(datetu[8:]))
ho = int(timetu[:2])
mi = int(timetu[3:5])
se = float(timetu[7:])
ut = float(ho) + float(mi)/60.0 + float(se)/3600.0
mjd_start = mjd + ut/24.0
secinday = 24*3600.0
fraction = 0.5
texp = h['EXPTIME'] #sec
mjd = mjd_start + (fraction * texp) / secinday
return mjd, mjd0
| 24.741379 | 118 | 0.579393 |
f7f058f44a4c23fbbabf4370511ade85e94cefd1
| 712 |
py
|
Python
|
archived/pyostack/pyostack/metering.py
|
DistributedSystemsGroup/sysadmin-dsp
|
baa3cfddcf34a0ef9e4f13f44593aa7fbb85750a
|
[
"Apache-2.0"
] | null | null | null |
archived/pyostack/pyostack/metering.py
|
DistributedSystemsGroup/sysadmin-dsp
|
baa3cfddcf34a0ef9e4f13f44593aa7fbb85750a
|
[
"Apache-2.0"
] | null | null | null |
archived/pyostack/pyostack/metering.py
|
DistributedSystemsGroup/sysadmin-dsp
|
baa3cfddcf34a0ef9e4f13f44593aa7fbb85750a
|
[
"Apache-2.0"
] | null | null | null |
import ceilometerclient.client as clclient
import logging
log = logging.getLogger(__name__)
class Metering:
'''Wrapper for the OpenStack MEtering service (Ceilometer)'''
def __init__(self, conf):
creds = self._get_creds(conf)
self.ceilo = clclient.get_client(2, **creds)
def _get_creds(self, conf):
d = {}
d['os_username'] = conf.get("environment", "OS_USERNAME")
d['os_password'] = conf.get("environment", "OS_PASSWORD")
d['os_auth_url'] = conf.get("environment", "OS_AUTH_URL")
d['os_tenant_name'] = conf.get("environment", "OS_TENANT_NAME")
return d
def meter_list(self, query=None):
return self.ceilo.meters.list()
| 30.956522 | 71 | 0.650281 |
1825980da1a157e2bf99be4f4dc5fd252bc8f7a5
| 4,323 |
py
|
Python
|
locations/spiders/noahsnybagels.py
|
mfjackson/alltheplaces
|
37c90b4041c80a574e6e4c2f886883e97df4b636
|
[
"MIT"
] | null | null | null |
locations/spiders/noahsnybagels.py
|
mfjackson/alltheplaces
|
37c90b4041c80a574e6e4c2f886883e97df4b636
|
[
"MIT"
] | null | null | null |
locations/spiders/noahsnybagels.py
|
mfjackson/alltheplaces
|
37c90b4041c80a574e6e4c2f886883e97df4b636
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import scrapy
import datetime
import re
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
DAY_MAPPING = {
"Monday": "Mo",
"Tuesday": "Tu",
"Wednesday": "We",
"Thursday": "Th",
"Friday": "Fr",
"Saturday": "Sa",
"Sunday": "Su",
}
class NoahsNYBagelsSpider(scrapy.Spider):
"""Copy of Einstein Bros. Bagels - all brands of the same parent company Coffee & Bagels"""
name = "noahsnybagels"
item_attributes = {"brand": "Noah's New York Bagels"}
allowed_domains = ["noahs.com"]
start_urls = ("https://locations.noahs.com/us",)
def parse_hours(self, elements):
opening_hours = OpeningHours()
for elem in elements:
day = elem.xpath(
'.//td[@class="c-location-hours-details-row-day"]/text()'
).extract_first()
intervals = elem.xpath(
'.//td[@class="c-location-hours-details-row-intervals"]'
)
if intervals.xpath("./text()").extract_first() == "Closed":
continue
if intervals.xpath("./span/text()").extract_first() == "Open 24 hours":
opening_hours.add_range(
day=DAY_MAPPING[day], open_time="0:00", close_time="23:59"
)
else:
start_time = elem.xpath(
'.//span[@class="c-location-hours-details-row-intervals-instance-open"]/text()'
).extract_first()
end_time = elem.xpath(
'.//span[@class="c-location-hours-details-row-intervals-instance-close"]/text()'
).extract_first()
opening_hours.add_range(
day=day[:2],
open_time=start_time,
close_time=end_time,
time_format="%H:%M %p",
)
return opening_hours.as_opening_hours()
def parse_store(self, response):
ref = re.search(r".+/(.+)$", response.url).group(1)
address1 = response.xpath(
'//span[@class="c-address-street-1"]/text()'
).extract_first()
address2 = (
response.xpath('//span[@class="c-address-street-2"]/text()').extract_first()
or ""
)
properties = {
"addr_full": " ".join([address1, address2]).strip(),
"phone": response.xpath(
'//span[@itemprop="telephone"]/text()'
).extract_first(),
"city": response.xpath(
'//span[@class="c-address-city"]/text()'
).extract_first(),
"state": response.xpath(
'//span[@itemprop="addressRegion"]/text()'
).extract_first(),
"postcode": response.xpath(
'//span[@itemprop="postalCode"]/text()'
).extract_first(),
"country": response.xpath(
'//abbr[@itemprop="addressCountry"]/text()'
).extract_first(),
"ref": ref,
"website": response.url,
"lat": float(
response.xpath('//meta[@itemprop="latitude"]/@content').extract_first()
),
"lon": float(
response.xpath('//meta[@itemprop="longitude"]/@content').extract_first()
),
"name": response.xpath('//h1[@id="location-name"]/text()').extract_first(),
}
hours = self.parse_hours(
response.xpath('//table[@class="c-location-hours-details"]//tbody/tr')
)
if hours:
properties["opening_hours"] = hours
yield GeojsonPointItem(**properties)
def parse(self, response):
urls = response.xpath('//a[@class="Directory-listLink"]/@href').extract()
is_store_list = response.xpath(
'//section[contains(@class,"LocationList")]'
).extract()
if not urls and is_store_list:
urls = response.xpath(
'//a[contains(@class,"Teaser-titleLink")]/@href'
).extract()
for url in urls:
if re.search(r"us/.{2}/.+/.+", url):
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
else:
yield scrapy.Request(response.urljoin(url))
| 34.309524 | 100 | 0.52186 |
77d885ddbeba5f5aaccd5d35154a92cce21c59f5
| 397 |
py
|
Python
|
scripts/reformatGff.py
|
hatimalmutairi/LMGAAP
|
bb3914de6113cbafad76f0552ed4d54286164187
|
[
"MIT"
] | 2 |
2022-01-17T14:01:24.000Z
|
2022-01-20T04:27:37.000Z
|
scripts/reformatGff.py
|
hatimalmutairi/LMGAAP
|
bb3914de6113cbafad76f0552ed4d54286164187
|
[
"MIT"
] | null | null | null |
scripts/reformatGff.py
|
hatimalmutairi/LMGAAP
|
bb3914de6113cbafad76f0552ed4d54286164187
|
[
"MIT"
] | null | null | null |
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1],header=None)
df.columns = ['column_1']
rows = df.loc[0:1].copy()
df2 = df.loc[2:].copy()
df2['ID'] = range(1,len(df2)+1)
df2['ID'] = 'ID=' + df2['ID'].astype(str)
df2['column_1'] = df2['column_1'].str.cat(df2['ID'],sep=";")
df2 = rows.append(df2)
df2 = df2.drop( columns='ID')
df2.to_csv(sys.argv[2], index=False,header=False)
| 33.083333 | 61 | 0.624685 |
be0a675c0ae447329cc6e6934e7424cb60d5675d
| 47 |
py
|
Python
|
src/python/pydetranutils/track_plot.py
|
baklanovp/libdetran
|
820efab9d03ae425ccefb9520bdb6c086fdbf939
|
[
"MIT"
] | 4 |
2015-03-07T16:20:23.000Z
|
2020-02-10T13:40:16.000Z
|
src/python/pydetranutils/track_plot.py
|
baklanovp/libdetran
|
820efab9d03ae425ccefb9520bdb6c086fdbf939
|
[
"MIT"
] | 3 |
2018-02-27T21:24:22.000Z
|
2020-12-16T00:56:44.000Z
|
src/python/pydetranutils/track_plot.py
|
baklanovp/libdetran
|
820efab9d03ae425ccefb9520bdb6c086fdbf939
|
[
"MIT"
] | 9 |
2015-03-07T16:20:26.000Z
|
2022-01-29T00:14:23.000Z
|
# Utilities for visualizing tracked geometries
| 23.5 | 46 | 0.851064 |
a141da4cffa4eddc00424406a5122f276f052116
| 42,609 |
py
|
Python
|
rs/login.py
|
alexander-marquardt/lexalink
|
d554f3a00699c8a4cdf1b28dd033655f929470fa
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1 |
2017-02-09T07:12:25.000Z
|
2017-02-09T07:12:25.000Z
|
rs/login.py
|
alexander-marquardt/lexalink
|
d554f3a00699c8a4cdf1b28dd033655f929470fa
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
rs/login.py
|
alexander-marquardt/lexalink
|
d554f3a00699c8a4cdf1b28dd033655f929470fa
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
################################################################################
# LexaLink Copyright information - do not remove this copyright notice
# Copyright (C) 2012
#
# Lexalink - a free social network and dating website platform for the Google App Engine.
#
# Original author: Alexander Marquardt
# Documentation and additional information: http://www.LexaLink.com
# Git source code repository: https://github.com/alexander-marquardt/lexalink
#
# Please consider contributing your enhancements and modifications to the LexaLink community,
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import uuid
import logging, StringIO, pickle, datetime, os
import json
from google.appengine.api import users
from rs import utils, localizations, login_utils, forms, admin, constants, views, common_data_structs, user_profile_main_data
from rs import store_data, channel_support, lang_settings
from rs import models, error_reporting, messages, utils_top_level
from django import template, shortcuts, http
from django.utils import simplejson
from django.core.validators import email_re
from django.utils.translation import ugettext
from django.core.urlresolvers import reverse
from localeurl import utils as localeurl_utils
try:
from rs.proprietary import search_engine_overrides
except:
pass
#############################################
# to prevent possible problems with multiple sessions being active at once, make sure that the
# user is logged out before letting them log in. Also, this should make it more difficult for
# jealous spouse to accidently discover that their partner has been logged in, since previous session
# will be automatically logged out if the user goes to the home page.
def check_if_login_country_allowed(request):
login_allowed = True
message_for_client = ''
country_encoded = None
region_encoded = None
http_country_code = request.META.get('HTTP_X_APPENGINE_COUNTRY', None)
if http_country_code:
# check if country is valid and is allowed to register profiles on our website
if http_country_code in localizations.forbidden_countries:
# user will not be allowed to register or login from un-supported countries.
forbidden_country_name = localizations.forbidden_countries[http_country_code]
message_for_client = u"We do not currently allow users from %s" % forbidden_country_name
logging.warning(message_for_client)
login_allowed = False
else:
tmp_country_encoded = "%s,," % http_country_code
if tmp_country_encoded in localizations.location_dict[0]:
# make sure that it is a country that we support.
country_encoded = tmp_country_encoded
else:
logging.info("Logging in user in unknown country: %s" % http_country_code)
http_region_code = request.META.get('HTTP_X_APPENGINE_REGION', None)
if country_encoded and http_region_code:
http_region_code = http_region_code.upper()
# check if the region code matches a region key
tmp_region_encoded = "%s,%s," % (http_country_code, http_region_code)
if tmp_region_encoded in localizations.location_dict[0]:
region_encoded = tmp_region_encoded
else:
logging.warning("Region code %s not found in location_dict" % http_region_code)
return (login_allowed, message_for_client)
def landing_page(request):
# Redirects to search results, and adds a get parameter to indicate that the registration/enter
# dialog should be shown to the user.
try:
redirect_to_search_results = reverse('search_gen') + "?query_order=unique_last_login"
for key in request.GET:
value = request.GET[key]
redirect_to_search_results += "&%s=%s" % (key, value)
return http.HttpResponseRedirect(redirect_to_search_results)
except:
return utils.return_and_report_internal_error(request)
def get_registration_html(request):
try:
http_country_code = request.META.get('HTTP_X_APPENGINE_COUNTRY', None)
signup_template = template.loader.get_template('login_helpers/registration.html')
html_for_signup = forms.MyHTMLLoginGenerator.as_table_rows(localizations.input_field_lang_idx[request.LANGUAGE_CODE], 'signup_fields')
context = template.Context (dict({
'html_for_signup': html_for_signup,
'minimum_registration_age' : constants.minimum_registration_age,
'is_popup_login_registration' : True,
'http_country_code' : http_country_code,
}, **constants.template_common_fields))
signup_html = signup_template.render(context)
login_template = template.loader.get_template('login_helpers/login.html')
context = template.Context (dict({
'is_popup_login_registration' : True
}, **constants.template_common_fields))
login_html = login_template.render(context)
response_dict = { 'signup_html' : signup_html,
'login_html' : login_html}
json_response = simplejson.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
except:
return utils.return_and_report_internal_error(request)
def store_authorization_info_and_send_email_wrapper(request, login_dict, encrypted_password, password_salt, currently_displayed_url):
# Receives the user_login values and forces user to verify a registration code sent to their email address before
# registration data is stored in the permanent data structures.
try:
generated_html = ''
(username, email_address) = login_utils.extract_data_from_login_dict(login_dict)
assert(username); assert(email_address)
# if someone calls the associated URL without using our website (i.e hacking us), it is possible that they could pass in
# bad values, and register invalid emails and usernames -- catch this.
if (not email_re.match(email_address) or constants.rematch_non_alpha.search(username) != None or len(username) < 3):
error_message="Invalid data passed in: username: %s email_address: %s" % (username, email_address)
error_reporting.log_exception(logging.error, error_message=error_message)
raise Exception(error_message)
# pickle the GET string for re-insertion into the request object when the user clicks on the email link to
# validate their account.
# We create a StringIO object because the pickle module expectes files to pickle the objects into. This is like a
# fake file.
pickled_login_get_dict_fake_file = StringIO.StringIO()
dump_login_dict = login_dict.copy()
# remove the password from the login_dict before pickling it so that we don't have un-encrypted passwords stored in the database.
dump_login_dict['password'] = "Unencrypted password is not stored - you should be using the encrypted_password field"
pickle.dump(dump_login_dict, pickled_login_get_dict_fake_file)
pickled_login_get_dict = pickled_login_get_dict_fake_file.getvalue()
authorization_info_status = login_utils.store_authorization_info_and_send_email(
currently_displayed_url, username, email_address, encrypted_password, password_salt,
pickled_login_get_dict, request.LANGUAGE_CODE)
pickled_login_get_dict_fake_file.close()
if authorization_info_status != "OK":
error_reporting.log_exception(logging.error, error_message = authorization_info_status)
return authorization_info_status
except:
error_reporting.log_exception(logging.critical)
return "Critical error in verify_user_email"
def process_login(request):
# Note that this function can be called from a POST or a GET (URL-passing in login parameters),
# or on initial loading without POST information.
# If it is called without POST information, then the default login page is simply displayed. Otherwise
# the POST data is analyzed to see if the login/signup is successful.
# GET is used for sending an incorrect login back to the original login page along with parameters
# that have previously been entered (such as a username)
#
try:
# If this is an administrator (as defined by the admins in the Google App Engine console), then the user
# will have extra privelidges such as not having to enter a password to enter into an account.
is_admin_login = users.is_current_user_admin()
response_dict = {}
error_dict = {}
lang_idx = localizations.input_field_lang_idx[request.LANGUAGE_CODE]
if request.method != 'POST':
error_message = "process_login was not called with POST data"
error_reporting.log_exception(logging.error, error_message = error_message)
return http.HttpResponseBadRequest(error_message)
(login_allowed, message_for_client) = check_if_login_country_allowed(request)
if not login_allowed:
logging.critical("Must handle countries that are not allowed")
assert(0)
login_dict = {'username_email' : request.POST.get('username_email', '----'),
'password' : request.POST.get('password', '----'),
}
for key in login_dict.keys():
if not login_dict[key]: login_dict[key] = "----"
#clear_old_session(request)
userobject = None
username = ''
# remove spaces in the username/email field
login_dict['username_email'] = login_dict['username_email'].replace(' ', '')
# Ensure that the most recently
# accessed object will be returned in case of a conflict (which might occur if a person uses the same email
# address for multiple accounts)
q_order_by_last_login = models.UserModel.query().order(-models.UserModel.last_login_string)
email_or_username_login = None
if email_re.match(login_dict['username_email']):
email_or_username_login = "email"
q_username_email = q_order_by_last_login.filter(models.UserModel.email_address == login_dict['username_email'].lower())
else:
email_or_username_login = "username"
username = login_dict['username_email'].upper()
q_username_email = q_order_by_last_login.filter(models.UserModel.username == username)
if (len(username) < 3 or username == "----"):
error_dict['username_email'] = u"%s" % constants.ErrorMessages.username_too_short
elif (constants.rematch_non_alpha.search(username) != None ):
error_dict['username_email'] = u"%s" % constants.ErrorMessages.username_alphabetic
if is_admin_login and login_dict['password'] == "----":
# If administrator has entered in a password, then we assume that they are testing the "normal" login
# flow. Only if the password is not entered (as indicated by "----") will we login as admin.
# There should only be a single "active" (not eliminated) user for each username/email.
userobject = q_username_email.get()
else:
q_not_eliminated = q_username_email.filter(models.UserModel.user_is_marked_for_elimination == False)
userobject = q_not_eliminated.get()
if userobject:
# Verify that the password is not empty.
if login_dict['password'] == "----":
error_dict['password'] = u"%s" % constants.ErrorMessages.password_required
else:
# userobject no found, just for informational purposes, we check to see if the user profile has been
# eliminated, and if so we provide some feedback to the user about the reason for elimination of their
# profile.
q_is_eliminated = q_username_email.filter(models.UserModel.user_is_marked_for_elimination == True)
eliminated_userobject = q_is_eliminated.get()
show_reason_for_elimination = False
if eliminated_userobject and email_or_username_login == 'username':
# if the user has entered a username for an eliminated account, show them the reason for elimination, even
# if the password was incorrect - this does not violate anyones privacy
show_reason_for_elimination = True
elif eliminated_userobject and (eliminated_userobject.password == utils.old_passhash(login_dict['password']) or \
eliminated_userobject.password == utils.new_passhash(login_dict['password'], eliminated_userobject.password_salt)):
# The username_login is an email address, this needs more privacy protection.
# Let user know that the profile was eliminated (but only if they have entered in the correct password).
# To protect users privacy, we don't want to confirm that an email address was registered unless the
# correct password was entered.
show_reason_for_elimination = True
if show_reason_for_elimination:
message_for_client = utils.get_removed_user_reason_html(eliminated_userobject)
error_dict['reason_for_removal_message'] = message_for_client
correct_username_password = False
if userobject:
if is_admin_login and login_dict['password'] == "----":
correct_username_password = True
elif userobject.password == utils.old_passhash(login_dict['password']):
# All "normal" (non admin) logins MUST check the password!!
correct_username_password = True
# Now we have to resest the users password to the new_passhash algorithm to make it more secure.
# This requires that we generate a salt in addition to hashing with the new algorithm.
userobject.password_salt = uuid.uuid4().hex
userobject.password = utils.new_passhash(login_dict['password'], userobject.password_salt)
elif userobject.password == utils.new_passhash(login_dict['password'], userobject.password_salt):
# All "normal" (non admin) logins MUST check the password!!
correct_username_password = True
elif userobject.password_reset and userobject.password_reset == utils.new_passhash(login_dict['password'], userobject.password_salt):
# Note: if the password has been reset, then the 'password_reset' value will contain
# the new password (as opposed to directly overwriting the 'password' field). This is done to prevent
# random people from resetting other peoples passwords. -- Once the user has
# logged in using the new 'reset_password', then we copy this field over to the 'password'
# field. If the user never logs in with this 'reset_password', then the original password
# is not over-written -- and we instead erase the 'reset_password' value (lower down in this function)
correct_username_password = True
userobject.password = userobject.password_reset
else:
correct_username_password = False
if not correct_username_password:
error_dict['incorrect_username_password_message'] = u"%s" % constants.ErrorMessages.incorrect_username_password
if not 'username_email' in error_dict:
error_dict['username_email'] = ''
if not 'password' in error_dict:
error_dict['password'] = ''
if not error_dict:
assert(userobject)
# success, user is in database and has entered correct data
owner_uid = userobject.key.urlsafe()
owner_nid = utils.get_nid_from_uid(owner_uid)
# make sure that the userobject has all the parts that the code expects it to have.
store_data.check_and_fix_userobject(userobject, request.LANGUAGE_CODE)
# if administrator is logging in, do not update any of the user login times, or other data that should only be updated
# if the real user logs in. However, if the administrator is logging in, and has entered a password, then they
# would like to be recognized as a standard login, and therefore we should update the login times.
if not is_admin_login or (is_admin_login and login_dict['password'] != "----"):
userobject.password_reset = None # if the user has sucessfully logged in, then we know that the "reset_password" is no longer needed
userobject.previous_last_login = userobject.last_login
userobject.last_login = datetime.datetime.now()
userobject.last_login_string = str(userobject.last_login)
if not utils.get_client_vip_status(userobject):
# client has lost their VIP status - clear from both the userobject
userobject.client_paid_status = None
# this user up until now has not had to solve any captchas since he was a VIP member - therefore, it is possible
# that his spam_tracker has accumulated a number of times being reported as spammer. We don't want to punish people
# after they lose their vip status, and so we set the number of captchas solved to be equal to the number of times
# reported as a spammer (this means that any previous spam messages will not require that a new captcha be solved).
spam_tracker = userobject.spam_tracker.get()
spam_tracker.number_of_captchass_solved_total = spam_tracker.num_times_reported_as_spammer_total
spam_tracker.put()
userobject.unique_last_login = login_utils.compute_unique_last_login(userobject)
# remove chat boxes from previous sessions.
channel_support.close_all_chatboxes_internal(owner_uid)
# reset the new_messages_since_last_notification data strutures since the user
# is logging in, and is obviously aware of new messages etc.
store_data.reset_new_contact_or_mail_counter_notification_settings(userobject.unread_mail_count_ref)
store_data.reset_new_contact_or_mail_counter_notification_settings(userobject.new_contact_counter_ref)
# log information about this users login time, and IP address
utils.update_ip_address_on_user_tracker(userobject.user_tracker)
utils.store_login_ip_information(request, userobject)
utils.put_userobject(userobject)
# update session to point to the current userobject
login_utils.store_session(request, userobject)
http_country_code = request.META.get('HTTP_X_APPENGINE_COUNTRY', None)
logging.info("Logging in User: %s IP: %s country code: %s -re-directing to edit_profile_url" % (userobject.username, os.environ['REMOTE_ADDR'], http_country_code))
# Set language to whatever the user used the last time they were logged in.
search_preferences = userobject.search_preferences2.get()
lang_code = search_preferences.lang_code
assert(lang_settings.set_language_in_session(request, lang_code))
current_path = request.POST.get('current_path', None)
if current_path:
locale, path = localeurl_utils.strip_path(current_path)
if path in constants.URLS_THAT_NEED_REDIRECT_AFTER_ENTRY:
# Note: we "manually" set the language in the URL on purpose, because we need to guarantee that the language
# stored in the profile, session and URL are consistent (so that the user can change it if it is not correct)
destination_url = "/%(lang_code)s/edit_profile/%(owner_nid)s/" % {
'lang_code': lang_code, 'owner_nid':owner_nid}
else:
destination_url = current_path
else:
# This is an error condition that should not occur if the client-side javascript is behaving properly
error_reporting.log_exception(logging.critical, error_message = "process_login did not receive a current_path value")
# Send them to the search results page so that they have something interesting to look at (since they may or may not
# now be logged in, we don't want to leave them sitting on the landing page)
destination_url = reverse('search_gen')
response_dict['Login_OK_Redirect_URL'] = destination_url
else:
assert(error_dict)
# there were errors - report them
response_dict['Login_Error'] = error_dict
json_response = simplejson.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
except:
error_message = "process_login unknown error"
error_reporting.log_exception(logging.critical, error_message = error_message)
return http.HttpResponseBadRequest(error_message)
def process_registration(request):
# new user is signing up
# We pass in the currently_displayed_url so that when the user registers, they will be directed back to
# the exact same page they were on when they initially signed up -- this could be almost any page
# since we now have pop-up dialogs that are shown overtop of the various pages.
lang_idx = localizations.input_field_lang_idx[request.LANGUAGE_CODE]
response_dict = {}
try:
if request.method != 'POST':
error_message = "process_registration was not called with POST data"
error_reporting.log_exception(logging.critical, error_message = error_message)
return http.HttpResponseBadRequest(error_message)
currently_displayed_url = request.POST.get('currently_displayed_url', None)
(login_allowed, message_for_client) = check_if_login_country_allowed(request)
if not login_allowed:
logging.critical("Must handle countries that are not allowed")
assert(0)
login_dict = login_utils.get_registration_dict_from_post(request)
login_dict['country'] = request.POST.get('country', '----')
login_dict['sub_region'] = request.POST.get('sub_region', '----')
login_dict['region'] = request.POST.get('region', '----')
# re-write all user names to upper-case to prevent confusion
# and amateur users from not being able to log in.
login_dict['username'] = login_dict['username'].upper().replace(' ', '')
username = login_dict['username']
# setup default email_address for developer testing
if login_dict['email_address'] == "----" and utils.is_exempt_user():
# for testing and debugging, we allow developers to bypass the check on the email address, and
# we just assign their google email address to this field automatically (if it is empty)
login_dict['email_address'] = users.User().email()
logging.warning("\n**** Warning: Setting registration email address to %s\n" % login_dict['email_address'])
if login_dict['password'] == "----" and utils.is_exempt_user():
# setup default password for developer testing
login_dict['password'] = constants.DEFAULT_PROFILE_PASSWORD
logging.warning("\n**** Warning: Setting registration password to %s\n" % login_dict['password'])
# if email address is given, make sure that it is valid
# remove blank spaces from the email address -- to make it more likely to be acceptable
login_dict['email_address'] = login_dict['email_address'].replace(' ', '')
login_dict['email_address'] = login_dict['email_address'].lower()
email_address = login_dict['email_address']
(error_dict) = login_utils.error_check_signup_parameters(login_dict, lang_idx)
# Now check if username is already taken
query = models.UserModel.query().filter(models.UserModel.username == username)
query_result = query.fetch(limit=1)
if len(query_result) > 0:
error_dict['username'] = u"%s" % constants.ErrorMessages.username_taken
else:
# now check if the username is in the process of being registered (in EmailAuthorization model)
query = models.EmailAutorizationModel.query().filter(models.EmailAutorizationModel.username == username)
query_result = query.fetch(limit=1)
if len(query_result) > 0:
error_dict['username'] = u"%s" % constants.ErrorMessages.username_taken
# if there are no errors, then store the signup information.
if not error_dict:
password_salt = uuid.uuid4().hex
# encrypt the password
encrypted_password = utils.new_passhash(login_dict['password'], password_salt)
authorization_info_status = store_authorization_info_and_send_email_wrapper(request, login_dict, encrypted_password, password_salt, currently_displayed_url)
if authorization_info_status == 'OK':
response_dict['Registration_OK'] = {'username': username,
'verification_email' : email_address}
if utils.is_exempt_user():
response_dict['Registration_OK']['allow_empty_code'] = "true"
else:
response_dict['Registration_Error'] = {'message': authorization_info_status}
else:
response_dict['Registration_Error'] = error_dict
json_response = simplejson.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
except:
error_message = "Unknown error"
error_reporting.log_exception(logging.critical, error_message = error_message)
return http.HttpResponseBadRequest(error_message)
def store_new_user_after_verify(request, lang_idx, login_dict, encrypted_password, password_salt):
# Store the user information passed in the request into a userobject.
# Does some validation to prevent attacks
try:
# The following error-checking should never fail unless the user has modified their login parameters
# after signing up - this is here just for peace of mind.
error_dict = login_utils.error_check_signup_parameters(login_dict, lang_idx)
if error_dict:
# if there is an error, make them re-do login process (this should never happen though)
error_message = repr(error_dict)
error_reporting.log_exception(logging.error, error_message=error_message)
return ("Error", None)
login_dict['username'] = login_dict['username'].upper()
username = login_dict['username']
password = encrypted_password
# if the username is already registered, then do not add another user into the database.
# Re-direct the user to to the login screen and indicate that they must enter in their
# username and password to login.
q = models.UserModel.query().order(-models.UserModel.last_login_string)
q = q.filter(models.UserModel.username == username)
q = q.filter(models.UserModel.user_is_marked_for_elimination == False)
userobject = q.get()
if userobject:
# user is already authorized -- send back to login
return ("username_already_registered", None)
q = models.UserModel.query().order(-models.UserModel.last_login_string)
q = q.filter(models.UserModel.username == username)
q = q.filter(models.UserModel.user_is_marked_for_elimination == True)
userobject = q.get()
if userobject:
# user has been deleted - return the userobject so that we can later provide
# additional information about why this userobject was deleted
return ("username_deleted", userobject)
# make sure that the user name is not already registered. (this should not happen
# under normal circumstances, but could possibly happen if someone is hacking our system or if two users have gone through
# the registration process and attempted to register the same username at the same moment)
query = models.UserModel.gql("WHERE username = :username", username = username)
if query.get():
error_reporting.log_exception(logging.warning, error_message = 'Registered username encountered in storing user - sending back to main login')
return ("Error", None)
except:
error_reporting.log_exception(logging.critical)
return ("Error", None)
# do not change the order of the following calls. Userobject is written twice because this
# is necessary to get a database key value. Also, since this is only on signup, efficiency is
# not an issue.
try:
# Cleanup the login_dict before passing it in to the UserModel
if 'login_type' in login_dict:
del login_dict['login_type']
# passing in the login_dict to the following declaration will copy the values into the user object.
userobject = models.UserModel(**login_dict)
userobject.password = encrypted_password
userobject.password_salt = password_salt
userobject.username_combinations_list = utils.get_username_combinations_list(username)
utils.put_userobject(userobject)
userobject.search_preferences2 = login_utils.create_search_preferences2_object(userobject, request.LANGUAGE_CODE)
userobject = login_utils.setup_new_user_defaults_and_structures(userobject, login_dict['username'], request.LANGUAGE_CODE)
userobject.viewed_profile_counter_ref = login_utils.create_viewed_profile_counter_object(userobject.key)
userobject.accept_terms_and_rules_key = login_utils.create_terms_and_rules_object()
# store indication of email address validity (syntactically valid )
if login_dict['email_address'] == '----':
userobject.email_address_is_valid = False
else:
userobject.email_address_is_valid = True
# We can update the user_tracker object with the
# email address, since we have now confirmed that it is truly verified.
utils.update_email_address_on_user_tracker(userobject, login_dict['email_address'])
try:
# make sure that the email address is a valid email address.
assert(email_re.match(login_dict['email_address']))
except:
error_reporting.log_exception(logging.warning, error_message = 'Email address %s is invalid' % login_dict['email_address'])
userobject.registration_ip_address = os.environ['REMOTE_ADDR']
userobject.registration_city = userobject.last_login_city = request.META.get('HTTP_X_APPENGINE_CITY', None)
userobject.registration_country_code = userobject.last_login_city = request.META.get('HTTP_X_APPENGINE_COUNTRY', None)
utils.store_login_ip_information(request, userobject)
utils.put_userobject(userobject)
logging.info("New userobject stored: Username: %s Email: %s" % (userobject.username, userobject.email_address))
login_utils.store_session(request, userobject)
lang_set_in_session = lang_settings.set_language_in_session(request, request.LANGUAGE_CODE)
assert(lang_set_in_session)
# send the user a welcome email and key and wink from Alex
messages.welcome_new_user(request)
except:
# if there is any failure in the signup process, clean up all the data stored, and send the user back to the login page with the data that they
# previously entered.
try:
error_message = "Error storing user -- cleaning up and sending back to login screen"
error_reporting.log_exception(logging.critical, request = request, error_message = error_message )
utils.delete_sub_object(userobject, 'search_preferences2')
utils.delete_sub_object(userobject, 'spam_tracker')
utils.delete_sub_object(userobject, 'unread_mail_count_ref')
utils.delete_sub_object(userobject, 'new_contact_counter_ref')
utils.delete_sub_object(userobject, 'user_tracker')
utils.delete_sub_object(userobject, 'viewed_profile_counter_ref')
utils.delete_sub_object(userobject, 'user_photos_tracker_key')
try:
error_message = "Deleting userobject: %s : %s" % (userobject.username, repr(userobject))
userobject.key.delete() # (Finally - remove the userobject)
error_reporting.log_exception(logging.critical, error_message = error_message)
except:
error_message = "Unable to delete userobject: %s : %s" % (userobject.username, repr(userobject))
error_reporting.log_exception(logging.critical, request = request, error_message = error_message)
except:
error_reporting.log_exception(logging.critical, error_message = "Unable to clean up after failed sign-up attempt" )
return ("Error", None)
# log information about this users login time, and IP address
utils.update_ip_address_on_user_tracker(userobject.user_tracker)
logging.info("Registered/Logging in User: %s IP: %s country code: %s " % (
userobject.username, os.environ['REMOTE_ADDR'], request.META.get('HTTP_X_APPENGINE_COUNTRY', None)))
return ("OK", userobject)
def check_verification_and_authorize_user(request):
# Note, this function is called directly as a URL from a user clicking on an email link OR
# it is called from the user entering the verification_code in a popup dialog.
# We direct them to a web page after verification.
#
# We return a json object containing the URL that the client-side javascript will then redirect to.
try:
if request.method != 'POST':
error_message = "check_verification_and_authorize_user was not called with POST data"
raise Exception(error_message)
username = request.POST.get("username", None)
secret_verification_code = request.POST.get("secret_verification_code", None)
current_path = request.POST.get("current_path", None)
logging.info("username: %s entered code: %s" % (username, secret_verification_code))
# remove spaces from verificaiton code - if use copies and pastes it incorrectly
# it might have a space before or after.
secret_verification_code = secret_verification_code.replace(' ' , '')
if current_path:
locale, path = localeurl_utils.strip_path(current_path)
if path in constants.URLS_THAT_NEED_REDIRECT_AFTER_ENTRY:
destination_url = "/"
else:
destination_url = current_path
else:
destination_url = "/"
authorization_info = login_utils.query_authorization_info_for_username(username)
if authorization_info:
if authorization_info.secret_verification_code != secret_verification_code:
authorization_status = "Incorrect code"
if utils.is_exempt_user() and not secret_verification_code:
# If this is an exempt (admin) user and he has left the input empty, then for testing
# purposes, we continue with the registration process. Note: that in order to submit an "empty"
# string from the client side, the user must actually press the Enter key inside the text box -
# otherwise if the user presses the submit button, then the text box will contain the text
# "Verification code" which will not work.
authorization_status = "Authorization OK"
else:
# secret codes match
authorization_status = "Authorization OK" # User has sucessfully authorized their account
else:
authorization_status = "No authorization_info"
if authorization_status == "Authorization OK":
login_get_dict = pickle.load(StringIO.StringIO(authorization_info.pickled_login_get_dict))
encrypted_password = authorization_info.encrypted_password
password_salt = authorization_info.password_salt
lang_idx = localizations.input_field_lang_idx[request.LANGUAGE_CODE]
(store_user_status, userobject) = store_new_user_after_verify(request, lang_idx, login_get_dict, encrypted_password, password_salt)
if store_user_status == "OK":
if destination_url == "/":
# if destination_url is not defined (ie. = "/"), then we will direct the user to edit their profile
# otherwise, we just send the user to whatever destination_url we have already assigned.
destination_url = reverse("edit_profile_url", kwargs={'display_nid' : userobject.key.id()})
elif store_user_status == "Error":
destination_url = "/"
elif store_user_status == "username_already_registered":
destination_url = '/?already_registered_username=%s&show_registration_login_popup=true' % username
elif store_user_status == "username_deleted":
response_dict = {"username_deleted" : utils.get_removed_user_reason_html(userobject)}
json_response = json.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
else:
destination_url = "/"
error_reporting.log_exception(logging.critical, error_message = "unknown status %s returned from store_new_user_after_verify" % store_user_status)
else:
# The verification code does not match the username that is being verified.
# could happen if the user clicks on the link to authorize their account at some point after
# we have erased the authorization info, or if the code really doesn't match
# We want to let the user know that the code they have entered is incorrect, without redirecting to
# another page.
if authorization_status == "Incorrect code" :
warning_message= ugettext("Incorrect code")
elif authorization_status == "No authorization_info":
warning_message = ugettext("Verification code is invalid or expired")
else :
error_reporting.log_exception(logging.critical)
warning_message = ugettext("Internal error - this error has been logged, and will be investigated immediately")
response_dict = {"warning_html" : u'<strong><span class="cl-warning-text">%(warning_message)s</span></strong>' % {
'warning_message' : warning_message}
}
json_response = json.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
except:
destination_url = "/"
error_reporting.log_exception(logging.critical)
logging.info('Username:"%s". Will be redirected by javascript client to to %s' % (username, destination_url))
response_dict = {"User_Stored_Redirect_URL" : destination_url}
json_response = simplejson.dumps(response_dict)
return http.HttpResponse(json_response, mimetype='text/javascript')
| 54.697047 | 175 | 0.648173 |
c868e9a61c22c86cee4b5bf5c671d1b97f0dbc87
| 5,140 |
py
|
Python
|
lexicon/providers/linode.py
|
1500cloud/lexicon
|
8fa65a4e8c844d5d7c33f55ac6e66242f7d415d9
|
[
"MIT"
] | 1 |
2019-08-17T23:09:24.000Z
|
2019-08-17T23:09:24.000Z
|
lexicon/providers/linode.py
|
1500cloud/lexicon
|
8fa65a4e8c844d5d7c33f55ac6e66242f7d415d9
|
[
"MIT"
] | null | null | null |
lexicon/providers/linode.py
|
1500cloud/lexicon
|
8fa65a4e8c844d5d7c33f55ac6e66242f7d415d9
|
[
"MIT"
] | 1 |
2021-01-06T16:04:36.000Z
|
2021-01-06T16:04:36.000Z
|
"""Module provider for Linode"""
from __future__ import absolute_import
import json
import logging
import requests
from lexicon.providers.base import Provider as BaseProvider
LOGGER = logging.getLogger(__name__)
NAMESERVER_DOMAINS = ['linode.com']
def provider_parser(subparser):
"""Module provider for Linode"""
subparser.add_argument(
"--auth-token", help="specify api key for authentication")
class Provider(BaseProvider):
"""Provider class for Linode"""
def __init__(self, config):
super(Provider, self).__init__(config)
self.domain_id = None
self.api_endpoint = 'https://api.linode.com/api/'
def _authenticate(self):
self.domain_id = None
payload = self._get('domain.list')
for domain in payload['DATA']:
if domain['DOMAIN'] == self.domain:
self.domain_id = domain['DOMAINID']
if self.domain_id is None:
raise Exception('Domain not found')
def _create_record(self, rtype, name, content):
if not self._list_records(rtype, name, content):
self._get('domain.resource.create', query_params={
'DomainID': self.domain_id,
'Name': self._relative_name(name),
'Type': rtype,
'Target': content,
'TTL_sec': 0
})
return True
# List all records. Return an empty list if no records found
# type, name and content are used to filter records.
# If possible filter during the query, otherwise filter after response is received.
def _list_records(self, rtype=None, name=None, content=None):
payload = self._get('domain.resource.list',
query_params={'DomainID': self.domain_id})
resource_list = payload['DATA']
if rtype:
resource_list = [
resource for resource in resource_list if resource['TYPE'] == rtype]
if name:
cmp_name = self._relative_name(name.lower())
resource_list = [
resource for resource in resource_list if resource['NAME'] == cmp_name]
if content:
resource_list = [
resource for resource in resource_list if resource['TARGET'] == content]
processed_records = []
for resource in resource_list:
processed_records.append({
'id': resource['RESOURCEID'],
'type': resource['TYPE'],
'name': self._full_name(resource['NAME']),
'ttl': resource['TTL_SEC'],
'content': resource['TARGET']
})
LOGGER.debug('list_records: %s', processed_records)
return processed_records
# Create or update a record.
def _update_record(self, identifier, rtype=None, name=None, content=None):
if not identifier:
resources = self._list_records(rtype, name, None)
identifier = resources[0]['id'] if resources else None
LOGGER.debug('update_record: %s', identifier)
self._get('domain.resource.update', query_params={
'DomainID': self.domain_id,
'ResourceID': identifier,
'Name': self._relative_name(name).lower() if name else None,
'Type': rtype if rtype else None,
'Target': content if content else None
})
return True
# Delete an existing record.
# If record does not exist, do nothing.
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
delete_resource_id = []
if not identifier:
resources = self._list_records(rtype, name, content)
delete_resource_id = [resource['id'] for resource in resources]
else:
delete_resource_id.append(identifier)
LOGGER.debug('delete_records: %s', delete_resource_id)
for resource_id in delete_resource_id:
self._get('domain.resource.delete', query_params={
'DomainID': self.domain_id,
'ResourceID': resource_id
})
return True
# Helpers
def _request(self, action='GET', url='', data=None, query_params=None):
if data is None:
data = {}
if query_params is None:
query_params = {}
default_headers = {
'Accept': 'application/json',
'Content-Type': 'application/json'
}
query_params['api_key'] = self._get_provider_option('auth_token')
query_params['resultFormat'] = 'JSON'
query_params['api_action'] = url
response = requests.request(action, self.api_endpoint, params=query_params,
data=json.dumps(data),
headers=default_headers)
# if the request fails for any reason, throw an error.
response.raise_for_status()
if action == 'DELETE':
return ''
result = response.json()
if result['ERRORARRAY']:
raise Exception('Linode api error: {0}'.format(result['ERRORARRAY']))
return result
| 35.944056 | 88 | 0.593969 |
df43151939b9eb8b422de5ed83d901dbb0d2c561
| 212 |
py
|
Python
|
load_intraday.py
|
eduardodisanti/stock_market_agents
|
207e4d30ff3f826250636fca2e750b6e5b6b0b51
|
[
"MIT"
] | null | null | null |
load_intraday.py
|
eduardodisanti/stock_market_agents
|
207e4d30ff3f826250636fca2e750b6e5b6b0b51
|
[
"MIT"
] | null | null | null |
load_intraday.py
|
eduardodisanti/stock_market_agents
|
207e4d30ff3f826250636fca2e750b6e5b6b0b51
|
[
"MIT"
] | null | null | null |
from gather_history_data import load_historical_data, load_intraday_data
SYMBOLS = ['FCAU', 'JNJ', 'SPCE', 'TLRD']
for symbol in SYMBOLS:
print("Loading", symbol)
load_intraday_data(symbol, interval=15)
| 30.285714 | 72 | 0.75 |
cdb3490a054e3aa3106f29bcc36f812c6ac70490
| 10,287 |
py
|
Python
|
letsencrypt-nginx/docs/conf.py
|
BillKeenan/lets-encrypt-preview
|
de182946b5cf09a3486f38d3df0c43e2280b3654
|
[
"Apache-2.0"
] | null | null | null |
letsencrypt-nginx/docs/conf.py
|
BillKeenan/lets-encrypt-preview
|
de182946b5cf09a3486f38d3df0c43e2280b3654
|
[
"Apache-2.0"
] | null | null | null |
letsencrypt-nginx/docs/conf.py
|
BillKeenan/lets-encrypt-preview
|
de182946b5cf09a3486f38d3df0c43e2280b3654
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# letsencrypt-nginx documentation build configuration file, created by
# sphinx-quickstart on Sun Oct 18 13:39:39 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
here = os.path.abspath(os.path.dirname(__file__))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(here, '..')))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance', 'private-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'letsencrypt-nginx'
copyright = u'2014-2015, Let\'s Encrypt Project'
author = u'Let\'s Encrypt Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0'
# The full version, including alpha/beta/rc tags.
release = '0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'py:obj'
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# http://docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'letsencrypt-nginxdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'letsencrypt-nginx.tex', u'letsencrypt-nginx Documentation',
u'Let\'s Encrypt Project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'letsencrypt-nginx', u'letsencrypt-nginx Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'letsencrypt-nginx', u'letsencrypt-nginx Documentation',
author, 'letsencrypt-nginx', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'acme': ('https://acme-python.readthedocs.org/en/latest/', None),
'letsencrypt': ('https://letsencrypt.readthedocs.org/en/latest/', None),
}
| 32.971154 | 97 | 0.719063 |
cb829a868f99a827969aefee4c34fb11d53b78b9
| 744 |
py
|
Python
|
amicleaner/resources/config.py
|
indyaah/aws-amicleaner
|
718dbd889a2576b291dc211b82ffd784372869ba
|
[
"MIT"
] | null | null | null |
amicleaner/resources/config.py
|
indyaah/aws-amicleaner
|
718dbd889a2576b291dc211b82ffd784372869ba
|
[
"MIT"
] | null | null | null |
amicleaner/resources/config.py
|
indyaah/aws-amicleaner
|
718dbd889a2576b291dc211b82ffd784372869ba
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# set your aws env vars to production
from blessings import Terminal
# terminal colors
TERM = Terminal()
# Number of previous amis to keep based on grouping strategy
# not including the ami currently running by an ec2 instance
KEEP_PREVIOUS = 4
# the way to regroup AMIs, the default filtering pattern is the creation date
# and the possible other values are :
#
# name : with a grep into the ami name
# ex: ubuntu => ["ubuntu-20160122", "ubuntu-20160123"]
# tags : with keys provided in GROUPING_STRATEGY_TAGS_KEYS, it filters AMI tags
# ex: ["Role", "Env"] => ["ubuntu-20160122"]
#
MAPPING_KEY = "tags"
MAPPING_VALUES = ["environment", "role"]
EXCLUDED_MAPPING_VALUES = []
| 24.8 | 79 | 0.708333 |
1ffa0e58b5be7b2143ad7a8d67192bf8af913e31
| 58 |
py
|
Python
|
HW7/Vaulin/CW7.6.py
|
kolyasalubov/Lv-677.PythonCore
|
c9f9107c734a61e398154a90b8a3e249276c2704
|
[
"MIT"
] | null | null | null |
HW7/Vaulin/CW7.6.py
|
kolyasalubov/Lv-677.PythonCore
|
c9f9107c734a61e398154a90b8a3e249276c2704
|
[
"MIT"
] | null | null | null |
HW7/Vaulin/CW7.6.py
|
kolyasalubov/Lv-677.PythonCore
|
c9f9107c734a61e398154a90b8a3e249276c2704
|
[
"MIT"
] | 6 |
2022-02-22T22:30:49.000Z
|
2022-03-28T12:51:19.000Z
|
def bool_to_word(bool):
return "Yes" if bool else "No"
| 29 | 34 | 0.689655 |
d9c93d17c190f767943b8c9d44ab37ad2799594b
| 1,643 |
py
|
Python
|
setup.py
|
SnehalD14/autolab_core
|
c271f1f84283ab5d368618eb85754a549aeae4a3
|
[
"Apache-2.0"
] | 2 |
2019-05-27T11:32:31.000Z
|
2019-06-13T21:46:34.000Z
|
setup.py
|
SnehalD14/autolab_core
|
c271f1f84283ab5d368618eb85754a549aeae4a3
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
SnehalD14/autolab_core
|
c271f1f84283ab5d368618eb85754a549aeae4a3
|
[
"Apache-2.0"
] | null | null | null |
"""
Setup of core python codebase
Author: Jeff Mahler
"""
from setuptools import setup
requirements = [
'numpy',
'scipy',
'scikit-learn',
'ruamel.yaml',
'matplotlib<=2.2.0',
'multiprocess',
'setproctitle',
'joblib',
'colorlog'
]
exec(open('autolab_core/version.py').read())
setup(
name='autolab_core',
version = __version__,
description = 'Core utilities for the Berkeley AutoLab',
long_description = 'Core utilities for the Berkeley AutoLab. Includes rigid transformations, loggers, and 3D data wrappers.',
author = 'Jeff Mahler',
author_email = '[email protected]',
license = 'Apache Software License',
url = 'https://github.com/BerkeleyAutomation/autolab_core',
keywords = 'robotics grasping transformations',
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Natural Language :: English',
'Topic :: Scientific/Engineering'
],
packages = ['autolab_core'],
install_requires = requirements,
extras_require = { 'docs' : [
'sphinx',
'sphinxcontrib-napoleon',
'sphinx_rtd_theme'
],
'ros' : [
'rospkg',
'catkin_pkg',
'empy'
],
}
)
| 29.339286 | 129 | 0.545953 |
6efed9c571a8eeaa22acb2db06a6463329d0fedc
| 249 |
py
|
Python
|
validadores.py
|
OrlandoBitencourt/Jokenpy
|
81b405f8bf465eafc7e81e68010948a57a7e51a3
|
[
"MIT"
] | null | null | null |
validadores.py
|
OrlandoBitencourt/Jokenpy
|
81b405f8bf465eafc7e81e68010948a57a7e51a3
|
[
"MIT"
] | null | null | null |
validadores.py
|
OrlandoBitencourt/Jokenpy
|
81b405f8bf465eafc7e81e68010948a57a7e51a3
|
[
"MIT"
] | null | null | null |
def valida_carta_escolhida(carta_escolhida):
if 0 < carta_escolhida <= 3:
return True
else:
print("\nDigite uma opção válida!\n")
return False
def valida_rodada(rodada):
rodada += 1
return rodada
| 20.75 | 46 | 0.610442 |
85dbe9e8eeae6cb66ac3720054ea54bf50bb8a44
| 1,188 |
py
|
Python
|
app/views_stream.py
|
fossabot/stream_vod_indexer
|
58bff60cc4adb1b8e5966134d2e560e59464d196
|
[
"MIT"
] | null | null | null |
app/views_stream.py
|
fossabot/stream_vod_indexer
|
58bff60cc4adb1b8e5966134d2e560e59464d196
|
[
"MIT"
] | null | null | null |
app/views_stream.py
|
fossabot/stream_vod_indexer
|
58bff60cc4adb1b8e5966134d2e560e59464d196
|
[
"MIT"
] | 1 |
2021-02-18T14:25:39.000Z
|
2021-02-18T14:25:39.000Z
|
from django.shortcuts import render
import datetime
from django.core.serializers import serialize
from app.models import GameStorage, StreamStorage
from django.http import JsonResponse
NotImplemented = JsonResponse({"error": "NotImplemented"})
def __is_date_valid(year: int, month: int, day: int):
try:
datetime.datetime(year=year, month=month, day=day)
return True
except ValueError:
return False
def date(request, year: int, month: int, day: int):
return JsonResponse(
{
'data': [obj for obj in StreamStorage.objects.filter(
date_of_stream__year=year,
date_of_stream__month=month,
date_of_stream__day=day,
vod_status=True
).values()]
}
)
def slug(request, slug: str):
return JsonResponse(
{
'data': [obj for obj in StreamStorage.objects.filter(
game__game_slug__iexact=slug,
vod_status=True
).values()]
}
)
def id(request, id: int):
return JsonResponse(
{
'data': StreamStorage.objects.filter(id=id).values()[0]
}
)
| 24.244898 | 67 | 0.600168 |
ca3b14a23e5738582a19a7398d75d6f239e8412b
| 1,006 |
py
|
Python
|
tasks.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | 2 |
2015-03-10T14:33:16.000Z
|
2015-03-10T14:33:18.000Z
|
tasks.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | null | null | null |
tasks.py
|
LisaDawn/TextBlob
|
6b76c3f568a3cea16389d6bdb1b3e5a9dc6043e9
|
[
"MIT"
] | null | null | null |
import os
from invoke import task, run
docs_dir = 'docs'
build_dir = os.path.join(docs_dir, '_build')
@task
def test():
run("python run_tests.py", pty=True)
@task
def deps():
print("Vendorizing nltk...")
run("git clone https://github.com/nltk/nltk.git")
run("rm -rf text/nltk")
run("mv nltk/nltk text/")
run("rm -rf nltk")
@task
def clean():
run("rm -rf build")
run("rm -rf dist")
run("rm -rf textblob.egg-info")
clean_docs()
print("Cleaned up.")
@task
def clean_docs():
run("rm -rf %s" % build_dir)
@task
def browse_docs():
run("open %s" % os.path.join(build_dir, 'index.html'))
@task
def build_docs(clean=False, browse=False):
if clean:
clean_docs()
run("sphinx-build %s %s" % (docs_dir, build_dir), pty=True)
if browse:
browse_docs()
@task
def build_readme():
run("rst2html.py README.rst > README.html", pty=True)
run("open README.html")
@task
def doctest():
os.chdir(docs_dir)
run("make doctest")
| 19.346154 | 63 | 0.621272 |
48cdaa8fd31a196dd0d50e26d256f14c51d9d948
| 1,604 |
py
|
Python
|
phiseg_train.py
|
MiguelMonteiro/PHiSeg-code
|
86436f84310e30eecedd3a656fdc528646a496c6
|
[
"Apache-2.0"
] | 4 |
2020-08-26T07:37:10.000Z
|
2021-10-13T03:50:23.000Z
|
phiseg_train.py
|
MiguelMonteiro/PHiSeg-code
|
86436f84310e30eecedd3a656fdc528646a496c6
|
[
"Apache-2.0"
] | null | null | null |
phiseg_train.py
|
MiguelMonteiro/PHiSeg-code
|
86436f84310e30eecedd3a656fdc528646a496c6
|
[
"Apache-2.0"
] | 1 |
2021-05-11T07:24:31.000Z
|
2021-05-11T07:24:31.000Z
|
import logging
from importlib.machinery import SourceFileLoader
import argparse
from data.data_switch import data_switch
import os
import config.system as sys_config
import shutil
import utils
from phiseg import phiseg_model
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
def main(exp_config):
logging.info('**************************************************************')
logging.info(' *** Running Experiment: %s', exp_config.experiment_name)
logging.info('**************************************************************')
# Get Data
data_loader = data_switch(exp_config.data_identifier)
data = data_loader(exp_config)
# Create Model
phiseg = phiseg_model.phiseg(exp_config)
# Fit model to data
phiseg.train(data)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Script for training")
parser.add_argument("--exp-path", type=str, help="Path to experiment config file")
parser.add_argument("--device", type=str, help="device for computation")
args = parser.parse_args()
config_file = args.exp_path
config_module = config_file.split('/')[-1].rstrip('.py')
os.environ["CUDA_VISIBLE_DEVICES"] = args.device
exp_config = SourceFileLoader(config_module, config_file).load_module()
log_dir = os.path.join(sys_config.log_root, exp_config.log_dir_name, exp_config.experiment_name)
utils.makefolder(log_dir)
shutil.copy(exp_config.__file__, log_dir)
logging.info('!!!! Copied exp_config file to experiment folder !!!!')
main(exp_config=exp_config)
| 31.45098 | 100 | 0.674564 |
13e433c3c3af14a00cca437177659f6d30e6976e
| 768 |
py
|
Python
|
src/main/python/spider/RequestsCookie.py
|
photowey/python-study
|
218456a0d661709a49fb060659664102b9287de8
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/spider/RequestsCookie.py
|
photowey/python-study
|
218456a0d661709a49fb060659664102b9287de8
|
[
"Apache-2.0"
] | null | null | null |
src/main/python/spider/RequestsCookie.py
|
photowey/python-study
|
218456a0d661709a49fb060659664102b9287de8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
# ---------------------------------------------
# @file Requests.py
# @description RequestsCookie
# @author WcJun
# @date 2020/07/05
# ---------------------------------------------
import src.main.python.spider.SpiderUtils as SpiderUtils
import requests
def main():
http_headers = SpiderUtils.populate_headers()
hello_url = "http://192.168.0.5:939/hello/greet"
cookie_url = "http://192.168.0.5:939/hello/cookie"
session = requests.Session()
hello_response = session.get(hello_url, verify=False, headers=http_headers)
print("the hello response is:", hello_response.text)
cookie_response = session.get(cookie_url, verify=False, headers=http_headers)
print("the user response is:", cookie_response.text)
| 28.444444 | 81 | 0.628906 |
2eff26dd39eb731830a792cc34b01c2d2263557d
| 1,145 |
py
|
Python
|
logadempirical/logdeep/dataset/__init__.py
|
LogIntelligence/LogADEmpirical
|
48458aee65c1c84466b04dd4092fae79a7f341fd
|
[
"MIT"
] | 11 |
2022-02-06T23:54:42.000Z
|
2022-03-30T06:41:39.000Z
|
logadempirical/logdeep/dataset/__init__.py
|
LogIntelligence/LogADEmpirical
|
48458aee65c1c84466b04dd4092fae79a7f341fd
|
[
"MIT"
] | 1 |
2022-02-13T23:24:56.000Z
|
2022-02-14T03:57:50.000Z
|
logadempirical/logdeep/dataset/__init__.py
|
LogIntelligence/LogADEmpirical
|
48458aee65c1c84466b04dd4092fae79a7f341fd
|
[
"MIT"
] | 5 |
2022-02-16T12:58:59.000Z
|
2022-03-21T04:59:40.000Z
|
from .vocab import Vocab
import torch
from transformers import BertTokenizer, BertModel
import re
import string
bert_tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
bert_model = BertModel.from_pretrained('bert-base-uncased')
def clean(s):
# s = re.sub(r'(\d+\.){3}\d+(:\d+)?', " ", s)
# s = re.sub(r'(\/.*?\.[\S:]+)', ' ', s)
s = re.sub('\]|\[|\)|\(|\=|\,|\;', ' ', s)
s = " ".join([word.lower() if word.isupper() else word for word in s.strip().split()])
s = re.sub('([A-Z][a-z]+)', r' \1', re.sub('([A-Z]+)', r' \1', s))
s = " ".join([word for word in s.split() if not bool(re.search(r'\d', word))])
trantab = str.maketrans(dict.fromkeys(list(string.punctuation)))
content = s.translate(trantab)
s = " ".join([word.lower().strip() for word in content.strip().split()])
return s
def bert_encoder(s, E):
s = clean(s)
if s in E.keys():
return E[s]
inputs = bert_tokenizer(s, return_tensors='pt', max_length=512, truncation=True)
outputs = bert_model(**inputs)
v = torch.mean(outputs.last_hidden_state, dim=1)
E[s] = v[0].detach().numpy()
return E[s]
| 34.69697 | 90 | 0.593886 |
31417d69d162bf56a47a1caac63d7990fcb3f6ab
| 3,072 |
py
|
Python
|
minesgenerate_v1.3.py
|
rakanann/minesgenerate
|
a5810bc91ecdacb8743720f8039a8611412b7b32
|
[
"BSD-3-Clause"
] | null | null | null |
minesgenerate_v1.3.py
|
rakanann/minesgenerate
|
a5810bc91ecdacb8743720f8039a8611412b7b32
|
[
"BSD-3-Clause"
] | null | null | null |
minesgenerate_v1.3.py
|
rakanann/minesgenerate
|
a5810bc91ecdacb8743720f8039a8611412b7b32
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import random
import math
class Map():
def __init__(self,num_mine,num_hero):
self.num_mine = num_mine
self.num_hero = num_hero
self.mines = []
self.heros = []
self.align = []
self.flag=0
self.init_map()
for i in self.heros:
self.align.append([])
def mines_generate(self,num_cluster=5,num_mines_each_cluster=100):
mines_list = []
#随机生成num_cluster个正态分布的参数
for i in range(num_cluster):
mu = np.random.randn(2)
std = np.random.random([2,2])
std_limit = np.sqrt(std[0,0]*std[1,1])
std[0,1] = -std_limit + 2*std_limit*np.random.random()
std[1,0] = std[0,1]
#print(mu)
#print(std)
mines = np.random.multivariate_normal(mu,std,size=num_mines_each_cluster)
#print(mines)
mines_list.append(mines)
return mines_list
def init_map(self):
mines_list=self.mines_generate(self.num_hero,self.num_mine)
for i in range(len(mines_list)):
j=mines_list[i].tolist()
for k in j:
self.mines.append(k)
for i in range(self.num_hero):
self.heros.append(random.choice(self.mines))
def hero_move(self):
k=0
for i in self.align:
x_num=0
y_num=0
for j in i:
x_num+=j[0]
y_num+=j[1]
x_num/=len(i)
y_num/=len(i)
if int((self.heros[k][0]-x_num)*10000)==0 and int((self.heros[k][1]- y_num)*10000)==0:#如果与上次的位置(精度为0.00001)相同,则不用换
self.flag+=1
else:
self.heros[k]=[x_num,y_num]
k+=1
return self.flag,len(self.heros)
def mine_align(self):
for i in range(self.num_hero): #每次开始清空上一次英雄所得的矿
self.align[i]=[]
for i in self.mines:
self.path=[]
for j in self.heros:
self.path.append(math.sqrt((i[0]-j[0])**2+(i[1]-j[1])**2))
self.min=self.path.index(min(self.path))
self.align[self.min].append(i)
def map_visualization(self):
color=['r','b','g']
k=0
for i in self.align:
tmp=np.array(i)
X=tmp[:,0]
Y=tmp[:,1]
plt.plot(X,Y,color[k]+'*')
k+=1
k=0
color1=['y','k','m']
for i in self.heros:
tmp=np.array(i)
X = tmp[0]
Y = tmp[1]
plt.plot(X, Y, color1[k]+'x')
k+=1
plt.show()
def main():
map = Map(num_mine=100,num_hero=3)
while True:
map.mine_align()
i,j=map.hero_move()
#map.map_visualization()
if i==j: #当三个点位置固定了,则退出循环
break
map.map_visualization()
if __name__=='__main__':
main()
| 28.981132 | 127 | 0.483073 |
7ec4a44bc1de5bf300499e0300184f175bd119a2
| 820 |
py
|
Python
|
api/src/validator/CommonValidator.py
|
SamuelJansen/health-check-manager-api
|
d75ecd3a8141a181bf458916b8c0cebb1ed73cb9
|
[
"MIT"
] | 1 |
2021-10-30T20:27:51.000Z
|
2021-10-30T20:27:51.000Z
|
api/src/validator/CommonValidator.py
|
SamuelJansen/health-check-manager-api
|
d75ecd3a8141a181bf458916b8c0cebb1ed73cb9
|
[
"MIT"
] | null | null | null |
api/src/validator/CommonValidator.py
|
SamuelJansen/health-check-manager-api
|
d75ecd3a8141a181bf458916b8c0cebb1ed73cb9
|
[
"MIT"
] | null | null | null |
from python_helper import Constant
from python_framework import Validator, ValidatorMethod, GlobalException, HttpStatus
@Validator()
class CommonValidator:
@ValidatorMethod(requestClass=bool)
def isBoolean(self, booleanObject):
...
@ValidatorMethod(requestClass=[str, str])
def strNotNull(self, key, attributeName):
if not key or c.NOTHING == key :
raise GlobalException(message=f'''The atribute "{attributeName}" cannot be empty''', status=HttpStatus.BAD_REQUEST)
@ValidatorMethod(requestClass=[str, str])
def pathVariableNotNull(self, pathVariable, pathVariableName):
if not pathVariable or c.NOTHING == pathVariable :
raise GlobalException(message=f'''The path variable "{pathVariableName}" cannot be null''', status=HttpStatus.BAD_REQUEST)
| 41 | 134 | 0.729268 |
8a5b281ef61a4da1681503dd3243551d95405286
| 3,553 |
py
|
Python
|
cmapPy/pandasGEXpress/tests/python3_tests/test_gctx2gct.py
|
Cellular-Longevity/cmapPy
|
abd4349f28af6d035f69fe8c399fde7bef8dd635
|
[
"BSD-3-Clause"
] | null | null | null |
cmapPy/pandasGEXpress/tests/python3_tests/test_gctx2gct.py
|
Cellular-Longevity/cmapPy
|
abd4349f28af6d035f69fe8c399fde7bef8dd635
|
[
"BSD-3-Clause"
] | 10 |
2022-03-14T18:40:45.000Z
|
2022-03-22T12:45:02.000Z
|
cmapPy/pandasGEXpress/tests/python3_tests/test_gctx2gct.py
|
Cellular-Longevity/cmapPy
|
abd4349f28af6d035f69fe8c399fde7bef8dd635
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import logging
import pandas as pd
import os
import cmapPy.pandasGEXpress.gctx2gct as gctx2gct
import cmapPy.pandasGEXpress.setup_GCToo_logger as setup_logger
import cmapPy.pandasGEXpress.parse_gct as parse_gct
import cmapPy.pandasGEXpress.parse_gctx as parse_gctx
logger = logging.getLogger(setup_logger.LOGGER_NAME)
import sys
sys.exit('NOT TESTING GCT PARSING FUNCTIONS YET')
class TestGCTx2GCT(unittest.TestCase):
def test_gctx2gct_main(self):
in_name = "cmapPy/pandasGEXpress/tests/functional_tests/mini_gctoo_for_testing.gctx"
out_name = "cmapPy/pandasGEXpress/tests/functional_tests/test_gctx2gct_out.gct"
args_string = "-f {} -o {}".format(in_name, out_name)
args = gctx2gct.build_parser().parse_args(args_string.split())
gctx2gct.gctx2gct_main(args)
# Make sure the input is identical to output
in_gctx = parse_gctx.parse(in_name)
out_gct = parse_gct.parse(out_name)
pd.util.testing.assert_frame_equal(in_gctx.data_df, out_gct.data_df, check_less_precise=3)
pd.util.testing.assert_frame_equal(in_gctx.col_metadata_df, out_gct.col_metadata_df)
pd.util.testing.assert_frame_equal(in_gctx.row_metadata_df, out_gct.row_metadata_df)
no_meta = "cmapPy/pandasGEXpress/tests/functional_tests/mini_gctoo_for_testing_nometa.gctx"
added_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_gctx2gct_out_annotated.gct"
row_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_rowmeta_n6.txt"
col_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_colmeta_n6.txt"
args_string = "-f {} -o {} -row_annot_path {} -col_annot_path {}".format(no_meta, added_meta, row_meta, col_meta )
args = gctx2gct.build_parser().parse_args(args_string.split())
gctx2gct.gctx2gct_main(args)
annotated_gct = parse_gct.parse(added_meta)
# Check added annotations are the same as original input GCTX
pd.util.testing.assert_frame_equal(in_gctx.data_df, annotated_gct.data_df, check_less_precise=3)
pd.util.testing.assert_frame_equal(in_gctx.col_metadata_df, annotated_gct.col_metadata_df)
pd.util.testing.assert_frame_equal(in_gctx.row_metadata_df, annotated_gct.row_metadata_df)
# Clean up
os.remove(out_name)
os.remove(added_meta)
def test_missing_annotations(self):
with self.assertRaises(Exception) as context:
no_meta = "cmapPy/pandasGEXpress/tests/functional_tests/mini_gctoo_for_testing_nometa.gctx"
added_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_gctx2gct_out_annotated.gct"
row_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_missing_rowmeta.txt"
args_string = "-f {} -o {} -row_annot_path {}".format(no_meta, added_meta, row_meta)
args = gctx2gct.build_parser().parse_args(args_string.split())
gctx2gct.gctx2gct_main(args)
print(context.exception)
self.assertTrue('Row ids in matrix missing from annotations file', context.exception)
with self.assertRaises(Exception) as context:
no_meta = "cmapPy/pandasGEXpress/tests/functional_tests/mini_gctoo_for_testing_nometa.gctx"
added_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_gctx2gct_out_annotated.gct"
col_meta = "cmapPy/pandasGEXpress/tests/functional_tests/test_missing_colmeta.txt"
args_string = "-f {} -o {} -col_annot_path {}".format(no_meta, added_meta, col_meta)
args = gctx2gct.build_parser().parse_args(args_string.split())
gctx2gct.gctx2gct_main(args)
self.assertTrue('Column ids in matrix missing from annotations file', context.exception)
if __name__ == "__main__":
setup_logger.setup(verbose=True)
unittest.main()
| 43.329268 | 116 | 0.804672 |
983a307150caeb53369cd8c0058d70adc342c34a
| 3,327 |
py
|
Python
|
code/LinearBinaryClassification/HelperClass/NeuralNet_1_2.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | 3 |
2021-05-25T10:18:23.000Z
|
2022-02-09T08:55:14.000Z
|
code/LinearBinaryClassification/HelperClass/NeuralNet_1_2.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | null | null | null |
code/LinearBinaryClassification/HelperClass/NeuralNet_1_2.py
|
Knowledge-Precipitation-Tribe/Neural-network
|
eac2e66cdde85b34ddf9313ce4d2b123cc1b8be8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-#
'''
# Name: NeuralNet
# Description: 定义神经网络
支持网络类型
将损失函数抽象为单独的类
# Author: super
# Date: 2020/5/16
'''
import numpy as np
import matplotlib.pyplot as plt
import math
from pathlib import Path
from matplotlib.colors import LogNorm
from HelperClass.DataReader_1_1 import *
from HelperClass.HyperParameters_1_1 import *
from HelperClass.TrainingHistory_1_0 import *
from HelperClass.LossFunction_1_0 import *
from HelperClass.ClassifierFunction_1_0 import *
class NeuralNet_1_2(object):
def __init__(self, hp):
self.hp = hp
self.W = np.zeros((self.hp.input_size, self.hp.output_size))
self.B = np.zeros((1, self.hp.output_size))
def forwardBatch(self, batch_x):
Z = np.dot(batch_x, self.W) + self.B
# 根据网络类型进行计算
if self.hp.net_type == NetType.BinaryClassifier:
A = Logistic().forward(Z)
return A
else:
return Z
def backwardBatch(self, batch_x, batch_y, batch_a):
m = batch_x.shape[0]
dZ = batch_a - batch_y
dB = dZ.sum(axis=0, keepdims=True)/m
dW = np.dot(batch_x.T, dZ)/m
return dW, dB
def update(self, dW, dB):
self.W = self.W - self.hp.eta * dW
self.B = self.B - self.hp.eta * dB
def inference(self, x):
return self.forwardBatch(x)
def train(self, dataReader, checkpoint=0.1):
# calculate loss to decide the stop condition
loss_history = TrainingHistory_1_0()
loss_function = LossFunction_1_0(self.hp.net_type)
loss = 10
if self.hp.batch_size == -1:
self.hp.batch_size = dataReader.num_train
max_iteration = math.ceil(dataReader.num_train / self.hp.batch_size)
checkpoint_iteration = (int)(max_iteration * checkpoint)
for epoch in range(self.hp.max_epoch):
#print("epoch=%d" %epoch)
dataReader.Shuffle()
for iteration in range(max_iteration):
# get x and y value for one sample
batch_x, batch_y = dataReader.GetBatchTrainSamples(self.hp.batch_size, iteration)
# get z from x,y
batch_a = self.forwardBatch(batch_x)
# calculate gradient of w and b
dW, dB = self.backwardBatch(batch_x, batch_y, batch_a)
# update w,b
self.update(dW, dB)
total_iteration = epoch * max_iteration + iteration
if (total_iteration+1) % checkpoint_iteration == 0:
loss = self.checkLoss(loss_function, dataReader)
print(epoch, iteration, loss)
loss_history.AddLossHistory(epoch*max_iteration+iteration, loss)
if loss < self.hp.eps:
break
#end if
#end if
# end for
if loss < self.hp.eps:
break
# end for
loss_history.ShowLossHistory(self.hp)
print("W=", self.W)
print("B=", self.B)
def checkLoss(self, loss_fun, dataReader):
X,Y = dataReader.GetWholeTrainSamples()
m = X.shape[0]
A = self.forwardBatch(X)
loss = loss_fun.CheckLoss(A, Y)
return loss
| 33.606061 | 97 | 0.579501 |
05559888c9723a39355210c666b23d66ea0e2880
| 7,009 |
py
|
Python
|
height_warp.py
|
FaiZaman/Mars-Topography-Visualisation
|
761d021507030cee94a9bd3a8d071d2af426a195
|
[
"MIT"
] | null | null | null |
height_warp.py
|
FaiZaman/Mars-Topography-Visualisation
|
761d021507030cee94a9bd3a8d071d2af426a195
|
[
"MIT"
] | null | null | null |
height_warp.py
|
FaiZaman/Mars-Topography-Visualisation
|
761d021507030cee94a9bd3a8d071d2af426a195
|
[
"MIT"
] | null | null | null |
import sys
import cv2
import vtk
import time
from preprocessing import load_obj
# texture and render height map of Mars using elevation data
def compute_height_map(elevation_data_path, texture_data_path):
start = time.time()
sphere_height, sphere_width = 1959, 1962
# create sphere and set values
mars = vtk.vtkSphereSource()
mars.SetCenter(0.0, 0.0, 0.0)
mars.SetRadius(978)
mars.SetThetaResolution(sphere_width)
mars.SetPhiResolution(sphere_height)
mars.Update()
colours = vtk.vtkNamedColors() # initalise colours
num_points = mars.GetOutput().GetPoints().GetNumberOfPoints()
# load data
height_list = load_obj('data/elevation_map')
# create data structure for heights to set scalars
height_scalars = vtk.vtkDoubleArray()
height_scalars.SetNumberOfTuples(num_points)
# set the height values in the data structure
for index in range(0, len(height_list)):
height = height_list[index] + 8
height_scalars.SetTuple1(index, height)
# assign to sphere
mars.GetOutput().GetPointData().SetScalars(height_scalars)
# creating a warp based on height values and setting the colours
warp = vtk.vtkWarpScalar()
warp.SetInputConnection(mars.GetOutputPort())
warp.SetScaleFactor(5)
# initialise a mapper to map sphere data
height_mapper = vtk.vtkPolyDataMapper()
height_mapper.SetInputConnection(warp.GetOutputPort())
height_mapper.ScalarVisibilityOff()
# use actor to set colours
height_actor = vtk.vtkActor()
height_actor.SetMapper(height_mapper)
# read the image data from a file
reader = vtk.vtkJPEGReader()
reader.SetFileName(texture_data_path)
# create texture object
texture = vtk.vtkTexture()
texture.SetInputConnection(reader.GetOutputPort())
# map texture coordinates onto warped geometry
map_to_sphere = vtk.vtkTextureMapToSphere()
map_to_sphere.SetInputConnection(warp.GetOutputPort())
map_to_sphere.PreventSeamOff()
# create mapper and set the mapped texture as input
texture_mapper = vtk.vtkPolyDataMapper()
texture_mapper.SetInputConnection(map_to_sphere.GetOutputPort())
texture_mapper.ScalarVisibilityOff()
# create actor and set the mapper and the texture
texture_actor = vtk.vtkActor()
texture_actor.SetMapper(texture_mapper)
texture_actor.SetTexture(texture)
# generate water sphere
water = vtk.vtkSphereSource()
water.SetCenter(0.0, 0.0, 0.0)
water.SetRadius(950)
water.SetThetaResolution(sphere_width)
water.SetPhiResolution(sphere_height)
water.Update()
# set water mapper
water_mapper = vtk.vtkPolyDataMapper()
water_mapper.SetInputConnection(water.GetOutputPort())
# create water actor and set to blue
water_actor = vtk.vtkActor()
water_actor.SetMapper(water_mapper)
water_actor.GetProperty().SetColor(colours.GetColor3d("DeepSkyBlue"))
# set camera perspective
camera = vtk.vtkCamera()
camera.SetPosition(1000, 1000, 1000)
camera.SetFocalPoint(0, 978, 0)
# initialise a renderer and set parameters
renderer = vtk.vtkRenderer()
#renderer.SetActiveCamera(camera)
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetWindowName("Mars Elevation Map")
renderWindow.SetSize(1500, 700)
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# add actors and set background colour
renderer.AddActor(height_actor)
renderer.AddActor(texture_actor)
renderer.AddActor(water_actor)
renderer.SetBackground(colours.GetColor3d("Black"))
# changes scale factor based on slider
def update_scale_factor(obj, event):
scale_factor = obj.GetRepresentation().GetValue()
warp.SetScaleFactor(scale_factor)
# parameters for scale factor slider
SliderRepresentation = vtk.vtkSliderRepresentation2D()
SliderRepresentation.SetMinimumValue(0)
SliderRepresentation.SetMaximumValue(10)
SliderRepresentation.SetValue(5)
# set coordinates of slider
SliderRepresentation.GetPoint1Coordinate().SetCoordinateSystemToNormalizedDisplay()
SliderRepresentation.GetPoint1Coordinate().SetValue(0.25, 0.1)
SliderRepresentation.GetPoint2Coordinate().SetCoordinateSystemToNormalizedDisplay()
SliderRepresentation.GetPoint2Coordinate().SetValue(0.75, 0.1)
# more slider parameters
SliderRepresentation.SetTitleText('Scale Factor')
SliderRepresentation.SetSliderLength(0.02)
SliderRepresentation.SetSliderWidth(0.03)
# create slider widget, assign parameters, and update scale factor based on slider changes
SliderWidget = vtk.vtkSliderWidget()
SliderWidget.SetInteractor(renderWindowInteractor)
SliderWidget.SetRepresentation(SliderRepresentation)
SliderWidget.SetEnabled(True)
SliderWidget.AddObserver("InteractionEvent", update_scale_factor)
# changes scale factor based on slider
def update_water_radius(obj, event):
water_radius = obj.GetRepresentation().GetValue()
water.SetRadius(water_radius)
# parameters for water radius slider
WaterSliderRepresentation = vtk.vtkSliderRepresentation2D()
WaterSliderRepresentation.SetMinimumValue(950)
WaterSliderRepresentation.SetMaximumValue(1100)
WaterSliderRepresentation.SetValue(950)
# set coordinates of slider
WaterSliderRepresentation.GetPoint1Coordinate().SetCoordinateSystemToNormalizedDisplay()
WaterSliderRepresentation.GetPoint1Coordinate().SetValue(0.25, 0.9)
WaterSliderRepresentation.GetPoint2Coordinate().SetCoordinateSystemToNormalizedDisplay()
WaterSliderRepresentation.GetPoint2Coordinate().SetValue(0.75, 0.9)
# more slider parameters
WaterSliderRepresentation.SetTitleText('Water Radius')
WaterSliderRepresentation.SetSliderLength(0.02)
WaterSliderRepresentation.SetSliderWidth(0.03)
# create slider widget, assign parameters, and update water radius based on slider changes
WaterSliderWidget = vtk.vtkSliderWidget()
WaterSliderWidget.SetInteractor(renderWindowInteractor)
WaterSliderWidget.SetRepresentation(WaterSliderRepresentation)
WaterSliderWidget.SetEnabled(True)
WaterSliderWidget.AddObserver("InteractionEvent", update_water_radius)
end = time.time()
print("Total time taken:", round(end - start, 2), "seconds")
# render the planet
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
# get arguments
file_paths = sys.argv
elevation_data_path, texture_path = file_paths[1], file_paths[2]
# flip the texture and save it to align properly with warp
texture = cv2.imread(texture_path, cv2.IMREAD_COLOR)
flipped_texture = cv2.flip(texture, 1)
flipped_texture_path = 'data/flipped_texture.jpg'
cv2.imwrite(flipped_texture_path, flipped_texture)
compute_height_map(elevation_data_path, flipped_texture_path)
| 35.57868 | 94 | 0.758168 |
628c15c61fb77ecda4e2b693e8b9ca9f366275bc
| 1,458 |
py
|
Python
|
data_augmentation/util/annotation.py
|
Ribosome-rbx/Medical-Mask-Detection-Based-on-Faster-RCNN
|
5fb1c6671a7d83ccc4c242e0261e88d5995c33ab
|
[
"MIT"
] | 2 |
2021-05-20T05:06:47.000Z
|
2022-01-14T04:30:48.000Z
|
data_augmentation/util/annotation.py
|
Ribosome-rbx/Realtime-Medical-Mask-Detection-Based-on-Faster-RCNN
|
5fb1c6671a7d83ccc4c242e0261e88d5995c33ab
|
[
"MIT"
] | null | null | null |
data_augmentation/util/annotation.py
|
Ribosome-rbx/Realtime-Medical-Mask-Detection-Based-on-Faster-RCNN
|
5fb1c6671a7d83ccc4c242e0261e88d5995c33ab
|
[
"MIT"
] | null | null | null |
import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
import xml.etree.ElementTree as ET
import glob
import shutil
EMPTY_DIR = 'empty'
def parse_xml(filename):
tree = ET.parse(filename)
elem = tree.getroot()
result = {
'filename': elem.find('.//filename').text,
'size': {
'width': elem.find('.//size/width').text,
'height': elem.find('.//size/height').text,
'depth': elem.find('.//size/depth').text,
},
'objects': []
}
for e in elem.findall('.//object'):
obj = {
'name': e.find('.//name').text,
'xmin': e.find('.//bndbox/xmin').text,
'ymin': e.find('.//bndbox/ymin').text,
'xmax': e.find('.//bndbox/xmax').text,
'ymax': e.find('.//bndbox/ymax').text
}
result['objects'].append(obj)
return result
def inspect(filename):
annotation = parse_xml(filename)
for obj in annotation['objects']:
x1=int(obj['xmin'])
y1=int(obj['ymin'])
x2=int(obj['xmax'])
y2=int(obj['ymax'])
if int((x2-x1)*(y2-y1)) <= 0:
print('File {} -- ERROR: Zero bbox occured!!!'.format(filename))
shutil.move(filename, EMPTY_DIR)
print('Zero bbox file %s is moved.' % filename)
if len(annotation['objects']) == 0:
print('Empty annotation file %s is moved.' % filename)
shutil.move(filename, EMPTY_DIR)
| 28.038462 | 72 | 0.548697 |
9a4222960bc764779d97dcbc2b4359bd67e56dce
| 468 |
py
|
Python
|
accounts/backends.py
|
Lucasfeelix/ong-joao-de-barro
|
be20042714883dac0a75a97f5ff9fd7804e6e218
|
[
"MIT"
] | null | null | null |
accounts/backends.py
|
Lucasfeelix/ong-joao-de-barro
|
be20042714883dac0a75a97f5ff9fd7804e6e218
|
[
"MIT"
] | null | null | null |
accounts/backends.py
|
Lucasfeelix/ong-joao-de-barro
|
be20042714883dac0a75a97f5ff9fd7804e6e218
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from django.contrib.auth.backends import ModelBackend as BaseModelBackend
from accounts.models import User
class ModelBackend(BaseModelBackend):
def authenticate(self, username=None, password=None):
if not username is None:
try:
user = User.objects.get(email=username)
if user.check_password(password):
return user
except User.DoesNotExist:
pass
| 29.25 | 73 | 0.632479 |
8bac7b8c1c487370c8ec6b9cb6ce93521251b5d2
| 1,791 |
py
|
Python
|
tests/test_migrate.py
|
sinonkt/pachelm
|
ca0c9c6efc36d18b58db35d17c466ed154098bdd
|
[
"MIT"
] | 1 |
2019-06-08T13:36:34.000Z
|
2019-06-08T13:36:34.000Z
|
tests/test_migrate.py
|
sinonkt/pachydelm
|
ca0c9c6efc36d18b58db35d17c466ed154098bdd
|
[
"MIT"
] | 4 |
2020-03-24T17:12:54.000Z
|
2021-06-01T23:49:53.000Z
|
tests/test_migrate.py
|
sinonkt/pachydelm
|
ca0c9c6efc36d18b58db35d17c466ed154098bdd
|
[
"MIT"
] | null | null | null |
import pytest
from pachelm.migration import PachydermMigration
updated_config_path = './tests/updated_configs/2019_06_04_221735_test-pipeline_pipeline_test-pipeline.json'
def test_get_pipeline(ctx):
emptyMigration = PachydermMigration(ctx)
assert emptyMigration.get_pipeline('test-pipeline') != None
def test_get_repo(ctx):
emptyMigration = PachydermMigration(ctx)
assert emptyMigration.get_repo('test-input') != None
def test_not_exist_pipeline(ctx):
emptyMigration = PachydermMigration(ctx)
assert emptyMigration.get_pipeline('not-existed-pipeline') == None
def test_not_exist_repo(ctx):
emptyMigration = PachydermMigration(ctx)
assert emptyMigration.get_repo('not-existed-repo') == None
def test_diff_and_has_changed(ctx):
emptyMigration = PachydermMigration(ctx)
testPipeline = emptyMigration.get_pipeline('test-pipeline')
diff = emptyMigration._diff('test-pipeline', updated_config_path)
has_changed = emptyMigration._has_pipeline_config_changed('test-pipeline', updated_config_path)
expected_changed = {
'values_changed': {
"root['parallelism_spec']['constant']": {
'new_value': 3,
'old_value': 1
}
}
}
assert diff == expected_changed
assert has_changed == True
def test_none_diff_compare_to_current_config(ctx):
emptyMigration = PachydermMigration(ctx)
diff = emptyMigration._diff('test-pipeline')
assert diff == {}
def test_is_resource_already_exist(ctx):
emptyMigration = PachydermMigration(ctx)
assert emptyMigration.is_resource_already_exist('test-pipeline') == True
assert emptyMigration.is_resource_already_exist('test-input') == True
assert emptyMigration.is_resource_already_exist('not-exist') == False
| 36.55102 | 107 | 0.742044 |
7ebca5e63d1f23540a3fe7842e841061cf18a699
| 1,620 |
py
|
Python
|
tests/pyre.pkg/calc/algebra.py
|
rtburns-jpl/pyre
|
ffc4fc1b2936e355f709d084eb4055954960b3a2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/pyre.pkg/calc/algebra.py
|
rtburns-jpl/pyre
|
ffc4fc1b2936e355f709d084eb4055954960b3a2
|
[
"BSD-3-Clause"
] | 1 |
2021-06-10T23:42:13.000Z
|
2021-06-10T23:42:13.000Z
|
tests/pyre.pkg/calc/algebra.py
|
jlmaurer/pyre
|
6af38a83621d7d6228d147b4bb94f97fbb10f6e2
|
[
"BSD-3-Clause"
] | 2 |
2020-08-31T18:07:52.000Z
|
2021-12-10T08:54:39.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2020 all rights reserved
#
"""
Exercise node algebra
"""
def test():
import pyre.calc
# declare a couple of nodes
n1 = pyre.calc.var(value=1)
n2 = pyre.calc.var(value=2)
# unary operators
assert (- n1).value == -1
assert (+ n2).value == 2
assert (abs(n1)).value == 1
# basic arithmetic with two operands
assert (n1 + n2).value == 1 + 2
assert (n1 - n2).value == 1 - 2
assert (n1 * n2).value == 1 * 2
assert (n1 / n2).value == 1 / 2
# assert (n1 // n2).value == 1 // 2 # NYI
# basic arithmetic with more than two operands
assert (n1 + n2 - n1).value == 1 + 2 - 1
assert (n1 * n2 / n1).value == 1 * 2 / 1
assert ((n1 - n2)*n2).value == (1 - 2)*2
# basic arithmetic with floats
assert (1 + n2).value == 1 + 2
assert (n2 + 1).value == 2 + 1
assert (1 - n2).value == 1 - 2
assert (n2 - 1).value == 2 - 1
assert (2 * n1).value == 2 * 1
assert (n1 * 2).value == 1 * 2
assert (3 / n2).value == 3 / 2
assert (n2 / 3).value == 2 / 3
assert (n2 ** 3).value == 2**3
assert (3 ** n2).value == 3**2
# more complicated forms
assert ((n1**2 + 2*n1*n2 + n2**2)).value == ((n1+n2)**2).value
assert ((n1**2 - 2*n1*n2 + n2**2)).value == ((n1-n2)**2).value
assert (2*(.5 - n1*n2 + n2**2)*n1).value == 2*(.5 - 1*2 + 2**2)*1
return
# main
if __name__ == "__main__":
# skip pyre initialization since we don't rely on the executive
pyre_noboot = True
# run the test
test()
# end of file
| 25.714286 | 69 | 0.535185 |
5f2c5d4be384f472f7e7d74df443c08eb720d6af
| 11,263 |
py
|
Python
|
see/context/resources/test/network_test.py
|
security-geeks/see
|
900472b8b3e45fbb414f3beba4df48e86eaa4b3a
|
[
"Apache-2.0"
] | 851 |
2015-10-28T09:32:05.000Z
|
2022-03-31T02:32:28.000Z
|
see/context/resources/test/network_test.py
|
security-geeks/see
|
900472b8b3e45fbb414f3beba4df48e86eaa4b3a
|
[
"Apache-2.0"
] | 29 |
2015-12-21T15:43:28.000Z
|
2021-05-16T10:57:09.000Z
|
see/context/resources/test/network_test.py
|
security-geeks/see
|
900472b8b3e45fbb414f3beba4df48e86eaa4b3a
|
[
"Apache-2.0"
] | 110 |
2015-10-26T13:05:18.000Z
|
2021-11-17T18:00:17.000Z
|
import mock
import random
import libvirt
import difflib
import unittest
import itertools
import ipaddress
from see.context.resources import network
def compare(text1, text2):
"""Utility function for comparing text and returning differences."""
diff = difflib.ndiff(str(text1).splitlines(True),
str(text2).splitlines(True))
return '\n' + '\n'.join(diff)
class NetworkXMLTest(unittest.TestCase):
def test_ip(self):
"""NETWORK XML with given IP."""
config = """<network>
<forward mode="nat"/>
<ip address="192.168.235.1" netmask="255.255.255.0">
<dhcp>
<range start="192.168.235.2" end="192.168.235.128"/>
</dhcp>
</ip>
</network>
"""
expected = """<network>
<forward mode="nat" />
<ip address="192.168.235.1" netmask="255.255.255.0">
<dhcp>
<range end="192.168.235.128" start="192.168.235.2" />
</dhcp>
</ip>
<name>foo</name><uuid>foo</uuid><bridge name="virbr-foo" /></network>"""
results = network.network_xml('foo', config)
self.assertEqual(results, expected, compare(results, expected))
def test_ip_modifies(self):
"""NETWORK Name and UUID are modified if existing."""
config = """<network>
<name>bar</name>
<uuid>bar</uuid>
<bridge name="virbr-bar"/>
<forward mode="nat"/>
<ip address="192.168.235.1" netmask="255.255.255.0">
<dhcp>
<range start="192.168.235.2" end="192.168.235.128"/>
</dhcp>
</ip>
</network>
"""
expected = """<network>
<name>foo</name>
<uuid>foo</uuid>
<bridge name="virbr-foo" />
<forward mode="nat" />
<ip address="192.168.235.1" netmask="255.255.255.0">
<dhcp>
<range end="192.168.235.128" start="192.168.235.2" />
</dhcp>
</ip>
</network>"""
results = network.network_xml('foo', config)
self.assertEqual(results, expected, compare(results, expected))
def test_ip_address(self):
"""NETWORK RuntimeError is raised if both address and <ip> are specified."""
config = """<network>
<forward mode="nat"/>
<ip address="192.168.235.1" netmask="255.255.255.0">
<dhcp>
<range start="192.168.235.2" end="192.168.235.128"/>
</dhcp>
</ip>
</network>
"""
with self.assertRaises(RuntimeError):
network.network_xml('foo', config, address=True)
def test_no_ip_address(self):
"""NETWORK XML with address."""
config = """<network>
<forward mode="nat"/>
</network>
"""
expected = """<network>
<forward mode="nat" />
<name>foo</name><uuid>foo</uuid><bridge name="virbr-foo" />""" + \
"""<ip address="192.168.1.1" netmask="255.255.255.0">""" + \
"""<dhcp><range end="192.168.1.254" start="192.168.1.2" />""" +\
"""</dhcp></ip></network>"""
address = ipaddress.IPv4Network(u'192.168.1.0/24')
results = network.network_xml('foo', config, address=address)
self.assertEqual(results, expected, compare(results, expected))
class ValidAddressTest(unittest.TestCase):
def test_valid(self):
"""NETWORK A valid address is retrieved."""
virnetwork = mock.Mock()
hypervisor = mock.Mock()
virnetwork.XMLDesc.side_effect = (
lambda x:
'<a><ip address="192.168.%s.1" netmask="255.255.255.0"/></a>'
% random.randint(1, 255))
hypervisor.listNetworks.return_value = ('foo', 'bar', 'baz')
hypervisor.networkLookupByName.return_value = virnetwork
configuration = {'ipv4': '192.168.0.0',
'prefix': 16,
'subnet_prefix': 24}
self.assertTrue(network.generate_address(hypervisor, configuration) in
[ipaddress.IPv4Network(u'192.168.{}.0/24'.format(i))
for i in range(1, 255)])
def test_randomised(self):
"""NETWORK Address generation is randomised."""
virnetwork = mock.Mock()
hypervisor = mock.Mock()
virnetwork.XMLDesc.side_effect = (
lambda x:
'<a><ip address="192.168.%s.1" netmask="255.255.255.0"/></a>'
% random.randint(1, 255))
hypervisor.listNetworks.return_value = ('foo', 'bar', 'baz')
hypervisor.networkLookupByName.return_value = virnetwork
configuration = {'ipv4': '192.168.0.0',
'prefix': 16,
'subnet_prefix': 24}
addresses = set(network.generate_address(hypervisor, configuration)
for _ in range(10))
self.assertTrue(len(addresses) > 1)
def test_invalid(self):
"""NETWORK ValueError is raised if configuration address is invalid."""
virnetwork = mock.Mock()
hypervisor = mock.Mock()
virnetwork.XMLDesc.side_effect = (
lambda x:
'<a><ip address="192.168.%s.1" netmask="255.255.255.0"/></a>'
% random.randint(1, 255))
hypervisor.listNetworks.return_value = ('foo', 'bar', 'baz')
hypervisor.networkLookupByName.return_value = virnetwork
configuration = {'ipv4': '192.168.0.1',
'prefix': 16,
'subnet_prefix': 24}
with self.assertRaises(ValueError):
network.generate_address(hypervisor, configuration)
def test_no_ip(self):
"""NETWORK RuntimeError is raised if all IPs are taken."""
counter = itertools.count()
virnetwork = mock.Mock()
hypervisor = mock.Mock()
virnetwork.XMLDesc.side_effect = (
lambda x:
'<a><ip address="192.168.%s.1" netmask="255.255.255.0"/></a>'
% next(counter))
hypervisor.listNetworks.return_value = range(0, 256)
hypervisor.networkLookupByName.return_value = virnetwork
configuration = {'ipv4': '192.168.0.0',
'prefix': 16,
'subnet_prefix': 24}
with self.assertRaises(RuntimeError):
network.generate_address(hypervisor, configuration)
class CreateTest(unittest.TestCase):
def test_create_too_many_attempts(self):
"""NETWORK RuntimeError is raised if too many fails to create a network."""
xml = '<network><forward mode="nat"/></network>'
network.MAX_ATTEMPTS = 3
hypervisor = mock.Mock()
hypervisor.listNetworks.return_value = []
hypervisor.networkCreateXML.side_effect = libvirt.libvirtError('BOOM')
configuration = {'configuration': 'bar',
'dynamic_address': {'ipv4': '10.0.0.0',
'prefix': 16,
'subnet_prefix': 24}}
with mock.patch('see.context.resources.network.open',
mock.mock_open(read_data=xml), create=True):
try:
network.create(hypervisor, 'foo', configuration)
except RuntimeError as error:
self.assertEqual(
error.args,
("Exceeded failed attempts (3) to get IP address.",
"Last error: BOOM"))
def test_create_xml(self):
"""NETWORK Provided XML is used."""
xml = """<network><forward mode="nat"/><ip address="192.168.1.1" netmask="255.255.255.0">""" + \
"""<dhcp><range end="192.168.1.128" start="192.168.1.2"/></dhcp></ip></network>"""
expected = """<network><forward mode="nat" /><ip address="192.168.1.1" netmask="255.255.255.0">""" + \
"""<dhcp><range end="192.168.1.128" start="192.168.1.2" /></dhcp></ip>""" + \
"""<name>foo</name><uuid>foo</uuid><bridge name="virbr-foo" /></network>"""
hypervisor = mock.Mock()
hypervisor.listNetworks.return_value = []
with mock.patch('see.context.resources.network.open', mock.mock_open(read_data=xml), create=True):
network.create(hypervisor, 'foo', {'configuration': '/foo'})
results = hypervisor.networkCreateXML.call_args_list[0][0][0]
self.assertEqual(results, expected, compare(results, expected))
def test_create_no_xml_file(self):
"""NETWORK Default XML is used if none is provided."""
expected = """<forward mode="nat" />"""
hypervisor = mock.Mock()
hypervisor.listNetworks.return_value = []
network.create(hypervisor, 'foo', {'dynamic_address':
{'ipv4': '192.168.0.0',
'prefix': 16,
'subnet_prefix': 24}})
results = hypervisor.networkCreateXML.call_args_list[0][0][0]
self.assertTrue(expected in results, compare(results, expected))
def test_create_xml_error(self):
"""NETWORK RuntimeError is raised in case of creation error."""
xml = """<network><forward mode="nat"/><ip address="192.168.1.1" netmask="255.255.255.0">""" + \
"""<dhcp><range end="192.168.1.128" start="192.168.1.2"/></dhcp></ip></network>"""
hypervisor = mock.Mock()
hypervisor.listNetworks.return_value = []
hypervisor.networkCreateXML.side_effect = libvirt.libvirtError('BOOM')
with mock.patch('see.context.resources.network.open', mock.mock_open(read_data=xml), create=True):
with self.assertRaises(RuntimeError) as error:
network.create(hypervisor, 'foo', {'configuration': '/foo'})
self.assertEqual(str(error), "Unable to create new network: BOOM.")
def test_create_empty_config(self):
"""NETWORK RuntimeError raised if empty configuration."""
hypervisor = mock.Mock()
with self.assertRaises(RuntimeError):
network.create(hypervisor, 'foo', {})
def test_delete(self):
"""NETWORK Network is destroyed on delete()."""
net = mock.Mock()
network.delete(net)
self.assertTrue(net.destroy.called)
class LookupTest(unittest.TestCase):
def test_lookup(self):
"""NETWORK Network lookup passes correct parameters to hypervisor."""
xml = """<domain><interface type="network">""" +\
"""<source network="foo" /></interface></domain>"""
domain = mock.Mock()
hypervisor = mock.Mock()
domain.XMLDesc.return_value = xml
domain.connect.return_value = hypervisor
network.lookup(domain)
hypervisor.networkLookupByName.assert_called_with('foo')
def test_lookup_no_network(self):
"""NETWORK None is return if domain is not associated with any Network."""
xml = """<domain></domain>"""
domain = mock.Mock()
hypervisor = mock.Mock()
domain.XMLDesc.return_value = xml
domain.connect.return_value = hypervisor
self.assertEqual(network.lookup(domain), None)
| 41.408088 | 110 | 0.56184 |
1d16dd12cc2979dd4d06eb97fefe9efadca42b1e
| 12,703 |
py
|
Python
|
homeassistant/components/keyboard_remote/__init__.py
|
basicpail/core
|
5cc54618c5af3f75c08314bf2375cc7ac40d2b7e
|
[
"Apache-2.0"
] | 11 |
2018-02-16T15:35:47.000Z
|
2020-01-14T15:20:00.000Z
|
homeassistant/components/keyboard_remote/__init__.py
|
basicpail/core
|
5cc54618c5af3f75c08314bf2375cc7ac40d2b7e
|
[
"Apache-2.0"
] | 77 |
2020-07-16T16:43:09.000Z
|
2022-03-31T06:14:37.000Z
|
homeassistant/components/keyboard_remote/__init__.py
|
Vaarlion/core
|
f3de8b9f28de01abf72c0f5bb0b457eb1841f201
|
[
"Apache-2.0"
] | 11 |
2020-12-16T13:48:14.000Z
|
2022-02-01T00:28:05.000Z
|
"""Receive signals from a keyboard and use it as a remote control."""
# pylint: disable=import-error
import asyncio
from contextlib import suppress
import logging
import os
import aionotify
from evdev import InputDevice, categorize, ecodes, list_devices
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEVICE_DESCRIPTOR = "device_descriptor"
DEVICE_ID_GROUP = "Device description"
DEVICE_NAME = "device_name"
DOMAIN = "keyboard_remote"
ICON = "mdi:remote"
KEY_CODE = "key_code"
KEY_VALUE = {"key_up": 0, "key_down": 1, "key_hold": 2}
KEYBOARD_REMOTE_COMMAND_RECEIVED = "keyboard_remote_command_received"
KEYBOARD_REMOTE_CONNECTED = "keyboard_remote_connected"
KEYBOARD_REMOTE_DISCONNECTED = "keyboard_remote_disconnected"
TYPE = "type"
EMULATE_KEY_HOLD = "emulate_key_hold"
EMULATE_KEY_HOLD_DELAY = "emulate_key_hold_delay"
EMULATE_KEY_HOLD_REPEAT = "emulate_key_hold_repeat"
DEVINPUT = "/dev/input"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Exclusive(DEVICE_DESCRIPTOR, DEVICE_ID_GROUP): cv.string,
vol.Exclusive(DEVICE_NAME, DEVICE_ID_GROUP): cv.string,
vol.Optional(TYPE, default=["key_up"]): vol.All(
cv.ensure_list, [vol.In(KEY_VALUE)]
),
vol.Optional(EMULATE_KEY_HOLD, default=False): cv.boolean,
vol.Optional(EMULATE_KEY_HOLD_DELAY, default=0.250): float,
vol.Optional(EMULATE_KEY_HOLD_REPEAT, default=0.033): float,
}
),
cv.has_at_least_one_key(DEVICE_DESCRIPTOR, DEVICE_ID_GROUP),
],
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the keyboard_remote."""
config = config.get(DOMAIN)
remote = KeyboardRemote(hass, config)
remote.setup()
return True
class KeyboardRemote:
"""Manage device connection/disconnection using inotify to asynchronously monitor."""
def __init__(self, hass, config):
"""Create handlers and setup dictionaries to keep track of them."""
self.hass = hass
self.handlers_by_name = {}
self.handlers_by_descriptor = {}
self.active_handlers_by_descriptor = {}
self.watcher = None
self.monitor_task = None
for dev_block in config:
handler = self.DeviceHandler(hass, dev_block)
descriptor = dev_block.get(DEVICE_DESCRIPTOR)
if descriptor is not None:
self.handlers_by_descriptor[descriptor] = handler
else:
name = dev_block.get(DEVICE_NAME)
self.handlers_by_name[name] = handler
def setup(self):
"""Listen for Home Assistant start and stop events."""
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, self.async_start_monitoring
)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, self.async_stop_monitoring
)
async def async_start_monitoring(self, event):
"""Start monitoring of events and devices.
Start inotify watching for events, start event monitoring for those already
connected, and start monitoring for device connection/disconnection.
"""
# start watching
self.watcher = aionotify.Watcher()
self.watcher.watch(
alias="devinput",
path=DEVINPUT,
flags=aionotify.Flags.CREATE
| aionotify.Flags.ATTRIB
| aionotify.Flags.DELETE,
)
await self.watcher.setup(self.hass.loop)
# add initial devices (do this AFTER starting watcher in order to
# avoid race conditions leading to missing device connections)
initial_start_monitoring = set()
descriptors = await self.hass.async_add_executor_job(list_devices, DEVINPUT)
for descriptor in descriptors:
dev, handler = await self.hass.async_add_executor_job(
self.get_device_handler, descriptor
)
if handler is None:
continue
self.active_handlers_by_descriptor[descriptor] = handler
initial_start_monitoring.add(handler.async_start_monitoring(dev))
if initial_start_monitoring:
await asyncio.wait(initial_start_monitoring)
self.monitor_task = self.hass.async_create_task(self.async_monitor_devices())
async def async_stop_monitoring(self, event):
"""Stop and cleanup running monitoring tasks."""
_LOGGER.debug("Cleanup on shutdown")
if self.monitor_task is not None:
if not self.monitor_task.done():
self.monitor_task.cancel()
await self.monitor_task
handler_stop_monitoring = set()
for handler in self.active_handlers_by_descriptor.values():
handler_stop_monitoring.add(handler.async_stop_monitoring())
if handler_stop_monitoring:
await asyncio.wait(handler_stop_monitoring)
def get_device_handler(self, descriptor):
"""Find the correct device handler given a descriptor (path)."""
# devices are often added and then correct permissions set after
try:
dev = InputDevice(descriptor)
except OSError:
return (None, None)
handler = None
if descriptor in self.handlers_by_descriptor:
handler = self.handlers_by_descriptor[descriptor]
elif dev.name in self.handlers_by_name:
handler = self.handlers_by_name[dev.name]
else:
# check for symlinked paths matching descriptor
for test_descriptor, test_handler in self.handlers_by_descriptor.items():
if test_handler.dev is not None:
fullpath = test_handler.dev.path
else:
fullpath = os.path.realpath(test_descriptor)
if fullpath == descriptor:
handler = test_handler
return (dev, handler)
async def async_monitor_devices(self):
"""Monitor asynchronously for device connection/disconnection or permissions changes."""
try:
while True:
event = await self.watcher.get_event()
descriptor = f"{DEVINPUT}/{event.name}"
descriptor_active = descriptor in self.active_handlers_by_descriptor
if (event.flags & aionotify.Flags.DELETE) and descriptor_active:
handler = self.active_handlers_by_descriptor[descriptor]
del self.active_handlers_by_descriptor[descriptor]
await handler.async_stop_monitoring()
elif (
(event.flags & aionotify.Flags.CREATE)
or (event.flags & aionotify.Flags.ATTRIB)
) and not descriptor_active:
dev, handler = await self.hass.async_add_executor_job(
self.get_device_handler, descriptor
)
if handler is None:
continue
self.active_handlers_by_descriptor[descriptor] = handler
await handler.async_start_monitoring(dev)
except asyncio.CancelledError:
return
class DeviceHandler:
"""Manage input events using evdev with asyncio."""
def __init__(self, hass, dev_block):
"""Fill configuration data."""
self.hass = hass
key_types = dev_block.get(TYPE)
self.key_values = set()
for key_type in key_types:
self.key_values.add(KEY_VALUE[key_type])
self.emulate_key_hold = dev_block.get(EMULATE_KEY_HOLD)
self.emulate_key_hold_delay = dev_block.get(EMULATE_KEY_HOLD_DELAY)
self.emulate_key_hold_repeat = dev_block.get(EMULATE_KEY_HOLD_REPEAT)
self.monitor_task = None
self.dev = None
async def async_keyrepeat(self, path, name, code, delay, repeat):
"""Emulate keyboard delay/repeat behaviour by sending key events on a timer."""
await asyncio.sleep(delay)
while True:
self.hass.bus.async_fire(
KEYBOARD_REMOTE_COMMAND_RECEIVED,
{KEY_CODE: code, DEVICE_DESCRIPTOR: path, DEVICE_NAME: name},
)
await asyncio.sleep(repeat)
async def async_start_monitoring(self, dev):
"""Start event monitoring task and issue event."""
if self.monitor_task is None:
self.dev = dev
self.monitor_task = self.hass.async_create_task(
self.async_monitor_input(dev)
)
self.hass.bus.async_fire(
KEYBOARD_REMOTE_CONNECTED,
{DEVICE_DESCRIPTOR: dev.path, DEVICE_NAME: dev.name},
)
_LOGGER.debug("Keyboard (re-)connected, %s", dev.name)
async def async_stop_monitoring(self):
"""Stop event monitoring task and issue event."""
if self.monitor_task is not None:
with suppress(OSError):
await self.hass.async_add_executor_job(self.dev.ungrab)
# monitoring of the device form the event loop and closing of the
# device has to occur before cancelling the task to avoid
# triggering unhandled exceptions inside evdev coroutines
asyncio.get_event_loop().remove_reader(self.dev.fileno())
self.dev.close()
if not self.monitor_task.done():
self.monitor_task.cancel()
await self.monitor_task
self.monitor_task = None
self.hass.bus.async_fire(
KEYBOARD_REMOTE_DISCONNECTED,
{DEVICE_DESCRIPTOR: self.dev.path, DEVICE_NAME: self.dev.name},
)
_LOGGER.debug("Keyboard disconnected, %s", self.dev.name)
self.dev = None
async def async_monitor_input(self, dev):
"""Event monitoring loop.
Monitor one device for new events using evdev with asyncio,
start and stop key hold emulation tasks as needed.
"""
repeat_tasks = {}
try:
_LOGGER.debug("Start device monitoring")
await self.hass.async_add_executor_job(dev.grab)
async for event in dev.async_read_loop():
if event.type is ecodes.EV_KEY:
if event.value in self.key_values:
_LOGGER.debug(categorize(event))
self.hass.bus.async_fire(
KEYBOARD_REMOTE_COMMAND_RECEIVED,
{
KEY_CODE: event.code,
DEVICE_DESCRIPTOR: dev.path,
DEVICE_NAME: dev.name,
},
)
if (
event.value == KEY_VALUE["key_down"]
and self.emulate_key_hold
):
repeat_tasks[event.code] = self.hass.async_create_task(
self.async_keyrepeat(
dev.path,
dev.name,
event.code,
self.emulate_key_hold_delay,
self.emulate_key_hold_repeat,
)
)
elif (
event.value == KEY_VALUE["key_up"]
and event.code in repeat_tasks
):
repeat_tasks[event.code].cancel()
del repeat_tasks[event.code]
except (OSError, asyncio.CancelledError):
# cancel key repeat tasks
for task in repeat_tasks.values():
task.cancel()
if repeat_tasks:
await asyncio.wait(repeat_tasks.values())
| 38.728659 | 96 | 0.575218 |
0a7a9496d35c274b2b7ca39c00067d53703b2316
| 29,490 |
py
|
Python
|
tests/test_ctl.py
|
ahachete/patroni
|
d2d49907ad5008fa2ac213eacb9132bf253cf326
|
[
"MIT"
] | null | null | null |
tests/test_ctl.py
|
ahachete/patroni
|
d2d49907ad5008fa2ac213eacb9132bf253cf326
|
[
"MIT"
] | null | null | null |
tests/test_ctl.py
|
ahachete/patroni
|
d2d49907ad5008fa2ac213eacb9132bf253cf326
|
[
"MIT"
] | null | null | null |
import etcd
import json
import os
import sys
import unittest
from click.testing import CliRunner
from datetime import datetime, timedelta
from mock import patch, Mock
from patroni.ctl import ctl, store_config, load_config, output_members, get_dcs, parse_dcs, \
get_all_members, get_any_member, get_cursor, query_member, configure, PatroniCtlException, apply_config_changes, \
format_config_for_editing, show_diff, invoke_editor, format_pg_version, find_executable
from patroni.dcs.etcd import Client, Failover
from patroni.utils import tzutc
from psycopg2 import OperationalError
from urllib3 import PoolManager
from . import MockConnect, MockCursor, MockResponse, psycopg2_connect
from .test_etcd import etcd_read, socket_getaddrinfo
from .test_ha import get_cluster_initialized_without_leader, get_cluster_initialized_with_leader, \
get_cluster_initialized_with_only_leader, get_cluster_not_initialized_without_leader, get_cluster, Member
CONFIG_FILE_PATH = './test-ctl.yaml'
def test_rw_config():
runner = CliRunner()
with runner.isolated_filesystem():
sys.argv = ['patronictl.py', '']
load_config(CONFIG_FILE_PATH + '/dummy', None)
store_config({'etcd': {'host': 'localhost:2379'}}, CONFIG_FILE_PATH + '/dummy')
load_config(CONFIG_FILE_PATH + '/dummy', '0.0.0.0')
os.remove(CONFIG_FILE_PATH + '/dummy')
os.rmdir(CONFIG_FILE_PATH)
@patch('patroni.ctl.load_config',
Mock(return_value={'scope': 'alpha', 'postgresql': {'data_dir': '.', 'parameters': {}, 'retry_timeout': 5},
'restapi': {'listen': '::', 'certfile': 'a'}, 'etcd': {'host': 'localhost:2379'}}))
class TestCtl(unittest.TestCase):
@patch('socket.getaddrinfo', socket_getaddrinfo)
def setUp(self):
with patch.object(Client, 'machines') as mock_machines:
mock_machines.__get__ = Mock(return_value=['http://remotehost:2379'])
self.runner = CliRunner()
self.e = get_dcs({'etcd': {'ttl': 30, 'host': 'ok:2379', 'retry_timeout': 10}}, 'foo')
@patch('psycopg2.connect', psycopg2_connect)
def test_get_cursor(self):
self.assertIsNone(get_cursor(get_cluster_initialized_without_leader(), {}, role='master'))
self.assertIsNotNone(get_cursor(get_cluster_initialized_with_leader(), {}, role='master'))
# MockCursor returns pg_is_in_recovery as false
self.assertIsNone(get_cursor(get_cluster_initialized_with_leader(), {}, role='replica'))
self.assertIsNotNone(get_cursor(get_cluster_initialized_with_leader(), {'database': 'foo'}, role='any'))
def test_parse_dcs(self):
assert parse_dcs(None) is None
assert parse_dcs('localhost') == {'etcd': {'host': 'localhost:2379'}}
assert parse_dcs('') == {'etcd': {'host': 'localhost:2379'}}
assert parse_dcs('localhost:8500') == {'consul': {'host': 'localhost:8500'}}
assert parse_dcs('zookeeper://localhost') == {'zookeeper': {'hosts': ['localhost:2181']}}
assert parse_dcs('exhibitor://dummy') == {'exhibitor': {'hosts': ['dummy'], 'port': 8181}}
assert parse_dcs('consul://localhost') == {'consul': {'host': 'localhost:8500'}}
self.assertRaises(PatroniCtlException, parse_dcs, 'invalid://test')
def test_output_members(self):
scheduled_at = datetime.now(tzutc) + timedelta(seconds=600)
cluster = get_cluster_initialized_with_leader(Failover(1, 'foo', 'bar', scheduled_at))
self.assertIsNone(output_members(cluster, name='abc', fmt='pretty'))
self.assertIsNone(output_members(cluster, name='abc', fmt='json'))
self.assertIsNone(output_members(cluster, name='abc', fmt='yaml'))
self.assertIsNone(output_members(cluster, name='abc', fmt='tsv'))
@patch('patroni.ctl.get_dcs')
@patch.object(PoolManager, 'request', Mock(return_value=MockResponse()))
def test_switchover(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_get_dcs.return_value.set_failover_value = Mock()
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\ny')
assert 'leader' in result.output
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n2300-01-01T12:23:00\ny')
assert result.exit_code == 0
with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)):
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--force', '--scheduled', '2015-01-01T12:00:00'])
assert result.exit_code == 1
# Aborting switchover, as we answer NO to the confirmation
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\nN')
assert result.exit_code == 1
# Aborting scheduled switchover, as we answer NO to the confirmation
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--scheduled', '2015-01-01T12:00:00+01:00'],
input='leader\nother\n\nN')
assert result.exit_code == 1
# Target and source are equal
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nleader\n\ny')
assert result.exit_code == 1
# Reality is not part of this cluster
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nReality\n\ny')
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--force'])
assert 'Member' in result.output
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--force', '--scheduled', '2015-01-01T12:00:00+01:00'])
assert result.exit_code == 0
# Invalid timestamp
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--force', '--scheduled', 'invalid'])
assert result.exit_code != 0
# Invalid timestamp
result = self.runner.invoke(ctl, ['switchover', 'dummy', '--force', '--scheduled', '2115-02-30T12:00:00+01:00'])
assert result.exit_code != 0
# Specifying wrong leader
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='dummy')
assert result.exit_code == 1
with patch.object(PoolManager, 'request', Mock(side_effect=Exception)):
# Non-responding patroni
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n2300-01-01T12:23:00\ny')
assert 'falling back to DCS' in result.output
with patch.object(PoolManager, 'request') as mocked:
mocked.return_value.status = 500
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\ny')
assert 'Switchover failed' in result.output
mocked.return_value.status = 501
mocked.return_value.data = b'Server does not support this operation'
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\ny')
assert 'Switchover failed' in result.output
# No members available
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_only_leader
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\ny')
assert result.exit_code == 1
# No master available
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_without_leader
result = self.runner.invoke(ctl, ['switchover', 'dummy'], input='leader\nother\n\ny')
assert result.exit_code == 1
@patch('patroni.ctl.get_dcs')
@patch.object(PoolManager, 'request', Mock(return_value=MockResponse()))
def test_failover(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_get_dcs.return_value.set_failover_value = Mock()
result = self.runner.invoke(ctl, ['failover', 'dummy'], input='\n')
assert 'Failover could be performed only to a specific candidate' in result.output
def test_get_dcs(self):
self.assertRaises(PatroniCtlException, get_dcs, {'dummy': {}}, 'dummy')
@patch('psycopg2.connect', psycopg2_connect)
@patch('patroni.ctl.query_member', Mock(return_value=([['mock column']], None)))
@patch('patroni.ctl.get_dcs')
@patch.object(etcd.Client, 'read', etcd_read)
def test_query(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
# Mutually exclusive
result = self.runner.invoke(ctl, ['query', 'alpha', '--member', 'abc', '--role', 'master'])
assert result.exit_code == 1
with self.runner.isolated_filesystem():
with open('dummy', 'w') as dummy_file:
dummy_file.write('SELECT 1')
# Mutually exclusive
result = self.runner.invoke(ctl, ['query', 'alpha', '--file', 'dummy', '--command', 'dummy'])
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['query', 'alpha', '--file', 'dummy'])
assert result.exit_code == 0
os.remove('dummy')
result = self.runner.invoke(ctl, ['query', 'alpha', '--command', 'SELECT 1'])
assert 'mock column' in result.output
# --command or --file is mandatory
result = self.runner.invoke(ctl, ['query', 'alpha'])
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['query', 'alpha', '--command', 'SELECT 1', '--username', 'root',
'--password', '--dbname', 'postgres'], input='ab\nab')
assert 'mock column' in result.output
def test_query_member(self):
with patch('patroni.ctl.get_cursor', Mock(return_value=MockConnect().cursor())):
rows = query_member(None, None, None, 'master', 'SELECT pg_catalog.pg_is_in_recovery()', {})
self.assertTrue('False' in str(rows))
rows = query_member(None, None, None, 'replica', 'SELECT pg_catalog.pg_is_in_recovery()', {})
self.assertEqual(rows, (None, None))
with patch.object(MockCursor, 'execute', Mock(side_effect=OperationalError('bla'))):
rows = query_member(None, None, None, 'replica', 'SELECT pg_catalog.pg_is_in_recovery()', {})
with patch('patroni.ctl.get_cursor', Mock(return_value=None)):
rows = query_member(None, None, None, None, 'SELECT pg_catalog.pg_is_in_recovery()', {})
self.assertTrue('No connection to' in str(rows))
rows = query_member(None, None, None, 'replica', 'SELECT pg_catalog.pg_is_in_recovery()', {})
self.assertTrue('No connection to' in str(rows))
with patch('patroni.ctl.get_cursor', Mock(side_effect=OperationalError('bla'))):
rows = query_member(None, None, None, 'replica', 'SELECT pg_catalog.pg_is_in_recovery()', {})
@patch('patroni.ctl.get_dcs')
def test_dsn(self, mock_get_dcs):
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['dsn', 'alpha'])
assert 'host=127.0.0.1 port=5435' in result.output
# Mutually exclusive options
result = self.runner.invoke(ctl, ['dsn', 'alpha', '--role', 'master', '--member', 'dummy'])
assert result.exit_code == 1
# Non-existing member
result = self.runner.invoke(ctl, ['dsn', 'alpha', '--member', 'dummy'])
assert result.exit_code == 1
@patch.object(PoolManager, 'request')
@patch('patroni.ctl.get_dcs')
def test_reload(self, mock_get_dcs, mock_post):
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['reload', 'alpha'], input='y')
assert 'Failed: reload for member' in result.output
mock_post.return_value.status = 200
result = self.runner.invoke(ctl, ['reload', 'alpha'], input='y')
assert 'No changes to apply on member' in result.output
mock_post.return_value.status = 202
result = self.runner.invoke(ctl, ['reload', 'alpha'], input='y')
assert 'Reload request received for member' in result.output
@patch.object(PoolManager, 'request')
@patch('patroni.ctl.get_dcs')
def test_restart_reinit(self, mock_get_dcs, mock_post):
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_post.return_value.status = 503
result = self.runner.invoke(ctl, ['restart', 'alpha'], input='now\ny\n')
assert 'Failed: restart for' in result.output
assert result.exit_code == 0
result = self.runner.invoke(ctl, ['reinit', 'alpha'], input='y')
assert result.exit_code == 1
# successful reinit
result = self.runner.invoke(ctl, ['reinit', 'alpha', 'other'], input='y\ny')
assert result.exit_code == 0
# Aborted restart
result = self.runner.invoke(ctl, ['restart', 'alpha'], input='now\nN')
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['restart', 'alpha', '--pending', '--force'])
assert result.exit_code == 0
# Aborted scheduled restart
result = self.runner.invoke(ctl, ['restart', 'alpha', '--scheduled', '2019-10-01T14:30'], input='N')
assert result.exit_code == 1
# Not a member
result = self.runner.invoke(ctl, ['restart', 'alpha', 'dummy', '--any'], input='now\ny')
assert result.exit_code == 1
# Wrong pg version
result = self.runner.invoke(ctl, ['restart', 'alpha', '--any', '--pg-version', '9.1'], input='now\ny')
assert 'Error: Invalid PostgreSQL version format' in result.output
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['restart', 'alpha', '--pending', '--force', '--timeout', '10min'])
assert result.exit_code == 0
# normal restart, the schedule is actually parsed, but not validated in patronictl
result = self.runner.invoke(ctl, ['restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30'])
assert 'Failed: flush scheduled restart' in result.output
with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)):
result = self.runner.invoke(ctl,
['restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30'])
assert result.exit_code == 1
# force restart with restart already present
result = self.runner.invoke(ctl, ['restart', 'alpha', 'other', '--force', '--scheduled', '2300-10-01T14:30'])
assert result.exit_code == 0
ctl_args = ['restart', 'alpha', '--pg-version', '99.0', '--scheduled', '2300-10-01T14:30']
# normal restart, the schedule is actually parsed, but not validated in patronictl
mock_post.return_value.status = 200
result = self.runner.invoke(ctl, ctl_args, input='y')
assert result.exit_code == 0
# get restart with the non-200 return code
# normal restart, the schedule is actually parsed, but not validated in patronictl
mock_post.return_value.status = 204
result = self.runner.invoke(ctl, ctl_args, input='y')
assert result.exit_code == 0
# get restart with the non-200 return code
# normal restart, the schedule is actually parsed, but not validated in patronictl
mock_post.return_value.status = 202
result = self.runner.invoke(ctl, ctl_args, input='y')
assert 'Success: restart scheduled' in result.output
assert result.exit_code == 0
# get restart with the non-200 return code
# normal restart, the schedule is actually parsed, but not validated in patronictl
mock_post.return_value.status = 409
result = self.runner.invoke(ctl, ctl_args, input='y')
assert 'Failed: another restart is already' in result.output
assert result.exit_code == 0
@patch('patroni.ctl.get_dcs')
def test_remove(self, mock_get_dcs):
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['-k', 'remove', 'alpha'], input='alpha\nslave')
assert 'Please confirm' in result.output
assert 'You are about to remove all' in result.output
# Not typing an exact confirmation
assert result.exit_code == 1
# master specified does not match master of cluster
result = self.runner.invoke(ctl, ['remove', 'alpha'], input='alpha\nYes I am aware\nslave')
assert result.exit_code == 1
# cluster specified on cmdline does not match verification prompt
result = self.runner.invoke(ctl, ['remove', 'alpha'], input='beta\nleader')
assert result.exit_code == 1
result = self.runner.invoke(ctl, ['remove', 'alpha'], input='alpha\nYes I am aware\nleader')
assert result.exit_code == 0
def test_ctl(self):
self.runner.invoke(ctl, ['list'])
result = self.runner.invoke(ctl, ['--help'])
assert 'Usage:' in result.output
def test_get_any_member(self):
self.assertIsNone(get_any_member(get_cluster_initialized_without_leader(), role='master'))
m = get_any_member(get_cluster_initialized_with_leader(), role='master')
self.assertEqual(m.name, 'leader')
def test_get_all_members(self):
self.assertEqual(list(get_all_members(get_cluster_initialized_without_leader(), role='master')), [])
r = list(get_all_members(get_cluster_initialized_with_leader(), role='master'))
self.assertEqual(len(r), 1)
self.assertEqual(r[0].name, 'leader')
r = list(get_all_members(get_cluster_initialized_with_leader(), role='replica'))
self.assertEqual(len(r), 1)
self.assertEqual(r[0].name, 'other')
self.assertEqual(len(list(get_all_members(get_cluster_initialized_without_leader(), role='replica'))), 2)
@patch('patroni.ctl.get_dcs')
def test_members(self, mock_get_dcs):
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['list'])
assert '127.0.0.1' in result.output
assert result.exit_code == 0
with patch('patroni.ctl.load_config', Mock(return_value={})):
self.runner.invoke(ctl, ['list'])
def test_configure(self):
result = self.runner.invoke(configure, ['--dcs', 'abc', '-c', 'dummy', '-n', 'bla'])
assert result.exit_code == 0
@patch('patroni.ctl.get_dcs')
def test_scaffold(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_not_initialized_without_leader
mock_get_dcs.return_value.initialize = Mock(return_value=True)
mock_get_dcs.return_value.touch_member = Mock(return_value=True)
mock_get_dcs.return_value.attempt_to_acquire_leader = Mock(return_value=True)
mock_get_dcs.return_value.delete_cluster = Mock()
with patch.object(self.e, 'initialize', return_value=False):
result = self.runner.invoke(ctl, ['scaffold', 'alpha'])
assert result.exception
with patch.object(mock_get_dcs.return_value, 'touch_member', Mock(return_value=False)):
result = self.runner.invoke(ctl, ['scaffold', 'alpha'])
assert result.exception
result = self.runner.invoke(ctl, ['scaffold', 'alpha'])
assert result.exit_code == 0
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['scaffold', 'alpha'])
assert result.exception
@patch('patroni.ctl.get_dcs')
def test_list_extended(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
cluster = get_cluster_initialized_with_leader(sync=('leader', 'other'))
mock_get_dcs.return_value.get_cluster = Mock(return_value=cluster)
result = self.runner.invoke(ctl, ['list', 'dummy', '--extended', '--timestamp'])
assert '2100' in result.output
assert 'Scheduled restart' in result.output
@patch('patroni.ctl.get_dcs')
@patch.object(PoolManager, 'request', Mock(return_value=MockResponse()))
def test_flush(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
result = self.runner.invoke(ctl, ['flush', 'dummy', 'restart', '-r', 'master'], input='y')
assert 'No scheduled restart' in result.output
result = self.runner.invoke(ctl, ['flush', 'dummy', 'restart', '--force'])
assert 'Success: flush scheduled restart' in result.output
with patch.object(PoolManager, 'request', return_value=MockResponse(404)):
result = self.runner.invoke(ctl, ['flush', 'dummy', 'restart', '--force'])
assert 'Failed: flush scheduled restart' in result.output
@patch.object(PoolManager, 'request')
@patch('patroni.ctl.get_dcs')
@patch('patroni.ctl.polling_loop', Mock(return_value=[1]))
def test_pause_cluster(self, mock_get_dcs, mock_post):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_post.return_value.status = 500
result = self.runner.invoke(ctl, ['pause', 'dummy'])
assert 'Failed' in result.output
mock_post.return_value.status = 200
with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)):
result = self.runner.invoke(ctl, ['pause', 'dummy'])
assert 'Cluster is already paused' in result.output
result = self.runner.invoke(ctl, ['pause', 'dummy', '--wait'])
assert "'pause' request sent" in result.output
mock_get_dcs.return_value.get_cluster = Mock(side_effect=[get_cluster_initialized_with_leader(),
get_cluster(None, None, [], None, None)])
self.runner.invoke(ctl, ['pause', 'dummy', '--wait'])
member = Member(1, 'other', 28, {})
mock_get_dcs.return_value.get_cluster = Mock(side_effect=[get_cluster_initialized_with_leader(),
get_cluster(None, None, [member], None, None)])
self.runner.invoke(ctl, ['pause', 'dummy', '--wait'])
@patch.object(PoolManager, 'request')
@patch('patroni.ctl.get_dcs')
def test_resume_cluster(self, mock_get_dcs, mock_post):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_post.return_value.status = 200
with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=False)):
result = self.runner.invoke(ctl, ['resume', 'dummy'])
assert 'Cluster is not paused' in result.output
with patch('patroni.dcs.Cluster.is_paused', Mock(return_value=True)):
result = self.runner.invoke(ctl, ['resume', 'dummy'])
assert 'Success' in result.output
mock_post.return_value.status = 500
result = self.runner.invoke(ctl, ['resume', 'dummy'])
assert 'Failed' in result.output
mock_post.side_effect = Exception
result = self.runner.invoke(ctl, ['resume', 'dummy'])
assert 'Can not find accessible cluster member' in result.output
def test_apply_config_changes(self):
config = {"postgresql": {"parameters": {"work_mem": "4MB"}, "use_pg_rewind": True}, "ttl": 30}
before_editing = format_config_for_editing(config)
# Spaces are allowed and stripped, numbers and booleans are interpreted
after_editing, changed_config = apply_config_changes(before_editing, config,
["postgresql.parameters.work_mem = 5MB",
"ttl=15", "postgresql.use_pg_rewind=off", 'a.b=c'])
self.assertEqual(changed_config, {"a": {"b": "c"}, "postgresql": {"parameters": {"work_mem": "5MB"},
"use_pg_rewind": False}, "ttl": 15})
# postgresql.parameters namespace is flattened
after_editing, changed_config = apply_config_changes(before_editing, config,
["postgresql.parameters.work_mem.sub = x"])
self.assertEqual(changed_config, {"postgresql": {"parameters": {"work_mem": "4MB", "work_mem.sub": "x"},
"use_pg_rewind": True}, "ttl": 30})
# Setting to null deletes
after_editing, changed_config = apply_config_changes(before_editing, config,
["postgresql.parameters.work_mem=null"])
self.assertEqual(changed_config, {"postgresql": {"use_pg_rewind": True}, "ttl": 30})
after_editing, changed_config = apply_config_changes(before_editing, config,
["postgresql.use_pg_rewind=null",
"postgresql.parameters.work_mem=null"])
self.assertEqual(changed_config, {"ttl": 30})
self.assertRaises(PatroniCtlException, apply_config_changes, before_editing, config, ['a'])
@patch('sys.stdout.isatty', return_value=False)
@patch('cdiff.markup_to_pager')
def test_show_diff(self, mock_markup_to_pager, mock_isatty):
show_diff("foo:\n bar: 1\n", "foo:\n bar: 2\n")
mock_markup_to_pager.assert_not_called()
mock_isatty.return_value = True
show_diff("foo:\n bar: 1\n", "foo:\n bar: 2\n")
mock_markup_to_pager.assert_called_once()
# Test that unicode handling doesn't fail with an exception
show_diff(b"foo:\n bar: \xc3\xb6\xc3\xb6\n".decode('utf-8'),
b"foo:\n bar: \xc3\xbc\xc3\xbc\n".decode('utf-8'))
@patch('subprocess.call', return_value=1)
def test_invoke_editor(self, mock_subprocess_call):
os.environ.pop('EDITOR', None)
for e in ('', '/bin/vi'):
with patch('patroni.ctl.find_executable', Mock(return_value=e)):
self.assertRaises(PatroniCtlException, invoke_editor, 'foo: bar\n', 'test')
@patch('patroni.ctl.get_dcs')
def test_show_config(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
self.runner.invoke(ctl, ['show-config', 'dummy'])
@patch('patroni.ctl.get_dcs')
def test_edit_config(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
mock_get_dcs.return_value.set_config_value = Mock(return_value=False)
os.environ['EDITOR'] = 'true'
self.runner.invoke(ctl, ['edit-config', 'dummy'])
self.runner.invoke(ctl, ['edit-config', 'dummy', '-s', 'foo=bar'])
self.runner.invoke(ctl, ['edit-config', 'dummy', '--replace', 'postgres0.yml'])
self.runner.invoke(ctl, ['edit-config', 'dummy', '--apply', '-'], input='foo: bar')
self.runner.invoke(ctl, ['edit-config', 'dummy', '--force', '--apply', '-'], input='foo: bar')
mock_get_dcs.return_value.set_config_value.return_value = True
self.runner.invoke(ctl, ['edit-config', 'dummy', '--force', '--apply', '-'], input='foo: bar')
@patch('patroni.ctl.get_dcs')
def test_version(self, mock_get_dcs):
mock_get_dcs.return_value = self.e
mock_get_dcs.return_value.get_cluster = get_cluster_initialized_with_leader
with patch.object(PoolManager, 'request') as mocked:
result = self.runner.invoke(ctl, ['version'])
assert 'patronictl version' in result.output
mocked.return_value.data = b'{"patroni":{"version":"1.2.3"},"server_version": 100001}'
result = self.runner.invoke(ctl, ['version', 'dummy'])
assert '1.2.3' in result.output
with patch.object(PoolManager, 'request', Mock(side_effect=Exception)):
result = self.runner.invoke(ctl, ['version', 'dummy'])
assert 'failed to get version' in result.output
@patch('patroni.ctl.get_dcs')
def test_history(self, mock_get_dcs):
mock_get_dcs.return_value.get_cluster = Mock()
mock_get_dcs.return_value.get_cluster.return_value.history.lines = [[1, 67176, 'no recovery target specified']]
result = self.runner.invoke(ctl, ['history'])
assert 'Reason' in result.output
def test_format_pg_version(self):
self.assertEqual(format_pg_version(100001), '10.1')
self.assertEqual(format_pg_version(90605), '9.6.5')
@patch('sys.platform', 'win32')
def test_find_executable(self):
with patch('os.path.isfile', Mock(return_value=True)):
self.assertEqual(find_executable('vim'), 'vim.exe')
with patch('os.path.isfile', Mock(return_value=False)):
self.assertIsNone(find_executable('vim'))
with patch('os.path.isfile', Mock(side_effect=[False, True])):
self.assertEqual(find_executable('vim', '/'), '/vim.exe')
| 50.153061 | 120 | 0.641607 |
fc38e38ec099d0d04145aa51f0b05d450c47c9ef
| 3,129 |
py
|
Python
|
pythonforandroid/recipes/vlc/__init__.py
|
Joreshic/python-for-android
|
c60e02d2e32e31a3a754838c51e9242cbadcd9e8
|
[
"MIT"
] | 38 |
2016-03-09T08:48:19.000Z
|
2021-11-28T15:31:22.000Z
|
pythonforandroid/recipes/vlc/__init__.py
|
Joreshic/python-for-android
|
c60e02d2e32e31a3a754838c51e9242cbadcd9e8
|
[
"MIT"
] | 2 |
2018-12-18T14:30:11.000Z
|
2021-06-21T20:15:47.000Z
|
pythonforandroid/recipes/vlc/__init__.py
|
Joreshic/python-for-android
|
c60e02d2e32e31a3a754838c51e9242cbadcd9e8
|
[
"MIT"
] | 18 |
2016-07-13T17:30:05.000Z
|
2022-02-22T07:04:33.000Z
|
from pythonforandroid.toolchain import Recipe, current_directory
from pythonforandroid.logger import info, debug, shprint, warning
from os.path import exists, join
from os import environ
import sh
from colorama import Fore, Style
class VlcRecipe(Recipe):
version = '3.0.0'
url = None
name = 'vlc'
depends = []
port_git = 'http://git.videolan.org/git/vlc-ports/android.git'
vlc_git = 'http://git.videolan.org/git/vlc.git'
ENV_LIBVLC_AAR = 'LIBVLC_AAR'
aars = {} # for future use of multiple arch
def prebuild_arch(self, arch):
super(VlcRecipe, self).prebuild_arch(arch)
build_dir = self.get_build_dir(arch.arch)
port_dir = join(build_dir, 'vlc-port-android')
if self.ENV_LIBVLC_AAR in environ:
self.aars[arch] = aar = environ.get(self.ENV_LIBVLC_AAR)
if not exists(aar):
warning("Error: libvlc-<ver>.aar bundle " \
"not found in {}".format(aar))
info("check {} environment!".format(self.ENV_LIBVLC_AAR))
exit(1)
else:
aar_path = join(port_dir, 'libvlc', 'build', 'outputs', 'aar')
self.aars[arch] = aar = join(aar_path, 'libvlc-{}.aar'.format(self.version))
warning("HINT: set path to precompiled libvlc-<ver>.aar bundle " \
"in {} environment!".format(self.ENV_LIBVLC_AAR))
info("libvlc-<ver>.aar should build " \
"from sources at {}".format(port_dir))
if not exists(join(port_dir, 'compile.sh')):
info("clone vlc port for android sources from {}".format(
self.port_git))
shprint(sh.git, 'clone', self.port_git, port_dir,
_tail=20, _critical=True)
vlc_dir = join(port_dir, 'vlc')
if not exists(join(vlc_dir, 'Makefile.am')):
info("clone vlc sources from {}".format(self.vlc_git))
shprint(sh.git, 'clone', self.vlc_git, vlc_dir,
_tail=20, _critical=True)
def build_arch(self, arch):
super(VlcRecipe, self).build_arch(arch)
build_dir = self.get_build_dir(arch.arch)
port_dir = join(build_dir, 'vlc-port-android')
aar = self.aars[arch]
if not exists(aar):
with current_directory(port_dir):
env = dict(environ)
env.update({
'ANDROID_ABI': arch.arch,
'ANDROID_NDK': self.ctx.ndk_dir,
'ANDROID_SDK': self.ctx.sdk_dir,
})
info("compiling vlc from sources")
debug("environment: {}".format(env))
if not exists(join('bin', 'VLC-debug.apk')):
shprint(sh.Command('./compile.sh'), _env=env,
_tail=50, _critical=True)
shprint(sh.Command('./compile-libvlc.sh'), _env=env,
_tail=50, _critical=True)
shprint(sh.cp, '-a', aar, self.ctx.aars_dir)
recipe = VlcRecipe()
| 43.458333 | 88 | 0.552892 |
abbc2df9061a3af12a895d514effc97a7e18e68f
| 6,972 |
py
|
Python
|
homeassistant/components/airvisual/config_flow.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 6 |
2020-07-18T16:33:25.000Z
|
2021-09-26T09:52:04.000Z
|
homeassistant/components/airvisual/config_flow.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47 |
2020-07-23T07:14:33.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/airvisual/config_flow.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 5 |
2020-03-29T00:29:13.000Z
|
2021-09-06T20:58:40.000Z
|
"""Define a config flow manager for AirVisual."""
import asyncio
from pyairvisual import Client
from pyairvisual.errors import InvalidKeyError, NodeProError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_API_KEY,
CONF_IP_ADDRESS,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_SHOW_ON_MAP,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from . import async_get_geography_id
from .const import ( # pylint: disable=unused-import
CONF_GEOGRAPHIES,
CONF_INTEGRATION_TYPE,
DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY,
INTEGRATION_TYPE_NODE_PRO,
LOGGER,
)
class AirVisualFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle an AirVisual config flow."""
VERSION = 2
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@property
def geography_schema(self):
"""Return the data schema for the cloud API."""
return vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Required(
CONF_LATITUDE, default=self.hass.config.latitude
): cv.latitude,
vol.Required(
CONF_LONGITUDE, default=self.hass.config.longitude
): cv.longitude,
}
)
@property
def pick_integration_type_schema(self):
"""Return the data schema for picking the integration type."""
return vol.Schema(
{
vol.Required("type"): vol.In(
[INTEGRATION_TYPE_GEOGRAPHY, INTEGRATION_TYPE_NODE_PRO]
)
}
)
@property
def node_pro_schema(self):
"""Return the data schema for a Node/Pro."""
return vol.Schema(
{vol.Required(CONF_IP_ADDRESS): str, vol.Required(CONF_PASSWORD): str}
)
async def _async_set_unique_id(self, unique_id):
"""Set the unique ID of the config flow and abort if it already exists."""
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Define the config flow to handle options."""
return AirVisualOptionsFlowHandler(config_entry)
async def async_step_geography(self, user_input=None):
"""Handle the initialization of the integration via the cloud API."""
if not user_input:
return self.async_show_form(
step_id="geography", data_schema=self.geography_schema
)
geo_id = async_get_geography_id(user_input)
await self._async_set_unique_id(geo_id)
self._abort_if_unique_id_configured()
# Find older config entries without unique ID:
for entry in self._async_current_entries():
if entry.version != 1:
continue
if any(
geo_id == async_get_geography_id(geography)
for geography in entry.data[CONF_GEOGRAPHIES]
):
return self.async_abort(reason="already_configured")
websession = aiohttp_client.async_get_clientsession(self.hass)
client = Client(session=websession, api_key=user_input[CONF_API_KEY])
# If this is the first (and only the first) time we've seen this API key, check
# that it's valid:
checked_keys = self.hass.data.setdefault("airvisual_checked_api_keys", set())
check_keys_lock = self.hass.data.setdefault(
"airvisual_checked_api_keys_lock", asyncio.Lock()
)
async with check_keys_lock:
if user_input[CONF_API_KEY] not in checked_keys:
try:
await client.api.nearest_city()
except InvalidKeyError:
return self.async_show_form(
step_id="geography",
data_schema=self.geography_schema,
errors={CONF_API_KEY: "invalid_api_key"},
)
checked_keys.add(user_input[CONF_API_KEY])
return self.async_create_entry(
title=f"Cloud API ({geo_id})",
data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY},
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_geography(import_config)
async def async_step_node_pro(self, user_input=None):
"""Handle the initialization of the integration with a Node/Pro."""
if not user_input:
return self.async_show_form(
step_id="node_pro", data_schema=self.node_pro_schema
)
await self._async_set_unique_id(user_input[CONF_IP_ADDRESS])
websession = aiohttp_client.async_get_clientsession(self.hass)
client = Client(session=websession)
try:
await client.node.from_samba(
user_input[CONF_IP_ADDRESS],
user_input[CONF_PASSWORD],
include_history=False,
include_trends=False,
)
except NodeProError as err:
LOGGER.error("Error connecting to Node/Pro unit: %s", err)
return self.async_show_form(
step_id="node_pro",
data_schema=self.node_pro_schema,
errors={CONF_IP_ADDRESS: "unable_to_connect"},
)
return self.async_create_entry(
title=f"Node/Pro ({user_input[CONF_IP_ADDRESS]})",
data={**user_input, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO},
)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return self.async_show_form(
step_id="user", data_schema=self.pick_integration_type_schema
)
if user_input["type"] == INTEGRATION_TYPE_GEOGRAPHY:
return await self.async_step_geography()
return await self.async_step_node_pro()
class AirVisualOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle an AirVisual options flow."""
def __init__(self, config_entry):
"""Initialize."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required(
CONF_SHOW_ON_MAP,
default=self.config_entry.options.get(CONF_SHOW_ON_MAP),
): bool
}
),
)
| 34.514851 | 87 | 0.619048 |
01f61ca001559dcfc47e105decb6aa2a86eff9be
| 305 |
py
|
Python
|
upybleutils/appble.py
|
Carglglz/upyble
|
e95ecc235e7d1d863efaa80eb092aeb070c6de3a
|
[
"MIT"
] | 5 |
2021-02-16T06:32:25.000Z
|
2022-03-18T00:26:08.000Z
|
upybleutils/appble.py
|
Carglglz/upyble
|
e95ecc235e7d1d863efaa80eb092aeb070c6de3a
|
[
"MIT"
] | null | null | null |
upybleutils/appble.py
|
Carglglz/upyble
|
e95ecc235e7d1d863efaa80eb092aeb070c6de3a
|
[
"MIT"
] | 2 |
2020-05-24T09:19:37.000Z
|
2021-02-16T06:32:28.000Z
|
import bluetooth
from ble_temp_amb import BLE_Battery_Temp
ble = bluetooth.BLE()
def main(**kargs):
ble_temp_batt = BLE_Battery_Temp(ble, **kargs)
return ble_temp_batt
def set_ble_flag(flag):
with open('ble_flag.py', 'wb') as bleconfig:
bleconfig.write(b'BLE = {}'.format(flag))
| 19.0625 | 50 | 0.701639 |
e71381f1077c9450389d1384c265521fb5682e06
| 593 |
py
|
Python
|
XML/XML_to_python_objects__untagle__examples/from_url.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | 117 |
2015-12-18T07:18:27.000Z
|
2022-03-28T00:25:54.000Z
|
XML/XML_to_python_objects__untagle__examples/from_url.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | 8 |
2018-10-03T09:38:46.000Z
|
2021-12-13T19:51:09.000Z
|
XML/XML_to_python_objects__untagle__examples/from_url.py
|
DazEB2/SimplePyScripts
|
1dde0a42ba93fe89609855d6db8af1c63b1ab7cc
|
[
"CC-BY-4.0"
] | 28 |
2016-08-02T17:43:47.000Z
|
2022-03-21T08:31:12.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# SOURCE: https://github.com/stchris/untangle
# pip install untangle
# OR:
# pip install git+https://github.com/stchris/untangle.git
import untangle
obj = untangle.parse('https://news.yandex.ru/games.rss')
channel = obj.rss.channel
print(channel.title.cdata) # Яндекс.Новости: Игры
print(channel.link.cdata) # https://news.yandex.ru/games.html?from=rss
print(channel.image.url.cdata) # https://company.yandex.ru/i/50x23.gif
print()
for item in channel.item:
print(item.title.cdata, item.link.cdata)
| 24.708333 | 76 | 0.713322 |
af63c4b4181ceb7095bd81146341f10412506518
| 219 |
py
|
Python
|
utils/__init__.py
|
naivete5656/BFP
|
74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18
|
[
"MIT"
] | 8 |
2020-07-31T15:20:01.000Z
|
2021-09-18T08:42:07.000Z
|
utils/__init__.py
|
naivete5656/BFP
|
74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
naivete5656/BFP
|
74c5604a9ba4eaa3ec3e2c76ef5e1282d7d10f18
|
[
"MIT"
] | 5 |
2020-10-04T02:02:13.000Z
|
2021-11-14T23:37:08.000Z
|
from .for_vis import Visdom
from .load import *
from .utils import local_maxima, gaus_filter, optimum, gather_path
from .load_for_CMP import *
from .load_image import visuarize_img, load_image
from .cmp_library import *
| 36.5 | 66 | 0.817352 |
b6c57be79d4cfea43df6e6533d79748bbcfb8335
| 97 |
py
|
Python
|
apps/transaccion/apps.py
|
mariomtzjr/podemos_test
|
5efaf02a19aa8c4849e3ad0108546e95af524126
|
[
"MIT"
] | null | null | null |
apps/transaccion/apps.py
|
mariomtzjr/podemos_test
|
5efaf02a19aa8c4849e3ad0108546e95af524126
|
[
"MIT"
] | 8 |
2021-03-30T13:39:24.000Z
|
2022-03-12T00:36:15.000Z
|
apps/transaccion/apps.py
|
mariomtzjr/podemos_test
|
5efaf02a19aa8c4849e3ad0108546e95af524126
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class TransaccionConfig(AppConfig):
name = 'transaccion'
| 16.166667 | 35 | 0.773196 |
d9cf20124fa745a4bd534cf170b01492da4e897a
| 5,871 |
py
|
Python
|
aws_xray_sdk/core/models/segment.py
|
Cloudzero/aws-xray-sdk-python
|
41b776d57bf4b3a47ddf993a9c2999f527ca0ede
|
[
"Apache-2.0"
] | null | null | null |
aws_xray_sdk/core/models/segment.py
|
Cloudzero/aws-xray-sdk-python
|
41b776d57bf4b3a47ddf993a9c2999f527ca0ede
|
[
"Apache-2.0"
] | null | null | null |
aws_xray_sdk/core/models/segment.py
|
Cloudzero/aws-xray-sdk-python
|
41b776d57bf4b3a47ddf993a9c2999f527ca0ede
|
[
"Apache-2.0"
] | null | null | null |
import copy
import traceback
from .entity import Entity
from .traceid import TraceId
from ..utils.atomic_counter import AtomicCounter
from ..exceptions.exceptions import SegmentNameMissingException
ORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'
class SegmentContextManager:
"""
Wrapper for segment and recorder to provide segment context manager.
"""
def __init__(self, recorder, name=None, **segment_kwargs):
self.name = name
self.segment_kwargs = segment_kwargs
self.recorder = recorder
self.segment = None
def __enter__(self):
self.segment = self.recorder.begin_segment(
name=self.name, **self.segment_kwargs)
return self.segment
def __exit__(self, exc_type, exc_val, exc_tb):
if self.segment is None:
return
if exc_type is not None:
self.segment.add_exception(
exc_val,
traceback.extract_tb(
exc_tb,
limit=self.recorder.max_trace_back,
)
)
self.recorder.end_segment()
class Segment(Entity):
"""
The compute resources running your application logic send data
about their work as segments. A segment provides the resource's name,
details about the request, and details about the work done.
"""
def __init__(self, name, entityid=None, traceid=None,
parent_id=None, sampled=True):
"""
Create a segment object.
:param str name: segment name. If not specified a
SegmentNameMissingException will be thrown.
:param str entityid: hexdigits segment id.
:param str traceid: The trace id of the segment.
:param str parent_id: The parent id of the segment. It comes
from id of an upstream segment or subsegment.
:param bool sampled: If False this segment will not be sent
to the X-Ray daemon.
"""
if not name:
raise SegmentNameMissingException("Segment name is required.")
super(Segment, self).__init__(name)
if not traceid:
traceid = TraceId().to_id()
self.trace_id = traceid
if entityid:
self.id = entityid
self.in_progress = True
self.sampled = sampled
self.user = None
self.ref_counter = AtomicCounter()
self._subsegments_counter = AtomicCounter()
if parent_id:
self.parent_id = parent_id
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment and increment
reference counter and total subsegments counter.
"""
super(Segment, self).add_subsegment(subsegment)
self.increment()
def increment(self):
"""
Increment reference counter to track on open subsegments
and total subsegments counter to track total size of subsegments
it currently hold.
"""
self.ref_counter.increment()
self._subsegments_counter.increment()
def decrement_ref_counter(self):
"""
Decrement reference counter by 1 when a subsegment is closed.
"""
self.ref_counter.decrement()
def ready_to_send(self):
"""
Return True if the segment doesn't have any open subsegments
and itself is not in progress.
"""
return self.ref_counter.get_current() <= 0 and not self.in_progress
def get_total_subsegments_size(self):
"""
Return the number of total subsegments regardless of open or closed.
"""
return self._subsegments_counter.get_current()
def decrement_subsegments_size(self):
"""
Decrement total subsegments by 1. This usually happens when
a subsegment is streamed out.
"""
return self._subsegments_counter.decrement()
def remove_subsegment(self, subsegment):
"""
Remove the reference of input subsegment.
"""
super(Segment, self).remove_subsegment(subsegment)
self.decrement_subsegments_size()
def set_user(self, user):
"""
set user of a segment. One segment can only have one user.
User is indexed and can be later queried.
"""
super(Segment, self)._check_ended()
self.user = user
def set_service(self, service_info):
"""
Add python runtime and version info.
This method should be only used by the recorder.
"""
self.service = service_info
def set_rule_name(self, rule_name):
"""
Add the matched centralized sampling rule name
if a segment is sampled because of that rule.
This method should be only used by the recorder.
"""
if not self.aws.get('xray', None):
self.aws['xray'] = {}
self.aws['xray']['rule_name'] = rule_name
def save_origin_trace_header(self, trace_header):
"""
Temporarily store additional data fields in trace header
to the segment for later propagation. The data will be
cleaned up upon serilaization.
"""
setattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, trace_header)
def get_origin_trace_header(self):
"""
Retrieve saved trace header data.
"""
return getattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, None)
def __getstate__(self):
"""
Used by jsonpikle to remove unwanted fields.
"""
properties = copy.copy(self.__dict__)
super(Segment, self)._delete_empty_properties(properties)
if not self.user:
del properties['user']
del properties['ref_counter']
del properties['_subsegments_counter']
properties.pop(ORIGIN_TRACE_HEADER_ATTR_KEY, None)
return properties
| 31.907609 | 76 | 0.628172 |
53273b0788a2f92daef8741b7065531f0c211845
| 1,566 |
py
|
Python
|
benchbuild/projects/benchbuild/lammps.py
|
simbuerg/benchbuild
|
e7b0c8d1e26c1d60b8dfab66526dcf6d0c0b6563
|
[
"MIT"
] | null | null | null |
benchbuild/projects/benchbuild/lammps.py
|
simbuerg/benchbuild
|
e7b0c8d1e26c1d60b8dfab66526dcf6d0c0b6563
|
[
"MIT"
] | 3 |
2017-02-02T15:54:52.000Z
|
2017-06-08T03:52:45.000Z
|
benchbuild/projects/benchbuild/lammps.py
|
simbuerg/benchbuild
|
e7b0c8d1e26c1d60b8dfab66526dcf6d0c0b6563
|
[
"MIT"
] | 1 |
2017-04-01T15:30:16.000Z
|
2017-04-01T15:30:16.000Z
|
from benchbuild.utils.wrapping import wrap
from benchbuild.projects.benchbuild.group import BenchBuildGroup
from benchbuild.utils.compiler import lt_clang_cxx
from benchbuild.utils.downloader import Git
from benchbuild.utils.run import run
from benchbuild.utils.versions import get_version_from_cache_dir
from plumbum import local
from benchbuild.utils.cmd import cp, make
from os import path
from glob import glob
class Lammps(BenchBuildGroup):
""" LAMMPS benchmark """
NAME = 'lammps'
DOMAIN = 'scientific'
SRC_FILE = 'lammps.git'
def prepare(self):
super(Lammps, self).prepare()
cp("-vr", self.testdir, "test")
def run_tests(self, experiment, run):
lammps_dir = path.join(self.builddir, self.src_dir, "src")
exp = wrap(path.join(lammps_dir, "lmp_serial"), experiment)
with local.cwd("test"):
tests = glob(path.join(self.testdir, "in.*"))
for test in tests:
cmd = (exp < test)
run(cmd, None)
src_dir = SRC_FILE
src_uri = "https://github.com/lammps/lammps"
def download(self):
Git(self.src_uri, self.src_dir)
def configure(self):
pass
def build(self):
self.ldflags += ["-lgomp"]
clang_cxx = lt_clang_cxx(self.cflags, self.ldflags,
self.compiler_extension)
with local.cwd(path.join(self.src_dir, "src")):
run(make[
"CC=" + str(clang_cxx), "LINK=" + str(
clang_cxx), "clean", "serial"])
| 27.964286 | 67 | 0.623883 |
c3578dbb989475177b7a11073e531935b3ae19ef
| 5,283 |
py
|
Python
|
core/platform/email/dev_mode_email_services_test.py
|
lheureuxe13/oppia
|
7110e3e5d5a53527c31d7b33e14d25e8d5b981f9
|
[
"Apache-2.0"
] | 4 |
2021-09-16T16:46:53.000Z
|
2022-02-06T13:00:14.000Z
|
core/platform/email/dev_mode_email_services_test.py
|
lheureuxe13/oppia
|
7110e3e5d5a53527c31d7b33e14d25e8d5b981f9
|
[
"Apache-2.0"
] | 80 |
2020-10-31T09:14:46.000Z
|
2021-01-12T23:38:15.000Z
|
core/platform/email/dev_mode_email_services_test.py
|
lheureuxe13/oppia
|
7110e3e5d5a53527c31d7b33e14d25e8d5b981f9
|
[
"Apache-2.0"
] | 1 |
2020-10-02T13:28:26.000Z
|
2020-10-02T13:28:26.000Z
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the email services API wrapper in DEV_MODE."""
from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import textwrap
from core import feconf
from core.platform.email import dev_mode_email_services
from core.tests import test_utils
from typing import Any, Dict, Union
class EmailTests(test_utils.GenericTestBase):
"""Tests for sending emails."""
def test_send_mail_logs_to_terminal(self) -> None:
"""In DEV Mode, platforms email_service API that sends a singular email
logs the correct email info to terminal.
"""
observed_log_messages = []
def _mock_logging_function(msg: str, *args: Any) -> None:
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
msg_body = (
"""
EmailService.SendMail
From: %s
To: %s
Subject: %s
Body:
Content-type: text/plain
Data length: %d
Body:
Content-type: text/html
Data length: %d
Bcc: None
Reply_to: None
Recipient Variables:
Length: 0
""" % (
feconf.SYSTEM_EMAIL_ADDRESS, feconf.ADMIN_EMAIL_ADDRESS,
'subject', 4, 4))
logging_info_email_body = textwrap.dedent(msg_body)
logging_info_notification = (
'You are not currently sending out real emails since this is a ' +
'dev environment. Emails are sent out in the production' +
' environment.')
allow_emailing = self.swap(feconf, 'CAN_SEND_EMAILS', True)
with allow_emailing, (
self.swap(logging, 'info', _mock_logging_function)):
dev_mode_email_services.send_email_to_recipients(
feconf.SYSTEM_EMAIL_ADDRESS, [feconf.ADMIN_EMAIL_ADDRESS],
'subject', 'body', 'html')
self.assertEqual(len(observed_log_messages), 2)
self.assertEqual(
observed_log_messages,
[logging_info_email_body, logging_info_notification])
def test_send_mail_to_multiple_recipients_logs_to_terminal(self) -> None:
"""In DEV Mode, platform email_services that sends mail to multiple
recipients logs the correct info to terminal.
"""
observed_log_messages = []
def _mock_logging_function(msg: str, *args: Any) -> None:
"""Mocks logging.info()."""
observed_log_messages.append(msg % args)
recipient_email_list_str = '[email protected] [email protected] [email protected]... Total: 4 emails.'
bcc_email_list_str = '[email protected] [email protected] [email protected]... Total: 4 emails.'
recipient_variables: Dict[str, Dict[str, Union[str, float]]] = (
{
'[email protected]': {'first': 'Bob', 'id': 1},
'[email protected]': {'first': 'Jane', 'id': 2},
'[email protected]': {'first': 'Rob', 'id': 3},
'[email protected]': {'first': 'Emily', 'id': 4},
})
msg_body = (
"""
EmailService.SendMail
From: %s
To: %s
Subject: %s
Body:
Content-type: text/plain
Data length: %d
Body:
Content-type: text/html
Data length: %d
Bcc: %s
Reply_to: %s
Recipient Variables:
Length: %d
""" % (
feconf.SYSTEM_EMAIL_ADDRESS, recipient_email_list_str,
'subject', 4, 4, bcc_email_list_str, '123',
len(recipient_variables)))
logging_info_email_body = textwrap.dedent(msg_body)
logging_info_notification = (
'You are not currently sending out real emails since this is a ' +
'dev environment. Emails are sent out in the production' +
' environment.')
allow_emailing = self.swap(feconf, 'CAN_SEND_EMAILS', True)
with allow_emailing, (
self.swap(logging, 'info', _mock_logging_function)):
dev_mode_email_services.send_email_to_recipients(
feconf.SYSTEM_EMAIL_ADDRESS,
['[email protected]', '[email protected]', '[email protected]', '[email protected]'],
'subject', 'body', 'html',
bcc=['[email protected]', '[email protected]', '[email protected]', '[email protected]'],
reply_to='123',
recipient_variables=recipient_variables)
self.assertEqual(len(observed_log_messages), 2)
self.assertEqual(
observed_log_messages,
[logging_info_email_body, logging_info_notification])
| 37.204225 | 80 | 0.586788 |
1e4a67c5320104d748908e71bd783844b06e1a3a
| 6,754 |
py
|
Python
|
openks/models/model.py
|
HIT-SCIR-xuanxuan/OpenKS
|
a7f2ce0890822113322aad22e98d6c961e63caef
|
[
"Apache-2.0"
] | null | null | null |
openks/models/model.py
|
HIT-SCIR-xuanxuan/OpenKS
|
a7f2ce0890822113322aad22e98d6c961e63caef
|
[
"Apache-2.0"
] | null | null | null |
openks/models/model.py
|
HIT-SCIR-xuanxuan/OpenKS
|
a7f2ce0890822113322aad22e98d6c961e63caef
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 OpenKS Authors, DCD Research Lab, Zhejiang University.
# All Rights Reserved.
"""
An abstract class for openks models to be trained with Paddle
"""
import logging
from typing import Tuple, List, Any
import torch
import torch.nn as nn
from torch.utils import data
import paddle.fluid as fluid
from paddle.fluid import Variable
from ..common.register import Register
from ..abstract.mtg import MTG
from ..abstract.mmd import MMD
logger = logging.getLogger(__name__)
class PaddleModel(Register):
def __init__(self, **kwargs):
self.forward()
def forward(self, *args):
return NotImplemented
def train_forward(self, *args):
return NotImplemented
def test_forward(self, *args):
return NotImplemented
def backward(self, loss, opt):
return NotImplemented
def loss(self, *args):
return NotImplemented
@staticmethod
def _algorithm(*args):
return NotImplemented
class TorchModel(nn.Module, Register):
def __init__(self, **kwargs):
super(TorchModel, self).__init__()
def forward(self, *args):
return NotImplemented
def loss(self, *args):
return NotImplemented
def predict(self, *args):
return NotImplemented
def _algorithm(self, *args):
return NotImplemented
# getter and setter for Ray distributed training
def get_weights(self):
return {k: v.cpu() for k, v in self.state_dict().items()}
def set_weights(self, weights):
self.load_state_dict(weights)
def get_gradients(self):
grads = []
for p in self.parameters():
grad = None if p.grad is None else p.grad.data.cpu().numpy()
grads.append(grad)
return grads
def set_gradients(self, gradients):
for g, p in zip(gradients, self.parameters()):
if g is not None:
p.grad = torch.from_numpy(g)
class KGC1TorchModel(nn.Module, Register):
def __init__(self, **kwargs):
super(KGC1TorchModel, self).__init__()
class KGC2TorchModel(nn.Module, Register):
def __init__(self, **kwargs):
super(KGC2TorchModel, self).__init__()
class TorchDataset(data.Dataset):
def __init__(self, samples):
self.samples = samples
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
item = self.samples[index]
return item
class TFModel(Register):
def __init__(self, **kwargs):
return NotImplemented
class MLModel(Register):
def __init__(self, **kwargs):
self.process()
def process(self, *args):
return NotImplemented
class OpenKSModel(Register):
def __init__(self):
pass
class KGLearnModel(OpenKSModel):
''' Base class for knowledge graph representation learning trainer '''
def __init__(self, name: str = 'model-name', graph: MTG = None, args: List = None):
self.name = name
self.graph = graph
def parse_args(self):
return NotImplemented
def triples_reader(self, *args):
return NotImplemented
def triples_generator(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class KELearnModel(OpenKSModel):
''' Base class for knowledge graph building trainer, such as text and image information extraction '''
def __init__(self, name: str = 'model-name', dataset: MMD = None, args: List = None):
self.name = name
self.dataset = dataset
def parse_args(self):
return NotImplemented
def data_reader(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class RecModel(OpenKSModel):
''' Base class for recommendation trainer, such as text and image information extraction '''
def __init__(self, name: str = 'model-name', dataset: MMD = None, args: List = None):
self.name = name
self.dataset = dataset
def parse_args(self):
return NotImplemented
def data_reader(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class KGC1LearnModel(OpenKSModel):
''' Base class for knowledge graph compensation learning trainer '''
def __init__(self, name: str = 'model-name', graph: MTG = None, args: List = None):
self.name = name
self.graph = graph
def parse_args(self):
return NotImplemented
def triples_reader(self, *args):
return NotImplemented
def triples_generator(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class KGC2LearnModel(OpenKSModel):
''' Base class for knowledge graph compensation learning trainer '''
def __init__(self, name: str = 'model-name', graph: MTG = None, args: List = None):
self.name = name
self.graph = graph
def parse_args(self):
return NotImplemented
def triples_reader(self, *args):
return NotImplemented
def triples_generator(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class NerModel(OpenKSModel):
''' Base class for ner trainer '''
def __init__(self, name: str = 'model-name', args: List = None):
self.name = name
def data_reader(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class Relation_ExtractionModel(OpenKSModel):
''' Base class for relation extraction trainer '''
def __init__(self, name: str = 'model-name', args: List = None):
self.name = name
def data_reader(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def load_model(self, *args):
return NotImplemented
def save_model(self, *args):
return NotImplemented
def run(self, *args):
return NotImplemented
class HypernymDiscoveryModel(OpenKSModel):
def __init__(self):
super().__init__()
def data_reader(self, *args):
return NotImplemented
def evaluate(self, *args):
return NotImplemented
def run(self):
return NotImplemented
class HypernymExtractModel(OpenKSModel):
def __init__(self):
super().__init__()
def entity2hyper_lst(self, *args):
return NotImplemented
| 21.10625 | 103 | 0.726384 |
c076b909cf10b399de405e13dc09731809c94902
| 22 |
py
|
Python
|
tests/__init__.py
|
JayveeHe/senz.app.staticinfo.inferrence
|
98748506adcbb28b074337e261fe79b2141f31a5
|
[
"MIT"
] | 1 |
2015-12-16T02:33:08.000Z
|
2015-12-16T02:33:08.000Z
|
config/__init__.py
|
JayveeHe/JiabeiGongfang
|
87de2b1912bfbb643e81020c29c34f9f0aba44cd
|
[
"MIT"
] | null | null | null |
config/__init__.py
|
JayveeHe/JiabeiGongfang
|
87de2b1912bfbb643e81020c29c34f9f0aba44cd
|
[
"MIT"
] | null | null | null |
__author__ = 'Jayvee'
| 11 | 21 | 0.727273 |
87265d851ddd082585439286dad501f5b73d0ce8
| 1,225 |
py
|
Python
|
protgraph/export/csv.py
|
Luxxii/ProtGraph
|
68c3b362e2b487403fa02cfc25d456a272004a1f
|
[
"BSD-2-Clause"
] | null | null | null |
protgraph/export/csv.py
|
Luxxii/ProtGraph
|
68c3b362e2b487403fa02cfc25d456a272004a1f
|
[
"BSD-2-Clause"
] | null | null | null |
protgraph/export/csv.py
|
Luxxii/ProtGraph
|
68c3b362e2b487403fa02cfc25d456a272004a1f
|
[
"BSD-2-Clause"
] | null | null | null |
import csv
from protgraph.export.generic_file_exporter import GenericFileExporter
class CSV(GenericFileExporter):
""" A simple CSV exporter. This export is compatible with Gephi."""
def __init__(self):
super(CSV, self).__init__(
self._lambda_export
)
def _lambda_export(self, pg, path):
self.write_nodes_data(path + "_nodes.csv", pg)
self.write_edges_data(path + "_edges.csv", pg)
def write_nodes_data(self, out_file, graph):
with open(out_file, "w") as csvfile:
writer = csv.writer(csvfile)
# Write Header
header = ["Id", *graph.vs.attributes()]
writer.writerow(header)
# Write "Body"
for n in graph.vs:
writer.writerow([n.index, *n.attributes().values()])
def write_edges_data(self, out_file, graph):
with open(out_file, "w") as csvfile:
writer = csv.writer(csvfile)
# Write Header
header = ["Source", "Target", *graph.es.attributes()]
writer.writerow(header)
# Write "Body"
for e in graph.es:
writer.writerow([e.source, e.target, *e.attributes().values()])
| 31.410256 | 79 | 0.58449 |
e6b80e40134da7570770d290a0ebe39d4af84633
| 42,535 |
py
|
Python
|
platform/hwconf_data/mgm13/MGM13_srcgen.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | null | null | null |
platform/hwconf_data/mgm13/MGM13_srcgen.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1 |
2020-08-25T02:36:22.000Z
|
2020-08-25T02:36:22.000Z
|
platform/hwconf_data/mgm13/MGM13_srcgen.py
|
lenloe1/v2.7
|
9ac9c4a7bb37987af382c80647f42d84db5f2e1d
|
[
"Zlib"
] | 1 |
2020-08-25T01:56:04.000Z
|
2020-08-25T01:56:04.000Z
|
#!/usr/bin/env python3
## Make sure that our path has just top level of ddml
#@PydevCodeAnalysisIgnore
import sys
import os
import glob
import copy
# get module mapping
from mgm13.modules.PIN.PIN_Class import *
import mgm13.PythonSnippet.ExporterModel as ExporterModel
import mgm13.PythonSnippet.Metadata as Metadata
import mgm13.halconfig.halconfig_dependency as dep
from mgm13.PythonSnippet.CodeGen import *
from mgm13.PythonSnippet.StudioProject import *
import mgm13.modules.ACMP0.ACMP_behavior as ACMP_behavior
import mgm13.modules.ACMP1.ACMP_behavior as ACMP_behavior
import mgm13.modules.ADC0.ADC_behavior as ADC_behavior
import mgm13.modules.ANTDIV.ANTDIV_behavior as ANTDIV_behavior
import mgm13.modules.BATTERYMON.BATTERYMON_behavior as BATTERYMON_behavior
import mgm13.modules.BTL_BUTTON.BTL_BUTTON_behavior as BTL_BUTTON_behavior
import mgm13.modules.BULBPWM.BULBPWM_behavior as BULBPWM_behavior
import mgm13.modules.BULBPWM_COLOR.BULBPWM_COLOR_behavior as BULBPWM_COLOR_behavior
import mgm13.modules.BUTTON.BUTTON_behavior as BUTTON_behavior
import mgm13.modules.CMU.CMU_behavior as CMU_behavior
import mgm13.modules.COEX.COEX_behavior as COEX_behavior
import mgm13.modules.CS5463.CS5463_behavior as CS5463_behavior
import mgm13.modules.CSEN.CSEN_behavior as CSEN_behavior
import mgm13.modules.DCDC.DCDC_behavior as DCDC_behavior
import mgm13.modules.EMU.EMU_behavior as EMU_behavior
import mgm13.modules.EXTFLASH.EXTFLASH_behavior as EXTFLASH_behavior
import mgm13.modules.EZRADIOPRO.EZRADIOPRO_behavior as EZRADIOPRO_behavior
import mgm13.modules.FEM.FEM_behavior as FEM_behavior
import mgm13.modules.GPIO.GPIO_behavior as GPIO_behavior
import mgm13.modules.I2C0.I2C_behavior as I2C_behavior
import mgm13.modules.I2C1.I2C_behavior as I2C_behavior
import mgm13.modules.I2CSENSOR.I2CSENSOR_behavior as I2CSENSOR_behavior
import mgm13.modules.IDAC0.IDAC_behavior as IDAC_behavior
import mgm13.modules.IOEXP.IOEXP_behavior as IOEXP_behavior
import mgm13.modules.LED.LED_behavior as LED_behavior
import mgm13.modules.LEUART0.LEUART_behavior as LEUART_behavior
import mgm13.modules.MODEM.MODEM_behavior as MODEM_behavior
import mgm13.modules.PA.PA_behavior as PA_behavior
import mgm13.modules.PRS.PRS_behavior as PRS_behavior
import mgm13.modules.PTI.PTI_behavior as PTI_behavior
import mgm13.modules.PYD1698.PYD1698_behavior as PYD1698_behavior
import mgm13.modules.SERIAL.SERIAL_behavior as SERIAL_behavior
import mgm13.modules.SPIDISPLAY.SPIDISPLAY_behavior as SPIDISPLAY_behavior
import mgm13.modules.SPINCP.SPINCP_behavior as SPINCP_behavior
import mgm13.modules.TIMER0.TIMER_behavior as TIMER_behavior
import mgm13.modules.TIMER1.TIMER_behavior as TIMER_behavior
import mgm13.modules.UARTNCP.UARTNCP_behavior as UARTNCP_behavior
import mgm13.modules.USART0.USART_behavior as USART_behavior
import mgm13.modules.USART1.USART_behavior as USART_behavior
import mgm13.modules.USART2.USART_behavior as USART_behavior
import mgm13.modules.VCOM.VCOM_behavior as VCOM_behavior
import mgm13.modules.VDAC0.VDAC_behavior as VDAC_behavior
import mgm13.modules.VUART.VUART_behavior as VUART_behavior
import mgm13.modules.WDOG.WDOG_behavior as WDOG_behavior
import mgm13.modules.WTIMER0.WTIMER_behavior as WTIMER_behavior
def generate(context):
"""
Generates the relevant defines for hwconf setup to hal-config/hal-config.h
"""
root = initSession(context)
# Get output directory from config or fall back to 'hal-config' directory
output_dir = getVariable('output_dir')
if not output_dir:
output_dir = 'hal-config'
# Get output filename from config or fall back to 'hal-config.h'
output_file = getVariable('output_file')
if not output_file:
output_file = 'hal-config.h'
hal_conf_dir = newDirectory(root, output_dir)
hal_conf_h = newFile(hal_conf_dir, output_file)
define_guard = output_file.upper().replace('-','_').replace('.','_')
# Generate header and start of cloaking
text = ""
text += "#ifndef {}\n".format(define_guard)
text += "#define {}\n".format(define_guard)
text += "\n"
text += "#include \"em_device.h\"\n"
text += "#include \"hal-config-types.h\"\n"
text += "\n"
text += "// This file is auto-generated by Hardware Configurator in Simplicity Studio.\n"
text += "// Any content between $[ and ]$ will be replaced whenever the file is regenerated.\n"
text += "// Content outside these regions will be preserved.\n"
# Push location for eval'd text
pushContext(location=hal_conf_h)
newContribution(hal_conf_h, text)
# Initializing lists
define_list = []
region_list = []
module_list = []
# Generate regions in file
for transition in modeTransitions():
# Get region names from transition.modules
for module in transition.modules:
module_type = module.obj.object.getComponentId().split(".")[2]
if not (module_type == "port" or module_type == "pin"):
region_list.append(module.name)
# Generate regions alphabetically in file
region_list.sort()
for region in region_list:
if not getLocation(region):
startRegion(region)
endRegion()
# Adding portio defines to define_list
portio = transition.getModule("PORTIO").obj.object
routes = add_portio_defines(portio, define_list)
available_modules = Metadata.get_available_modules_for_family()
familyobj = dep.Family('mgm13')
# Generate a module list from '_behaviour' files
mod_inst = ACMP_behavior.ACMP('ACMP0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = ACMP_behavior.ACMP('ACMP1')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = ADC_behavior.ADC('ADC0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = ANTDIV_behavior.ANTDIV('ANTDIV')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = BATTERYMON_behavior.BATTERYMON('BATTERYMON')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = BTL_BUTTON_behavior.BTL_BUTTON('BTL_BUTTON')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = BULBPWM_behavior.BULBPWM('BULBPWM')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = BULBPWM_COLOR_behavior.BULBPWM_COLOR('BULBPWM_COLOR')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = BUTTON_behavior.BUTTON('BUTTON')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = CMU_behavior.CMU('CMU')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = COEX_behavior.COEX('COEX')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = CS5463_behavior.CS5463('CS5463')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = CSEN_behavior.CSEN('CSEN')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = DCDC_behavior.DCDC('DCDC')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = EMU_behavior.EMU('EMU')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = EXTFLASH_behavior.EXTFLASH('EXTFLASH')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = EZRADIOPRO_behavior.EZRADIOPRO('EZRADIOPRO')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = FEM_behavior.FEM('FEM')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = GPIO_behavior.GPIO('GPIO')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = I2C_behavior.I2C('I2C0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = I2C_behavior.I2C('I2C1')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = I2CSENSOR_behavior.I2CSENSOR('I2CSENSOR')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = IDAC_behavior.IDAC('IDAC0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = IOEXP_behavior.IOEXP('IOEXP')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = LED_behavior.LED('LED')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = LEUART_behavior.LEUART('LEUART0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = MODEM_behavior.MODEM('MODEM')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = PA_behavior.PA('PA')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = PRS_behavior.PRS('PRS')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = PTI_behavior.PTI('PTI')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = PYD1698_behavior.PYD1698('PYD1698')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = SERIAL_behavior.SERIAL('SERIAL')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = SPIDISPLAY_behavior.SPIDISPLAY('SPIDISPLAY')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = SPINCP_behavior.SPINCP('SPINCP')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = TIMER_behavior.TIMER('TIMER0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = TIMER_behavior.TIMER('TIMER1')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = UARTNCP_behavior.UARTNCP('UARTNCP')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = USART_behavior.USART('USART0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = USART_behavior.USART('USART1')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = USART_behavior.USART('USART2')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = VCOM_behavior.VCOM('VCOM')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = VDAC_behavior.VDAC('VDAC0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = VUART_behavior.VUART('VUART')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = WDOG_behavior.WDOG('WDOG')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
mod_inst = WTIMER_behavior.WTIMER('WTIMER0')
mod_inst.load_halconfig_model(available_modules, familyobj)
module_list.append(mod_inst)
# Adding peripheral defines to list
add_peripheral_defines(module_list, transition, define_list, routes)
# Printing the list to hal-config.h
print_defines_from_list(define_list)
# Add module include
text = ""
text += "#if defined(_SILICON_LABS_MODULE)\n"
text += "#include \"sl_module.h\"\n"
text += "#endif\n"
text += "\n"
newContribution(hal_conf_h, text)
# Ending the #ifndef
newContribution(hal_conf_h, "#endif /* {} */".format(define_guard))
popContext()
# Sync project modules
try:
framework = detect_framework(context)
refresh_studio_modules(framework, module_list)
except:
print("project module sync failed")
def add_portio_defines(portio, define_list):
"""
Adding defines for the active routes found in portio to define_list
:type portio: Studio._private.StudioClasses.com_silabs_ide_halconf_core_client_accessors_IPortIOAccessor
:param define_list: list of dictionaries containing the relevant info for #defines
"""
routes = {}
common_routes = []
# Iterating through active routes, adding them to defineList
for route in activePinRoutes(portio):
mod_label = route.getLabel()
pin_name = route.getPinName()
location = route.location.getLocationNumber() if route.selector.locationPropertyReference else None
mod_name = route.getModuleName()
if len(route.selector.routes) > 1:
# Multiple routes on this selector
if mod_name == 'GPIO':
# DBG -> DBGROUTE, ETM -> ETMROUTE, etc
common_route = "{}ROUTE".format(route.selector.getName())
elif route.selector.getName().endswith('LOC'):
# ETH_RMIILOC -> RMII, etc
common_route = route.selector.getName().split('_')[1][:-3]
elif mod_name == 'PRS':
common_route = 'PRSROUTE'
else:
common_route = 'ROUTE'
else:
common_route = None
# If route is connected to the APORT generate a different location to match data sheet
aport_location = None
if route.getAportName():
aport_name = route.getAportName()
mod_label = mod_label.rsplit("_", 1)[0]
aport_location = location
location = generate_aport_loc_define(aport_name, mod_name, mod_label, location)
routes[mod_label] = {
"module": mod_name,
"pin_name": pin_name,
"common_route": common_route,
"location": location,
}
if aport_location is not None:
routes[mod_label]['aport_location'] = aport_location
# Append define dict to define_list
def_label = "{}_{}".format("PORTIO", mod_label)
def_common_label = "{}_{}_{}".format("PORTIO", mod_name, common_route)
define_list.append({"region": mod_name, "label": def_label, "pin_name": pin_name})
if location is not None:
if common_route:
if def_common_label not in common_routes:
# Not seen this common route before
define_list.append({"region": mod_name, "label": def_common_label, "loc": location})
common_routes.append(def_common_label)
else:
define_list.append({"region": mod_name, "label": def_label, "loc": location})
define_list.append({"region": mod_name, "label": "", "def_value": "newline"})
# Looking at all pins, searching for custom pin names
for port_bank in portio.getPortBanks():
for pin in port_bank.getPins():
# If pin has custom name generate #defines for this name also
if pin.ref(PORT_PINS.ports_settings_custompinname.S).get():
custom_pin_name = pin.ref(PORT_PINS.ports_settings_custompinname.S).get()
if pin.ref(PORT_PINS.ports_settings_custompinname.S).isReadOnly():
continue
def_label = custom_pin_name
define_list.append({"region": "Custom pin names", "label": def_label, "pin_name": pin.getName()})
define_list.append({"region": "Custom pin names", "label": "", "def_value": "newline"})
return routes
def generate_peripheral(exp_module,
studio_module,
define_list,
routes,
mode,
def_region,
output_prefix,
custom_name = None,
prevent_override = False):
peripheral_pins = []
peripheral_props = []
peripheral_custom_props = []
inherited_props = []
override_props = []
inherit_prop_list = []
# This peripheral has overridden configuration options (e.g. internal SPI flash)
overrides = {}
if hasattr(exp_module, "generate_define_override"):
for prop in exp_module.generate_define_override(studio_module):
overrides[prop['label']] = prop
override_props.append(prop)
common_routes = []
show_advanced = studio_module.ref(exp_module.get_property('showadvanced').id).get() == '1'
for prop in exp_module.get_properties():
if prop.mode:
# Make sure property exists in the active mode
if isinstance(prop.mode, list):
if mode not in prop.mode:
continue
elif prop.mode != mode:
continue
# Only create #defines from uppercase properties
if not prop.name.isupper():
continue
# Only generate hidden properties if they should be generated when hidden
if not prop.generate_if_hidden and studio_module.ref(prop.id).isHidden():
continue
# Only print advanced properties if we are in advanced mode
if not show_advanced and prop.is_advanced:
continue
def_label = prop.define_name.replace(prop.parent.namespace, output_prefix)
if custom_name:
custom_label = prop.define_name.replace(prop.parent.namespace, custom_name).replace('HAL_','').replace('BSP_','')
prop_value = studio_module.ref(prop.id).get()
# Filter out disabled/none values
if prop_value == "Disabled" or prop_value == "None":
continue
if def_label in overrides:
# Don't generate native defines for properties that will be overridden
continue
if prevent_override:
# Prevent this define generation from overriding an existing one
already_defined = False
for define in define_list:
if define['region'] == def_region and define['label'] == def_label:
already_defined = True
if already_defined:
continue
# If property is a pin
if isinstance(prop, ExporterModel.PinProperty):
if not prop.mode and studio_module.ref(prop.id).isHidden():
# Don't generate defines for hidden pin properties unless they are in the active mode
continue
pin_name = prop_value
pin_obj = {"region": def_region, "label": def_label, "pin_name": pin_name}
common_route_obj = None
# If prop has a referenced route to portio
custom_route = None
if prop.referenced_route:
route = routes.get("{}_{}".format(prop.referenced_module, prop.referenced_route), None)
if route:
if route['common_route']:
if route['common_route'] not in common_routes:
common_routes.append(route['common_route'])
def_label = "{}_{}".format(def_label.rsplit('_', 1)[0], route['common_route'])
common_route_obj = {"region": def_region, "label": def_label, "loc": route['location']}
else:
pin_obj['loc'] = route['location']
else:
print("Error: referenced route, but no route found for " + prop.name)
peripheral_pins.append(pin_obj)
if common_route_obj:
peripheral_pins.append(common_route_obj)
# If pin has em4 attribute True
if prop.em4:
peripheral_pins.append({"region": def_region, "label": def_label + "_EM4WU", "def_value": Metadata.em4_pin_to_loc(pin_name)["define"]})
# If peripheral has a non-empty custom name
if custom_name:
custom_pin_obj = copy.deepcopy(pin_obj)
custom_pin_obj['label'] = custom_label
peripheral_pins.append(custom_pin_obj)
if common_route_obj:
custom_route_obj = copy.deepcopy(common_route_obj)
custom_route_obj['label'] = custom_route_obj['label'].replace(output_prefix, custom_name).replace('HAL_','').replace('BSP_','')
peripheral_pins.append(custom_route_obj)
if prop.em4:
peripheral_pins.append({"region": def_region, "label": custom_label + "_EM4WU", "def_value": Metadata.em4_pin_to_loc(pin_name)["define"]})
peripheral_pins.append({"region": def_region, "label": "", "def_value":"newline"})
elif isinstance(prop, ExporterModel.AportBusProperty):
def_value = prop_value
for value in prop.values.values():
if value.value == studio_module.ref(prop.id).get():
def_value = value.define_value
if def_value.startswith("APORT"):
route = routes.get("{}_{}".format(prop.parent.name, prop.signal), None)
if not route:
# APORT selected but no channel output enabled. Skip this property
continue
ch = "CH{}".format(route['aport_location'])
def_value = def_value + ch
# Handle prefix
if hasattr(prop, "define_value_prefix"):
def_value = prop.define_value_prefix + def_value
# Putting the define strings into define_list
peripheral_props.append({"region": def_region, "label": def_label, "def_value": str(def_value),
"is_array": False, "is_str_prop": False})
if custom_name:
peripheral_custom_props.append({"region": def_region, "label": custom_label, "def_value": str(def_value),
"is_array": False, "is_str_prop": False})
else:
def_value = prop_value
if isinstance(prop, ExporterModel.IntegerProperty):
if prop.format:
# Format integer
def_value = prop.format.format(def_value)
else:
# Handle integer properties that should have literal suffixes
if prop.min >= 0:
def_value = str(def_value) + 'U'
if prop.max >= 65535:
def_value = str(def_value) + 'L'
if isinstance(prop, ExporterModel.EnumProperty):
for value in prop.values.values():
if value.value == studio_module.ref(prop.id).get():
def_value = value.define_value
# Handle prefix
if hasattr(prop, "define_value_prefix"):
def_value = prop.define_value_prefix + str(def_value)
# Handle array properties that should print {} rather than ()
if isinstance(prop, ExporterModel.ArrayProperty):
is_array = True
else:
is_array = False
# Handle string properties that should not print ()
if isinstance(prop, ExporterModel.StringProperty):
is_str_prop = True
else:
is_str_prop = False
# Putting the define strings into define_list
peripheral_props.append({"region": def_region, "label": def_label, "def_value": str(def_value),
"is_array": is_array, "is_str_prop": is_str_prop})
if custom_name:
peripheral_custom_props.append({"region": def_region, "label": custom_label, "def_value": str(def_value),
"is_array": is_array, "is_str_prop": is_str_prop})
if hasattr(prop, "inherit_options"):
if prop.inherit_options:
inherit_prop_list.append((prop, studio_module.ref(prop.id).get()))
define_list += peripheral_pins
define_list += peripheral_props
define_list += peripheral_custom_props
define_list += override_props
return inherit_prop_list
def add_peripheral_defines(exp_module_list, transition, define_list, routes):
"""
Adding defines specified in the peripheral view to define_list
:param exp_module_list: list of ExporterModel Modules
:param transition: chip mode change from reset to DefaultMode only (EFM32)
:param define_list: list of dictionaries with information about defines to be generated
:param routes: Dict containing all active pin routes with pin <-> location mapping
:return: None
"""
modules = {}
inherit = []
enabledModules = []
# Iterating through peripheral modules
for exp_module in exp_module_list:
# Check if module is enabled in peripheral view
for mod in transition.modules:
if exp_module.name == mod.name:
if mod.isEnabled():
enabledModules.append(str(mod.name))
modules[mod.name] = (exp_module, mod)
properties = exp_module.get_properties()
if mod.name == "PRS":
add_prs_defines(mod, properties, define_list, routes)
continue
# Generate enable define if property wasn't force enabled
if exp_module.enabled_define and mod.ref(exp_module.get_property('forceenable').id).get() != '1':
define_list.append({"region": mod.name, "label": exp_module.enabled_define.replace(exp_module.namespace, mod.name), "def_value": "1"})
define_list.append({"region": mod.name, "label": "", "def_value":"newline"})
# Checking if ModeProperty instance in properties
mode = None
for prop in properties:
if isinstance(prop, ExporterModel.ModeProperty):
mode = mod.ref(prop.id).get()
# Checking if we have a custom name
custom_name = None
for prop in properties:
if exp_module.has_custom_name and 'customname.STRING' in prop.id:
custom_name = mod.ref(prop.id).get()
inherit += generate_peripheral(exp_module,
mod,
define_list,
routes,
mode,
mod.name,
mod.name,
custom_name=custom_name)
# studio module found, jumping to next exp_module
break
print("Enabled HWCONF modules:" + str(enabledModules))
# Iterate through properties that inherit properties from other modules
for prop, inherit_from in inherit:
module = prop.parent.name
if hasattr(prop, 'owned_mode'):
# We are owning the other module in a specific mode
mode = prop.owned_mode
else:
mode = None
output_name = module + prop.define_name_postfix if hasattr(prop, "define_name_postfix") else module
# Generate defines from owned module into this module, using the correct mode
generate_peripheral(modules[inherit_from][0],
modules[inherit_from][1],
define_list,
routes,
mode,
module,
output_name,
prevent_override=True)
def add_prs_defines(prs_mod, properties, define_list, routes):
def_region = prs_mod.name
prs_properties = {}
# Iterating through properties
for prop in properties:
if prop.name.isupper() or "custom_name" in prop.name:
def_label = prop.name
def_value = prs_mod.ref(prop.id).get()
# Filtering out disabled/none/empty values
if def_value == "Disabled" or def_value == "None" or def_value == "":
continue
# Only print the visible properties
if prs_mod.ref(prop.id).isHidden():
continue
# If property is a pin
is_pin = False
if isinstance(prop, ExporterModel.PinProperty):
is_pin = True
route = routes.get("{}_{}".format(prop.referenced_module, prop.referenced_route), None)
# Checking if enum values have define_names - this can really be removed from prs
if isinstance(prop, ExporterModel.EnumProperty):
for value in prop.values.values():
if value.value == prs_mod.ref(prop.id).get():
def_value = value.define_value
# Determining PRS channel current property is valid for
chan = None
if "CH" in def_label:
chan = def_label.split("CH")[1]
for i in range(len(chan)):
if not chan[i].isdigit():
chan = chan[:i]
break
# Updating property dict
if chan not in prs_properties.keys():
prs_properties[chan] = {"custom_name": "", "source": "", "signal": "", "pin": ""}
if "_SIGNAL_" in def_label:
prs_properties[chan]["signal"] = def_value
elif "_SOURCE" in def_label:
prs_properties[chan]["source"] = def_value
elif "custom_name" in def_label:
prs_properties[chan]["custom_name"] = def_value
elif is_pin:
prs_properties[chan]["pin"] = def_value
prs_properties[chan]["route"] = route
seen_common_route = False
prs_define_prefix = 'PRS'
for chan, options in prs_properties.items():
if options["source"] and options["signal"]:
define_list.append({"region": def_region, "label": "BSP_PRS_CH{}_SOURCE".format(chan), "def_value": "{}_CH_CTRL_SOURCESEL_{}".format(prs_define_prefix, options["source"])})
define_list.append({"region": def_region, "label": "BSP_PRS_CH{}_SIGNAL".format(chan), "def_value": "{}_CH_CTRL_SIGSEL_{}".format(prs_define_prefix, options["signal"].replace("_", ""))})
define_list.append({"region": def_region, "label": "BSP_PRS_CH{}_SOURCESIGNAL".format(chan),"def_value": "{}_{}".format(prs_define_prefix, options["signal"])})
if options["pin"]:
if options["route"]["common_route"]:
define_list.append({"region": def_region, "label": "BSP_PRS_CH{}".format(chan), "pin_name": options["pin"]})
if not seen_common_route:
define_list.append({"region": def_region, "label": "BSP_PRS_{}".format(options["route"]["common_route"]), "loc": options["route"]["location"]})
seen_common_route = True
else:
define_list.append({"region": def_region, "label": "BSP_PRS_CH{}".format(chan), "pin_name": options["pin"], "loc": options["route"]["location"]})
if options["custom_name"]:
if (options["source"] and options["signal"]) or options["pin"]:
define_list.append({"region": def_region, "label": "", "def_value": "newline"})
if options["source"] and options["signal"]:
define_list.append({"region": def_region, "label": "BSP_PRS_{}_CHANNEL".format(options["custom_name"]), "def_value": chan})
define_list.append({"region": def_region, "label": "BSP_PRS_{}_SOURCE".format(options["custom_name"]), "def_value": "{}_CH_CTRL_SOURCESEL_{}".format(prs_define_prefix, options["source"])})
define_list.append({"region": def_region, "label": "BSP_PRS_{}_SIGNAL".format(options["custom_name"]),"def_value": "{}_CH_CTRL_SIGSEL_{}".format(prs_define_prefix, options["signal"].replace("_", ""))})
define_list.append({"region": def_region, "label": "BSP_PRS_{}_SOURCESIGNAL".format(options["custom_name"]),"def_value": "{}_{}".format(prs_define_prefix, options["signal"])})
if options["pin"]:
if options["route"]["common_route"]:
define_list.append({"region": def_region, "label": "BSP_PRS_{}".format(options["custom_name"]), "pin_name": options["pin"]})
else:
define_list.append({"region": def_region, "label": "BSP_PRS_{}".format(options["custom_name"]), "pin_name": options["pin"], "loc": options["route"]["location"]})
if (options["source"] and options["signal"]) or options["pin"]:
define_list.append({"region": def_region, "label": "", "def_value": "newline"})
def generate_aport_loc_define(aport_name, mod_name, mod_label, location):
"""
Generates pin locations for APORT connected routes
:type aport_name: str
:param aport_name: the routes APORT name
:type mod_name: str
:param mod_name: module name
:type mod_label: str
:param mod_label: module label
:type location: int
:param location: the routes pin location
:return: The generated location
"""
# rename aport names: AN -> 1Y
renamed_aport_name = ''
letter = ''
num = aport_name[len(aport_name) - 2]
if "IDAC" in mod_name:
num = "1"
if len(aport_name) > 2:
num = "0"
if aport_name.endswith("N"):
letter = "Y"
elif aport_name.endswith("P"):
letter = "X"
num = re.sub("A", "1", num)
num = re.sub("B", "2", num)
num = re.sub("C", "3", num)
num = re.sub("D", "4", num)
renamed_aport_name += str(num) + letter
# generate defines
aport_location = "_{}_".format(mod_name.rstrip('0123456789'))
if "OPA" in mod_label:
if "OUT" in mod_label:
aport_location += "OPA_OUT_APORTOUTSEL_"
else:
aport_location += "OPA_MUX_"
elif "ACMP" in mod_label:
aport_location += "INPUTSEL_"
elif "ADC" in mod_label:
aport_location += "SINGLECTRL_"
elif "IDAC" in mod_label:
aport_location += "CTRL_APORTOUTSEL_"
elif "CSEN" in mod_label:
aport_location += "SINGLECTRL_SINGLESEL_"
if "NEG" in mod_label:
aport_location += "NEGSEL_"
elif "POS" in mod_label:
aport_location += "POSSEL_"
aport_location += "APORT{}CH{}".format(renamed_aport_name, location)
return aport_location
def print_defines_from_list(def_list):
"""
Creates correct #define structures from a def_list
:param def_list: A list containing dictionaries of defines. Each dictionary contains a region and a label,
and one or more of the following: pin_name, location, def_value.
:return: None
"""
# Ensures only one of each define is generated
index = 0
while index < len(def_list):
if "def_value" in def_list[index]:
if def_list[index]["def_value"] == "newline":
index += 1
continue
if def_list.count(def_list[index]) > 1:
print("Duplicate in define list removed: " + def_list[index]["label"])
def_list.remove(def_list[index])
index -= 1
index += 1
# Find the longest label length, used to evenly space the #defines
longest_label_length = 0
for i in range(len(def_list)):
if len(def_list[i]["label"]) > longest_label_length:
longest_label_length = len(def_list[i]["label"])
skip_prefixes = getVariable('skip_prefix')
if not skip_prefixes:
skip_prefixes = []
# Print the #defines
for define in def_list:
region = define["region"]
label = define["label"]
is_array = define.get("is_array", False)
is_str_prop = define.get("is_str_prop", False)
# Skip defines with given prefixes
skip = False
for skip_prefix in skip_prefixes:
if label.startswith(skip_prefix):
skip = True
if skip:
print("skipped {}".format(label))
continue
# Print different types of #defines depending on the type
if "pin_name" in define:
pin_name = define["pin_name"]
pin_num = str(int(pin_name[2:], 10)) + 'U'
port = "gpioPort" + pin_name[1]
print_define(region, label + "_PIN", pin_num, longest_label_length, is_array, is_str_prop)
print_define(region, label + "_PORT", port, longest_label_length, is_array, is_str_prop)
if "loc" in define:
loc = define["loc"]
try:
int(loc)
loc = str(loc) + 'U'
except ValueError:
pass
print_define(region, label + "_LOC", loc, longest_label_length, is_array, is_str_prop)
if "def_value" in define:
def_val = define["def_value"]
if def_val == "newline":
newContribution(getLocation(region), label)
else:
print_define(region, label, def_val, longest_label_length, is_array, is_str_prop)
def print_define(region, label, define_value, longest_label_length, is_array=False, is_str_prop=False):
"""
Prints a define in a given region, if the region does not exist it is created
:param region: Name of region for define to go in
:param label: name of the define
:param define_value: value to be define
:param longest_label_length: length of longest label, used to make even spacing
:param is_array: bool value indicating if the define should be an array
:param is_str_prop: bool value indicating if the define originates from a StringProperty
:return: None
"""
if not getLocation(region):
startRegion(region)
endRegion()
# aligning the values. adding 10 to get some distance
spaces = longest_label_length - len(label) + 10
if is_array:
define_value = "{{ {} }}".format(define_value)
elif is_str_prop:
define_value = " {}".format(define_value)
else:
define_value = "({})".format(define_value)
newContribution(getLocation(region), "#define {}{}{}".format(label, " " * spaces, define_value))
def detect_framework(context):
# First attempt to get framework from global context
protocol_names = getVariable("protocolNames")
if protocol_names:
return protocol_names
# Then attempt to get framework from project info
proj = getProject()
if proj:
proj_info = getProjectInfo(proj)
protocol_names = proj_info.get("generalProj.esf.protocolNames")
if protocol_names:
return protocol_names
# Finally attempt to get framework from esf-specific context
protocol_names = getVariable("generalProj.esf.protocolNames")
if protocol_names:
return protocol_names
# Didn't find a framework
return None
def refresh_studio_modules(framework, exp_module_list):
"""
Ensures all studio modules associated with hwconf modules are added/removed
:param framework: framework specifier, should match halconfig_types.Framework
:param exp_module_list: list of ExporterModel Modules
"""
# If the context contains a project model, use that one. In the case where
# we are passed a project model, we don't need to update the project.
proj_needs_update = False
proj_model = getVariable("projectModel")
if proj_model == None:
proj_model = getProjectModel()
proj_needs_update = True
newModules = []
print("framework:{}".format(framework if framework else "none"))
for transition in modeTransitions():
# Iterating through peripheral modules
for exp_module in exp_module_list:
# Check if module is enabled in peripheral view
for mod in transition.modules:
# Match module name
if exp_module.name != mod.name:
continue
# Only work with enabled modules with associated studio modules
if not mod.isEnabled():
continue
if exp_module.studio_module == None:
continue
moduleId = exp_module.studio_module.getModuleId(framework)
if moduleId:
newModules.append(moduleId)
removeModules(proj_model, "SDK.HAL.*")
print("Adding modules:" + str(newModules))
for mod in newModules:
addModules(proj_model, mod)
if proj_needs_update:
updateProjectFromModel(proj_model)
| 42.365538 | 217 | 0.630093 |
8ff7acaf73d3ad0896c1644713189462886295b9
| 354 |
py
|
Python
|
ProjectApplication/grant_management/migrations/0003_remove_socialnetwork_identifier.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5 |
2020-07-29T10:00:11.000Z
|
2022-02-19T11:00:34.000Z
|
ProjectApplication/grant_management/migrations/0003_remove_socialnetwork_identifier.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 471 |
2019-09-20T14:37:28.000Z
|
2022-03-25T14:16:34.000Z
|
ProjectApplication/grant_management/migrations/0003_remove_socialnetwork_identifier.py
|
code-review-doctor/project-application
|
d85b40b69572efbcda24ce9c40803f76d8ffd192
|
[
"MIT"
] | 5 |
2020-03-15T12:42:47.000Z
|
2022-02-15T18:06:52.000Z
|
# Generated by Django 3.0.3 on 2020-04-06 07:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('grant_management', '0002_socialnetwork_identifier'),
]
operations = [
migrations.RemoveField(
model_name='socialnetwork',
name='identifier',
),
]
| 19.666667 | 62 | 0.621469 |
61cbcfcfa916e65e3a52051234b91b9717cfec5d
| 4,342 |
py
|
Python
|
app/api/v2/views/product_view.py
|
andrewhingah/store_manager_v2
|
1cb52f8826b7343858cfe88b9ca1a94c77e0620c
|
[
"MIT"
] | null | null | null |
app/api/v2/views/product_view.py
|
andrewhingah/store_manager_v2
|
1cb52f8826b7343858cfe88b9ca1a94c77e0620c
|
[
"MIT"
] | 2 |
2018-11-09T06:57:14.000Z
|
2018-11-13T16:44:56.000Z
|
app/api/v2/views/product_view.py
|
andrewhingah/store_manager_3
|
1cb52f8826b7343858cfe88b9ca1a94c77e0620c
|
[
"MIT"
] | null | null | null |
"""This module contains resources for products"""
from datetime import datetime
from flask import Flask, jsonify, make_response, request
from flask_restful import Api, Resource, reqparse
from flask_jwt_extended import (jwt_required, create_access_token, get_jwt_identity, get_raw_jwt)
from app.api.v2.models.product_model import Product
from app.api.v2.models.helpers import get_user, get_products, get_product, delete_product, edit_product
from app.api.v2.utils.validate import validate_email, verify_name_details, validate_all
parser = reqparse.RequestParser()
parser.add_argument('category', required = True, help = "Category must be provided")
parser.add_argument('name', required = True, help = "Name must be provided")
parser.add_argument('quantity', type=int, help='Quantity must be an integer')
parser.add_argument('price', type=int, help='Price must be an integer')
class AllProducts(Resource):
"""All products class"""
@jwt_required
def get(self):
"""gets all products"""
products = get_products()
if products is None:
return make_response(jsonify(
{
"message": "No products available"
}))
return make_response(jsonify(
{
"message":"success",
"status":"ok",
"products":products}), 200)
@jwt_required
def post(self):
"""posts a single product"""
email = get_jwt_identity()
user = get_user(email)
if user['role'] != 'admin':
return {"message": "You don't have access to this page"}, 403
args = parser.parse_args()
category = args['category']
name = args['name']
quantity = args['quantity']
price = args['price']
date_created = datetime.now()
if not quantity:
return {"message": "Quantity must be provided"}, 400
if not price:
return {"message": "Price must be provided"}, 400
if verify_name_details(category):
return verify_name_details(category)
if verify_name_details(name):
return verify_name_details(name)
newproduct = Product(category, name, quantity, price, date_created)
newproduct.save()
return make_response(jsonify(
{"message":"Product created successfully",
"status":"created",
"product":newproduct.__dict__}
), 201)
def get(self):
'''view all available products'''
email = get_jwt_identity()
user = get_user(email)
products = get_products()
if products is None:
return jsonify ({"message": "No products available"}), 404
return jsonify({"message": "successfully", "Products": products}), 200
class SingleProduct(Resource):
'''This class has all operations related to a single product'''
@jwt_required
def get(self, id):
'''gets single product by id'''
email = get_jwt_identity()
user = get_user(email)
product = get_product(id)
if product is None:
return make_response(jsonify({"message": "Product unavailable"}), 404)
return make_response(jsonify({"message": "success", "Product": product}), 200)
@jwt_required
def delete(self, id):
'''deletes a single product by id'''
email = get_jwt_identity()
user = get_user(email)
if user['role'] != "admin":
return {"message": "You are not permitted to perform this action"}, 403
product = get_product(id)
if product is None:
return jsonify({"message": "You requested to delete an unavailable product"})
delete_product(id)
return jsonify({"message": "product has been deleted"})
@jwt_required
def put(self, id):
'''
updates details of an existing product
creates a new one if not exists
'''
email = get_jwt_identity()
user = get_user(email)
if user['role'] != 'admin':
return {"message": "You are not permitted to perform this action"}, 403
product = get_product(id)
args = parser.parse_args()
if product is None:
product = Product(
category = args['category'],
name = args['name'],
quantity = args['quantity'],
price = args['price'],
date_created = datetime.now())
product.save()
return make_response(jsonify({'Product': product.__dict__,
'message': "New product created"}), 201)
else:
product['category'] = args['category']
product['name'] = args['name']
product['quantity'] = args['quantity'],
product['price'] = args['price'],
product['date_created'] = datetime.now()
edit_product(id, product)
return make_response(jsonify({"Product":product,
"message":"Updated successfully"}), 200)
| 28.565789 | 103 | 0.698065 |
50aed70bf275d92254745cb80f24d15261c098cb
| 13,613 |
py
|
Python
|
pandas/core/common.py
|
oricou/pandas
|
9405e58d9268041f5416711c051cf5429a19bf49
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/core/common.py
|
oricou/pandas
|
9405e58d9268041f5416711c051cf5429a19bf49
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/core/common.py
|
oricou/pandas
|
9405e58d9268041f5416711c051cf5429a19bf49
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 |
2021-06-04T09:25:40.000Z
|
2021-06-04T09:25:40.000Z
|
"""
Misc tools for implementing data structures
Note: pandas.core.common is *not* part of the public API.
"""
from collections import (
abc,
defaultdict,
)
import contextlib
from functools import partial
import inspect
from typing import (
Any,
Callable,
Collection,
Iterable,
Iterator,
List,
Optional,
Tuple,
Union,
cast,
)
import warnings
import numpy as np
from pandas._libs import lib
from pandas._typing import (
AnyArrayLike,
NpDtype,
Scalar,
T,
)
from pandas.compat import np_version_under1p18
from pandas.core.dtypes.cast import construct_1d_object_array_from_listlike
from pandas.core.dtypes.common import (
is_array_like,
is_bool_dtype,
is_extension_array_dtype,
is_integer,
)
from pandas.core.dtypes.generic import (
ABCExtensionArray,
ABCIndex,
ABCSeries,
)
from pandas.core.dtypes.inference import iterable_not_string
from pandas.core.dtypes.missing import ( # noqa
isna,
isnull,
notnull,
)
class SettingWithCopyError(ValueError):
pass
class SettingWithCopyWarning(Warning):
pass
def flatten(line):
"""
Flatten an arbitrarily nested sequence.
Parameters
----------
line : sequence
The non string sequence to flatten
Notes
-----
This doesn't consider strings sequences.
Returns
-------
flattened : generator
"""
for element in line:
if iterable_not_string(element):
yield from flatten(element)
else:
yield element
def consensus_name_attr(objs):
name = objs[0].name
for obj in objs[1:]:
try:
if obj.name != name:
name = None
except ValueError:
name = None
return name
def is_bool_indexer(key: Any) -> bool:
"""
Check whether `key` is a valid boolean indexer.
Parameters
----------
key : Any
Only list-likes may be considered boolean indexers.
All other types are not considered a boolean indexer.
For array-like input, boolean ndarrays or ExtensionArrays
with ``_is_boolean`` set are considered boolean indexers.
Returns
-------
bool
Whether `key` is a valid boolean indexer.
Raises
------
ValueError
When the array is an object-dtype ndarray or ExtensionArray
and contains missing values.
See Also
--------
check_array_indexer : Check that `key` is a valid array to index,
and convert to an ndarray.
"""
if isinstance(key, (ABCSeries, np.ndarray, ABCIndex)) or (
is_array_like(key) and is_extension_array_dtype(key.dtype)
):
if key.dtype == np.object_:
key = np.asarray(key)
if not lib.is_bool_array(key):
na_msg = "Cannot mask with non-boolean array containing NA / NaN values"
if lib.infer_dtype(key) == "boolean" and isna(key).any():
# Don't raise on e.g. ["A", "B", np.nan], see
# test_loc_getitem_list_of_labels_categoricalindex_with_na
raise ValueError(na_msg)
return False
return True
elif is_bool_dtype(key.dtype):
return True
elif isinstance(key, list):
try:
arr = np.asarray(key)
return arr.dtype == np.bool_ and len(arr) == len(key)
except TypeError: # pragma: no cover
return False
return False
def cast_scalar_indexer(val, warn_float=False):
"""
To avoid numpy DeprecationWarnings, cast float to integer where valid.
Parameters
----------
val : scalar
warn_float : bool, default False
If True, issue deprecation warning for a float indexer.
Returns
-------
outval : scalar
"""
# assumes lib.is_scalar(val)
if lib.is_float(val) and val.is_integer():
if warn_float:
warnings.warn(
"Indexing with a float is deprecated, and will raise an IndexError "
"in pandas 2.0. You can manually convert to an integer key instead.",
FutureWarning,
stacklevel=3,
)
return int(val)
return val
def not_none(*args):
"""
Returns a generator consisting of the arguments that are not None.
"""
return (arg for arg in args if arg is not None)
def any_none(*args) -> bool:
"""
Returns a boolean indicating if any argument is None.
"""
return any(arg is None for arg in args)
def all_none(*args) -> bool:
"""
Returns a boolean indicating if all arguments are None.
"""
return all(arg is None for arg in args)
def any_not_none(*args) -> bool:
"""
Returns a boolean indicating if any argument is not None.
"""
return any(arg is not None for arg in args)
def all_not_none(*args) -> bool:
"""
Returns a boolean indicating if all arguments are not None.
"""
return all(arg is not None for arg in args)
def count_not_none(*args) -> int:
"""
Returns the count of arguments that are not None.
"""
return sum(x is not None for x in args)
def asarray_tuplesafe(values, dtype: Optional[NpDtype] = None) -> np.ndarray:
if not (isinstance(values, (list, tuple)) or hasattr(values, "__array__")):
values = list(values)
elif isinstance(values, ABCIndex):
return values._values
if isinstance(values, list) and dtype in [np.object_, object]:
return construct_1d_object_array_from_listlike(values)
result = np.asarray(values, dtype=dtype)
if issubclass(result.dtype.type, str):
result = np.asarray(values, dtype=object)
if result.ndim == 2:
# Avoid building an array of arrays:
values = [tuple(x) for x in values]
result = construct_1d_object_array_from_listlike(values)
return result
def index_labels_to_array(labels, dtype: Optional[NpDtype] = None) -> np.ndarray:
"""
Transform label or iterable of labels to array, for use in Index.
Parameters
----------
dtype : dtype
If specified, use as dtype of the resulting array, otherwise infer.
Returns
-------
array
"""
if isinstance(labels, (str, tuple)):
labels = [labels]
if not isinstance(labels, (list, np.ndarray)):
try:
labels = list(labels)
except TypeError: # non-iterable
labels = [labels]
labels = asarray_tuplesafe(labels, dtype=dtype)
return labels
def maybe_make_list(obj):
if obj is not None and not isinstance(obj, (tuple, list)):
return [obj]
return obj
def maybe_iterable_to_list(obj: Union[Iterable[T], T]) -> Union[Collection[T], T]:
"""
If obj is Iterable but not list-like, consume into list.
"""
if isinstance(obj, abc.Iterable) and not isinstance(obj, abc.Sized):
return list(obj)
obj = cast(Collection, obj)
return obj
def is_null_slice(obj) -> bool:
"""
We have a null slice.
"""
return (
isinstance(obj, slice)
and obj.start is None
and obj.stop is None
and obj.step is None
)
def is_true_slices(line):
"""
Find non-trivial slices in "line": return a list of booleans with same length.
"""
return [isinstance(k, slice) and not is_null_slice(k) for k in line]
# TODO: used only once in indexing; belongs elsewhere?
def is_full_slice(obj, line) -> bool:
"""
We have a full length slice.
"""
return (
isinstance(obj, slice)
and obj.start == 0
and obj.stop == line
and obj.step is None
)
def get_callable_name(obj):
# typical case has name
if hasattr(obj, "__name__"):
return getattr(obj, "__name__")
# some objects don't; could recurse
if isinstance(obj, partial):
return get_callable_name(obj.func)
# fall back to class name
if hasattr(obj, "__call__"):
return type(obj).__name__
# everything failed (probably because the argument
# wasn't actually callable); we return None
# instead of the empty string in this case to allow
# distinguishing between no name and a name of ''
return None
def apply_if_callable(maybe_callable, obj, **kwargs):
"""
Evaluate possibly callable input using obj and kwargs if it is callable,
otherwise return as it is.
Parameters
----------
maybe_callable : possibly a callable
obj : NDFrame
**kwargs
"""
if callable(maybe_callable):
return maybe_callable(obj, **kwargs)
return maybe_callable
def standardize_mapping(into):
"""
Helper function to standardize a supplied mapping.
Parameters
----------
into : instance or subclass of collections.abc.Mapping
Must be a class, an initialized collections.defaultdict,
or an instance of a collections.abc.Mapping subclass.
Returns
-------
mapping : a collections.abc.Mapping subclass or other constructor
a callable object that can accept an iterator to create
the desired Mapping.
See Also
--------
DataFrame.to_dict
Series.to_dict
"""
if not inspect.isclass(into):
if isinstance(into, defaultdict):
return partial(defaultdict, into.default_factory)
into = type(into)
if not issubclass(into, abc.Mapping):
raise TypeError(f"unsupported type: {into}")
elif into == defaultdict:
raise TypeError("to_dict() only accepts initialized defaultdicts")
return into
def random_state(state=None):
"""
Helper function for processing random_state arguments.
Parameters
----------
state : int, array-like, BitGenerator (NumPy>=1.17), np.random.RandomState, None.
If receives an int, array-like, or BitGenerator, passes to
np.random.RandomState() as seed.
If receives an np.random.RandomState object, just returns object.
If receives `None`, returns np.random.
If receives anything else, raises an informative ValueError.
.. versionchanged:: 1.1.0
array-like and BitGenerator (for NumPy>=1.18) object now passed to
np.random.RandomState() as seed
Default None.
Returns
-------
np.random.RandomState
"""
if (
is_integer(state)
or is_array_like(state)
or (not np_version_under1p18 and isinstance(state, np.random.BitGenerator))
):
return np.random.RandomState(state)
elif isinstance(state, np.random.RandomState):
return state
elif state is None:
return np.random
else:
raise ValueError(
"random_state must be an integer, array-like, a BitGenerator, "
"a numpy RandomState, or None"
)
def pipe(
obj, func: Union[Callable[..., T], Tuple[Callable[..., T], str]], *args, **kwargs
) -> T:
"""
Apply a function ``func`` to object ``obj`` either by passing obj as the
first argument to the function or, in the case that the func is a tuple,
interpret the first element of the tuple as a function and pass the obj to
that function as a keyword argument whose key is the value of the second
element of the tuple.
Parameters
----------
func : callable or tuple of (callable, str)
Function to apply to this object or, alternatively, a
``(callable, data_keyword)`` tuple where ``data_keyword`` is a
string indicating the keyword of `callable`` that expects the
object.
*args : iterable, optional
Positional arguments passed into ``func``.
**kwargs : dict, optional
A dictionary of keyword arguments passed into ``func``.
Returns
-------
object : the return type of ``func``.
"""
if isinstance(func, tuple):
func, target = func
if target in kwargs:
msg = f"{target} is both the pipe target and a keyword argument"
raise ValueError(msg)
kwargs[target] = obj
return func(*args, **kwargs)
else:
return func(obj, *args, **kwargs)
def get_rename_function(mapper):
"""
Returns a function that will map names/labels, dependent if mapper
is a dict, Series or just a function.
"""
if isinstance(mapper, (abc.Mapping, ABCSeries)):
def f(x):
if x in mapper:
return mapper[x]
else:
return x
else:
f = mapper
return f
def convert_to_list_like(
values: Union[Scalar, Iterable, AnyArrayLike]
) -> Union[List, AnyArrayLike]:
"""
Convert list-like or scalar input to list-like. List, numpy and pandas array-like
inputs are returned unmodified whereas others are converted to list.
"""
if isinstance(values, (list, np.ndarray, ABCIndex, ABCSeries, ABCExtensionArray)):
# np.ndarray resolving as Any gives a false positive
return values # type: ignore[return-value]
elif isinstance(values, abc.Iterable) and not isinstance(values, str):
return list(values)
return [values]
@contextlib.contextmanager
def temp_setattr(obj, attr: str, value) -> Iterator[None]:
"""Temporarily set attribute on an object.
Args:
obj: Object whose attribute will be modified.
attr: Attribute to modify.
value: Value to temporarily set attribute to.
Yields:
obj with modified attribute.
"""
old_value = getattr(obj, attr)
setattr(obj, attr, value)
yield obj
setattr(obj, attr, old_value)
| 26.229287 | 88 | 0.627856 |
1e6c1867756ee328859f70e63fb082b329281c53
| 1,898 |
py
|
Python
|
src/OTLMOW/OTLModel/Classes/ProefWeerstandAfschilfering.py
|
davidvlaminck/OTLClassPython
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | 2 |
2022-02-01T08:58:11.000Z
|
2022-02-08T13:35:17.000Z
|
src/OTLMOW/OTLModel/Classes/ProefWeerstandAfschilfering.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
src/OTLMOW/OTLModel/Classes/ProefWeerstandAfschilfering.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut
from OTLMOW.OTLModel.Classes.Proef import Proef
from OTLMOW.OTLModel.Datatypes.DtcDocument import DtcDocument
from OTLMOW.GeometrieArtefact.PuntGeometrie import PuntGeometrie
from OTLMOW.GeometrieArtefact.LijnGeometrie import LijnGeometrie
from OTLMOW.GeometrieArtefact.VlakGeometrie import VlakGeometrie
# Generated with OTLClassCreator. To modify: extend, do not edit
class ProefWeerstandAfschilfering(Proef, PuntGeometrie, LijnGeometrie, VlakGeometrie):
"""Controle van de vorst-dooiweerstand volgens CEN/TS 12390-9."""
typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/proefenmeting#ProefWeerstandAfschilfering'
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
def __init__(self):
Proef.__init__(self)
LijnGeometrie.__init__(self)
PuntGeometrie.__init__(self)
VlakGeometrie.__init__(self)
self._weerstandAfschilfering = OTLAttribuut(field=DtcDocument,
naam='weerstandAfschilfering',
label='weerstand afschilfering',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/proefenmeting#ProefWeerstandAfschilfering.weerstandAfschilfering',
definition='Proef om de weerstand/afschilfering van de laag te bepalen.',
owner=self)
@property
def weerstandAfschilfering(self):
"""Proef om de weerstand/afschilfering van de laag te bepalen."""
return self._weerstandAfschilfering.get_waarde()
@weerstandAfschilfering.setter
def weerstandAfschilfering(self, value):
self._weerstandAfschilfering.set_waarde(value, owner=self)
| 49.947368 | 174 | 0.68019 |
ea0d1d24e59e9eb2b01a810014b2fa971a244b9e
| 1,030 |
py
|
Python
|
zeit_epaper.py
|
mlux86/zeit-to-tolino
|
0a352cbd2185f2c4a75f12828554b0e55b10918f
|
[
"MIT"
] | null | null | null |
zeit_epaper.py
|
mlux86/zeit-to-tolino
|
0a352cbd2185f2c4a75f12828554b0e55b10918f
|
[
"MIT"
] | null | null | null |
zeit_epaper.py
|
mlux86/zeit-to-tolino
|
0a352cbd2185f2c4a75f12828554b0e55b10918f
|
[
"MIT"
] | null | null | null |
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
class ZeitIssue:
def __init__(self, driver):
self.driver = driver
def get_download_url(self):
link_elem: WebElement = self.driver.find_element(By.LINK_TEXT, 'EPUB FÜR E-READER LADEN')
return link_elem.get_attribute('href')
class ZeitEPaper:
url = 'https://epaper.zeit.de/abo/diezeit/'
def __init__(self, driver: WebDriver):
self.driver = driver
self.driver.get(self.url)
def login(self, username, password):
self.driver.find_element(By.NAME, 'email').send_keys(username)
self.driver.find_element(By.NAME, 'pass').send_keys(password)
self.driver.find_element(By.CSS_SELECTOR, 'input[type=\'submit\']').click()
return self
def current_issue(self):
self.driver.find_element(By.LINK_TEXT, 'ZUR AKTUELLEN AUSGABE').click()
return ZeitIssue(self.driver)
| 33.225806 | 97 | 0.702913 |
b21d5213dd5a9a685f2e75c6bd37259944d08025
| 4,880 |
py
|
Python
|
alternate_fingers.py
|
anthologen/audio-sandbox
|
e5aa6f28c0db002520273c55c453390e163dad81
|
[
"MIT"
] | null | null | null |
alternate_fingers.py
|
anthologen/audio-sandbox
|
e5aa6f28c0db002520273c55c453390e163dad81
|
[
"MIT"
] | null | null | null |
alternate_fingers.py
|
anthologen/audio-sandbox
|
e5aa6f28c0db002520273c55c453390e163dad81
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
A tool to find and draw all possible chord finger positions on
a string instrument. Partially inspired by a finger injury.
"""
import logging
import sys
import argparse
logger = logging.getLogger()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
SEMITONE_INDEX = {
"A": 0,
"A#": 1, "Bb": 1,
"B": 2,
"C": 3,
"C#": 4, "Db": 4,
"D": 5,
"D#": 6, "Eb": 6,
"E": 7,
"F": 8,
"F#": 9, "Gb": 9,
"G": 10,
"G#": 11, "Ab": 11
}
INVERTED_SEMITONE_INDEX = {val: key for key, val in SEMITONE_INDEX.items()}
SEMITONE_DIVISOR = 12
CHORD_INTERVALS = {
"major": [0, 4, 7, 12],
"minor": [0, 3, 7, 12],
"7": [0, 4, 7, 11],
"major7": [0, 4, 7, 10],
"minor7": [0, 3, 7, 10]
}
class FrettedStringInstrument:
def __init__(self, string_list, num_frets):
self.string_list = string_list
self.num_frets = num_frets
class SopranoUkulele(FrettedStringInstrument):
def __init__(self):
FrettedStringInstrument.__init__(self, ["G", "C", "E", "A"], 12)
# TODO: Non-standard tuning
class StandardGuitar(FrettedStringInstrument):
def __init__(self):
FrettedStringInstrument.__init__(self, ["E", "A", "D", "G", "B", "E"], 19)
class StandardHand:
def __init__(self):
self.finger_list = [1, 2, 3, 4]
# TODO: Non-standard hand
class FingeringSolver:
def __init__(self, instrument, hand):
self.instrument = instrument
self.hand = hand # currently unused
def get_chord_notes(self, root, chord):
"""
Return a list of valid notes for the given root and chord
"""
note_list = []
interval_distance_list = CHORD_INTERVALS[chord]
for interval_distance in interval_distance_list:
note_idx = SEMITONE_INDEX[root]
interval_note_idx = (note_idx + interval_distance) % SEMITONE_DIVISOR
note_list.append(INVERTED_SEMITONE_INDEX[interval_note_idx])
return note_list
def find_valid_frets_on_string(self, string_tone, num_frets, root, chord):
"""
Return a list of frets indicies that can be pressed on a string
of the given tone and number of frets for the given root and chord
"""
fret_list = []
target_note_set = set(self.get_chord_notes(root, chord))
logger.debug(root + " " + chord + " = " + str(target_note_set))
string_note_idx = SEMITONE_INDEX[string_tone]
for fret_idx in range(num_frets + 1): # + 1 to account for open string
fretted_note_idx = (string_note_idx + fret_idx) % SEMITONE_DIVISOR
fretted_note = INVERTED_SEMITONE_INDEX[fretted_note_idx]
if fretted_note in target_note_set:
fret_list.append(fret_idx)
return fret_list
def draw_all_possible_frets_for(self, root, chord):
"""
Draw a diagram of all possible finger positions on this insturment
for the given root and chord.
"""
print(root + " " + chord)
for string_tone in reversed(self.instrument.string_list):
valid_fret_list = self.find_valid_frets_on_string(
string_tone, self.instrument.num_frets, root, chord)
# draw the frets to be pressed on this string
string_output_line = string_tone + "|"
if 0 in valid_fret_list:
string_output_line += "o" # play open string
else:
string_output_line += " "
for fret_idx in range(1, self.instrument.num_frets + 1):
if fret_idx in valid_fret_list:
string_output_line += "x" # valid fret
else:
string_output_line += "-"
print(string_output_line)
if __name__ == "__main__":
ukulele = SopranoUkulele()
guitar = StandardGuitar()
hand = StandardHand()
solver = FingeringSolver(ukulele, hand)
parser = argparse.ArgumentParser(description=
'A tool to draw all possible chord finger positions on a soprano ukulele')
parser.add_argument('root', choices=SEMITONE_INDEX.keys(), help='the root of the chord')
parser.add_argument('chord', choices=CHORD_INTERVALS.keys(), help='the type of chord')
args = parser.parse_args()
solver.draw_all_possible_frets_for(args.root, args.chord)
"""
solver = FingeringSolver(guitar, hand)
# I–V–vi–IV progression (B)
solver.draw_all_possible_frets_for("B", "major")
solver.draw_all_possible_frets_for("F#", "major")
solver.draw_all_possible_frets_for("Ab", "minor")
solver.draw_all_possible_frets_for("E", "major")
"""
| 32.972973 | 92 | 0.632582 |
0333fb58de83af9f4c78d078b1d9fe8f1eb438ea
| 5,959 |
py
|
Python
|
qsimcirq/qsim_circuit.py
|
Thenerdstation/qsim
|
4c16822ee56614b917434c3b7bc7b06b07dc8e02
|
[
"Apache-2.0"
] | null | null | null |
qsimcirq/qsim_circuit.py
|
Thenerdstation/qsim
|
4c16822ee56614b917434c3b7bc7b06b07dc8e02
|
[
"Apache-2.0"
] | null | null | null |
qsimcirq/qsim_circuit.py
|
Thenerdstation/qsim
|
4c16822ee56614b917434c3b7bc7b06b07dc8e02
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import cirq
from qsimcirq import qsim
def _cirq_gate_kind(gate):
if isinstance(gate, cirq.ops.identity.IdentityGate):
if gate.num_qubits() == 1:
return qsim.kI
if gate.num_qubits() == 2:
return qsim.kI2
raise NotImplementedError(
f'Received identity on {gate.num_qubits()} qubits; '
+ 'only 1- or 2-qubit gates are supported.')
if isinstance(gate, cirq.ops.XPowGate):
# cirq.rx also uses this path.
if gate.exponent == 1:
return qsim.kX
return qsim.kXPowGate
if isinstance(gate, cirq.ops.YPowGate):
# cirq.ry also uses this path.
if gate.exponent == 1:
return qsim.kY
return qsim.kYPowGate
if isinstance(gate, cirq.ops.ZPowGate):
# cirq.rz also uses this path.
if gate.exponent == 1:
return qsim.kZ
if gate.exponent == 0.5:
return qsim.kS
if gate.exponent == 0.25:
return qsim.kT
return qsim.kZPowGate
if isinstance(gate, cirq.ops.HPowGate):
if gate.exponent == 1:
return qsim.kH
return qsim.kHPowGate
if isinstance(gate, cirq.ops.CZPowGate):
if gate.exponent == 1:
return qsim.kCZ
return qsim.kCZPowGate
if isinstance(gate, cirq.ops.CXPowGate):
if gate.exponent == 1:
return qsim.kCX
return qsim.kCXPowGate
if isinstance(gate, cirq.ops.PhasedXPowGate):
return qsim.kPhasedXPowGate
if isinstance(gate, cirq.ops.PhasedXZGate):
return qsim.kPhasedXZGate
if isinstance(gate, cirq.ops.XXPowGate):
if gate.exponent == 1:
return qsim.kXX
return qsim.kXXPowGate
if isinstance(gate, cirq.ops.YYPowGate):
if gate.exponent == 1:
return qsim.kYY
return qsim.kYYPowGate
if isinstance(gate, cirq.ops.ZZPowGate):
if gate.exponent == 1:
return qsim.kZZ
return qsim.kZZPowGate
if isinstance(gate, cirq.ops.SwapPowGate):
if gate.exponent == 1:
return qsim.kSWAP
return qsim.kSwapPowGate
if isinstance(gate, cirq.ops.ISwapPowGate):
# cirq.riswap also uses this path.
if gate.exponent == 1:
return qsim.kISWAP
return qsim.kISwapPowGate
if isinstance(gate, cirq.ops.PhasedISwapPowGate):
# cirq.givens also uses this path.
return qsim.kPhasedISwapPowGate
if isinstance(gate, cirq.ops.FSimGate):
return qsim.kFSimGate
if isinstance(gate, cirq.ops.MatrixGate):
if gate.num_qubits() == 1:
return qsim.kMatrixGate1
if gate.num_qubits() == 2:
return qsim.kMatrixGate2
raise NotImplementedError(
f'Received matrix on {gate.num_qubits()} qubits; '
+ 'only 1- or 2-qubit gates are supported.')
# Unrecognized gates will be decomposed.
return None
class QSimCircuit(cirq.Circuit):
def __init__(self,
cirq_circuit: cirq.Circuit,
device: cirq.devices = cirq.devices.UNCONSTRAINED_DEVICE,
allow_decomposition: bool = False):
if allow_decomposition:
super().__init__([], device=device)
for moment in cirq_circuit:
for op in moment:
# This should call decompose on the gates
self.append(op)
else:
super().__init__(cirq_circuit, device=device)
def __eq__(self, other):
if not isinstance(other, QSimCircuit):
return False
# equality is tested, for the moment, for cirq.Circuit
return super().__eq__(other)
def _resolve_parameters_(self, param_resolver: cirq.study.ParamResolver):
qsim_circuit = super()._resolve_parameters_(param_resolver)
qsim_circuit.device = self.device
return qsim_circuit
def translate_cirq_to_qsim(
self,
qubit_order: cirq.ops.QubitOrderOrList = cirq.ops.QubitOrder.DEFAULT
) -> qsim.Circuit:
"""
Translates this Cirq circuit to the qsim representation.
:qubit_order: Ordering of qubits
:return: a C++ qsim Circuit object
"""
qsim_circuit = qsim.Circuit()
qsim_circuit.num_qubits = len(self.all_qubits())
ordered_qubits = cirq.ops.QubitOrder.as_qubit_order(qubit_order).order_for(
self.all_qubits())
# qsim numbers qubits in reverse order from cirq
ordered_qubits = list(reversed(ordered_qubits))
qubit_to_index_dict = {q: i for i, q in enumerate(ordered_qubits)}
time_offset = 0
for moment in self:
moment_length = 1
for op in moment:
qsim_ops = cirq.decompose(
op, keep=lambda x: _cirq_gate_kind(x.gate) != None)
moment_length = max(moment_length, len(qsim_ops))
for gi, qsim_op in enumerate(qsim_ops):
gate_kind = _cirq_gate_kind(qsim_op.gate)
time = time_offset + gi
qubits = [qubit_to_index_dict[q] for q in qsim_op.qubits]
params = {
p.strip('_'): val for p, val in vars(qsim_op.gate).items()
if isinstance(val, float)
}
if gate_kind == qsim.kMatrixGate1:
qsim.add_matrix1(time, qubits,
cirq.unitary(qsim_op.gate).tolist(),
qsim_circuit)
elif gate_kind == qsim.kMatrixGate2:
qsim.add_matrix2(time, qubits,
cirq.unitary(qsim_op.gate).tolist(),
qsim_circuit)
else:
qsim.add_gate(gate_kind, time, qubits, params, qsim_circuit)
time_offset += moment_length
return qsim_circuit
| 32.922652 | 79 | 0.663031 |
8c44b34a018ad9e9f023d894e11a7a4ce39128f1
| 532 |
py
|
Python
|
Code/GraphEvoDef/main/utils.py
|
vijaybw/graphevodef
|
5dcde8ea6b5a754fa15a370fe3bc73e4bd0a94a9
|
[
"MIT"
] | null | null | null |
Code/GraphEvoDef/main/utils.py
|
vijaybw/graphevodef
|
5dcde8ea6b5a754fa15a370fe3bc73e4bd0a94a9
|
[
"MIT"
] | null | null | null |
Code/GraphEvoDef/main/utils.py
|
vijaybw/graphevodef
|
5dcde8ea6b5a754fa15a370fe3bc73e4bd0a94a9
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
"""
Copyright (c) 2020 - present Rakan Alanazi, Vijay Walunj
"""
from werkzeug.utils import secure_filename
from flask import current_app
import os
def save_file(uploaded_file,project_name,last_file_flag=False):
filename = secure_filename(uploaded_file.filename)
_, f_ext = os.path.splitext(filename)
prediction_fn = str(project_name) + f_ext
uploaded_file.save(os.path.join(current_app.config['PROJECT_LOCATION'],prediction_fn))
if last_file_flag:
return prediction_fn
| 28 | 92 | 0.746241 |
1e432282e630ec77bcf5c1c3a074baf0b0cf57b4
| 13,309 |
py
|
Python
|
test/test_remote_io.py
|
pytorch/data
|
32e447f1890437fa9707bc2e517cfa8b3e19dfca
|
[
"BSD-3-Clause"
] | 611 |
2021-09-27T18:19:16.000Z
|
2022-03-31T11:36:01.000Z
|
test/test_remote_io.py
|
pytorch/data
|
32e447f1890437fa9707bc2e517cfa8b3e19dfca
|
[
"BSD-3-Clause"
] | 271 |
2021-09-27T19:07:00.000Z
|
2022-03-30T19:55:14.000Z
|
test/test_remote_io.py
|
pytorch/data
|
32e447f1890437fa9707bc2e517cfa8b3e19dfca
|
[
"BSD-3-Clause"
] | 36 |
2021-09-27T19:22:32.000Z
|
2022-03-29T12:49:06.000Z
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import io
import os
import unittest
import warnings
import expecttest
import torchdata
from _utils._common_utils_for_test import check_hash_fn, create_temp_dir, IS_WINDOWS
from torch.utils.data import DataLoader
from torchdata.datapipes.iter import (
EndOnDiskCacheHolder,
FileOpener,
FSSpecFileLister,
FSSpecFileOpener,
HttpReader,
IterableWrapper,
OnDiskCacheHolder,
S3FileLister,
S3FileLoader,
)
try:
import fsspec
import s3fs
HAS_FSSPEC_S3 = True
except ImportError:
HAS_FSSPEC_S3 = False
skipIfNoFSSpecS3 = unittest.skipIf(not HAS_FSSPEC_S3, "no FSSpec with S3fs")
try:
from torchdata._torchdata import S3Handler
HAS_AWS = True
except ImportError:
HAS_AWS = False
skipIfAWS = unittest.skipIf(HAS_AWS, "AWSSDK Enabled")
skipIfNoAWS = unittest.skipIf(not HAS_AWS, "No AWSSDK Enabled")
class TestDataPipeRemoteIO(expecttest.TestCase):
def setUp(self):
self.temp_dir = create_temp_dir()
def tearDown(self):
try:
self.temp_dir.cleanup()
except Exception as e:
warnings.warn(f"TestDataPipeRemoteIO was not able to cleanup temp dir due to {e}")
def test_http_reader_iterdatapipe(self):
file_url = "https://raw.githubusercontent.com/pytorch/data/main/LICENSE"
expected_file_name = "LICENSE"
expected_MD5_hash = "bb9675028dd39d2dd2bf71002b93e66c"
query_params = {"auth": ("fake_username", "fake_password"), "allow_redirects": True}
timeout = 120
http_reader_dp = HttpReader(IterableWrapper([file_url]), timeout=timeout, **query_params)
# Functional Test: test if the Http Reader can download and read properly
reader_dp = http_reader_dp.readlines()
it = iter(reader_dp)
path, line = next(it)
self.assertEqual(expected_file_name, os.path.basename(path))
self.assertTrue(b"BSD" in line)
# Reset Test: http_reader_dp has been read, but we reset when calling check_hash()
check_cache_dp = http_reader_dp.check_hash({file_url: expected_MD5_hash}, "md5", rewind=False)
it = iter(check_cache_dp)
path, stream = next(it)
self.assertEqual(expected_file_name, os.path.basename(path))
self.assertTrue(io.BufferedReader, type(stream))
# __len__ Test: returns the length of source DataPipe
self.assertEqual(1, len(http_reader_dp))
def test_on_disk_cache_holder_iterdatapipe(self):
tar_file_url = "https://raw.githubusercontent.com/pytorch/data/main/test/_fakedata/csv.tar.gz"
expected_file_name = os.path.join(self.temp_dir.name, "csv.tar.gz")
expected_MD5_hash = "42cd45e588dbcf64c65751fbf0228af9"
tar_hash_dict = {expected_file_name: expected_MD5_hash}
tar_file_dp = IterableWrapper([tar_file_url])
with self.assertRaisesRegex(RuntimeError, "Expected `OnDiskCacheHolder` existing"):
_ = tar_file_dp.end_caching()
def _filepath_fn(url):
filename = os.path.basename(url)
return os.path.join(self.temp_dir.name, filename)
tar_cache_dp = tar_file_dp.on_disk_cache(
filepath_fn=_filepath_fn,
hash_dict=tar_hash_dict,
hash_type="md5",
)
# DataPipe Constructor
tar_cache_dp = HttpReader(tar_cache_dp)
# Start iteration without `end_caching`
with self.assertRaisesRegex(RuntimeError, "Please call"):
_ = list(tar_cache_dp)
# Both filepath_fn and same_filepath_fn are set
with self.assertRaisesRegex(ValueError, "`filepath_fn` is mutually"):
_ = tar_cache_dp.end_caching(mode="wb", filepath_fn=_filepath_fn, same_filepath_fn=True)
tar_cache_dp = tar_cache_dp.end_caching(mode="wb", same_filepath_fn=True)
# File doesn't exist on disk
self.assertFalse(os.path.exists(expected_file_name))
path = list(tar_cache_dp)[0]
# File is cached to disk
self.assertTrue(os.path.exists(expected_file_name))
self.assertEqual(expected_file_name, path)
self.assertTrue(check_hash_fn(expected_file_name, expected_MD5_hash))
# Modify the downloaded file to trigger downloading again
with open(expected_file_name, "w") as f:
f.write("0123456789abcdef")
self.assertFalse(check_hash_fn(expected_file_name, expected_MD5_hash))
path = list(tar_cache_dp)[0]
self.assertTrue(check_hash_fn(expected_file_name, expected_MD5_hash))
# Call `end_caching` again
with self.assertRaisesRegex(RuntimeError, "`end_caching` can only be invoked once"):
_ = tar_cache_dp.end_caching()
# Multiple filepaths
def _gen_filepath_fn(tar_path):
for i in range(3):
yield os.path.join(os.path.dirname(tar_path), "csv", f"{i}.csv")
# DataPipe Constructor
file_cache_dp = OnDiskCacheHolder(tar_cache_dp, filepath_fn=_gen_filepath_fn)
file_cache_dp = FileOpener(file_cache_dp, mode="rb")
# Functional API
file_cache_dp = file_cache_dp.load_from_tar()
def _csv_filepath_fn(csv_path):
return os.path.join(self.temp_dir.name, "csv", os.path.basename(csv_path))
# Read and decode
def _read_and_decode(x):
return x.read().decode()
file_cache_dp = file_cache_dp.map(fn=_read_and_decode, input_col=1)
file_cache_dp = EndOnDiskCacheHolder(file_cache_dp, mode="w", filepath_fn=_csv_filepath_fn, skip_read=True)
cached_it = iter(file_cache_dp)
for expected_csv_path in _gen_filepath_fn(expected_file_name):
# Check disabled due to some elements of prefetching inside of on_disck_cache
# self.assertFalse(os.path.exists(expected_csv_path))
csv_path = next(cached_it)
# File is cached to disk
self.assertTrue(os.path.exists(expected_csv_path))
self.assertEqual(expected_csv_path, csv_path)
# Cache decompressed archive but only check root directory
root_dir = "temp"
file_cache_dp = OnDiskCacheHolder(
tar_cache_dp, filepath_fn=lambda tar_path: os.path.join(os.path.dirname(tar_path), root_dir)
)
file_cache_dp = FileOpener(file_cache_dp, mode="rb").load_from_tar()
file_cache_dp = file_cache_dp.end_caching(
mode="wb",
filepath_fn=lambda file_path: os.path.join(self.temp_dir.name, root_dir, os.path.basename(file_path)),
)
cached_it = iter(file_cache_dp)
for i in range(3):
expected_csv_path = os.path.join(self.temp_dir.name, root_dir, f"{i}.csv")
# File doesn't exist on disk
# Check disabled due to some elements of prefetching inside of on_disck_cache
# self.assertFalse(os.path.exists(expected_csv_path))
csv_path = next(cached_it)
# File is cached to disk
self.assertTrue(os.path.exists(expected_csv_path))
self.assertEqual(expected_csv_path, csv_path)
if not IS_WINDOWS:
dl = DataLoader(file_cache_dp, num_workers=3, multiprocessing_context="fork", batch_size=1)
expected = [[os.path.join(self.temp_dir.name, root_dir, f"{i}.csv")] for i in range(3)] * 3
res = list(dl)
self.assertEqual(sorted(expected), sorted(res))
@skipIfNoFSSpecS3
def test_fsspec_io_iterdatapipe(self):
input_list = [
(["s3://ai2-public-datasets"], 39), # bucket without '/'
(["s3://ai2-public-datasets/charades/"], 18), # bucket with '/'
(
[
"s3://ai2-public-datasets/charades/Charades_v1.zip",
"s3://ai2-public-datasets/charades/Charades_v1_flow.tar",
"s3://ai2-public-datasets/charades/Charades_v1_rgb.tar",
"s3://ai2-public-datasets/charades/Charades_v1_480.zip",
],
4,
), # multiple files
]
for urls, num in input_list:
fsspec_lister_dp = FSSpecFileLister(IterableWrapper(urls), anon=True)
self.assertEqual(sum(1 for _ in fsspec_lister_dp), num, f"{urls} failed")
url = "s3://ai2-public-datasets/charades/"
fsspec_loader_dp = FSSpecFileOpener(FSSpecFileLister(IterableWrapper([url]), anon=True), anon=True)
res = list(fsspec_loader_dp)
self.assertEqual(len(res), 18, f"{input} failed")
@skipIfAWS
def test_disabled_s3_io_iterdatapipe(self):
file_urls = ["s3://ai2-public-datasets"]
with self.assertRaisesRegex(ModuleNotFoundError, "TorchData must be built with"):
_ = S3FileLister(IterableWrapper(file_urls))
with self.assertRaisesRegex(ModuleNotFoundError, "TorchData must be built with"):
_ = S3FileLoader(IterableWrapper(file_urls))
@skipIfNoAWS
def test_s3_io_iterdatapipe(self):
# S3FileLister: different inputs
input_list = [
[["s3://ai2-public-datasets"], 77], # bucket without '/'
[["s3://ai2-public-datasets/"], 77], # bucket with '/'
[["s3://ai2-public-datasets/charades"], 18], # folder without '/'
[["s3://ai2-public-datasets/charades/"], 18], # folder without '/'
[["s3://ai2-public-datasets/charad"], 18], # prefix
[
[
"s3://ai2-public-datasets/charades/Charades_v1",
"s3://ai2-public-datasets/charades/Charades_vu17",
],
12,
], # prefixes
[["s3://ai2-public-datasets/charades/Charades_v1.zip"], 1], # single file
[
[
"s3://ai2-public-datasets/charades/Charades_v1.zip",
"s3://ai2-public-datasets/charades/Charades_v1_flow.tar",
"s3://ai2-public-datasets/charades/Charades_v1_rgb.tar",
"s3://ai2-public-datasets/charades/Charades_v1_480.zip",
],
4,
], # multiple files
[
[
"s3://ai2-public-datasets/charades/Charades_v1.zip",
"s3://ai2-public-datasets/charades/Charades_v1_flow.tar",
"s3://ai2-public-datasets/charades/Charades_v1_rgb.tar",
"s3://ai2-public-datasets/charades/Charades_v1_480.zip",
"s3://ai2-public-datasets/charades/Charades_vu17",
],
10,
], # files + prefixes
]
for input in input_list:
s3_lister_dp = S3FileLister(IterableWrapper(input[0]), region="us-west-2")
self.assertEqual(sum(1 for _ in s3_lister_dp), input[1], f"{input[0]} failed")
# S3FileLister: prefixes + different region
file_urls = [
"s3://aft-vbi-pds/bin-images/111",
"s3://aft-vbi-pds/bin-images/222",
]
s3_lister_dp = S3FileLister(IterableWrapper(file_urls), region="us-east-1")
self.assertEqual(sum(1 for _ in s3_lister_dp), 2212, f"{input} failed")
# S3FileLister: incorrect inputs
input_list = [
[""],
["ai2-public-datasets"],
["s3://"],
["s3:///bin-images"],
]
for input in input_list:
with self.assertRaises(ValueError, msg=f"{input} should raise ValueError."):
s3_lister_dp = S3FileLister(IterableWrapper(input), region="us-east-1")
for _ in s3_lister_dp:
pass
# S3FileLoader: loader
input = [
"s3://charades-tar-shards/charades-video-0.tar",
"s3://charades-tar-shards/charades-video-1.tar",
] # multiple files
s3_loader_dp = S3FileLoader(input, region="us-west-2")
self.assertEqual(sum(1 for _ in s3_loader_dp), 2, f"{input} failed")
input = [["s3://aft-vbi-pds/bin-images/100730.jpg"], 1]
s3_loader_dp = S3FileLoader(input[0], region="us-east-1")
self.assertEqual(sum(1 for _ in s3_loader_dp), input[1], f"{input[0]} failed")
# S3FileLoader: incorrect inputs
input_list = [
[""],
["ai2-public-datasets"],
["s3://"],
["s3:///bin-images"],
["s3://ai2-public-datasets/bin-image"],
]
for input in input_list:
with self.assertRaises(ValueError, msg=f"{input} should raise ValueError."):
s3_loader_dp = S3FileLoader(input, region="us-east-1")
for _ in s3_loader_dp:
pass
# integration test
input = [["s3://charades-tar-shards/"], 10]
s3_lister_dp = S3FileLister(IterableWrapper(input[0]), region="us-west-2")
s3_loader_dp = S3FileLoader(s3_lister_dp, region="us-west-2")
self.assertEqual(sum(1 for _ in s3_loader_dp), input[1], f"{input[0]} failed")
if __name__ == "__main__":
unittest.main()
| 39.37574 | 115 | 0.624314 |
19ffc8d38b23842ce67873c05660d0019a108f82
| 4,156 |
py
|
Python
|
pontoon/checks/tests/test_libraries.py
|
udacity/pontoon
|
e15a03a0c987615385b2a8c537bb18c99567f77e
|
[
"BSD-3-Clause"
] | null | null | null |
pontoon/checks/tests/test_libraries.py
|
udacity/pontoon
|
e15a03a0c987615385b2a8c537bb18c99567f77e
|
[
"BSD-3-Clause"
] | 1 |
2020-10-14T16:39:53.000Z
|
2020-10-14T16:39:53.000Z
|
pontoon/checks/tests/test_libraries.py
|
udacity/pontoon
|
e15a03a0c987615385b2a8c537bb18c99567f77e
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import pytest
from textwrap import dedent
from mock import patch, MagicMock, ANY
from pontoon.checks.libraries import run_checks
@pytest.yield_fixture
def run_tt_checks_mock():
with patch('pontoon.checks.libraries.translate_toolkit.run_checks') as mock:
yield mock
@pytest.yield_fixture()
def entity_properties_mock():
"""
Mock of entity from a .properties file.
"""
mock = MagicMock()
mock.resource.path = 'file.properties'
mock.resource.format = 'properties'
mock.resource.all.return_value = []
mock.string = 'Example string'
mock.comment = ''
yield mock
@pytest.yield_fixture()
def entity_dtd_mock():
"""
Mock of entity from a .dtd file.
"""
mock = MagicMock()
mock.resource.path = 'file.dtd'
mock.resource.format = 'dtd'
mock.resource.all.return_value = []
mock.string = 'Example string'
mock.key = 'entity_dtd'
mock.comment = ''
yield mock
@pytest.yield_fixture()
def entity_properties_plurals_mock():
"""
Mock of entity from a .properties file.
"""
mock = MagicMock()
mock.resource.path = 'file.properties'
mock.resource.format = 'properties'
mock.resource.all.return_value = []
mock.string = 'Example string'
mock.comment = 'Localization_and_Plurals'
yield mock
@pytest.yield_fixture()
def entity_invalid_resource_mock():
"""
Mock of entity from a resource with unsupported filetype.
"""
mock = MagicMock()
mock.resource.path = 'file.invalid'
mock.resource.format = 'invalid'
mock.resource.all.return_value = []
mock.string = 'Example string'
mock.comment = ''
yield mock
@pytest.yield_fixture()
def entity_ftl_mock():
"""
Mock of entity from a a .ftl file.
"""
mock = MagicMock()
mock.resource.path = 'file.ftl'
mock.resource.format = 'ftl'
mock.resource.all.return_value = []
mock.string = dedent("""
windowTitle = Untranslated string
.pontoon = is cool
""")
mock.comment = ''
yield mock
def test_ignore_warnings(
entity_properties_plurals_mock,
):
"""
Check if logic of ignore_warnings works when there are errors.
"""
assert run_checks(
entity_properties_plurals_mock,
'en-US',
entity_properties_plurals_mock.string,
'plural1;plural2;plural3;plural4;plural5',
True,
) == {
'clWarnings': ['expecting 2 plurals, found 5'],
'ttWarnings': ['Simple capitalization', 'Starting capitalization']
}
# Warnings can be ignored for Translate Toolkit if user decides to do so
assert run_checks(
entity_properties_plurals_mock,
'en-US',
entity_properties_plurals_mock.string,
'plural1;plural2;plural3;plural4;plural5',
False,
) == {
'clWarnings': ['expecting 2 plurals, found 5'],
}
def test_invalid_resource_compare_locales(
entity_invalid_resource_mock,
):
"""
Unsupported resource shouldn't raise an error.
"""
assert run_checks(
entity_invalid_resource_mock,
'en-US',
entity_invalid_resource_mock.string,
'Translation',
False
) == {}
def test_tt_disabled_checks(
entity_properties_mock,
entity_dtd_mock,
run_tt_checks_mock,
):
"""
Check if overlapping checks are disabled in Translate Toolkit.
"""
assert run_checks(
entity_properties_mock,
'en-US',
entity_properties_mock.string,
'invalid translation \q',
True,
) == {
'clWarnings': [
'unknown escape sequence, \q'
]
}
run_tt_checks_mock.assert_called_with(
ANY,
ANY,
ANY,
{'escapes', 'acronyms', 'printf', 'gconf', 'kdecomments', 'nplurals'},
)
assert run_checks(
entity_dtd_mock,
'en-US',
entity_properties_mock.string,
'Translated string',
True,
) == {}
assert not run_tt_checks_mock.assert_called_with(
ANY,
ANY,
ANY,
{'acronyms', 'gconf', 'kdecomments'}
)
| 23.348315 | 80 | 0.63282 |
086087b195ccf5ce9d8f5cc94df16fd626d8681c
| 15,315 |
py
|
Python
|
movement_validation/statistics/specs.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
movement_validation/statistics/specs.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
movement_validation/statistics/specs.py
|
eulerkaku/movement_validation
|
af939a42a97c1de889cf13bad0c22a2824d60947
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Instances of these classes define how a feature should be quantized into a
histogram as well as some additional informaton (see csv files and class definitions)
The raw information is actually located in csv files in:
movement_validation/statistics/feature_metadata
These classes instantiate each row of these files as instances.
This is the Python port of:
https://github.com/JimHokanson/SegwormMatlabClasses/blob/master/%2Bseg_worm/%2Bstats/specs.m
and its subclasses:
https://github.com/JimHokanson/SegwormMatlabClasses/blob/master/%2Bseg_worm/%2Bstats/movement_specs.m
https://github.com/JimHokanson/SegwormMatlabClasses/blob/master/%2Bseg_worm/%2Bstats/simple_specs.m
https://github.com/JimHokanson/SegwormMatlabClasses/blob/master/%2Bseg_worm/%2Bstats/event_specs.m
This module defines the following classes:
Specs
MovementSpecs(Specs)
SimpleSpecs(Specs)
EventSpecs(Specs)
"""
import os
import csv
import numpy as np
from .. import utils
class Specs(object):
"""
Attributes
----------
long_field : string
Notes
-----
Formerly seg_worm.stats.specs in SegwormMatlabClasses
"""
def __init__(self):
"""
This initialization method does nothing. To instantiate, you need
to call the static factory method Specs.specs_factory
"""
pass
def __repr__(self):
return utils.print_object(self)
@property
def long_field(self):
"""
Give the "long" version of the instance's name.
Returns
-------
string
A '.' delimited concatenation of feature_field and sub_field.
"""
value = self.feature_field
if hasattr(self, 'sub_field') and \
self.sub_field != None and self.sub_field != '':
value += '.' + self.sub_field
return value
def getData(self, worm_features):
"""
Drill down into the nested data structure of worm_features to obtain
the numpy array with the data specific to this specification.
Parameters
----------
worm_features: A WormFeatures instance
All the feature data calculated for a single worm video.
Arranged heirarchically into categories:, posture, morphology,
path, locomotion, in an h5py group.
Returns
-------
A numpy array
"""
data = worm_features
# Call getattr as many times as is necessary, to dynamically
# access a potentially nested field.
# e.g. if self.feature_field = 'posture.coils', we'll need to call
# getattr twice, first on 'posture', and second on 'coils'.
for cur_feature_field in self.feature_field.split('.'):
if not hasattr(data, cur_feature_field):
import pdb
pdb.set_trace()
raise Exception("The WormFeatures instance passed does " +
"not have the feature: " + cur_feature_field +
". Its full name is " + self.long_field)
data = getattr(data, cur_feature_field)
return data
@staticmethod
def specs_factory(csv_path, class_function_handle):
"""
Factory for creating Specs subclasses for every extended feature
in a CSV file
Parameters
----------
csv_path: string
The path to a CSV file that has a list of extended features
class_function_handle: A class inheriting from Stats
Returns
-------
list
A list of instances of the Stats subclass provided by
class_function_handle, with each item in the list corresponding
to a row in the CSV file at the provided csv_path.
Notes
-----
Formerly function objs = seg_worm.stats.specs.getObjectsHelper( ...
csv_path,class_function_handle,prop_names,prop_types)
The inherited objects can give relatively simple
instructions on how their properties should be interpreted
from their CSV specification file.
TODO:
It would be nice to do the reading and object construction in
here but Matlab is awkward for dynamic object creation
- @JimHokanson
"""
stats_instances = []
# See below comment above prop_types
data_types = {1: str, 2: float, 3: int, 4: bool}
with open(csv_path) as feature_metadata_file:
feature_metadata = csv.DictReader(feature_metadata_file)
# The first row of the CSV file contains the field names.
# The second row of the CSV file contains information about
# what kind of data is held in each column:
# 1 = str
# 2 = float
# 3 = int
# 4 = bool
# (this mapping was recorded above in data_types)
field_data_types = next(feature_metadata)
# The third to last rows of the CSV file contain the feature
# metadata. Let's now create a stats_instance for each
# of these rows, initializing them with the row's metadata.
for row in feature_metadata:
# Dynamically create an instance of the right kind
# of class
stats_instance = class_function_handle()
for field in row:
# Blank values are given the value None
value = None
if(row[field] != ''):
# Here we are dynamically casting the element
# to the correct data type of the field,
# which was recorded in the prop_types dictionary.
data_type = data_types[int(field_data_types[field])]
if data_type == bool:
# We must handle bool as a separate case because
# bool('0') = True. To correct this, we must
# first cast to int: e.g. bool(int('0')) = False
value = bool(int(row[field]))
else:
value = data_type(row[field])
# Dynamically assign the field's value to the
# member data element of the same name in the object
setattr(stats_instance, field, value)
# Only append this row to our list if there is
# actually a name. If not it's likely just a blank row.
if stats_instance.feature_field:
stats_instances.append(stats_instance)
return stats_instances
class SimpleSpecs(Specs):
"""
%
% Class:
% seg_worm.stats.simple_specs
%
"""
def __init__(self):
pass
@staticmethod
def getSpecs():
"""
Formerly function objs = getSpecs()
%
%
% s_specs = seg_worm.stats.simple_specs.getSpecs();
%
%
"""
csv_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'feature_metadata',
'simple_features.csv')
# Return a list of SimpleSpecs instances, one instance for each
# row in the csv_path CSV file. Each row represents a feature.
return Specs.specs_factory(csv_path, SimpleSpecs)
class MovementSpecs(Specs):
"""
This class specifies how to treat each movement related feature when doing
histogram processing.
Attributes
----------
feature_field :
old_feature_field :
index :
feature_category :
is_time_series :
bin_width :
is_zero_bin :
is_signed :
name :
short_name :
units :
Created via static method, getSpecs()
%From Matlab comments:
%
% TODO:
% - might need to incorporate seg_worm.w.stats.wormStatsInfo
% - remove is_time_series entry ...
"""
def __init__(self):
pass
def getData(self, worm_features):
"""
Parameters
----------
worm_features : movement_validation.features.WormFeatures
All the feature data calculated for a single worm video.
Arranged heirarchically into categories:, posture, morphology,
path, locomotion.
Notes
-----------------------
Formerly data = getData(obj,feature_obj)
"""
data = super(MovementSpecs,self).getData(worm_features)
# NOTE: We can't filter data here because the data is
# filtered according to the value of the data, not
# according to the velocity of the midbody
if self.index != None and data != None:
# This is for eigenprojections, i.e. for instances when
# self.feature_field = 'posture.eigen_projection'
# In these cases the data is stored as a num_frames x 6 numpy
# array. We use self.index to identify which of the 6 eigenworms
# we are looking for projections of, for this particular feature.
data = data[self.index,:]
# So now our data has shape num_frames instead of [6, num_frames]
return data
@staticmethod
def getSpecs():
"""
Formerly objs = getSpecs()
%seg_worm.stats.movement_specs.getSpecs();
Returns
---------------------
"""
csv_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'feature_metadata',
'movement_features.csv')
# Return a list of MovementSpecs instances, one instance for each
# row in the csv_path CSV file. Each row represents a feature.
return Specs.specs_factory(csv_path, MovementSpecs)
class EventSpecs(Specs):
"""
Notes
--------------------------
Formerly seg_worm.stats.event_specs
"""
def __init__(self):
pass
@staticmethod
def getSpecs():
"""
Formerly function objs = getSpecs()
%
%
% s_specs = seg_worm.stats.event_specs.getSpecs();
%
%
"""
csv_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'feature_metadata',
'event_features.csv')
# Return a list of MovementSpecs instances, one instance for each
# row in the csv_path CSV file. Each row represents a feature.
return Specs.specs_factory(csv_path, EventSpecs)
def getData(self, worm_features, num_samples):
"""
Parameters
---------------------
worm_features: A WormFeatures instance
All the feature data calculated for a single worm video.
Arranged hierarchically into categories:
- posture
- morphology,
- path
- locomotion, in an h5py group.
num_samples: int
Number of samples (i.e. number of frames in the video)
Returns
---------------------
#https://github.com/JimHokanson/SegwormMatlabClasses/blob/master/%2Bseg_worm/%2Bstats/event_specs.m#L55
Notes
---------------------
Formerly SegwormMatlabClasses / +seg_worm / +stats / event_specs.m
function data = getData(obj,feature_obj,n_samples)
"""
# For example, self.feature_field might be
# locomotion.motion_events.forward,
# and self.sub_field might be
# time_between_events or distance_between_events, etc.
# Basically we want to drill to the bottom of the nested
# heirarchy of data in worm_features.
#JAH: This will fail in Python 2.7
#???? super(Specs).getData(worm_features)
parent_data = super(EventSpecs,self).getData(worm_features)
#JAH: The Matlab code would use an empty structure.
#Rather than just having an empty class, all events have a property 'is_null' which
#indicates if the event class is fully populated or if there are no events for the video.
if parent_data is None or parent_data.is_null:
return None
if self.sub_field is not None:
data = getattr(parent_data, self.sub_field)
if self.is_signed:
negate_mask = getattr(parent_data, self.signed_field)
if len(negate_mask) == 1 and negate_mask == True:
# Handle the case where data is just one value,
# a scalar, rather than a numpy array
data *= -1
elif len(negate_mask) == len(data):
# Our mask size perfectly matches the data size
# e.g. for event_durations
data[negate_mask] *= -1
elif len(negate_mask) == len(data) + 1:
# Our mask is one larger than the data size
# e.g. for time_between_events
# DEBUG: Are we masking the right entry here?
# should we perhaps be using
# negate_mask[:-1] instead?
data[negate_mask[1:]] *= -1
else:
raise Exception("For the signed_field " +
self.signed_field + " and the data " +
self.long_field + ", " +
"len(negate_mask) is not the same " +
"size or one smaller as len(data), " +
"as required.")
if self.remove_partial_events:
# Remove the starting and ending event if it's right
# up against the edge of the data, since we can't be
# sure that the video captured the full extent of the
# event
start_frames = parent_data.start_frames
end_frames = parent_data.end_frames
remove_mask = np.empty(len(data), dtype=bool)*False
if start_frames[0] == 0:
remove_mask[:end_frames[0]+1] = True
if end_frames[-1] == num_samples:
remove_mask[start_frames[-1]:] = True
# Remove all entries corresponding to True
# in the remove_mask
try:
data = data[~remove_mask]
except:
import pdb
pdb.set_trace()
else:
import pdb
pdb.set_trace()
raise Exception("The WormFeature contains no data for " + self.long_field)
if data.size == 0 and self.make_zero_if_empty:
data = 0
return data
| 33.882743 | 111 | 0.55253 |
970dc4ed84f6566050349e7b58a9e7f03f2daac5
| 1,072 |
py
|
Python
|
napari/layers/surface/_surface_constants.py
|
MaksHess/napari
|
64a144607342c02177fc62fa83a3442ace0a98e7
|
[
"BSD-3-Clause"
] | 1,345 |
2019-03-03T21:14:14.000Z
|
2022-03-31T19:46:39.000Z
|
napari/layers/surface/_surface_constants.py
|
MaksHess/napari
|
64a144607342c02177fc62fa83a3442ace0a98e7
|
[
"BSD-3-Clause"
] | 3,904 |
2019-03-02T01:30:24.000Z
|
2022-03-31T20:17:27.000Z
|
napari/layers/surface/_surface_constants.py
|
MaksHess/napari
|
64a144607342c02177fc62fa83a3442ace0a98e7
|
[
"BSD-3-Clause"
] | 306 |
2019-03-29T17:09:10.000Z
|
2022-03-30T09:54:11.000Z
|
from enum import auto
from ...utils.misc import StringEnum
from ...utils.translations import trans
class Shading(StringEnum):
"""Shading: Shading mode for the surface.
Selects a preset shading mode in vispy that determines how
color is computed in the scene.
See also: https://www.khronos.org/registry/OpenGL-Refpages/gl2.1/xhtml/glShadeModel.xml
Shading.NONE
Computed color is interpreted as input color, unaffected by
lighting. Corresponds to shading='none'.
Shading.FLAT
Computed colours are the color at a specific vertex for each
primitive in the mesh. Corresponds to shading='flat'.
Shading.SMOOTH
Computed colors are interpolated between vertices for each
primitive in the mesh. Corresponds to shading='smooth'
"""
NONE = auto()
FLAT = auto()
SMOOTH = auto()
SHADING_TRANSLATION = {
trans._("none"): Shading.NONE,
trans._("flat"): Shading.FLAT,
trans._("smooth"): Shading.SMOOTH,
}
| 31.529412 | 91 | 0.646455 |
7bfb701e7c536a1e82ef06617409e3afc5890947
| 113 |
py
|
Python
|
iseg/runner/amp.py
|
TaikiInoue/iSegmentation
|
25b2ce6a766e22cefca44b5dd21edadb0870df23
|
[
"MIT"
] | null | null | null |
iseg/runner/amp.py
|
TaikiInoue/iSegmentation
|
25b2ce6a766e22cefca44b5dd21edadb0870df23
|
[
"MIT"
] | null | null | null |
iseg/runner/amp.py
|
TaikiInoue/iSegmentation
|
25b2ce6a766e22cefca44b5dd21edadb0870df23
|
[
"MIT"
] | null | null | null |
from torch.cuda.amp import GradScaler
class RunnerAMP:
def init_scaler(self):
return GradScaler()
| 14.125 | 37 | 0.707965 |
1a093f7569c5b3be86a1411161a46111294fee74
| 3,963 |
py
|
Python
|
tests/unit/extraction_rules/test_audit_policy.py
|
karolkieglerski/dynatrace-gcp-function
|
3da9abf969aebe63cecbc7817f033123637dd12e
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/extraction_rules/test_audit_policy.py
|
karolkieglerski/dynatrace-gcp-function
|
3da9abf969aebe63cecbc7817f033123637dd12e
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/extraction_rules/test_audit_policy.py
|
karolkieglerski/dynatrace-gcp-function
|
3da9abf969aebe63cecbc7817f033123637dd12e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Dynatrace LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from datetime import datetime
from queue import Queue
from typing import NewType, Any
from lib.context import LogsContext
from lib.logs import logs_processor
from lib.logs.metadata_engine import ATTRIBUTE_GCP_PROJECT_ID, ATTRIBUTE_GCP_RESOURCE_TYPE, ATTRIBUTE_SEVERITY, \
ATTRIBUTE_CLOUD_PROVIDER, ATTRIBUTE_CLOUD_REGION, ATTRIBUTE_GCP_REGION, ATTRIBUTE_CONTENT, ATTRIBUTE_TIMESTAMP, \
ATTRIBUTE_DT_LOGPATH, ATTRIBUTE_AUDIT_IDENTITY, ATTRIBUTE_AUDIT_ACTION, ATTRIBUTE_AUDIT_RESULT
MonkeyPatchFixture = NewType("MonkeyPatchFixture", Any)
timestamp = datetime.utcnow().isoformat() + "Z"
# From https://cloud.google.com/vpc-service-controls/docs/troubleshooting
record = {
"insertId": "222lvajc6f7",
"logName": "projects/dynatrace-gcp-extension/logs/cloudaudit.googleapis.com%2Fpolicy",
"protoPayload": {
"@type": "type.googleapis.com/google.cloud.audit.AuditLog",
"authenticationInfo": {
"principalEmail": "[email protected]"
},
"metadata": {
"@type": "type.googleapis.com/google.cloud.audit.VpcServiceControlAuditMetadata",
"resourceNames": [
"projects/_"
],
"violationReason": "NO_MATCHING_ACCESS_LEVEL"
},
"methodName": "google.storage.NoBillingOk",
"requestMetadata": {
"callerIp": "x.x.x.x",
"destinationAttributes": {},
"requestAttributes": {}
},
"resourceName": "projects/690885588241",
"serviceName": "storage.googleapis.com",
"status": {
"code": 7,
"details": [
{
"@type": "type.googleapis.com/google.rpc.PreconditionFailure",
"violations": [
{
"type": "VPC_SERVICE_CONTROLS"
}
]
}
],
"message": "Request is prohibited by organization's policy"
}
},
"receiveTimestamp": "2018-11-27T21:40:43.823209571Z",
"resource": {
"labels": {
"method": "google.storage.NoBillingOk",
"project_id": "dynatrace-gcp-extension",
"service": "storage.googleapis.com"
},
"type": "audited_resource"
},
"severity": "ERROR",
"timestamp": timestamp
}
expected_output_list = [
{
ATTRIBUTE_CLOUD_PROVIDER: 'gcp',
ATTRIBUTE_GCP_PROJECT_ID: 'dynatrace-gcp-extension',
ATTRIBUTE_GCP_RESOURCE_TYPE: 'audited_resource',
ATTRIBUTE_TIMESTAMP: timestamp,
ATTRIBUTE_CONTENT: json.dumps(record),
ATTRIBUTE_DT_LOGPATH: 'projects/dynatrace-gcp-extension/logs/cloudaudit.googleapis.com%2Fpolicy',
ATTRIBUTE_AUDIT_IDENTITY: '[email protected]',
ATTRIBUTE_AUDIT_ACTION: 'google.storage.NoBillingOk',
ATTRIBUTE_AUDIT_RESULT: 'Failed.PermissionDenied',
ATTRIBUTE_SEVERITY: 'ERROR',
}
]
logs_context = LogsContext(
project_id_owner="",
dynatrace_api_key="",
dynatrace_url="",
scheduled_execution_id="",
sfm_queue=Queue()
)
def test_extraction():
for entry in expected_output_list:
actual_output = logs_processor._create_dt_log_payload(logs_context, entry[ATTRIBUTE_CONTENT])
assert actual_output == entry
| 36.027273 | 117 | 0.642947 |
69883ae1a0de22f61630de150c92fd9e198b58fe
| 3,680 |
py
|
Python
|
os_collect_config/local.py
|
mail2nsrajesh/os-collect-config
|
5be5fe782f4e5802c4250f4a1f7fe3b6e4f9221a
|
[
"Apache-2.0"
] | null | null | null |
os_collect_config/local.py
|
mail2nsrajesh/os-collect-config
|
5be5fe782f4e5802c4250f4a1f7fe3b6e4f9221a
|
[
"Apache-2.0"
] | null | null | null |
os_collect_config/local.py
|
mail2nsrajesh/os-collect-config
|
5be5fe782f4e5802c4250f4a1f7fe3b6e4f9221a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import locale
import os
import stat
from oslo_config import cfg
from oslo_log import log
from os_collect_config import exc
LOCAL_DEFAULT_PATHS = ['/var/lib/os-collect-config/local-data']
CONF = cfg.CONF
opts = [
cfg.MultiStrOpt('path',
default=LOCAL_DEFAULT_PATHS,
help='Local directory to scan for Metadata files.')
]
name = 'local'
logger = log.getLogger(__name__)
def _dest_looks_insecure(local_path):
'''We allow group writable so owner can let others write.'''
looks_insecure = False
uid = os.getuid()
st = os.stat(local_path)
if uid != st[stat.ST_UID]:
logger.error('%s is owned by another user. This is a'
' security risk.' % local_path)
looks_insecure = True
if st.st_mode & stat.S_IWOTH:
logger.error('%s is world writable. This is a security risk.'
% local_path)
looks_insecure = True
return looks_insecure
class Collector(object):
def __init__(self, requests_impl=None):
pass
def collect(self):
if len(cfg.CONF.local.path) == 0:
raise exc.LocalMetadataNotAvailable
final_content = []
for local_path in cfg.CONF.local.path:
try:
os.stat(local_path)
except OSError:
logger.warn("%s not found. Skipping", local_path)
continue
if _dest_looks_insecure(local_path):
raise exc.LocalMetadataNotAvailable
for data_file in os.listdir(local_path):
if data_file.startswith('.'):
continue
data_file = os.path.join(local_path, data_file)
if os.path.isdir(data_file):
continue
st = os.stat(data_file)
if st.st_mode & stat.S_IWOTH:
logger.error(
'%s is world writable. This is a security risk.' %
data_file)
raise exc.LocalMetadataNotAvailable
with open(data_file) as metadata:
try:
value = json.loads(metadata.read())
except ValueError as e:
logger.error(
'%s is not valid JSON (%s)' % (data_file, e))
raise exc.LocalMetadataNotAvailable
basename = os.path.basename(data_file)
final_content.append((basename, value))
if not final_content:
logger.warn('No local metadata found (%s)' %
cfg.CONF.local.path)
# Now sort specifically by C locale
def locale_aware_by_first_item(data):
return locale.strxfrm(data[0])
save_locale = locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, 'C')
sorted_content = sorted(final_content, key=locale_aware_by_first_item)
locale.setlocale(locale.LC_ALL, save_locale)
return sorted_content
| 35.728155 | 78 | 0.599457 |
746982639fe8d25471cb578846e2f0baff812ea0
| 2,142 |
py
|
Python
|
Data/initialize_database.py
|
GregHilston/Score-Board
|
c204412434cc395bafd5212fd454b903d93837ea
|
[
"MIT"
] | null | null | null |
Data/initialize_database.py
|
GregHilston/Score-Board
|
c204412434cc395bafd5212fd454b903d93837ea
|
[
"MIT"
] | 2 |
2017-03-13T18:58:21.000Z
|
2017-03-13T18:58:51.000Z
|
Data/initialize_database.py
|
GregHilston/Score-Board
|
c204412434cc395bafd5212fd454b903d93837ea
|
[
"MIT"
] | null | null | null |
import sys, sqlite3, csv, logging
class DatabaseInitializer():
def __init__(self, logger, sqlite):
self._logger = logger
self._sqlite = sqlite
def start(self):
"""
Starts the initialize and populate process
"""
GAMES_TABLE_NAME = "games"
PLAYERS_TABLE_NAME = "players"
RECORDS_TABLE_NAME = "records"
try:
# create our tables
self._sqlite.execute("CREATE TABLE {} (id INTEGER PRIMARY KEY AUTOINCREMENT, name char(100) UNIQUE NOT NULL)".format(GAMES_TABLE_NAME))
self._sqlite.execute("CREATE TABLE {} (id INTEGER PRIMARY KEY AUTOINCREMENT, name char(100) UNIQUE NOT NULL)".format(PLAYERS_TABLE_NAME))
self._sqlite.execute("CREATE TABLE {} (date char(100) NOT NULL, time char(100) NOT NULL, game INTEGER NOT NULL, winner INTEGER NOT NULL, loser INTEGER NOT NULL, ip char(100) NOT NULL)".format(RECORDS_TABLE_NAME))
self._sqlite.commit()
# populate our tables
self.populate_table(GAMES_TABLE_NAME, "Data/games.csv")
self.populate_table(PLAYERS_TABLE_NAME, "Data/players.csv")
self._logger.info("Database does not already exist. Creating it")
except sqlite3.OperationalError:
self._logger.warning("Database already exists, not overwriting it")
def populate_table(self, TABLE_NAME, CSV_FILE_NAME):
"""
Populates a table with CSV data
"""
with open(CSV_FILE_NAME) as f:
csv_file = csv.reader(f)
self._logger.debug("Populating contents of {}".format(csv_file))
for row in csv_file:
val = row[0] # Only using the 0th column
self._logger.debug("\t {} of type {}".format(val, type(val,)))
self._sqlite.execute("INSERT into {} (name) VALUES (?)".format(TABLE_NAME), (val,))
self._sqlite.commit()
def main():
"""
Creates and kicks off our DatabaseInitializer
"""
database_initializer = DatabaseInitializer()
database_initializer.start()
if __name__ == "__main__":
main()
| 35.114754 | 224 | 0.627918 |
4f43cd887abece6a371266464abcbf9c5ac44692
| 4,265 |
py
|
Python
|
benchmark/startQiskit_Class2672.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_Class2672.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_Class2672.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=4
# total number=40
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=19
prog.y(input_qubit[2]) # number=36
prog.cz(input_qubit[0],input_qubit[3]) # number=20
prog.h(input_qubit[3]) # number=21
prog.cx(input_qubit[0],input_qubit[3]) # number=14
prog.cx(input_qubit[0],input_qubit[3]) # number=25
prog.cx(input_qubit[0],input_qubit[3]) # number=28
prog.x(input_qubit[3]) # number=29
prog.cx(input_qubit[0],input_qubit[3]) # number=30
prog.cx(input_qubit[3],input_qubit[1]) # number=35
prog.y(input_qubit[2]) # number=34
prog.cx(input_qubit[0],input_qubit[3]) # number=27
prog.h(input_qubit[3]) # number=22
prog.cz(input_qubit[0],input_qubit[3]) # number=23
prog.h(input_qubit[3]) # number=24
prog.cx(input_qubit[0],input_qubit[3]) # number=13
prog.h(input_qubit[3]) # number=18
prog.z(input_qubit[3]) # number=10
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.cx(input_qubit[3],input_qubit[0]) # number=31
prog.cx(input_qubit[3],input_qubit[0]) # number=37
prog.z(input_qubit[3]) # number=38
prog.cx(input_qubit[3],input_qubit[0]) # number=39
prog.cx(input_qubit[3],input_qubit[0]) # number=33
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class2672.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 34.959016 | 140 | 0.649238 |
04686b2c0fc14d29505f65e333fdeb486ec3fa9f
| 2,997 |
py
|
Python
|
test_piui.py
|
sanfilippomike/piui-master
|
5c5b6f4f63077f4e370ee05c66e7cf044d191420
|
[
"BSD-2-Clause",
"Unlicense"
] | 244 |
2015-01-03T00:40:29.000Z
|
2022-03-12T13:20:21.000Z
|
test_piui.py
|
bojiunchen/piui
|
2a2311255d11a109f2f21a8a8a21066d55fbf6f5
|
[
"BSD-2-Clause",
"Unlicense"
] | 17 |
2015-01-05T21:06:22.000Z
|
2015-12-07T20:45:44.000Z
|
test_piui.py
|
bojiunchen/piui
|
2a2311255d11a109f2f21a8a8a21066d55fbf6f5
|
[
"BSD-2-Clause",
"Unlicense"
] | 80 |
2015-01-07T14:36:01.000Z
|
2021-01-04T17:51:48.000Z
|
import json
import unittest
import urllib2
from piui import PiUi
class PiUiTestCase(unittest.TestCase):
def setUp(self):
self._ui = PiUi("Test", timeout=1)
def tearDown(self):
print "tearDown"
self._ui.exit()
def http_get(self, rel_url):
handler = urllib2.urlopen('http://localhost:9999/' + rel_url)
return handler.getcode(), handler.read()
def click(self):
self._clicked = True
def test_menu(self):
self.page = self._ui.new_ui_page(title="PiUi")
self.list = self.page.add_list()
self.list.add_item("Static Content", chevron=True, onclick=self.click)
self.list.add_item("Buttons", chevron=True, onclick=self.click)
self.list.add_item("Input", chevron=True, onclick=self.click)
self.list.add_item("Images", chevron=True, onclick=self.click)
self.list.add_item("Toggles", chevron=True, onclick=self.click)
self.list.add_item("Console!", chevron=True, onclick=self.click)
resp = self.http_get('/')
assert "initPiUi();" in resp[1]
resp = self.http_get('/init')
assert "ok" in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "newpage"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "pagepost"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addul"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addli"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "timeout"' in resp[1]
def test_clicks(self):
self._clicked = False
self.page = self._ui.new_ui_page(title="PiUi")
self.title = self.page.add_textbox("Buttons!", "h1")
plus = self.page.add_button("Test Button", self.click)
resp = self.http_get('/')
assert "initPiUi();" in resp[1]
resp = self.http_get('/init')
assert "ok" in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "newpage"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "pagepost"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addelement"' in resp[1]
resp = self.http_get('/poll')
assert '"cmd": "addbutton"' in resp[1]
btn_cmd = resp[1]
resp = self.http_get('/poll')
assert '"cmd": "timeout"' in resp[1]
decoder = json.JSONDecoder()
cmd = decoder.decode(btn_cmd)
self.http_get('/click?eid=' + cmd['eid'])
assert self._clicked
if __name__ == '__main__':
unittest.main()
| 35.678571 | 78 | 0.576243 |
aad9130ebc33059f974d2e537f4e7ea044b0adb2
| 521 |
py
|
Python
|
leads/migrations/0005_lead_organisation.py
|
tmbyers1102/int-djcrm
|
c820953e1693075a8ed302c4e67d73da7cc79bd0
|
[
"MIT"
] | 250 |
2021-01-13T23:32:57.000Z
|
2022-03-28T03:01:45.000Z
|
leads/migrations/0005_lead_organisation.py
|
ashrafali46/getting-started-with-django
|
a58f624f813dd266796ee25bdd34298af3be6084
|
[
"MIT"
] | 8 |
2021-03-17T09:24:08.000Z
|
2022-02-18T10:24:15.000Z
|
leads/migrations/0005_lead_organisation.py
|
ashrafali46/getting-started-with-django
|
a58f624f813dd266796ee25bdd34298af3be6084
|
[
"MIT"
] | 228 |
2021-01-14T00:10:10.000Z
|
2022-03-28T11:05:28.000Z
|
# Generated by Django 3.1.4 on 2020-12-16 10:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('leads', '0004_auto_20201216_1046'),
]
operations = [
migrations.AddField(
model_name='lead',
name='organisation',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='leads.userprofile'),
preserve_default=False,
),
]
| 24.809524 | 116 | 0.639155 |
76ede487401b1c8d59e9151128998462b8b6c5cd
| 4,654 |
py
|
Python
|
mistral_tempest_tests/tests/scenario/engine/actions/v2/test_openstack_actions.py
|
openstack/mistral-tempest-plugin
|
091926bbb405bf8d0249156f11f090dc455077dc
|
[
"Apache-2.0"
] | 6 |
2018-06-27T11:19:36.000Z
|
2019-07-18T10:32:55.000Z
|
mistral_tempest_tests/tests/scenario/engine/actions/v2/test_openstack_actions.py
|
openstack/mistral-tempest-plugin
|
091926bbb405bf8d0249156f11f090dc455077dc
|
[
"Apache-2.0"
] | null | null | null |
mistral_tempest_tests/tests/scenario/engine/actions/v2/test_openstack_actions.py
|
openstack/mistral-tempest-plugin
|
091926bbb405bf8d0249156f11f090dc455077dc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from tempest.lib import decorators
from mistral_tempest_tests.tests import base
class OpenStackActionsTestsV2(base.TestCase):
_service = 'workflowv2'
# TODO(akuznetsova): add checks for task result after task_output
# TODO(akuznetsova): refactoring will be finished
def setUp(self):
super(OpenStackActionsTestsV2, self).setUp()
_, self.wb = self.client.create_workbook(
'openstack/action_collection_wb.yaml')
@decorators.attr(type='openstack')
@decorators.idempotent_id('9a999fc2-a089-4375-bc69-e1ed85b17a82')
def test_nova_actions(self):
wf_name = self.wb['name'] + '.nova'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_tasks = self.client.get_wf_tasks(wf_name)
for executed_task in executed_tasks:
self.assertEqual('SUCCESS', executed_task['state'])
if executed_task['name'] == 'versions_get_current':
published = jsonutils.loads(executed_task['published'])
# NOTE(kiennt): By default, microversion is 2.1.
# But now Mistral supports OpenStack project
# dynamic version, so the version should be
# the max possible version.
self.assertNotEqual('2.1', published['result']['version'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('81bdc1c9-cd9a-4c97-b8ce-e44f5211eace')
def test_keystone_actions(self):
wf_name = self.wb['name'] + '.keystone'
_, execution = self.admin_client.create_execution(wf_name)
self.admin_client.wait_execution_success(execution)
executed_task = self.admin_client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('fde681b8-3e1b-4172-a4b8-2fcac1f070d9')
def test_heat_actions(self):
wf_name = self.wb['name'] + '.heat'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_task = self.client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('5981360d-f336-45ca-9d38-799c7a8ade26')
def test_glance_actions(self):
wf_name = self.wb['name'] + '.glance'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_task = self.client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('a1f71a72-3681-4d32-aad9-117068717b33')
def test_cinder_actions(self):
wf_name = self.wb['name'] + '.cinder'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_task = self.client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('586dd973-fc65-40e2-9a85-31418b22473a')
def test_neutron_actions(self):
wf_name = self.wb['name'] + '.neutron'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_task = self.client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
@decorators.attr(type='openstack')
@decorators.idempotent_id('985c1051-cf2e-4fd0-8ceb-a9b8110597a1')
def test_swift_actions(self):
wf_name = self.wb['name'] + '.swift'
_, execution = self.client.create_execution(wf_name)
self.client.wait_execution_success(execution)
executed_task = self.client.get_wf_tasks(wf_name)[-1]
self.assertEqual('SUCCESS', executed_task['state'])
| 42.309091 | 77 | 0.686936 |
b2ad6a6175623c51cc32cbe34eba7a7eadbc1a13
| 3,413 |
py
|
Python
|
app/app/settings.py
|
andrewtdunn/recipe-app-api
|
f46775563b32399d792fb2f93801e9432ef0a71a
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
andrewtdunn/recipe-app-api
|
f46775563b32399d792fb2f93801e9432ef0a71a
|
[
"MIT"
] | null | null | null |
app/app/settings.py
|
andrewtdunn/recipe-app-api
|
f46775563b32399d792fb2f93801e9432ef0a71a
|
[
"MIT"
] | null | null | null |
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'odp&j=gx-(@xmhm*tg+78i^4q2@h&#us69ebm%0ux#=vqlmi6&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = '/vol/web/media'
STATIC_ROOT = '/vol/web/static'
AUTH_USER_MODEL = 'core.User'
| 25.281481 | 91 | 0.685028 |
cf3f3da11754b5ccca4d0cf3f0030fb2860e0b16
| 1,200 |
py
|
Python
|
qiskit/circuit/library/template_circuits/toffoli/template_9c_4.py
|
TanveshT/qiskit-terra
|
dc3a2a667b8dc22512ca409ecae347d8dbdd944c
|
[
"Apache-2.0"
] | 1 |
2021-07-11T18:17:38.000Z
|
2021-07-11T18:17:38.000Z
|
qiskit/circuit/library/template_circuits/toffoli/template_9c_4.py
|
TanveshT/qiskit-terra
|
dc3a2a667b8dc22512ca409ecae347d8dbdd944c
|
[
"Apache-2.0"
] | null | null | null |
qiskit/circuit/library/template_circuits/toffoli/template_9c_4.py
|
TanveshT/qiskit-terra
|
dc3a2a667b8dc22512ca409ecae347d8dbdd944c
|
[
"Apache-2.0"
] | 1 |
2021-01-31T02:24:55.000Z
|
2021-01-31T02:24:55.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Template 9c_4:
.. parsed-literal::
q_0: ──■────■─────────■──────────────■────────────
┌─┴─┐ │ ┌───┐┌─┴─┐ ┌───┐ │ ┌───┐
q_1: ┤ X ├──■──┤ X ├┤ X ├─────┤ X ├──■───────┤ X ├
└─┬─┘┌─┴─┐└───┘└─┬─┘┌───┐└─┬─┘┌─┴─┐┌───┐└─┬─┘
q_2: ──■──┤ X ├───────■──┤ X ├──■──┤ X ├┤ X ├──■──
└───┘ └───┘ └───┘└───┘
"""
from qiskit.circuit.quantumcircuit import QuantumCircuit
def template_9c_4():
"""
Returns:
QuantumCircuit: template as a quantum circuit.
"""
qc = QuantumCircuit(3)
qc.ccx(0, 2, 1)
qc.ccx(0, 1, 2)
qc.x(1)
qc.ccx(0, 2, 1)
qc.x(2)
qc.cx(2, 1)
qc.ccx(0, 1, 2)
qc.x(2)
qc.cx(2, 1)
return qc
| 27.906977 | 77 | 0.499167 |
2c1aa6f35d13f19d1f23bf743b3db9aa1b114a51
| 83,273 |
py
|
Python
|
bot.py
|
mii-10/program-team
|
e8ff65c31a309b2c2aa56ffb143a582a1fa22aff
|
[
"MIT"
] | 12 |
2020-01-06T11:49:53.000Z
|
2021-07-22T14:59:00.000Z
|
bot.py
|
mii-10/program-team
|
e8ff65c31a309b2c2aa56ffb143a582a1fa22aff
|
[
"MIT"
] | 30 |
2020-01-04T13:40:49.000Z
|
2022-01-17T11:49:52.000Z
|
bot.py
|
mii-10/program-team
|
e8ff65c31a309b2c2aa56ffb143a582a1fa22aff
|
[
"MIT"
] | 31 |
2020-01-04T13:22:19.000Z
|
2021-02-06T09:07:01.000Z
|
# -*- coding: utf-8 -*- #
from cogs import apple_invite
from cogs import apple_foc
import discord
from discord.ext import commands, tasks
import json
import random
import wikipedia
import wikidata.client
from PIL import Image, ImageDraw, ImageFont
import time
import asyncio
import datetime
import pickle
import sys
import platform
import re
from twitter import *
from dateutil.relativedelta import relativedelta as rdelta
import traceback
import os
import shutil
import pytz
import sqlite3
import aiohttp
# from discord_slash import SlashCommand
from my_module import dpy_interaction as dpyui
# textto etc
import m10s_util as ut
from apple_util import AppleUtil
from l10n import TranslateHandler, LocalizedContext
from checker import MaliciousInput, content_checker
# tokens
import config
"""import logging
logging.basicConfig(level=logging.DEBUG)"""
intents:discord.Intents = discord.Intents.default()
intents.members = True
intents.presences = True
bot = commands.Bot(command_prefix="s-", status=discord.Status.invisible,
allowed_mentions=discord.AllowedMentions(everyone=False),
intents=intents)
bot.owner_id = 404243934210949120
# slash = SlashCommand(bot,sync_commands=True,sync_on_cog_reload=True)
bot.dpyui = dpyui.interaction_actions(bot)
bot.team_sina = config.team_sina
# トークンたち
bot.DROP_TOKEN = config.DROP_TOKEN
bot.BOT_TEST_TOKEN = config.BOT_TEST_TOKEN
bot.BOT_TOKEN = config.BOT_TOKEN
bot.NAPI_TOKEN = config.NAPI_TOKEN
bot.GAPI_TOKEN = config.GAPI_TOKEN
bot.T_API_key = config.T_API_key
bot.T_API_SKey = config.T_API_SKey
bot.T_Acs_Token = config.T_Acs_Token
bot.T_Acs_SToken = config.T_Acs_SToken
# test
postcount = {}
sqlite3.register_converter('pickle', pickle.loads)
sqlite3.register_converter('json', json.loads)
sqlite3.register_adapter(dict, json.dumps)
sqlite3.register_adapter(list, pickle.dumps)
db = sqlite3.connect(
"sina_datas.db", detect_types=sqlite3.PARSE_DECLTYPES, isolation_level=None)
db.row_factory = sqlite3.Row
bot.cursor = db.cursor()
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS users(id integer PRIMARY KEY NOT NULL,prefix pickle,gpoint integer,memo json,levcard text,onnotif pickle,lang text,accounts pickle,sinapartner integer,gban integer,gnick text,gcolor integer,gmod integer,gstar integer,galpha integer,gbanhist text)")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS guilds(id integer PRIMARY KEY NOT NULL,levels json,commands json,hash pickle,levelupsendto integer,reward json,jltasks json,lockcom pickle,sendlog integer,prefix pickle,lang text)")
""" old gchat
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS globalchs(name text PRIMARY KEY NOT NULL,ids pickle)")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS globaldates(id integer PRIMARY KEY NOT NULL,content text,allid pickle,aid integer,gid integer,timestamp text)")
"""
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS invites(id text PRIMARY KEY NOT NULL, guild_id int NOT NULL, uses integer, inviter_id integer NOT NULL);")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS gban_settings(id integer PRIMARY KEY NOT NULL,chid integer);")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS gban_dates(id integer PRIMARY KEY NOT NULL,reason text NOT NULL,gban_by id NOT NULL);")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS welcome_auth(id integer PRIMARY KEY NOT NULL,category integer,use integer NOT NULL,can_view pickle NOT NULL,next_reaction NOT NULL,au_w pickle NOT NULL,give_role integer NOT NULL);")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS role_panels(id integer PRIMARY KEY NOT NULL,roles json NOT NULL);")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS activity_roles(guild_id integer NOT NULL,activity_type integer NOT NULL,role_id integer NOT NULL , PRIMARY KEY(guild_id,activity_type) );")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS actrole_optin(id integer PRIMARY KEY NOT NULL, is_enable integer NOT NULL default 0);")
bot.cursor.execute("create table if not exists remaind(\
id integer primary key not null,\
stext text not null,\
mention_role integer,\
time real not null,\
chid integer not null)")
# re_gchat
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS gchat_clist(name text PRIMARY KEY NOT NULL,pass text)")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS gchat_cinfo(id integer PRIMARY KEY NOT NULL,connected_to text NOT NULL, wh_id integer NOT NULL)")
bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS gchat_pinfo(id integer PRIMARY KEY NOT NULL,content pickle,allids pickle,author_id integer,guild_id integer,timestamp pickle)")
try:
bot.cursor.execute("ALTER TABLE users ADD COLUMN online_agreed integer;")
except:
pass
try:
bot.cursor.execute("ALTER TABLE guilds ADD COLUMN verified integer NOT NULL default 0;")
except:
pass
bot.session = aiohttp.ClientSession(loop=bot.loop)
bot._default_close = bot.close
async def close_handler():
await bot._default_close()
await bot.session.close()
try:
db.commit()
except sqlite3.ProgrammingError:
pass
else:
db.close()
bot.close = close_handler
bot.translate_handler = TranslateHandler(bot, ["en", "ja"])
bot._get_context = bot.get_context
async def get_context(msg, cls=LocalizedContext):
ctx = await bot._get_context(msg, cls=cls)
ctx.context_at = datetime.datetime.utcnow().timestamp()
return ctx
bot.get_context = get_context
bot._ = bot.translate_handler.get_translation_for
bot.l10n_guild = bot.translate_handler.get_guild_translation_for
bot.l10n_any = bot.translate_handler.get_any_translation
bot.l10n_raw = bot.translate_handler.get_raw_translation
"""
au_w:[
{
"reactions":["str" or "id"(0-19)],
"give_role"[None or id(int)],
"text":str""
},...
]
"""
DoServercmd = False
gprofilever = "v1.0.1"
wikipedia.set_lang('ja')
bot.mwc = wikidata.client.Client()
rpcct = 0
rpcs = [
"ヘルプ:s-help",
"アイコン:しおさばきゅーさん",
"サーバー数:{0}",
"ユーザー数:{1}",
"作成:チーム☆思惟奈ちゃん",
"制作リーダー:mii-10#3110",
"help:s-help",
"icon:しおさばきゅー",
"{0}guilds",
"{1}users",
"created by mii-10#3110"
]
"""db = dropbox.Dropbox(DROP_TOKEN)
db.users_get_current_account()"""
bot.twi = Twitter(auth=OAuth(
bot.T_Acs_Token, bot.T_Acs_SToken, bot.T_API_key, bot.T_API_SKey))
bot.ec = 0x42bcf4
Donotif = False
bot.StartTime = datetime.datetime.now()
aglch = None
bot.features = config.sp_features
bot.apple_util = AppleUtil(bot)
def shares_guild(user_id_a, user_id_b):
return not not [
guild
for guild
in bot.guilds
if set([user_id_a, user_id_b]).issubset(frozenset(guild._members.keys()))
]
bot.shares_guild = shares_guild
def can_use_online(user):
enabled = bot.cursor.execute(
"SELECT online_agreed FROM users WHERE id = ?", (user.id,)).fetchone()
return enabled and enabled["online_agreed"]
bot.can_use_online = can_use_online
# 初回ロード
"""db.files_download_to_file( "guildsetting.json" , "/guildsetting.json" )
db.files_download_to_file( "profiles.json" , "/profiles.json" )
db.files_download_to_file( "gp.json" , "/gp.json" )
db.files_download_to_file( "globaldatas.json" , "/globaldatas.json" )
db.files_download_to_file( "gchatchs.json" , "/gchatchs.json" )"""
bot.tl = " ゔ 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈〉《》「」『』【】+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓∈∋⊆⊇⊂⊃∪∩∧∨¬⇒⇔∀∃∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬ʼn♯♭♪†‡¶◯01234567891234567890abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz-^\=~|@[]:;\/.,<>?_+*}{`!\"#$%&'()ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをんァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρστυφχψωАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдеёжзийклмнопрстуфхцчшщъыьэюя─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙"
bot.gguide = """思惟奈ちゃんのグローバルチャット利用規約 最終更新 2020/02/22
1.思惟奈ちゃんグローバルチャットで発言を行った時点で、この規約に同意したものとする。
2.規約違反者の扱い
運営での話し合いのうえ、処罰内容は変動するものとする。(行った行為の重大さ等によって判断される。)
3.グローバルチャットに、以下のようなテキスト、画像、そのようなコンテンツにつながるURLを投稿することを禁止する。
ただし、グローバルチャット作成者、およびグローバルモデレーターは、管理運営に必要な場合は、投稿してもよいとする。
・年齢制限の必要なもの
・閲覧に金銭や個人情報が必要なもの(ただし、これによって投稿などにログインが必要なサイトのリンク投稿を制限するものではない)
・Discordのサーバー招待。ただし新機能のテストのために、「思惟奈ちゃん更新関係サーバー」に誘導する場合など、一部の例外を除く。これによって他のグローバルチャットのグローバルチャンネル名の送信を禁止するものではない。
・意味のない文字列の羅列。ただし、接続テストの場合を除く。
・その他法律、Discord利用規約に違反するもの
このうちいくつかの項目に関しては、自動的に送信がブロックされるものもある。
4.グローバルチャット製作者および、グローバルモデレーターは、利用者のできることに加えて、次の行為を行うことができる。
・利用者の使用禁止状態の切り替え
・オリジナルメッセージサーバーにいない状態での投稿の削除
5.グローバルチャットにて、ほかのサーバーに送信される項目は、以下のとおりである。
・メッセージ内容、付属するembed、添付されたファイル、返信元の投稿内容、一部スタンプ
・ユーザーのid、投稿時にオフラインでないデバイス(PC,moblie,webの三通り)
・送信したサーバーの名前、アイコン、id
・投稿時間
また、送信された内容に関して次のような行為が加わった際にはそれが反映される。
・メッセージ内容の変更
・オリジナルメッセージの削除
6.この規約は`s-globalguide`でいつでも見ることができる。
7.改定
・制作者が予告なしに改定することがある。改定後は、グローバルチャットにて報告される。
・予告して改定した場合も、同じように改定後に報告する。
"""
bot.load_extension("cogs.apple_misc")
bot.load_extension("cogs.apple_onlinenotif")
@tasks.loop(minutes=20.0)
async def cRPC():
global rpcct
if rpcct == 7:
rpcct = 0
else:
rpcct = rpcct+1
await bot.change_presence(status=discord.Status.online, activity=discord.Game(name=rpcs[rpcct].format(len(bot.guilds), len(bot.users))))
async def repomsg(msg, rs, should_ban=False):
ch = bot.get_channel(628929788421210144)
e = discord.Embed(title="グローバルメッセージブロック履歴",
description=f"メッセージ内容:{msg.clean_content}", color=bot.ec)
e.set_author(name=f"{msg.author}(id:{msg.author.id})",
icon_url=msg.author.avatar_url_as(static_format="png"))
e.set_footer(text=f"サーバー:{msg.guild.name}(id:{msg.guild.id})",
icon_url=msg.guild.icon_url_as(static_format="png"))
e.timestamp = msg.created_at
e.add_field(name="ブロック理由", value=rs or "なし")
await ch.send(embed=e)
if should_ban:
bot.cursor.execute(
"UPDATE users SET gban = ? WHERE id = ?", (1, msg.author.id))
bot.cursor.execute("UPDATE users SET gbanhist = ? WHERE id = ?",
("予防グローバルチャットBAN: {}".format(rs), msg.author.id))
async def gsended(message, ch, embed):
try:
tmp = await ch.send(embed=embed)
if not message.embeds[0] is None:
await ch.send(embed=message.embeds[0])
return tmp.id
except:
pass
async def gsendwh(message, wch, spicon, pf, ed, fls):
try:
for wh in await wch.webhooks():
if wh.name == "sina_global":
if not fls == []:
sdfl = []
for at in fls:
sdfl.append(discord.File(
f"globalsends/{at.filename}", filename=at.filename, spoiler=at.is_spoiler()))
tmp = await wh.send(content=message.clean_content, wait=True, username=f"[{spicon}]{pf['gnick']}", avatar_url=message.author.avatar_url_as(static_format='png'), embeds=ed, files=sdfl)
else:
tmp = await wh.send(content=message.clean_content, wait=True, username=f"[{spicon}]{pf['gnick']}", avatar_url=message.author.avatar_url_as(static_format='png'), embeds=ed)
return tmp.id
except:
pass
async def globalSend(message):
try:
if message.content.startswith("//"):
return
if message.author.id == bot.user.id:
return
if message.is_system():
return
bot.cursor.execute("select * from globalchs")
gchs = bot.cursor.fetchall()
gchn = None
for sgch in gchs:
if message.channel.id in sgch["ids"]:
gchn = sgch["name"]
gchs = sgch["ids"]
break
if gchn is None:
return
try:
content_checker(bot, message)
except MaliciousInput as err:
await repomsg(message, err.reason, err.should_ban)
return
bot.cursor.execute("select * from users where id=?",
(message.author.id,))
upf = bot.cursor.fetchone()
bot.cursor.execute("select * from guilds where id=?",
(message.guild.id,))
gpf = bot.cursor.fetchone()
if (datetime.datetime.now() - rdelta(hours=9) - rdelta(days=7) >= message.author.created_at) or upf["gmod"] or upf["gstar"] or gchn=="mido_sync_a":
if upf["gban"] == 1:
if not (gchn == "sync_rsp_main_chat" or gchn=="mido_sync_a"):
dc = await ut.opendm(message.author)
await dc.send(bot._(message.author, "global-banned", message.author.mention))
await repomsg(message, "思惟奈ちゃんグローバルチャットの使用禁止")
await message.add_reaction("❌")
await asyncio.sleep(5)
await message.remove_reaction("❌", bot.user)
else:
try:
if upf["sinapartner"] and message.author.activity:
if message.author.activity.type == discord.ActivityType.playing:
ne = discord.Embed(
title="", description=f"{message.author.activity.name}をプレイしています。", color=upf["gcolor"])
elif message.author.activity.type == discord.ActivityType.watching:
ne = discord.Embed(
title="", description=f"{message.author.activity.name}を視聴しています。", color=upf["gcolor"])
elif message.author.activity.type == discord.ActivityType.listening:
if message.author.activity.name == "Spotify":
ne = discord.Embed(
title="", description=f"Spotifyで[{message.author.activity.title}](https://open.spotify.com/track/{message.author.activity.track_id})を聞いています。", color=upf["gcolor"])
else:
ne = discord.Embed(
title="", description=f"{message.author.activity.name}を聞いています。", color=upf["gcolor"])
elif message.author.activity.type == discord.ActivityType.streaming:
ne = discord.Embed(
title="", description=f"{message.author.activity.name}を配信しています。", color=upf["gcolor"])
elif message.author.activity.type == discord.ActivityType.custom:
ne = discord.Embed(
title="", description=f"{message.author.activity.name}", color=upf["gcolor"])
else:
ne = discord.Embed(
title="", description="", color=upf["gcolor"])
else:
ne = discord.Embed(
title="", description="", color=upf["gcolor"])
ne.set_author(
name=f"{ut.ondevicon(message.author)},({str(message.author.id)})")
if gpf["verified"]:
ne.set_footer(text=f"✅:{message.guild.name}(id:{message.guild.id})", icon_url=message.guild.icon_url_as(
static_format="png"))
else:
ne.set_footer(text=f"{message.guild.name}(id:{message.guild.id})",
icon_url=message.guild.icon_url_as(static_format="png"))
ne.timestamp = datetime.datetime.now() - rdelta(hours=9)
embed = discord.Embed(
title="本文", description=message.content, color=upf["gcolor"])
embed.set_footer(text=f"{message.guild.name}(id:{message.guild.id})",
icon_url=message.guild.icon_url_as(static_format="png"))
if message.application is not None:
embed.add_field(
name=message.application["name"]+"へのRPC招待", value="RPC招待はグローバル送信できません。")
if message.type == discord.MessageType.default and message.reference:
ref = message.reference
if ref.cached_message:
m = ref.cached_message
else:
try:
m = await bot.get_channel(ref.channel_id).fetch_message(ref.message_id)
except:
m = None
if m:
ne.add_field(name=f"{m.author.display_name}のメッセージへの返信",value=f"{m.clean_content}")
embed.add_field(name=f"{m.author.display_name}のメッセージへの返信",value=f"{m.clean_content}")
else:
ne.add_field(name="メッセージへの返信",value="(このメッセージは削除されている等の理由で取得できません。)")
embed.add_field(name="メッセージへの返信",value="(このメッセージは削除されている等の理由で取得できません。)")
spicon = ""
if message.author.id == 404243934210949120: # みぃてん☆
spicon = spicon + "🌈"
if message.author.id in bot.team_sina: # チーム☆思惟奈ちゃん
spicon = spicon + "🌠"
if message.author.bot:
spicon = spicon + "⚙"
if upf["sinapartner"]:
spicon = spicon + "💠" # 認証済みアカウント
if message.author.id in config.partner_ids:
spicon = spicon + "🔗"
if upf["gmod"]:
spicon = spicon + "🔧"
if upf["galpha"]:
spicon = spicon + "🔔"
if upf["gstar"]:
spicon = spicon + "🌟"
if spicon == "":
spicon = "👤"
embed.set_author(name=f"{upf['gnick']}({spicon}):{str(message.author.id)}",
icon_url=message.author.avatar_url_as(static_format="png"))
if not message.attachments == []:
embed.set_image(url=message.attachments[0].url)
for atc in message.attachments:
temp = f"{atc.url}\n"
embed.add_field(name="添付ファイルのURL一覧", value=temp)
except:
traceback.print_exc(0)
await message.add_reaction("❌")
await asyncio.sleep(5)
await message.remove_reaction("❌", bot.user)
return
try:
if not (gchn == "sync_rsp_main_chat" or gchn=="mido_sync_a"):
await message.add_reaction(bot.get_emoji(653161518346534912))
except:
pass
if gchn.startswith("ed-"):
tasks = []
for cid in gchs:
ch = bot.get_channel(cid)
tasks.append(asyncio.ensure_future(
gsended(message, ch, embed)))
bot.cursor.execute(
"select * from globalchs where name=?", (gchn.replace("ed-", ""),))
nch = bot.cursor.fetchone()
try:
if nch["ids"]:
for cid in nch["ids"]:
try:
if not cid == message.channel.id:
wch = bot.get_channel(cid)
tasks.append(asyncio.ensure_future(
gsendwh(message, wch, spicon, upf, ne, [])))
except:
pass
if message.attachments == []:
await message.delete()
except:
pass
mids = await asyncio.gather(*tasks)
try:
await message.remove_reaction(bot.get_emoji(653161518346534912), bot.user)
except:
pass
else:
try:
sfs = False
fls = []
ed = []
#sticker
try:
if message.stickers:
sticker = message.stickers[0]
sembed = discord.Embed(title=f"スタンプ:{sticker.name}",)
if sticker.format == discord.StickerType.png:
sembed.set_image(url=sticker.image_url)
elif sticker.format == discord.StickerType.apng:
sembed.set_image(url=f"https://dsticker.herokuapp.com/convert.gif?url={sticker.image_url}")
elif sticker.format == discord.StickerType.lottie:
# メモ: https://cdn.discordapp.com/stickers/{id}/{hash}.json?size=1024
sembed.description = "画像取得非対応のスタンプです。"
ed.append(sembed)
except:
traceback.print_exc(0)
await message.add_reaction("❌")
await asyncio.sleep(5)
await message.remove_reaction("❌", bot.user)
return
if not message.attachments == []:
os.makedirs('globalsends/', exist_ok=True)
for at in message.attachments:
await at.save(f"globalsends/{at.filename}")
fls.append(at)
if not gchn == "sync_rsp_main_chat":
ed = ed + message.embeds + [ne]
else:
if not gchn == "sync_rsp_main_chat":
ed = ed + message.embeds + [ne]
except:
traceback.print_exc(0)
await message.add_reaction("❌")
await asyncio.sleep(5)
await message.remove_reaction("❌", bot.user)
return
try:
if not (gchn == "sync_rsp_main_chat" or gchn=="mido_sync_a"):
await message.add_reaction(bot.get_emoji(653161518346534912))
except:
pass
tasks = []
for cid in gchs:
try:
if not cid == message.channel.id:
wch = bot.get_channel(cid)
tasks.append(asyncio.ensure_future(
gsendwh(message, wch, spicon, upf, ed, fls)))
except:
pass
bot.cursor.execute(
"select * from globalchs where name=?", (f"ed-{gchn}",))
och = bot.cursor.fetchone()
try:
if nch["ids"]:
for cid in och["ids"]:
ch = bot.get_channel(cid)
tasks.append(asyncio.ensure_future(
gsended(message, ch, embed)))
except:
pass
mids = await asyncio.gather(*tasks)
if not fls == []:
shutil.rmtree("globalsends/")
try:
if not (gchn == "sync_rsp_main_chat" or gchn=="mido_sync_a"):
await message.remove_reaction(bot.get_emoji(653161518346534912), bot.user)
except:
pass
bot.cursor.execute("INSERT INTO globaldates(id,content,allid,aid,gid,timestamp) VALUES(?,?,?,?,?,?,?)", (message.id, message.clean_content,
mids+[message.id], message.author.id, message.guild.id, str(message.created_at.strftime('%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}').format(*'年月日時分秒'))))
if not (gchn == "sync_rsp_main_chat" or gchn=="mido_sync_a"):
await message.add_reaction(bot.get_emoji(653161518195539975))
await asyncio.sleep(5)
await message.remove_reaction(bot.get_emoji(653161518195539975), bot.user)
else:
await repomsg(message, "作成後7日に満たないアカウント")
except Exception as e:
traceback.print_exc()
@bot.event
async def on_member_update(b, a):
global Donotif
# serverlog
try:
e = discord.Embed(
title="メンバーの更新", description=f"変更メンバー:{str(a)}", color=bot.ec)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
if not b.nick == a.nick:
e.add_field(name="変更内容", value="ニックネーム")
if b.nick:
bnick = b.nick
else:
bnick = b.name
if a.nick:
anick = a.nick
else:
anick = a.name
e.add_field(name="変更前", value=bnick.replace("\\", "\\\\").replace("*", "\*").replace(
"_", "\_").replace("|", "\|").replace("~", "\~").replace("`", "\`").replace(">", "\>"))
e.add_field(name="変更後", value=anick.replace("\\", "\\\\").replace("*", "\*").replace(
"_", "\_").replace("|", "\|").replace("~", "\~").replace("`", "\`").replace(">", "\>"))
bot.cursor.execute(
"select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
elif not b.pending == a.pending:
e.add_field(name="メンバースクリーニングの状態変更",value=f"メンバースクリーニング{'が再度要求されます。' if a.pending else 'を完了しました。'}")
bot.cursor.execute(
"select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
elif not b.roles == a.roles:
if len(b.roles) > len(a.roles):
e.add_field(name="変更内容", value="役職除去")
e.add_field(name="役職", value=list(
set(b.roles)-set(a.roles))[0])
else:
e.add_field(name="変更内容", value="役職付与")
e.add_field(name="役職", value=list(
set(a.roles)-set(b.roles))[0])
bot.cursor.execute(
"select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
except:
pass
# online notif are now handled in apple_onlinenotif
async def nga(m, r):
# 過去の遺産
ch = m.guild.get_channel(631875590307446814)
admins = m.guild.get_role(574494236951707668)
tmpadmins = m.guild.get_role(583952666317684756)
giverole = m.guild.get_role(620911942889897984)
tch = await ch.create_text_channel(f"認証待ち-{m.name}", overwrites={
m: discord.PermissionOverwrite(read_messages=True, send_messages=True),
m.guild.default_role: discord.PermissionOverwrite(read_messages=False),
admins: discord.PermissionOverwrite(read_messages=True, send_messages=True),
tmpadmins: discord.PermissionOverwrite(read_messages=True, send_messages=True),
giverole: discord.PermissionOverwrite(
read_messages=True, send_messages=True)
}, topic=str(m.id))
await tch.send(f"""{m.mention}さん!みぃてん☆のわいがや広場にようこそ!
あなたは{r}が理由で、思惟奈ちゃんによる自動認証が行われませんでした。
思惟奈ちゃんに関するお問い合わせ等の方は`思惟奈ちゃん`カテゴリー内のチャンネルをご利用ください。
不明点等ございましたら、このチャンネルをご利用ください。
その他のチャンネルを使う際には、メンバー役職が必要です。
まずはルールを確認してください!
<#574500456471199746> このチャンネルにルールがあります。
その後、そのことを報告してください。
みぃてん☆
""")
@bot.event
async def on_member_join(member):
try:
bot.cursor.execute(
"select * from guilds where id=?", (member.guild.id,))
gpf = bot.cursor.fetchone()
ctt = gpf["jltasks"]
if not ctt.get("welcome") is None:
if ctt["welcome"]["sendto"] == "sysch":
await member.guild.system_channel.send(ctt["welcome"]["content"].format(member.mention))
else:
dc = await ut.opendm(member)
await dc.send(ctt["welcome"]["content"].format(member.mention))
except:
pass
e = discord.Embed(
title="メンバーの参加", description=f"{len(member.guild.members)}人目のメンバー", color=bot.ec)
e.add_field(name="参加メンバー", value=member.mention)
e.add_field(name="そのユーザーのid", value=member.id)
e.set_footer(
text=f"アカウント作成日時(そのままの値:{(member.created_at + rdelta(hours=9)).strftime('%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}').format(*'年月日時分秒')},タイムスタンプ化:")
e.timestamp = member.created_at
bot.cursor.execute("select * from guilds where id=?", (member.guild.id,))
gpf = bot.cursor.fetchone()
try:
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == member.guild.id:
await ch.send(embed=e)
except:
pass
# 他サーバーでのban通知
isgban = False
bot.cursor.execute("select * from users where id=?", (member.id,))
upf = bot.cursor.fetchone()
bunotif = 0
if member.id in bot.team_sina:
for ch in member.guild.channels:
if ch.name == "sina-user-check":
await ch.send(embed=discord.Embed(title=f"{member}の安全性評価", description=f"そのユーザーは、チーム☆思惟奈ちゃんのメンバーです。"))
elif upf and upf["gban"] == 1:
for ch in member.guild.channels:
if ch.name == "sina-user-check":
await ch.send(embed=discord.Embed(title=f"{member}の安全性評価", description=f"そのユーザーは、思惟奈ちゃんグローバルチャットbanを受けています。\n何らかの事情があってこうなっていますので十分に注意してください。"))
else:
for g in bot.guilds:
try:
tmp = await g.bans()
except:
continue
banulist = [i.user.id for i in tmp]
if member.id in banulist:
bunotif = bunotif + 1
if bunotif == 0:
for ch in member.guild.channels:
if ch.name == "sina-user-check":
await ch.send(embed=discord.Embed(title=f"{member}の安全性評価", description=f"そのユーザーは、思惟奈ちゃんのいるサーバーでは、banされていません。"))
else:
for ch in member.guild.channels:
if ch.name == "sina-user-check":
await ch.send(embed=discord.Embed(title=f"{member}の安全性評価", description=f"そのユーザーは、思惟奈ちゃんのいる{bunotif}のサーバーでbanされています。注意してください。"))
@bot.event
async def on_member_remove(member):
try:
bot.cursor.execute(
"select * from guilds where id=?", (member.guild.id,))
gpf = bot.cursor.fetchone()
ctt = gpf["jltasks"]
if not ctt.get("cu") is None:
if ctt["cu"]["sendto"] == "sysch":
await member.guild.system_channel.send(ctt["cu"]["content"].format(str(member)))
else:
dc = await ut.opendm(member)
await dc.send(ctt["cu"]["content"].format(str(member)))
except:
pass
e = discord.Embed(title="メンバーの退出", color=bot.ec)
e.add_field(name="退出メンバー", value=str(member))
e.add_field(name="役職", value=[i.name for i in member.roles])
# e.set_footer(text=f"{member.guild.name}/{member.guild.id}")
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (member.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == member.guild.id:
await ch.send(embed=e)
"""if member.guild.id == 611445741902364672:
c = bot.get_channel(613629308166209549)
await c.send(embed=e)"""
@bot.event
async def on_webhooks_update(channel):
e = discord.Embed(title="Webhooksの更新", color=bot.ec)
e.add_field(name="チャンネル", value=channel.mention)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (channel.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == channel.guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_role_create(role):
e = discord.Embed(title="役職の作成", color=bot.ec)
e.add_field(name="役職名", value=role.name)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (role.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == role.guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_role_delete(role):
e = discord.Embed(title="役職の削除", color=bot.ec)
e.add_field(name="役職名", value=role.name)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (role.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == role.guild.id:
await ch.send(embed=e)
@bot.event
async def on_message_edit(before, after):
if after.channel.id == 611117238464020490:
if after.embeds and before.content == after.content:
bot.cursor.execute(
"select * from globalchs where name=?", ("防災情報",))
chs = bot.cursor.fetchone()
es = after.embeds
sed = []
for e in es:
e.color = bot.ec
e.title = f'💠{str(e.title).replace("Embed.Empty","防災情報")}'
sed.append(e)
for chid in chs["ids"]:
try:
ch = bot.get_channel(chid)
for wh in await ch.webhooks():
try:
if wh.name == "sina_global":
await wh.send(embeds=sed)
await asyncio.sleep(0.2)
break
except:
continue
except:
pass
# サーバーログ
if before.content != after.content:
e = discord.Embed(title="メッセージの編集", color=bot.ec)
e.add_field(name="編集前", value=before.content)
e.add_field(name="編集後", value=after.content)
e.add_field(name="メッセージ送信者", value=after.author.mention)
e.add_field(name="メッセージチャンネル", value=after.channel.mention)
e.add_field(name="メッセージのURL", value=after.jump_url)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute(
"select * from guilds where id=?", (after.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == after.guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_channel_delete(channel):
# bl = await channel.guild.audit_logs(limit=1, action=discord.AuditLogAction.channel_delete).flatten()
e = discord.Embed(title="チャンネル削除", color=bot.ec)
e.add_field(name="チャンネル名", value=channel.name)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (channel.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == channel.guild.id:
await ch.send(embed=e)
@bot.event
async def on_reaction_clear(message, reactions):
e = discord.Embed(title="リアクションの一斉除去", color=bot.ec)
e.add_field(name="リアクション", value=[str(i) for i in reactions])
e.add_field(name="除去されたメッセージ", value=message.content or "(本文なし)")
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (message.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == message.guild.id:
await ch.send(embed=e)
@bot.event
async def on_message_delete(message):
if not message.author.bot:
e = discord.Embed(title="メッセージ削除", color=bot.ec)
e.add_field(name="メッセージ", value=message.content)
e.add_field(name="メッセージ送信者", value=message.author.mention)
e.add_field(name="メッセージチャンネル", value=message.channel.mention)
e.add_field(name="メッセージのid", value=message.id)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?",
(message.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == message.guild.id:
await ch.send(embed=e)
@bot.event
async def on_bulk_message_delete(messages):
logs = ["一括削除ログ\n",f"チャンネル:{messages[0].channel}({messages[0].channel.id})\n","------\n"]
for m in messages:
logs.append(f"author(送信者):{m.author.display_name}({m.author}/{m.author.id})\n")
logs.append(f"content(メッセージ内容):{m.system_content}\n")
logs.append(f"message id(メッセージid):{m.id}\n")
c_at = (m.created_at + rdelta(hours=9)).strftime("%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}").format(*"年月日時分秒")
logs.append(f"created_at(送信日時):{c_at}\n")
if m.type == discord.MessageType.default and m.reference:
rfm = m.reference
if rfm.cached_message:
logs.append(f"返信メッセージ:(送信者)-{rfm.cached_message.author.display_name}({rfm.cached_message.author}/{rfm.cached_message.author.id})\n")
logs.append(f"返信メッセージ:(メッセージ内容)-{rfm.cached_message.system_content}\n")
logs.append(f"返信メッセージ:(メッセージid)-{rfm.cached_message.id}\n")
c_at = (rfm.cached_message.created_at + rdelta(hours=9)).strftime("%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}").format(*"年月日時分秒")
logs.append(f"created_at(送信日時):{c_at}\n")
else:
logs.append(f"返信メッセージ:(guild_id/channel_id/message_id)-{rfm.guild_id}/{rfm.channel_id}/{rfm.message_id}\n")
logs.append("------\n")
with open("bulk_message_delete.txt",mode="w",encoding="utf_8") as f:
f.writelines(logs)
e = discord.Embed(title="メッセージ一括削除", color=bot.ec)
e.add_field(name="件数", value=len(messages))
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?",
(messages[0].guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == messages[0].guild.id:
await ch.send(embed=e,file=discord.File(fp="bulk_message_delete.txt"))
@bot.event
async def on_guild_channel_create(channel):
e = discord.Embed(title="チャンネル作成", color=bot.ec)
e.add_field(name="チャンネル名", value=channel.mention)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (channel.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == channel.guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_channel_update(b, a):
e = discord.Embed(title="チャンネル更新", description=a.mention, color=bot.ec)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
if not b.name == a.name:
if not a.guild.id == 461789442743468073:
e.add_field(name="変更内容", value="チャンネル名")
e.add_field(name="変更前", value=b.name)
e.add_field(name="変更後", value=a.name)
bot.cursor.execute(
"select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
elif not b.changed_roles == a.changed_roles:
e.add_field(name="変更内容", value="権限の上書き")
e.add_field(name="確認:", value="チャンネル設定を見てください。")
bot.cursor.execute("select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
elif isinstance(b, discord.TextChannel):
if not b.topic == a.topic:
e.add_field(name="変更内容", value="チャンネルトピック")
e.add_field(name="変更前", value=b.topic)
e.add_field(name="変更後", value=a.topic)
bot.cursor.execute(
"select * from guilds where id=?", (a.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_update(b, a):
e = discord.Embed(title="サーバーの更新", color=bot.ec)
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
if b.name != a.name:
e.add_field(name="変更内容", value="サーバー名")
e.add_field(name="変更前", value=b.name)
e.add_field(name="変更後", value=a.name)
bot.cursor.execute("select * from guilds where id=?", (a.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.id:
await ch.send(embed=e)
elif b.icon != a.icon:
e.add_field(name="変更内容", value="サーバーアイコン")
bot.cursor.execute("select * from guilds where id=?", (a.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.id:
await ch.send(embed=e)
elif b.owner.id != a.owner.id:
e.add_field(name="変更内容", value="サーバー所有者の変更")
e.add_field(name="変更前", value=b.owner)
e.add_field(name="変更後", value=a.owner)
bot.cursor.execute("select * from guilds where id=?", (a.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == a.id:
await ch.send(embed=e)
@bot.event
async def on_member_ban(g, user):
guild = bot.get_guild(g.id)
bl = await guild.audit_logs(limit=1, action=discord.AuditLogAction.ban).flatten()
e = discord.Embed(title="ユーザーのban", color=bot.ec)
e.add_field(name="ユーザー名", value=str(user))
# e.add_field(name="実行者", value=str(bl[0].user))
# e.set_footer(text=f"{g.name}/{g.id}")
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (g.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == g.id:
await ch.send(embed=e)
@bot.event
async def on_member_unban(guild, user):
e = discord.Embed(title="ユーザーのban解除", color=bot.ec)
e.add_field(name="ユーザー名", value=str(user))
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == guild.id:
await ch.send(embed=e)
@bot.event
async def on_guild_join(guild):
e = discord.Embed(
title=f"思惟奈ちゃんが{guild.name}に参加したよ!({len(bot.guilds)}サーバー)", description=f"id:{guild.id}", color=bot.ec)
e.add_field(name="サーバー作成日時",
value=f"{(guild.created_at+ rdelta(hours=9)).strftime('%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}').format(*'年月日時分秒')}")
e.add_field(
name="メンバー数", value=f"{len([i for i in guild.members if not i.bot])}ユーザー、{len([i for i in guild.members if i.bot])}bot")
e.add_field(
name="チャンネル数", value=f"テキスト:{len(guild.text_channels)}\nボイス:{len(guild.voice_channels)}\nカテゴリー{len(guild.categories)}")
e.add_field(name="サーバーオーナー",value=f"{guild.owner.mention}({guild.owner}({guild.owner.id}))")
ch = bot.get_channel(693048937304555529)
await ch.send(embed=e)
e=discord.Embed(title="思惟奈ちゃんの導入ありがとうございます!",description="ここでは、思惟奈ちゃんの機能を「少しだけ」ご紹介させていただきます。",color=bot.ec)
e.add_field(name="コマンドの利用制限",value="`s-comlock`コマンドで、使用してほしくないコマンドや、動いてほしくない一部機能の制限ができます。\n詳しくは`s-help comlock`でご確認ください!")
e.add_field(name="グローバルチャット",value="`s-gconnect`コマンドで、実行チャンネルをグローバルチャットに接続できます(Webhooks管理権限が必要)。\n詳しくは`s-help gconnect`でご確認ください!")
e.add_field(name="ハッシュタグチャンネル",value="`s-hash`コマンドで、実行チャンネルをハッシュタグチャンネルとして登録できます。登録されたチャンネルにメンションすることで、メッセージの複製を送信し、ハッシュタグのようにあとで一覧確認ができるようになります。詳しくは`s-help hash`でご確認ください。")
e.add_field(name="音楽再生機能",value="`s-play [URL/検索ワード]`でボイスチャット内で音楽を再生できます。その他のコマンドはヘルプのページ目で一覧確認できます。詳細は`s-help [コマンド名]`で確認できます。")
e.add_field(name="グローバルBANとその申請",value="`s-gbanlogto [チャンネルID]`でグローバルBANログの送信先を指定することで、グローバルBAN機能が有効化されます(BAN権限が必要)。\n一般のユーザーの方は`s-report`コマンドで申請ができます。詳しくは`s-help gbanlogto`ならびに`s-help report`をご覧ください!")
e.add_field(name="サーバー/ユーザーの設定変更に関して",value="`s-settings`コマンドで設定できる内容を見て、直接該当コマンドを呼び出すことができます。また、該当コマンドを直接呼び出しても設定を変えることもできます。使いやすい方を使ってください。")
e.add_field(name="思惟奈ちゃんのお知らせを受け取ろう!",value="`s-rnotify`コマンドで、そのチャンネルに思惟奈ちゃんのお知らせを受け取れるようになります。ぜひ!受信設定をお願いします。")
e.add_field(name="その他",value="このほかにもたくさんの機能を備えています。helpの1ページ目にリンクがある「みぃてんのわいがや広場」では、サポートも行っておりますのでお困りの方は一度足を運んでみてください。あなたのサーバーに少しでも役に立てるように頑張りますので思惟奈ちゃんをよろしくお願いします!")
try:
await guild.system_channel.send(embed=e)
except:
for ch in guild.text_channels:
try:
await ch.send(embed=e)
return
except:
continue
@bot.event
async def on_guild_remove(guild):
try:
e = discord.Embed(
title=f"思惟奈ちゃんが{guild.name}から退出しました。({len(bot.guilds)}サーバー)", description=f"原因としてサーバーからのkick/banまたはサーバーの削除などの可能性があります。\nid:{guild.id}", color=bot.ec)
e.add_field(name="サーバー作成日時",
value=f"{(guild.created_at+ rdelta(hours=9)).strftime('%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}').format(*'年月日時分秒')}")
try:
e.add_field(name="サーバー参加日時",
value=f"{(guild.me.joined_at+ rdelta(hours=9)).strftime('%Y{0}%m{1}%d{2} %H{3}%M{4}%S{5}').format(*'年月日時分秒')}")
except:
pass
e.add_field(
name="メンバー数", value=f"{len([i for i in guild.members if not i.bot])}ユーザー、{len([i for i in guild.members if i.bot])}bot")
e.add_field(
name="チャンネル数", value=f"テキスト:{len(guild.text_channels)}\nボイス:{len(guild.voice_channels)}\nカテゴリー{len(guild.categories)}")
e.add_field(name="サーバーオーナー",value=f"{guild.owner.mention}({guild.owner}({guild.owner.id}))")
except:
e=discord.Embed(title="退出通知",description=f"以下のエラーにより正常に生成できていないため、一部情報が断片的な情報を送ります。\n```py\n{traceback.format_exc(3)}```")
e.add_field(name="サーバー名/id",value=f"{guild.name}({guild.id})")
ch = bot.get_channel(693048937304555529)
await ch.send(embed=e)
@bot.event
async def on_invite_create(invite):
e = discord.Embed(title="サーバー招待の作成", color=bot.ec)
e.add_field(name="作成ユーザー", value=str(invite.inviter))
e.add_field(name="使用可能回数", value=str(invite.max_uses))
e.add_field(name="使用可能時間", value=str(invite.max_age))
e.add_field(name="チャンネル", value=str(invite.channel.mention))
e.add_field(name="コード", value=str(invite.code))
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (invite.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == invite.guild.id:
await ch.send(embed=e)
@bot.event
async def on_invite_delete(invite):
e = discord.Embed(title="サーバー招待の削除", color=bot.ec)
e.add_field(name="作成ユーザー", value=str(invite.inviter))
e.add_field(name="チャンネル", value=str(invite.channel.mention))
e.add_field(name="コード", value=str(invite.code))
e.timestamp = datetime.datetime.now() - rdelta(hours=9)
bot.cursor.execute("select * from guilds where id=?", (invite.guild.id,))
gpf = bot.cursor.fetchone()
if gpf["sendlog"]:
ch = bot.get_channel(gpf["sendlog"])
if ch.guild.id == invite.guild.id:
await ch.send(embed=e)
discord.abc.Messageable
@bot.event
async def on_ready():
global aglch
print('ログインしました。')
print(bot.user.name)
print(bot.user.id)
print('------------------')
aglch = bot.get_channel(659706303521751072)
pmsgc = bot.get_channel(676371380111015946)
cRPC.start()
"""invite_tweet.start()
now_sina_tweet.start()"""
bot.application_id = (await bot.application_info()).id
bot.load_extension("jishaku")
files = [
"m10s_music", "m10s_info", "m10s_owner", "m10s_settings", "m10s_manage", "m10s_levels",
"m10s_tests", "m10s_gcoms", "m10s_other", "m10s_search", "m10s_games", "P143_jyanken",
"nekok500_mee6", "pf9_symmetry", "syouma", "m10s_gban", "m10s_bmail", "m10s_auth_wiz",
"m10s_chinfo_rewrite", "m10s_role_panel", "m10s_messageinfo", "m10s_setting_command",
"m10s_partners", "m10s_remainder", "m10s_level_edit", "m10s_set_activity_roles", "m10s_re_gchat",
# "_m10s_slash_testing","_m10s_music_slash",
"_m10s_api",
"_m10s_ctx_menu"
]
embed = discord.Embed(title="読み込みに失敗したCog", color=bot.ec)
txt = ""
for file in files:
try:
bot.load_extension(f"cogs.{file}")
except:
print(f"Extension {file} Load Failed.")
txt += f"`{file}`, "
else:
print(f"Extension {file} Load.")
embed.description = txt
try:
ch = bot.get_channel(595526013031546890)
e=discord.Embed(title="起動時インフォメーション",description=f"認識ユーザー数:{len(bot.users)}\n認識サーバー数:{len(bot.guilds)}\n認識チャンネル数:{len([c for c in bot.get_all_channels()])}\ndiscord.py ver_{discord.__version__}",color=bot.ec)
await ch.send(f"{bot.get_emoji(653161518531215390)}on_ready!",embed=e)
if txt:
await ch.send(embed=embed)
except:
pass
@bot.event
async def on_message(message):
if "cu:on_msg" in bot.features.get(message.author.id, []):
return
if isinstance(message.channel, discord.DMChannel):
return
if message.webhook_id:
return
if message.author.id == bot.user.id:
return
if postcount.get(str(message.guild.id), None) is None:
postcount[str(message.guild.id)] = 1
else:
postcount[str(message.guild.id)] += 1
# db.files_download_to_file( "guildsetting.json" , "/guildsetting.json" )
# db.files_download_to_file( "profiles.json" , "/profiles.json" )
tks = [
domsg(message)
# globalSend(message), グローバルチャットは進化した! -> cogs.m10s_re_gchat
]
await asyncio.gather(*tks)
# await domsg(message)
# await globalSend(message)
async def domsg(message):
global DoServercmd
bot.cursor.execute("select * from guilds where id=?", (message.guild.id,))
gs = bot.cursor.fetchone()
if not gs:
guild_lang = bot.translate_handler.get_lang_by_guild(
message.guild, False)
bot.cursor.execute("INSERT INTO guilds(id,levels,commands,hash,levelupsendto,reward,jltasks,lockcom,sendlog,prefix,lang,verified) VALUES(?,?,?,?,?,?,?,?,?,?,?,?)",
(message.guild.id, {}, {}, [], None, {}, {}, [], None, [], guild_lang,0))
try:
await message.channel.send(f"{bot.get_emoji(653161518153596950)}このサーバーの思惟奈ちゃんサーバープロファイルを作成しました!いくつかの項目はコマンドを使って書き換えることができます。詳しくはヘルプ(`s-help`)をご覧ください。\nまた、不具合や疑問点などがありましたら`mii-10#3110`にお願いします。\n思惟奈ちゃんのお知らせは`s-rnotify [チャンネルid(省略可能)]`で、コマンド等の豆知識は`s-rtopic [チャンネルid(省略可能)]`で受信する設定にできます。(Webhook管理権限が必要です。)\nこのメッセージを見たことがある?\n 長期のメンテナンスによりデータが失われてしまった可能性があります。お手数をおかけしますが、再度設定をお願いします。")
except:
pass
bot.cursor.execute("select * from guilds where id=?",
(message.guild.id,))
gs = bot.cursor.fetchone()
bot.cursor.execute("select * from users where id=?", (message.author.id,))
pf = bot.cursor.fetchone()
if not pf:
if message.is_system():
return
bot.cursor.execute("INSERT INTO users(id,prefix,gpoint,memo,levcard,onnotif,lang,accounts,sinapartner,gban,gnick,gcolor,gmod,gstar,galpha,gbanhist) VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
(message.author.id, [], 0, {}, "m@ji☆", [], "ja", [], 0, 0, message.author.name, 0, 0, 0, 0, "なし"))
try:
#if "disable_profile_msg" in gs["lockcom"]:
await message.add_reaction(bot.get_emoji(653161518153596950))
#else:
#try:
#await message.reply(f"> {bot.get_emoji(653161518153596950)} あなたの思惟奈ちゃんユーザープロファイルを作成しました!いくつかの項目はコマンドを使って書き換えることができます。詳しくはヘルプ(`s-help`)をご覧ください。\n> なんでこのメッセージが来たの?\n 思惟奈ちゃんのいるサーバーで発言したことにより、プロファイルが作成されました。プロファイルの削除を希望する場合は`mii-10#3110`のDMにご連絡ください。なお、プロファイルを削除後は思惟奈ちゃんをご利用できなくなります。(レベル機能などサーバープロファイルに依存するものを含む)")
#except:
#await message.send(f"> {bot.get_emoji(653161518153596950)} {message.author.mention}さん!あなたの思惟奈ちゃんユーザープロファイルを作成しました!いくつかの項目はコマンドを使って書き換えることができます。詳しくはヘルプ(`s-help`)をご覧ください。\n> なんでこのメッセージが来たの?\n 思惟奈ちゃんのいるサーバーで発言したことにより、プロファイルが作成されました。プロファイルの削除を希望する場合は`mii-10#3110`のDMにご連絡ください。なお、プロファイルを削除後は思惟奈ちゃんをご利用できなくなります。(レベル機能などサーバープロファイルに依存するものを含む)")
except:
pass
bot.cursor.execute("select * from users where id=?",
(message.author.id,))
pf = bot.cursor.fetchone()
"""bot.cursor.execute(
"CREATE TABLE IF NOT EXISTS actrole_optin(id integer PRIMARY KEY NOT NULL, is_enable integer NOT NULL default 0);")"""
bot.cursor.execute("select * from actrole_optin where id=?", (message.author.id,))
if not bot.cursor.fetchone():
if message.is_system():
return
bot.cursor.execute("INSERT INTO actrole_optin(id,is_enable) VALUES(?,?)",
(message.author.id, 0))
bot.cursor.execute("select * from actrole_optin where id=?",
(message.author.id,))
tks = [asyncio.ensure_future(dlevel(message, gs)), asyncio.ensure_future(
gahash(message, gs)), asyncio.ensure_future(runsercmd(message, gs, pf))]
await asyncio.gather(*tks)
tpf = pf["prefix"]+gs["prefix"]
if not "disable_defprefix" in gs["lockcom"]:
tpf.insert(0,"s-")
bot.command_prefix = tpf
ctx = await bot.get_context(message)
try:
if ctx.command:
if ctx.command.name in gs["lockcom"] and not ctx.author.guild_permissions.administrator and ctx.author.id != 404243934210949120:
await ctx.send(ctx._("comlock-locked"))
else:
if ctx.command.name in bot.features[0]:
await ctx.send("> command locked by admins\n このコマンドはメンテナンスなどのために一時的な使用制限がかかっています。\n 問題点が解消され次第、再度利用が可能となりますので今しばらくお待ちください。")
else:
await bot.process_commands(message)
except SystemExit:
sys.exit()
except Exception:
print(traceback.format_exc(0))
async def runsercmd(message, gs, pf):
# servercmd
if "scom" not in gs["lockcom"]:
if not message.author.id == bot.user.id and message.webhook_id is None:
tpf = pf["prefix"]+gs["prefix"]
tpf.append("s-")
try:
if not gs["commands"] is None:
cmds = gs["commands"]
ctts = message.content.split(" ")
for k, v in cmds.items():
for px in tpf:
if px+k == ctts[0]:
DoServercmd = True
if v["mode"] == "random":
await message.channel.send(random.choice(v["rep"]))
elif v["mode"] == "one":
await message.channel.send(v["rep"])
elif v["mode"] == "role":
try:
role = message.guild.get_role(v["rep"])
except:
await message.channel.send(bot._(message.author, "scmd-notfound-role"))
return
if role < message.author.top_role:
if role in message.author.roles:
await message.author.remove_roles(role)
await message.channel.send(bot._(message.author, "scmd-delrole"))
else:
await message.author.add_roles(role)
await message.channel.send(bot._(message.author, "scmd-addrole"))
else:
await message.channel.send(bot._(message.author, "scmd-notrole"))
break
except:
pass
async def gahash(message, gs):
# hash
if "s-noHashSend" in (message.channel.topic or ""):
return
if "shash" not in gs["lockcom"]:
ch = gs["hash"]
if ch is not []:
menchan = message.channel_mentions
for sch in menchan:
if sch.id in ch:
if message.channel.is_nsfw():
embed = discord.Embed(title="", description=bot.l10n_guild(
message.guild, "hash-nsfw"), color=message.author.color)
embed.add_field(name=bot.l10n_guild(message.guild, "hash-from"),
value=f'{bot.l10n_guild(message.guild,"hash-chmention")}:{message.channel.mention}\n{bot.l10n_guild(message.guild,"hash-chname")}:{message.channel.name}')
embed.add_field(name=bot.l10n_guild(
message.guild, "hash-link"), value=message.jump_url)
embed.set_author(name=message.author.display_name, icon_url=message.author.avatar_url_as(
static_format='png'))
else:
embed = discord.Embed(
title="", description=message.content, color=message.author.color)
embed.add_field(name=bot.l10n_guild(message.guild, "hash-from"),
value=f'{bot.l10n_guild(message.guild,"hash-chmention")}:{message.channel.mention}\n{bot.l10n_guild(message.guild,"hash-chname")}:{message.channel.name}')
embed.add_field(name=bot.l10n_guild(
message.guild, "hash-link"), value=message.jump_url)
embed.set_author(name=message.author.display_name, icon_url=message.author.avatar_url_as(
static_format='png'))
if not message.attachments == [] and (not message.attachments[0].is_spoiler()):
embed.set_image(url=message.attachments[0].url)
await sch.send(embed=embed)
async def dlevel(message, gs):
if "clevel" in gs["lockcom"]:
return
if message.author.bot:
return
if gs["levels"].get(str(message.author.id), None) is None:
gs["levels"][str(message.author.id)] = {
"level": 0,
"exp": random.randint(5, 15),
"lltime": int(time.time()),
"dlu": True
}
bot.cursor.execute(
"UPDATE guilds SET levels = ? WHERE id = ?", (gs["levels"], message.guild.id))
else:
if gs["levels"][str(message.author.id)]["dlu"]:
if (int(time.time())-gs["levels"][str(message.author.id)]["lltime"]) >= 60:
gs["levels"][str(message.author.id)
]["lltime"] = int(time.time())
gs["levels"][str(message.author.id)
]["exp"] += random.randint(5, 15)
if gs["levels"][str(message.author.id)]["exp"] >= gs["levels"][str(message.author.id)]["level"] ** 3 + 20:
gs["levels"][str(
message.author.id)]["exp"] -= gs["levels"][str(message.author.id)]["level"] ** 3 + 20
gs["levels"][str(message.author.id)]["level"] += 1
aut = str(message.author).replace("\\", "\\\\").replace("*", "\*").replace(
"_", "\_").replace("|", "\|").replace("~", "\~").replace("`", "\`").replace(">", "\>")
if gs["levelupsendto"]:
c = bot.get_channel(gs["levelupsendto"])
try:
m = await c.send(str(bot.get_emoji(653161518212448266))+bot._(message.author, "levelup-notify", aut, gs["levels"][str(message.author.id)]["level"]))
await asyncio.sleep(1)
await m.edit(content=str(bot.get_emoji(653161518212448266))+bot._(message.author, "levelup-notify", message.author.mention, gs["levels"][str(message.author.id)]["level"]))
except:
pass
else:
try:
m = await message.channel.send(str(bot.get_emoji(653161518212448266))+bot._(message.author, "levelup-notify", aut, gs["levels"][str(message.author.id)]["level"]))
await asyncio.sleep(1)
await m.edit(content=str(bot.get_emoji(653161518212448266))+bot._(message.author, "levelup-notify", message.author.mention, gs["levels"][str(message.author.id)]["level"]))
except:
pass
try:
if gs["reward"].get(str(gs["levels"][str(message.author.id)]["level"]), None):
rl = message.guild.get_role(
gs["reward"][str(gs["levels"][str(message.author.id)]["level"])])
await message.author.add_roles(rl)
except:
pass
bot.cursor.execute(
"UPDATE guilds SET levels = ? WHERE id = ?", (gs["levels"], message.guild.id))
@commands.is_owner()
@bot.command()
async def ldb(ctx, name):
bot.cursor.execute(f"select * from {name}")
sddb = bot.cursor.fetchall()
await ctx.send(f"{len(sddb)}")
@commands.is_owner()
@bot.command()
async def mentdb(ctx):
bot.cursor.execute(f"select * from users")
sddb = bot.cursor.fetchall()
async with ctx.channel.typing():
for ctt in sddb:
if not (ctt["id"] in [i.id for i in bot.users]):
bot.cursor.execute(f"delete from users where id = {ctt['id']}")
await ctx.send("完了しました☆")
@bot.command()
async def vpc(ctx):
await ctx.send(embed=ut.getEmbed("post count", str([f"{k}:{v}" for k, v in postcount.items()])))
@bot.command()
@commands.bot_has_permissions(manage_webhooks=True)
@commands.has_permissions(administrator=True)
async def rnotify(ctx, ch: int=None):
if ctx.author.guild_permissions.administrator or ctx.author.id == 404243934210949120:
tchid = ch or ctx.channel.id
tch = bot.get_channel(tchid)
fch = bot.get_channel(667351221106901042)
await fch.follow(destination=tch)
await ctx.send("フォローが完了しました。")
else:
await ctx.send("サーバー管理者である必要があります。")
@bot.command()
@commands.bot_has_permissions(manage_webhooks=True)
@commands.has_permissions(administrator=True)
async def rtopic(ctx, ch: int=None):
if ctx.author.guild_permissions.administrator or ctx.author.id == 404243934210949120:
tchid = ch or ctx.channel.id
tch = bot.get_channel(tchid)
fch = bot.get_channel(677862542298710037)
await fch.follow(destination=tch)
await ctx.send("フォローが完了しました。")
else:
await ctx.send("サーバー管理者である必要があります。")
bot.remove_command('help')
@bot.command()
async def ehelp(ctx, rcmd=None):
# 英語ヘルプ用
if rcmd is None:
page = 1
embed = discord.Embed(title=ctx._("help-1-t"),
description=ctx._("help-1-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
msg = await ctx.send(embed=embed)
await msg.add_reaction(bot.get_emoji(653161518195671041))
await msg.add_reaction(bot.get_emoji(653161518170505216))
await msg.add_reaction("🔍")
while True:
try:
r, u = await bot.wait_for("reaction_add", check=lambda r, u: r.message.id == msg.id and u.id == ctx.message.author.id, timeout=30)
except:
break
try:
await msg.remove_reaction(r, u)
except:
pass
if str(r) == str(bot.get_emoji(653161518170505216)):
if page == 14:
page = 1
else:
page = page + 1
embed = discord.Embed(title=ctx._(
f"help-{page}-t"), description=ctx._(f"help-{page}-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
await msg.edit(embed=embed)
elif str(r) == str(bot.get_emoji(653161518195671041)):
if page == 1:
page = 14
else:
page = page - 1
embed = discord.Embed(title=ctx._(
f"help-{page}-t"), description=ctx._(f"help-{page}-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
await msg.edit(embed=embed)
elif str(r) == "🔍":
await msg.remove_reaction(bot.get_emoji(653161518195671041), bot.user)
await msg.remove_reaction("🔍", bot.user)
await msg.remove_reaction(bot.get_emoji(653161518170505216), bot.user)
qm = await ctx.send(ctx._("help-s-send"))
try:
msg = await bot.wait_for('message', check=lambda m: m.author == ctx.author and m.channel == ctx.channel, timeout=60)
sewd = msg.content
except asyncio.TimeoutError:
pass
else:
try:
await msg.delete()
await qm.delete()
except:
pass
async with ctx.message.channel.typing():
lang = ctx.user_lang() or "ja"
with open(f"lang/{lang}.json", "r", encoding="utf-8") as j:
f = json.load(j)
sre = discord.Embed(title=ctx._(
"help-s-ret-title"), description=ctx._("help-s-ret-desc", sewd), color=bot.ec)
for k, v in f.items():
if k.startswith("h-"):
if sewd in k.replace("h-", "") or sewd in v:
sre.add_field(name=k.replace(
"h-", ""), value=v.replace(sewd, f"**{sewd}**"))
await ctx.send(embed=sre)
try:
await msg.remove_reaction(bot.get_emoji(653161518195671041), bot.user)
await msg.remove_reaction("🔍", bot.user)
await msg.remove_reaction(bot.get_emoji(653161518170505216), bot.user)
except:
pass
else:
embed = discord.Embed(title=str(rcmd), description=ctx._(
f"h-{str(rcmd)}"), color=bot.ec)
if embed.description == "":
await ctx.send(ctx._("h-notfound"))
else:
await ctx.send(embed=embed)
@bot.command()
@commands.bot_has_permissions(embed_links=True, external_emojis=True, add_reactions=True)
async def help(ctx, rcmd=None):
# ヘルプ内容
if rcmd is None:
page = 1
embed = discord.Embed(title=ctx._("help-1-t"),
description=ctx._("help-1-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
msg = await ctx.send(embed=embed)
await msg.add_reaction(bot.get_emoji(653161518195671041))
await msg.add_reaction(bot.get_emoji(653161518170505216))
await msg.add_reaction("🔍")
while True:
try:
r, u = await bot.wait_for("reaction_add", check=lambda r, u: r.message.id == msg.id and u.id == ctx.message.author.id, timeout=30)
except:
break
try:
await msg.remove_reaction(r, u)
except:
pass
if str(r) == str(bot.get_emoji(653161518170505216)):
if page == 17:
page = 1
else:
page = page + 1
embed = discord.Embed(title=ctx._(
f"help-{page}-t"), description=ctx._(f"help-{page}-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
await msg.edit(embed=embed)
elif str(r) == str(bot.get_emoji(653161518195671041)):
if page == 1:
page = 17
else:
page = page - 1
embed = discord.Embed(title=ctx._(
f"help-{page}-t"), description=ctx._(f"help-{page}-d"), color=bot.ec)
embed.set_footer(text=f"page:{page}")
await msg.edit(embed=embed)
elif str(r) == "🔍":
await msg.remove_reaction(bot.get_emoji(653161518195671041), bot.user)
await msg.remove_reaction("🔍", bot.user)
await msg.remove_reaction(bot.get_emoji(653161518170505216), bot.user)
qm = await ctx.send(ctx._("help-s-send"))
try:
msg = await bot.wait_for('message', check=lambda m: m.author == ctx.author and m.channel == ctx.channel, timeout=60)
sewd = msg.content
except asyncio.TimeoutError:
pass
else:
try:
await msg.delete()
await qm.delete()
except:
pass
async with ctx.message.channel.typing():
lang = ctx.user_lang() or "ja"
with open(f"lang/{lang}.json", "r", encoding="utf-8") as j:
f = json.load(j)
sre = discord.Embed(title=ctx._(
"help-s-ret-title"), description=ctx._("help-s-ret-desc", sewd), color=bot.ec)
for k, v in f.items():
if k.startswith("nh-"):
if sewd in k.replace("nh-", "") or sewd in str(v):
sre.add_field(name=k.replace(
"nh-", ""), value=f"詳細を見るには`s-help {k.replace('nh-','')}`と送信")
await ctx.send(embed=sre)
try:
await msg.remove_reaction(bot.get_emoji(653161518195671041), bot.user)
await msg.remove_reaction("🔍", bot.user)
await msg.remove_reaction(bot.get_emoji(653161518170505216), bot.user)
except:
pass
else:
dcmd = ctx._(f"nh-{str(rcmd)}")
if str(dcmd) == "":
await ctx.send(ctx._("h-notfound"))
else:
embed = ut.getEmbed(dcmd[0], dcmd[1], bot.ec, *dcmd[2:])
await ctx.send(embed=embed)
@bot.event
async def on_command(ctx):
ch = bot.get_channel(693048961107230811)
e = discord.Embed(title=f"{ctx.command.name}の実行",
description=f"実行文:`{ctx.message.clean_content}`", color=bot.ec)
e.set_author(name=f"実行者:{str(ctx.author)}({ctx.author.id})",
icon_url=ctx.author.avatar_url_as(static_format="png"))
e.set_footer(text=f"実行サーバー:{ctx.guild.name}({ctx.guild.id})",
icon_url=ctx.guild.icon_url_as(static_format="png"))
e.add_field(name="実行チャンネル", value=ctx.channel.name)
e.timestamp = ctx.message.created_at
await ch.send(embed=e)
@bot.event
async def on_command_error(ctx, error):
# await ctx.send(f"{error}")
# global DoServercmd
"""if isinstance(error, commands.CommandNotFound):
if not DoServercmd:
embed = discord.Embed(title=ctx._("cmd-error-t"), description=ctx._("cmd-notfound-d"), color=bot.ec)
DoServercmd = False
await ctx.send(embed=embed)
el"""
if isinstance(error, commands.CommandOnCooldown):
# クールダウン
embed = discord.Embed(title=ctx._("cmd-error-t"), description=ctx._(
"cmd-cooldown-d", str(error.retry_after)[:4]), color=bot.ec)
await ctx.send(embed=embed)
elif isinstance(error, commands.NotOwner):
# オーナー専用コマンド
embed = discord.Embed(title=ctx._("cmd-error-t"),
description=ctx._("only-mii-10"), color=bot.ec)
await ctx.send(embed=embed)
ch = bot.get_channel(652127085598474242)
await ch.send(embed=ut.getEmbed("エラーログ", f"コマンド:`{ctx.command.name}`\n```{str(error)}```", bot.ec, f"サーバー", ctx.guild.name, "実行メンバー", ctx.author.name, "メッセージ内容", ctx.message.content))
elif isinstance(error, commands.MissingRequiredArgument):
# 引数がないよっ☆
embed = discord.Embed(title=ctx._("cmd-error-t"),
description=ctx._("pls-arg"), color=bot.ec)
await ctx.send(embed=embed)
elif isinstance(error, commands.MissingPermissions):
embed = discord.Embed(title=ctx._("cmd-error-t"),
description=f"このコマンドの実行には、あなたに次の権限が必要です。\n```py\n{error.missing_perms}```", color=bot.ec)
try:
await ctx.send(embed=embed)
except:
await ctx.send(f'> {ctx._("cmd-error-t")}\n このコマンドの実行には、あなたに次の権限が必要です。\n```py\n{error.missing_perms}```')
elif isinstance(error, commands.BotMissingPermissions):
embed = discord.Embed(title=ctx._("cmd-error-t"),
description=f"このコマンドの実行には、Botに次の権限が必要です。\n```py\n{error.missing_perms}```", color=bot.ec)
try:
await ctx.send(embed=embed)
except:
await ctx.send(f'> {ctx._("cmd-error-t")}\n このコマンドの実行には、Botに次の権限が必要です。\n```py\n{error.missing_perms}```')
else:
# その他例外
ch = bot.get_channel(652127085598474242)
msg = await ch.send(embed=ut.getEmbed("エラーログ", f"コマンド:`{ctx.command.name}`\n```{str(error)}```", bot.ec, f"サーバー", ctx.guild.name, "実行メンバー", ctx.author.name, "メッセージ内容", ctx.message.content))
await ctx.send(embed=ut.getEmbed(ctx._("com-error-t"), ctx._("cmd-other-d", error, bot.ec, "error id", msg.id, "サポートが必要ですか?", "[サポートサーバー](https://discord.gg/vtn2V3v)に参加して、「view-思惟奈ちゃんch」役職をつけて質問してみましょう!")))
"""
@tasks.loop(time=datetime.time(hour=23,minute=0,second=0))
async def invite_tweet():
try:
bot.twi.statuses.update(status=f"[定期投稿]\nみぃてん☆の公開Discordサーバー:https://discord.gg/GbHq7fz\nみぃてん☆制作、多機能Discordbot思惟奈ちゃん:https://discordapp.com/oauth2/authorize?client_id=462885760043843584&permissions=8&scope=bot\n<この投稿は思惟奈ちゃんより行われました。>")
except:
dc=bot.get_user(404243934210949120)
await dc.send(f"have error:```{traceback.format_exc(1)}```")
@tasks.loop(time=datetime.time(hour=8,minute=0,second=0))
async def now_sina_tweet():
try:
bot.twi.statuses.update(status=f"[定期投稿]\n思惟奈ちゃんのいるサーバー数:{len(bot.guilds)}\n思惟奈ちゃんの公式サーバー:https://discord.gg/udA3qgZ\n<この投稿は思惟奈ちゃんより行われました。>")
except:
dc=bot.get_user(404243934210949120)
await dc.send(f"have error:```{traceback.format_exc(1)}```")
"""
apple_invite.setup(bot)
apple_foc.setup(bot)
# 通常トークン
bot.run(bot.BOT_TOKEN)
# テストトークン
# bot.run(bot.BOT_TEST_TOKEN)
| 48.190394 | 6,936 | 0.605322 |
1e16ac60f63aceaa28d3c2050aca01732bb95f2e
| 40,867 |
py
|
Python
|
Lib/asyncio/selector_events.py
|
Victor-Savu/cpython-old
|
87060fc4b043dd0da0a29a0ffb8eb92d8cad8dd3
|
[
"PSF-2.0"
] | null | null | null |
Lib/asyncio/selector_events.py
|
Victor-Savu/cpython-old
|
87060fc4b043dd0da0a29a0ffb8eb92d8cad8dd3
|
[
"PSF-2.0"
] | null | null | null |
Lib/asyncio/selector_events.py
|
Victor-Savu/cpython-old
|
87060fc4b043dd0da0a29a0ffb8eb92d8cad8dd3
|
[
"PSF-2.0"
] | null | null | null |
"""Event loop using a selector and related classes.
A selector is a "notify-when-ready" multiplexer. For a subclass which
also includes support for signal handling, see the unix_events sub-module.
"""
__all__ = ['BaseSelectorEventLoop']
import collections
import errno
import functools
import socket
import warnings
try:
import ssl
except ImportError: # pragma: no cover
ssl = None
from . import base_events
from . import compat
from . import constants
from . import events
from . import futures
from . import selectors
from . import transports
from . import sslproto
from .coroutines import coroutine
from .log import logger
def _test_selector_event(selector, fd, event):
# Test if the selector is monitoring 'event' events
# for the file descriptor 'fd'.
try:
key = selector.get_key(fd)
except KeyError:
return False
else:
return bool(key.events & event)
if hasattr(socket, 'TCP_NODELAY'):
def _set_nodelay(sock):
if (sock.family in {socket.AF_INET, socket.AF_INET6} and
sock.type == socket.SOCK_STREAM and
sock.proto == socket.IPPROTO_TCP):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
else:
def _set_nodelay(sock):
pass
class BaseSelectorEventLoop(base_events.BaseEventLoop):
"""Selector event loop.
See events.EventLoop for API specification.
"""
def __init__(self, selector=None):
super().__init__()
if selector is None:
selector = selectors.DefaultSelector()
logger.debug('Using selector: %s', selector.__class__.__name__)
self._selector = selector
self._make_self_pipe()
def _make_socket_transport(self, sock, protocol, waiter=None, *,
extra=None, server=None):
return _SelectorSocketTransport(self, sock, protocol, waiter,
extra, server)
def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter=None,
*, server_side=False, server_hostname=None,
extra=None, server=None):
if not sslproto._is_sslproto_available():
return self._make_legacy_ssl_transport(
rawsock, protocol, sslcontext, waiter,
server_side=server_side, server_hostname=server_hostname,
extra=extra, server=server)
ssl_protocol = sslproto.SSLProtocol(self, protocol, sslcontext, waiter,
server_side, server_hostname)
_SelectorSocketTransport(self, rawsock, ssl_protocol,
extra=extra, server=server)
return ssl_protocol._app_transport
def _make_legacy_ssl_transport(self, rawsock, protocol, sslcontext,
waiter, *,
server_side=False, server_hostname=None,
extra=None, server=None):
# Use the legacy API: SSL_write, SSL_read, etc. The legacy API is used
# on Python 3.4 and older, when ssl.MemoryBIO is not available.
return _SelectorSslTransport(
self, rawsock, protocol, sslcontext, waiter,
server_side, server_hostname, extra, server)
def _make_datagram_transport(self, sock, protocol,
address=None, waiter=None, extra=None):
return _SelectorDatagramTransport(self, sock, protocol,
address, waiter, extra)
def close(self):
if self.is_running():
raise RuntimeError("Cannot close a running event loop")
if self.is_closed():
return
self._close_self_pipe()
super().close()
if self._selector is not None:
self._selector.close()
self._selector = None
def _socketpair(self):
raise NotImplementedError
def _close_self_pipe(self):
self.remove_reader(self._ssock.fileno())
self._ssock.close()
self._ssock = None
self._csock.close()
self._csock = None
self._internal_fds -= 1
def _make_self_pipe(self):
# A self-socket, really. :-)
self._ssock, self._csock = self._socketpair()
self._ssock.setblocking(False)
self._csock.setblocking(False)
self._internal_fds += 1
self.add_reader(self._ssock.fileno(), self._read_from_self)
def _process_self_data(self, data):
pass
def _read_from_self(self):
while True:
try:
data = self._ssock.recv(4096)
if not data:
break
self._process_self_data(data)
except InterruptedError:
continue
except BlockingIOError:
break
def _write_to_self(self):
# This may be called from a different thread, possibly after
# _close_self_pipe() has been called or even while it is
# running. Guard for self._csock being None or closed. When
# a socket is closed, send() raises OSError (with errno set to
# EBADF, but let's not rely on the exact error code).
csock = self._csock
if csock is not None:
try:
csock.send(b'\0')
except OSError:
if self._debug:
logger.debug("Fail to write a null byte into the "
"self-pipe socket",
exc_info=True)
def _start_serving(self, protocol_factory, sock,
sslcontext=None, server=None, backlog=100):
self.add_reader(sock.fileno(), self._accept_connection,
protocol_factory, sock, sslcontext, server, backlog)
def _accept_connection(self, protocol_factory, sock,
sslcontext=None, server=None, backlog=100):
# This method is only called once for each event loop tick where the
# listening socket has triggered an EVENT_READ. There may be multiple
# connections waiting for an .accept() so it is called in a loop.
# See https://bugs.python.org/issue27906 for more details.
for _ in range(backlog):
try:
conn, addr = sock.accept()
if self._debug:
logger.debug("%r got a new connection from %r: %r",
server, addr, conn)
conn.setblocking(False)
except (BlockingIOError, InterruptedError, ConnectionAbortedError):
# Early exit because the socket accept buffer is empty.
return None
except OSError as exc:
# There's nowhere to send the error, so just log it.
if exc.errno in (errno.EMFILE, errno.ENFILE,
errno.ENOBUFS, errno.ENOMEM):
# Some platforms (e.g. Linux keep reporting the FD as
# ready, so we remove the read handler temporarily.
# We'll try again in a while.
self.call_exception_handler({
'message': 'socket.accept() out of system resource',
'exception': exc,
'socket': sock,
})
self.remove_reader(sock.fileno())
self.call_later(constants.ACCEPT_RETRY_DELAY,
self._start_serving,
protocol_factory, sock, sslcontext, server,
backlog)
else:
raise # The event loop will catch, log and ignore it.
else:
extra = {'peername': addr}
accept = self._accept_connection2(protocol_factory, conn, extra,
sslcontext, server)
self.create_task(accept)
@coroutine
def _accept_connection2(self, protocol_factory, conn, extra,
sslcontext=None, server=None):
protocol = None
transport = None
try:
protocol = protocol_factory()
waiter = self.create_future()
if sslcontext:
transport = self._make_ssl_transport(
conn, protocol, sslcontext, waiter=waiter,
server_side=True, extra=extra, server=server)
else:
transport = self._make_socket_transport(
conn, protocol, waiter=waiter, extra=extra,
server=server)
try:
yield from waiter
except:
transport.close()
raise
# It's now up to the protocol to handle the connection.
except Exception as exc:
if self._debug:
context = {
'message': ('Error on transport creation '
'for incoming connection'),
'exception': exc,
}
if protocol is not None:
context['protocol'] = protocol
if transport is not None:
context['transport'] = transport
self.call_exception_handler(context)
def add_reader(self, fd, callback, *args):
"""Add a reader callback."""
self._check_closed()
handle = events.Handle(callback, args, self)
try:
key = self._selector.get_key(fd)
except KeyError:
self._selector.register(fd, selectors.EVENT_READ,
(handle, None))
else:
mask, (reader, writer) = key.events, key.data
self._selector.modify(fd, mask | selectors.EVENT_READ,
(handle, writer))
if reader is not None:
reader.cancel()
def remove_reader(self, fd):
"""Remove a reader callback."""
if self.is_closed():
return False
try:
key = self._selector.get_key(fd)
except KeyError:
return False
else:
mask, (reader, writer) = key.events, key.data
mask &= ~selectors.EVENT_READ
if not mask:
self._selector.unregister(fd)
else:
self._selector.modify(fd, mask, (None, writer))
if reader is not None:
reader.cancel()
return True
else:
return False
def add_writer(self, fd, callback, *args):
"""Add a writer callback.."""
self._check_closed()
handle = events.Handle(callback, args, self)
try:
key = self._selector.get_key(fd)
except KeyError:
self._selector.register(fd, selectors.EVENT_WRITE,
(None, handle))
else:
mask, (reader, writer) = key.events, key.data
self._selector.modify(fd, mask | selectors.EVENT_WRITE,
(reader, handle))
if writer is not None:
writer.cancel()
def remove_writer(self, fd):
"""Remove a writer callback."""
if self.is_closed():
return False
try:
key = self._selector.get_key(fd)
except KeyError:
return False
else:
mask, (reader, writer) = key.events, key.data
# Remove both writer and connector.
mask &= ~selectors.EVENT_WRITE
if not mask:
self._selector.unregister(fd)
else:
self._selector.modify(fd, mask, (reader, None))
if writer is not None:
writer.cancel()
return True
else:
return False
def sock_recv(self, sock, n):
"""Receive data from the socket.
The return value is a bytes object representing the data received.
The maximum amount of data to be received at once is specified by
nbytes.
This method is a coroutine.
"""
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
fut = self.create_future()
self._sock_recv(fut, False, sock, n)
return fut
def _sock_recv(self, fut, registered, sock, n):
# _sock_recv() can add itself as an I/O callback if the operation can't
# be done immediately. Don't use it directly, call sock_recv().
fd = sock.fileno()
if registered:
# Remove the callback early. It should be rare that the
# selector says the fd is ready but the call still returns
# EAGAIN, and I am willing to take a hit in that case in
# order to simplify the common case.
self.remove_reader(fd)
if fut.cancelled():
return
try:
data = sock.recv(n)
except (BlockingIOError, InterruptedError):
self.add_reader(fd, self._sock_recv, fut, True, sock, n)
except Exception as exc:
fut.set_exception(exc)
else:
fut.set_result(data)
def sock_sendall(self, sock, data):
"""Send data to the socket.
The socket must be connected to a remote socket. This method continues
to send data from data until either all data has been sent or an
error occurs. None is returned on success. On error, an exception is
raised, and there is no way to determine how much data, if any, was
successfully processed by the receiving end of the connection.
This method is a coroutine.
"""
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
fut = self.create_future()
if data:
self._sock_sendall(fut, False, sock, data)
else:
fut.set_result(None)
return fut
def _sock_sendall(self, fut, registered, sock, data):
fd = sock.fileno()
if registered:
self.remove_writer(fd)
if fut.cancelled():
return
try:
n = sock.send(data)
except (BlockingIOError, InterruptedError):
n = 0
except Exception as exc:
fut.set_exception(exc)
return
if n == len(data):
fut.set_result(None)
else:
if n:
data = data[n:]
self.add_writer(fd, self._sock_sendall, fut, True, sock, data)
@coroutine
def sock_connect(self, sock, address):
"""Connect to a remote socket at address.
This method is a coroutine.
"""
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
if not hasattr(socket, 'AF_UNIX') or sock.family != socket.AF_UNIX:
resolved = base_events._ensure_resolved(
address, family=sock.family, proto=sock.proto, loop=self)
if not resolved.done():
yield from resolved
_, _, _, _, address = resolved.result()[0]
fut = self.create_future()
self._sock_connect(fut, sock, address)
return (yield from fut)
def _sock_connect(self, fut, sock, address):
fd = sock.fileno()
try:
sock.connect(address)
except (BlockingIOError, InterruptedError):
# Issue #23618: When the C function connect() fails with EINTR, the
# connection runs in background. We have to wait until the socket
# becomes writable to be notified when the connection succeed or
# fails.
fut.add_done_callback(
functools.partial(self._sock_connect_done, fd))
self.add_writer(fd, self._sock_connect_cb, fut, sock, address)
except Exception as exc:
fut.set_exception(exc)
else:
fut.set_result(None)
def _sock_connect_done(self, fd, fut):
self.remove_writer(fd)
def _sock_connect_cb(self, fut, sock, address):
if fut.cancelled():
return
try:
err = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
# Jump to any except clause below.
raise OSError(err, 'Connect call failed %s' % (address,))
except (BlockingIOError, InterruptedError):
# socket is still registered, the callback will be retried later
pass
except Exception as exc:
fut.set_exception(exc)
else:
fut.set_result(None)
def sock_accept(self, sock):
"""Accept a connection.
The socket must be bound to an address and listening for connections.
The return value is a pair (conn, address) where conn is a new socket
object usable to send and receive data on the connection, and address
is the address bound to the socket on the other end of the connection.
This method is a coroutine.
"""
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
fut = self.create_future()
self._sock_accept(fut, False, sock)
return fut
def _sock_accept(self, fut, registered, sock):
fd = sock.fileno()
if registered:
self.remove_reader(fd)
if fut.cancelled():
return
try:
conn, address = sock.accept()
conn.setblocking(False)
except (BlockingIOError, InterruptedError):
self.add_reader(fd, self._sock_accept, fut, True, sock)
except Exception as exc:
fut.set_exception(exc)
else:
fut.set_result((conn, address))
def _process_events(self, event_list):
for key, mask in event_list:
fileobj, (reader, writer) = key.fileobj, key.data
if mask & selectors.EVENT_READ and reader is not None:
if reader._cancelled:
self.remove_reader(fileobj)
else:
self._add_callback(reader)
if mask & selectors.EVENT_WRITE and writer is not None:
if writer._cancelled:
self.remove_writer(fileobj)
else:
self._add_callback(writer)
def _stop_serving(self, sock):
self.remove_reader(sock.fileno())
sock.close()
class _SelectorTransport(transports._FlowControlMixin,
transports.Transport):
max_size = 256 * 1024 # Buffer size passed to recv().
_buffer_factory = bytearray # Constructs initial value for self._buffer.
# Attribute used in the destructor: it must be set even if the constructor
# is not called (see _SelectorSslTransport which may start by raising an
# exception)
_sock = None
def __init__(self, loop, sock, protocol, extra=None, server=None):
super().__init__(extra, loop)
self._extra['socket'] = sock
self._extra['sockname'] = sock.getsockname()
if 'peername' not in self._extra:
try:
self._extra['peername'] = sock.getpeername()
except socket.error:
self._extra['peername'] = None
self._sock = sock
self._sock_fd = sock.fileno()
self._protocol = protocol
self._protocol_connected = True
self._server = server
self._buffer = self._buffer_factory()
self._conn_lost = 0 # Set when call to connection_lost scheduled.
self._closing = False # Set when close() called.
if self._server is not None:
self._server._attach()
def __repr__(self):
info = [self.__class__.__name__]
if self._sock is None:
info.append('closed')
elif self._closing:
info.append('closing')
info.append('fd=%s' % self._sock_fd)
# test if the transport was closed
if self._loop is not None and not self._loop.is_closed():
polling = _test_selector_event(self._loop._selector,
self._sock_fd, selectors.EVENT_READ)
if polling:
info.append('read=polling')
else:
info.append('read=idle')
polling = _test_selector_event(self._loop._selector,
self._sock_fd,
selectors.EVENT_WRITE)
if polling:
state = 'polling'
else:
state = 'idle'
bufsize = self.get_write_buffer_size()
info.append('write=<%s, bufsize=%s>' % (state, bufsize))
return '<%s>' % ' '.join(info)
def abort(self):
self._force_close(None)
def set_protocol(self, protocol):
self._protocol = protocol
def get_protocol(self):
return self._protocol
def is_closing(self):
return self._closing
def close(self):
if self._closing:
return
self._closing = True
self._loop.remove_reader(self._sock_fd)
if not self._buffer:
self._conn_lost += 1
self._loop.remove_writer(self._sock_fd)
self._loop.call_soon(self._call_connection_lost, None)
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if self._sock is not None:
warnings.warn("unclosed transport %r" % self, ResourceWarning,
source=self)
self._sock.close()
def _fatal_error(self, exc, message='Fatal error on transport'):
# Should be called from exception handler only.
if isinstance(exc, base_events._FATAL_ERROR_IGNORE):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
self._loop.call_exception_handler({
'message': message,
'exception': exc,
'transport': self,
'protocol': self._protocol,
})
self._force_close(exc)
def _force_close(self, exc):
if self._conn_lost:
return
if self._buffer:
self._buffer.clear()
self._loop.remove_writer(self._sock_fd)
if not self._closing:
self._closing = True
self._loop.remove_reader(self._sock_fd)
self._conn_lost += 1
self._loop.call_soon(self._call_connection_lost, exc)
def _call_connection_lost(self, exc):
try:
if self._protocol_connected:
self._protocol.connection_lost(exc)
finally:
self._sock.close()
self._sock = None
self._protocol = None
self._loop = None
server = self._server
if server is not None:
server._detach()
self._server = None
def get_write_buffer_size(self):
return len(self._buffer)
class _SelectorSocketTransport(_SelectorTransport):
def __init__(self, loop, sock, protocol, waiter=None,
extra=None, server=None):
super().__init__(loop, sock, protocol, extra, server)
self._eof = False
self._paused = False
# Disable the Nagle algorithm -- small writes will be
# sent without waiting for the TCP ACK. This generally
# decreases the latency (in some cases significantly.)
_set_nodelay(self._sock)
self._loop.call_soon(self._protocol.connection_made, self)
# only start reading when connection_made() has been called
self._loop.call_soon(self._loop.add_reader,
self._sock_fd, self._read_ready)
if waiter is not None:
# only wake up the waiter when connection_made() has been called
self._loop.call_soon(futures._set_result_unless_cancelled,
waiter, None)
def pause_reading(self):
if self._closing:
raise RuntimeError('Cannot pause_reading() when closing')
if self._paused:
raise RuntimeError('Already paused')
self._paused = True
self._loop.remove_reader(self._sock_fd)
if self._loop.get_debug():
logger.debug("%r pauses reading", self)
def resume_reading(self):
if not self._paused:
raise RuntimeError('Not paused')
self._paused = False
if self._closing:
return
self._loop.add_reader(self._sock_fd, self._read_ready)
if self._loop.get_debug():
logger.debug("%r resumes reading", self)
def _read_ready(self):
if self._conn_lost:
return
try:
data = self._sock.recv(self.max_size)
except (BlockingIOError, InterruptedError):
pass
except Exception as exc:
self._fatal_error(exc, 'Fatal read error on socket transport')
else:
if data:
self._protocol.data_received(data)
else:
if self._loop.get_debug():
logger.debug("%r received EOF", self)
keep_open = self._protocol.eof_received()
if keep_open:
# We're keeping the connection open so the
# protocol can write more, but we still can't
# receive more, so remove the reader callback.
self._loop.remove_reader(self._sock_fd)
else:
self.close()
def write(self, data):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be a bytes-like object, '
'not %r' % type(data).__name__)
if self._eof:
raise RuntimeError('Cannot call write() after write_eof()')
if not data:
return
if self._conn_lost:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
logger.warning('socket.send() raised exception.')
self._conn_lost += 1
return
if not self._buffer:
# Optimization: try to send now.
try:
n = self._sock.send(data)
except (BlockingIOError, InterruptedError):
pass
except Exception as exc:
self._fatal_error(exc, 'Fatal write error on socket transport')
return
else:
data = data[n:]
if not data:
return
# Not all was written; register write handler.
self._loop.add_writer(self._sock_fd, self._write_ready)
# Add it to the buffer.
self._buffer.extend(data)
self._maybe_pause_protocol()
def _write_ready(self):
assert self._buffer, 'Data should not be empty'
if self._conn_lost:
return
try:
n = self._sock.send(self._buffer)
except (BlockingIOError, InterruptedError):
pass
except Exception as exc:
self._loop.remove_writer(self._sock_fd)
self._buffer.clear()
self._fatal_error(exc, 'Fatal write error on socket transport')
else:
if n:
del self._buffer[:n]
self._maybe_resume_protocol() # May append to buffer.
if not self._buffer:
self._loop.remove_writer(self._sock_fd)
if self._closing:
self._call_connection_lost(None)
elif self._eof:
self._sock.shutdown(socket.SHUT_WR)
def write_eof(self):
if self._eof:
return
self._eof = True
if not self._buffer:
self._sock.shutdown(socket.SHUT_WR)
def can_write_eof(self):
return True
class _SelectorSslTransport(_SelectorTransport):
_buffer_factory = bytearray
def __init__(self, loop, rawsock, protocol, sslcontext, waiter=None,
server_side=False, server_hostname=None,
extra=None, server=None):
if ssl is None:
raise RuntimeError('stdlib ssl module not available')
if not sslcontext:
sslcontext = sslproto._create_transport_context(server_side, server_hostname)
wrap_kwargs = {
'server_side': server_side,
'do_handshake_on_connect': False,
}
if server_hostname and not server_side:
wrap_kwargs['server_hostname'] = server_hostname
sslsock = sslcontext.wrap_socket(rawsock, **wrap_kwargs)
super().__init__(loop, sslsock, protocol, extra, server)
# the protocol connection is only made after the SSL handshake
self._protocol_connected = False
self._server_hostname = server_hostname
self._waiter = waiter
self._sslcontext = sslcontext
self._paused = False
# SSL-specific extra info. (peercert is set later)
self._extra.update(sslcontext=sslcontext)
if self._loop.get_debug():
logger.debug("%r starts SSL handshake", self)
start_time = self._loop.time()
else:
start_time = None
self._on_handshake(start_time)
def _wakeup_waiter(self, exc=None):
if self._waiter is None:
return
if not self._waiter.cancelled():
if exc is not None:
self._waiter.set_exception(exc)
else:
self._waiter.set_result(None)
self._waiter = None
def _on_handshake(self, start_time):
try:
self._sock.do_handshake()
except ssl.SSLWantReadError:
self._loop.add_reader(self._sock_fd,
self._on_handshake, start_time)
return
except ssl.SSLWantWriteError:
self._loop.add_writer(self._sock_fd,
self._on_handshake, start_time)
return
except BaseException as exc:
if self._loop.get_debug():
logger.warning("%r: SSL handshake failed",
self, exc_info=True)
self._loop.remove_reader(self._sock_fd)
self._loop.remove_writer(self._sock_fd)
self._sock.close()
self._wakeup_waiter(exc)
if isinstance(exc, Exception):
return
else:
raise
self._loop.remove_reader(self._sock_fd)
self._loop.remove_writer(self._sock_fd)
peercert = self._sock.getpeercert()
if not hasattr(self._sslcontext, 'check_hostname'):
# Verify hostname if requested, Python 3.4+ uses check_hostname
# and checks the hostname in do_handshake()
if (self._server_hostname and
self._sslcontext.verify_mode != ssl.CERT_NONE):
try:
ssl.match_hostname(peercert, self._server_hostname)
except Exception as exc:
if self._loop.get_debug():
logger.warning("%r: SSL handshake failed "
"on matching the hostname",
self, exc_info=True)
self._sock.close()
self._wakeup_waiter(exc)
return
# Add extra info that becomes available after handshake.
self._extra.update(peercert=peercert,
cipher=self._sock.cipher(),
compression=self._sock.compression(),
ssl_object=self._sock,
)
self._read_wants_write = False
self._write_wants_read = False
self._loop.add_reader(self._sock_fd, self._read_ready)
self._protocol_connected = True
self._loop.call_soon(self._protocol.connection_made, self)
# only wake up the waiter when connection_made() has been called
self._loop.call_soon(self._wakeup_waiter)
if self._loop.get_debug():
dt = self._loop.time() - start_time
logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3)
def pause_reading(self):
# XXX This is a bit icky, given the comment at the top of
# _read_ready(). Is it possible to evoke a deadlock? I don't
# know, although it doesn't look like it; write() will still
# accept more data for the buffer and eventually the app will
# call resume_reading() again, and things will flow again.
if self._closing:
raise RuntimeError('Cannot pause_reading() when closing')
if self._paused:
raise RuntimeError('Already paused')
self._paused = True
self._loop.remove_reader(self._sock_fd)
if self._loop.get_debug():
logger.debug("%r pauses reading", self)
def resume_reading(self):
if not self._paused:
raise RuntimeError('Not paused')
self._paused = False
if self._closing:
return
self._loop.add_reader(self._sock_fd, self._read_ready)
if self._loop.get_debug():
logger.debug("%r resumes reading", self)
def _read_ready(self):
if self._conn_lost:
return
if self._write_wants_read:
self._write_wants_read = False
self._write_ready()
if self._buffer:
self._loop.add_writer(self._sock_fd, self._write_ready)
try:
data = self._sock.recv(self.max_size)
except (BlockingIOError, InterruptedError, ssl.SSLWantReadError):
pass
except ssl.SSLWantWriteError:
self._read_wants_write = True
self._loop.remove_reader(self._sock_fd)
self._loop.add_writer(self._sock_fd, self._write_ready)
except Exception as exc:
self._fatal_error(exc, 'Fatal read error on SSL transport')
else:
if data:
self._protocol.data_received(data)
else:
try:
if self._loop.get_debug():
logger.debug("%r received EOF", self)
keep_open = self._protocol.eof_received()
if keep_open:
logger.warning('returning true from eof_received() '
'has no effect when using ssl')
finally:
self.close()
def _write_ready(self):
if self._conn_lost:
return
if self._read_wants_write:
self._read_wants_write = False
self._read_ready()
if not (self._paused or self._closing):
self._loop.add_reader(self._sock_fd, self._read_ready)
if self._buffer:
try:
n = self._sock.send(self._buffer)
except (BlockingIOError, InterruptedError, ssl.SSLWantWriteError):
n = 0
except ssl.SSLWantReadError:
n = 0
self._loop.remove_writer(self._sock_fd)
self._write_wants_read = True
except Exception as exc:
self._loop.remove_writer(self._sock_fd)
self._buffer.clear()
self._fatal_error(exc, 'Fatal write error on SSL transport')
return
if n:
del self._buffer[:n]
self._maybe_resume_protocol() # May append to buffer.
if not self._buffer:
self._loop.remove_writer(self._sock_fd)
if self._closing:
self._call_connection_lost(None)
def write(self, data):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be a bytes-like object, '
'not %r' % type(data).__name__)
if not data:
return
if self._conn_lost:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
logger.warning('socket.send() raised exception.')
self._conn_lost += 1
return
if not self._buffer:
self._loop.add_writer(self._sock_fd, self._write_ready)
# Add it to the buffer.
self._buffer.extend(data)
self._maybe_pause_protocol()
def can_write_eof(self):
return False
class _SelectorDatagramTransport(_SelectorTransport):
_buffer_factory = collections.deque
def __init__(self, loop, sock, protocol, address=None,
waiter=None, extra=None):
super().__init__(loop, sock, protocol, extra)
self._address = address
self._loop.call_soon(self._protocol.connection_made, self)
# only start reading when connection_made() has been called
self._loop.call_soon(self._loop.add_reader,
self._sock_fd, self._read_ready)
if waiter is not None:
# only wake up the waiter when connection_made() has been called
self._loop.call_soon(futures._set_result_unless_cancelled,
waiter, None)
def get_write_buffer_size(self):
return sum(len(data) for data, _ in self._buffer)
def _read_ready(self):
if self._conn_lost:
return
try:
data, addr = self._sock.recvfrom(self.max_size)
except (BlockingIOError, InterruptedError):
pass
except OSError as exc:
self._protocol.error_received(exc)
except Exception as exc:
self._fatal_error(exc, 'Fatal read error on datagram transport')
else:
self._protocol.datagram_received(data, addr)
def sendto(self, data, addr=None):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be a bytes-like object, '
'not %r' % type(data).__name__)
if not data:
return
if self._address and addr not in (None, self._address):
raise ValueError('Invalid address: must be None or %s' %
(self._address,))
if self._conn_lost and self._address:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
logger.warning('socket.send() raised exception.')
self._conn_lost += 1
return
if not self._buffer:
# Attempt to send it right away first.
try:
if self._address:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
return
except (BlockingIOError, InterruptedError):
self._loop.add_writer(self._sock_fd, self._sendto_ready)
except OSError as exc:
self._protocol.error_received(exc)
return
except Exception as exc:
self._fatal_error(exc,
'Fatal write error on datagram transport')
return
# Ensure that what we buffer is immutable.
self._buffer.append((bytes(data), addr))
self._maybe_pause_protocol()
def _sendto_ready(self):
while self._buffer:
data, addr = self._buffer.popleft()
try:
if self._address:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
except (BlockingIOError, InterruptedError):
self._buffer.appendleft((data, addr)) # Try again later.
break
except OSError as exc:
self._protocol.error_received(exc)
return
except Exception as exc:
self._fatal_error(exc,
'Fatal write error on datagram transport')
return
self._maybe_resume_protocol() # May append to buffer.
if not self._buffer:
self._loop.remove_writer(self._sock_fd)
if self._closing:
self._call_connection_lost(None)
| 36.750899 | 89 | 0.562606 |
d294584157e518e479762d8cb465eaf03c3797cf
| 3,297 |
py
|
Python
|
src/loss/loss.py
|
praeclarumjj3/AOT-GAN-Experiments
|
1e66eaa6d393441f08768f4b1962bfc86da0e900
|
[
"MIT"
] | 3 |
2021-05-15T17:32:06.000Z
|
2022-02-07T11:25:20.000Z
|
src/loss/loss.py
|
praeclarumjj3/AOT-GAN-Experiments
|
1e66eaa6d393441f08768f4b1962bfc86da0e900
|
[
"MIT"
] | null | null | null |
src/loss/loss.py
|
praeclarumjj3/AOT-GAN-Experiments
|
1e66eaa6d393441f08768f4b1962bfc86da0e900
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from .common import VGG19, gaussian_blur
class L1():
def __init__(self,):
self.calc = torch.nn.L1Loss()
def __call__(self, x, y):
return self.calc(x, y)
class Perceptual(nn.Module):
def __init__(self, weights=[1.0, 1.0, 1.0, 1.0, 1.0]):
super(Perceptual, self).__init__()
self.vgg = VGG19().cuda()
self.criterion = torch.nn.L1Loss()
self.weights = weights
def __call__(self, x, y):
x_vgg, y_vgg = self.vgg(x), self.vgg(y)
content_loss = 0.0
prefix = [1, 2, 3, 4, 5]
for i in range(5):
content_loss += self.weights[i] * self.criterion(
x_vgg[f'relu{prefix[i]}_1'], y_vgg[f'relu{prefix[i]}_1'])
return content_loss
class Style(nn.Module):
def __init__(self):
super(Style, self).__init__()
self.vgg = VGG19().cuda()
self.criterion = torch.nn.L1Loss()
def compute_gram(self, x):
b, c, h, w = x.size()
f = x.view(b, c, w * h)
f_T = f.transpose(1, 2)
G = f.bmm(f_T) / (h * w * c)
return G
def __call__(self, x, y):
x_vgg, y_vgg = self.vgg(x), self.vgg(y)
style_loss = 0.0
prefix = [2, 3, 4, 5]
posfix = [2, 4, 4, 2]
for pre, pos in list(zip(prefix, posfix)):
style_loss += self.criterion(
self.compute_gram(x_vgg[f'relu{pre}_{pos}']), self.compute_gram(y_vgg[f'relu{pre}_{pos}']))
return style_loss
class nsgan():
def __init__(self, ):
self.loss_fn = torch.nn.Softplus()
def __call__(self, netD, fake, real):
fake_detach = fake.detach()
d_fake = netD(fake_detach)
d_real = netD(real)
dis_loss = self.loss_fn(-d_real).mean() + self.loss_fn(d_fake).mean()
g_fake = netD(fake)
gen_loss = self.loss_fn(-g_fake).mean()
return dis_loss, gen_loss
class smgan():
def __init__(self, ksize=71):
self.ksize = ksize
self.loss_fn = nn.MSELoss()
def __call__(self, netD, fake, real, masks):
fake_detach = fake.detach()
g_fake = netD(fake)
d_fake = netD(fake_detach)
d_real = netD(real)
_, _, h, w = g_fake.size()
b, c, ht, wt = masks.size()
# Handle inconsistent size between outputs and masks
if h != ht or w != wt:
g_fake = F.interpolate(g_fake, size=(ht, wt), mode='bilinear', align_corners=True)
d_fake = F.interpolate(d_fake, size=(ht, wt), mode='bilinear', align_corners=True)
d_real = F.interpolate(d_real, size=(ht, wt), mode='bilinear', align_corners=True)
d_fake_label = gaussian_blur(1 - masks, (self.ksize, self.ksize), (10, 10)).detach().cuda()
d_real_label = torch.ones_like(d_real).cuda()
g_fake_label = torch.ones_like(g_fake).cuda()
dis_loss = self.loss_fn(d_fake, d_fake_label) + self.loss_fn(d_real, d_real_label)
gen_loss = (self.loss_fn(g_fake, g_fake_label) * masks + 1e-7) / (torch.mean(masks) + 1e-7)
return dis_loss.mean(), gen_loss.mean()
| 32.643564 | 108 | 0.556263 |
4d0aac44eebba9779e7819c50f80363f0c70344c
| 22,629 |
py
|
Python
|
src/ui/ui_main_dlg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 75 |
2017-03-20T06:33:14.000Z
|
2022-02-15T16:16:45.000Z
|
src/ui/ui_main_dlg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 42 |
2017-10-25T06:34:54.000Z
|
2022-02-10T20:53:46.000Z
|
src/ui/ui_main_dlg.py
|
frosted97/dash-masternode-tool
|
d824740309ab878d745e41d39f274e952111542f
|
[
"MIT"
] | 98 |
2017-03-20T05:27:36.000Z
|
2022-03-20T05:03:08.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file ui_main_dlg.ui
#
# Created by: PyQt5 UI code generator
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1027, 530)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setStyleSheet("")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setContentsMargins(-1, -1, -1, 6)
self.verticalLayout.setObjectName("verticalLayout")
self.layMessage = QtWidgets.QHBoxLayout()
self.layMessage.setContentsMargins(0, -1, -1, 0)
self.layMessage.setSpacing(0)
self.layMessage.setObjectName("layMessage")
self.lblMessage = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lblMessage.sizePolicy().hasHeightForWidth())
self.lblMessage.setSizePolicy(sizePolicy)
self.lblMessage.setText("")
self.lblMessage.setWordWrap(True)
self.lblMessage.setOpenExternalLinks(False)
self.lblMessage.setObjectName("lblMessage")
self.layMessage.addWidget(self.lblMessage)
self.verticalLayout.addLayout(self.layMessage)
self.gbMain = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.gbMain.sizePolicy().hasHeightForWidth())
self.gbMain.setSizePolicy(sizePolicy)
self.gbMain.setTitle("")
self.gbMain.setObjectName("gbMain")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.gbMain)
self.verticalLayout_2.setContentsMargins(6, 6, 6, 6)
self.verticalLayout_2.setSpacing(6)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout.addWidget(self.gbMain)
self.verticalLayout.setStretch(1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusBar = QtWidgets.QStatusBar(MainWindow)
self.statusBar.setObjectName("statusBar")
MainWindow.setStatusBar(self.statusBar)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1027, 22))
self.menuBar.setObjectName("menuBar")
self.menuTools = QtWidgets.QMenu(self.menuBar)
self.menuTools.setObjectName("menuTools")
self.menuClear = QtWidgets.QMenu(self.menuTools)
self.menuClear.setObjectName("menuClear")
self.menuFile = QtWidgets.QMenu(self.menuBar)
self.menuFile.setObjectName("menuFile")
self.action_open_recent_files = QtWidgets.QMenu(self.menuFile)
self.action_open_recent_files.setObjectName("action_open_recent_files")
self.menuMasternode = QtWidgets.QMenu(self.menuBar)
self.menuMasternode.setObjectName("menuMasternode")
MainWindow.setMenuBar(self.menuBar)
self.toolBar = QtWidgets.QToolBar(MainWindow)
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self.toolBar.setFloatable(False)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.action_hw_wallet = QtWidgets.QAction(MainWindow)
self.action_hw_wallet.setObjectName("action_hw_wallet")
self.action_sign_message_with_collateral_addr = QtWidgets.QAction(MainWindow)
self.action_sign_message_with_collateral_addr.setObjectName("action_sign_message_with_collateral_addr")
self.action_load_config_file = QtWidgets.QAction(MainWindow)
self.action_load_config_file.setObjectName("action_load_config_file")
self.action_save_config_file_as = QtWidgets.QAction(MainWindow)
self.action_save_config_file_as.setObjectName("action_save_config_file_as")
self.action_save_config_file = QtWidgets.QAction(MainWindow)
self.action_save_config_file.setObjectName("action_save_config_file")
self.action_check_network_connection = QtWidgets.QAction(MainWindow)
self.action_check_network_connection.setObjectName("action_check_network_connection")
self.action_open_settings_window = QtWidgets.QAction(MainWindow)
self.action_open_settings_window.setMenuRole(QtWidgets.QAction.PreferencesRole)
self.action_open_settings_window.setObjectName("action_open_settings_window")
self.action_open_proposals_window = QtWidgets.QAction(MainWindow)
self.action_open_proposals_window.setObjectName("action_open_proposals_window")
self.action_connect_hw = QtWidgets.QAction(MainWindow)
self.action_connect_hw.setObjectName("action_connect_hw")
self.action_disconnect_hw = QtWidgets.QAction(MainWindow)
self.action_disconnect_hw.setObjectName("action_disconnect_hw")
self.action_hw_tools = QtWidgets.QAction(MainWindow)
self.action_hw_tools.setObjectName("action_hw_tools")
self.action_check_for_updates = QtWidgets.QAction(MainWindow)
self.action_check_for_updates.setObjectName("action_check_for_updates")
self.action_open_log_file = QtWidgets.QAction(MainWindow)
self.action_open_log_file.setObjectName("action_open_log_file")
self.action_about_app = QtWidgets.QAction(MainWindow)
self.action_about_app.setMenuRole(QtWidgets.QAction.AboutRole)
self.action_about_app.setObjectName("action_about_app")
self.action_import_masternode_conf = QtWidgets.QAction(MainWindow)
self.action_import_masternode_conf.setObjectName("action_import_masternode_conf")
self.action_about_qt = QtWidgets.QAction(MainWindow)
self.action_about_qt.setMenuRole(QtWidgets.QAction.AboutQtRole)
self.action_about_qt.setObjectName("action_about_qt")
self.action_gen_mn_priv_key_compressed = QtWidgets.QAction(MainWindow)
self.action_gen_mn_priv_key_compressed.setObjectName("action_gen_mn_priv_key_compressed")
self.action_gen_mn_priv_key_uncompressed = QtWidgets.QAction(MainWindow)
self.action_gen_mn_priv_key_uncompressed.setObjectName("action_gen_mn_priv_key_uncompressed")
self.action_command_console = QtWidgets.QAction(MainWindow)
self.action_command_console.setObjectName("action_command_console")
self.action_run_trezor_emulator = QtWidgets.QAction(MainWindow)
self.action_run_trezor_emulator.setObjectName("action_run_trezor_emulator")
self.action_open_data_folder = QtWidgets.QAction(MainWindow)
self.action_open_data_folder.setObjectName("action_open_data_folder")
self.action_clear_wallet_cache = QtWidgets.QAction(MainWindow)
self.action_clear_wallet_cache.setObjectName("action_clear_wallet_cache")
self.action_clear_proposals_cache = QtWidgets.QAction(MainWindow)
self.action_clear_proposals_cache.setObjectName("action_clear_proposals_cache")
self.action_restore_config_from_backup = QtWidgets.QAction(MainWindow)
self.action_restore_config_from_backup.setObjectName("action_restore_config_from_backup")
self.action_sign_message_with_owner_key = QtWidgets.QAction(MainWindow)
self.action_sign_message_with_owner_key.setObjectName("action_sign_message_with_owner_key")
self.action_sign_message_with_voting_key = QtWidgets.QAction(MainWindow)
self.action_sign_message_with_voting_key.setObjectName("action_sign_message_with_voting_key")
self.action_export_configuration = QtWidgets.QAction(MainWindow)
self.action_export_configuration.setObjectName("action_export_configuration")
self.action_import_configuration = QtWidgets.QAction(MainWindow)
self.action_import_configuration.setObjectName("action_import_configuration")
self.action_wallet_tools = QtWidgets.QAction(MainWindow)
self.action_wallet_tools.setObjectName("action_wallet_tools")
self.action_register_masternode = QtWidgets.QAction(MainWindow)
self.action_register_masternode.setObjectName("action_register_masternode")
self.action_update_masternode_payout_address = QtWidgets.QAction(MainWindow)
self.action_update_masternode_payout_address.setObjectName("action_update_masternode_payout_address")
self.action_update_masternode_operator_key = QtWidgets.QAction(MainWindow)
self.action_update_masternode_operator_key.setObjectName("action_update_masternode_operator_key")
self.action_update_masternode_voting_key = QtWidgets.QAction(MainWindow)
self.action_update_masternode_voting_key.setObjectName("action_update_masternode_voting_key")
self.action_revoke_masternode = QtWidgets.QAction(MainWindow)
self.action_revoke_masternode.setObjectName("action_revoke_masternode")
self.action_new_masternode_entry = QtWidgets.QAction(MainWindow)
self.action_new_masternode_entry.setShortcutVisibleInContextMenu(True)
self.action_new_masternode_entry.setObjectName("action_new_masternode_entry")
self.action_update_masternode_service = QtWidgets.QAction(MainWindow)
self.action_update_masternode_service.setObjectName("action_update_masternode_service")
self.action_show_masternode_details = QtWidgets.QAction(MainWindow)
self.action_show_masternode_details.setObjectName("action_show_masternode_details")
self.action_delete_masternode_entry = QtWidgets.QAction(MainWindow)
self.action_delete_masternode_entry.setShortcutVisibleInContextMenu(True)
self.action_delete_masternode_entry.setObjectName("action_delete_masternode_entry")
self.action_clone_masternode_entry = QtWidgets.QAction(MainWindow)
self.action_clone_masternode_entry.setObjectName("action_clone_masternode_entry")
self.action_new_configuration = QtWidgets.QAction(MainWindow)
self.action_new_configuration.setObjectName("action_new_configuration")
self.menuClear.addAction(self.action_clear_wallet_cache)
self.menuClear.addAction(self.action_clear_proposals_cache)
self.menuTools.addAction(self.action_hw_wallet)
self.menuTools.addAction(self.action_wallet_tools)
self.menuTools.addSeparator()
self.menuTools.addAction(self.action_check_for_updates)
self.menuTools.addAction(self.action_command_console)
self.menuTools.addAction(self.action_open_log_file)
self.menuTools.addAction(self.action_open_data_folder)
self.menuTools.addAction(self.menuClear.menuAction())
self.menuFile.addAction(self.action_new_configuration)
self.menuFile.addAction(self.action_load_config_file)
self.menuFile.addAction(self.action_open_recent_files.menuAction())
self.menuFile.addAction(self.action_restore_config_from_backup)
self.menuFile.addAction(self.action_save_config_file)
self.menuFile.addAction(self.action_save_config_file_as)
self.menuFile.addAction(self.action_export_configuration)
self.menuFile.addAction(self.action_import_configuration)
self.menuFile.addSeparator()
self.menuFile.addAction(self.action_open_settings_window)
self.menuFile.addSeparator()
self.menuFile.addAction(self.action_about_app)
self.menuFile.addAction(self.action_about_qt)
self.menuMasternode.addAction(self.action_show_masternode_details)
self.menuMasternode.addSeparator()
self.menuMasternode.addAction(self.action_new_masternode_entry)
self.menuMasternode.addAction(self.action_clone_masternode_entry)
self.menuMasternode.addAction(self.action_delete_masternode_entry)
self.menuMasternode.addSeparator()
self.menuMasternode.addAction(self.action_register_masternode)
self.menuMasternode.addAction(self.action_update_masternode_payout_address)
self.menuMasternode.addAction(self.action_update_masternode_operator_key)
self.menuMasternode.addAction(self.action_update_masternode_voting_key)
self.menuMasternode.addAction(self.action_update_masternode_service)
self.menuMasternode.addAction(self.action_revoke_masternode)
self.menuMasternode.addSeparator()
self.menuMasternode.addAction(self.action_sign_message_with_collateral_addr)
self.menuMasternode.addAction(self.action_sign_message_with_owner_key)
self.menuMasternode.addAction(self.action_sign_message_with_voting_key)
self.menuBar.addAction(self.menuFile.menuAction())
self.menuBar.addAction(self.menuMasternode.menuAction())
self.menuBar.addAction(self.menuTools.menuAction())
self.toolBar.addAction(self.action_open_settings_window)
self.toolBar.addAction(self.action_save_config_file)
self.toolBar.addSeparator()
self.toolBar.addAction(self.action_check_network_connection)
self.toolBar.addSeparator()
self.toolBar.addAction(self.action_connect_hw)
self.toolBar.addAction(self.action_disconnect_hw)
self.toolBar.addSeparator()
self.toolBar.addAction(self.action_open_proposals_window)
self.toolBar.addSeparator()
self.toolBar.addAction(self.action_hw_wallet)
self.toolBar.addAction(self.action_wallet_tools)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.menuTools.setTitle(_translate("MainWindow", "Tools"))
self.menuClear.setTitle(_translate("MainWindow", "Clear"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.action_open_recent_files.setTitle(_translate("MainWindow", "Recent Configuration Files"))
self.menuMasternode.setTitle(_translate("MainWindow", "Masternode"))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar"))
self.action_hw_wallet.setText(_translate("MainWindow", "Wallet"))
self.action_sign_message_with_collateral_addr.setText(_translate("MainWindow", "Sign Message with Collateral Address..."))
self.action_sign_message_with_collateral_addr.setToolTip(_translate("MainWindow", "Sign message with the collateral address of the current masternode"))
self.action_load_config_file.setText(_translate("MainWindow", "Open Configuration File..."))
self.action_load_config_file.setToolTip(_translate("MainWindow", "Open Configuration File"))
self.action_load_config_file.setShortcut(_translate("MainWindow", "Ctrl+O"))
self.action_save_config_file_as.setText(_translate("MainWindow", "Save Configuration As..."))
self.action_save_config_file_as.setShortcut(_translate("MainWindow", "Ctrl+Shift+S"))
self.action_save_config_file.setText(_translate("MainWindow", "Save Configuration"))
self.action_save_config_file.setShortcut(_translate("MainWindow", "Ctrl+S"))
self.action_check_network_connection.setText(_translate("MainWindow", "Check Network Connection"))
self.action_check_network_connection.setToolTip(_translate("MainWindow", "Check Dash Network Connection"))
self.action_open_settings_window.setText(_translate("MainWindow", "Settings"))
self.action_open_settings_window.setToolTip(_translate("MainWindow", "Settings"))
self.action_open_proposals_window.setText(_translate("MainWindow", "Proposals"))
self.action_open_proposals_window.setToolTip(_translate("MainWindow", "Proposals"))
self.action_connect_hw.setText(_translate("MainWindow", "Connect Hardware Wallet"))
self.action_disconnect_hw.setText(_translate("MainWindow", "Disconnect Hardware Wallet"))
self.action_hw_tools.setText(_translate("MainWindow", "Hardware Wallet Tools..."))
self.action_check_for_updates.setText(_translate("MainWindow", "Check For Updates"))
self.action_open_log_file.setText(_translate("MainWindow", "Open Log File"))
self.action_open_log_file.setShortcut(_translate("MainWindow", "Meta+Alt+L"))
self.action_about_app.setText(_translate("MainWindow", "About DashMasternodeTool..."))
self.action_import_masternode_conf.setText(_translate("MainWindow", "Import masternodes from the masternode.conf file..."))
self.action_about_qt.setText(_translate("MainWindow", "About Qt..."))
self.action_about_qt.setToolTip(_translate("MainWindow", "About Qt"))
self.action_gen_mn_priv_key_compressed.setText(_translate("MainWindow", "Generate masternode private key (compressed)"))
self.action_gen_mn_priv_key_compressed.setShortcut(_translate("MainWindow", "Ctrl+Alt+C"))
self.action_gen_mn_priv_key_uncompressed.setText(_translate("MainWindow", "Generate masternode private key (uncompressed)"))
self.action_gen_mn_priv_key_uncompressed.setToolTip(_translate("MainWindow", "Generate masternode private key (uncompressed)"))
self.action_gen_mn_priv_key_uncompressed.setShortcut(_translate("MainWindow", "Ctrl+Alt+U"))
self.action_command_console.setText(_translate("MainWindow", "Command Console"))
self.action_command_console.setShortcut(_translate("MainWindow", "Meta+Alt+C"))
self.action_run_trezor_emulator.setText(_translate("MainWindow", "Run Trezor T emulator"))
self.action_run_trezor_emulator.setToolTip(_translate("MainWindow", "Run Trezor T emulator"))
self.action_open_data_folder.setText(_translate("MainWindow", "Open Application Data Folder"))
self.action_clear_wallet_cache.setText(_translate("MainWindow", "Wallet Cache"))
self.action_clear_proposals_cache.setText(_translate("MainWindow", "Proposals Cache"))
self.action_restore_config_from_backup.setText(_translate("MainWindow", "Restore Configuration from Backup..."))
self.action_restore_config_from_backup.setToolTip(_translate("MainWindow", "Restore Configuration from Backup..."))
self.action_restore_config_from_backup.setShortcut(_translate("MainWindow", "Ctrl+R"))
self.action_sign_message_with_owner_key.setText(_translate("MainWindow", "Sign Message with Owner Key..."))
self.action_sign_message_with_owner_key.setToolTip(_translate("MainWindow", "Sign message with the masternode owner key"))
self.action_sign_message_with_voting_key.setText(_translate("MainWindow", "Sign Message with Voting Key..."))
self.action_sign_message_with_voting_key.setToolTip(_translate("MainWindow", "Sign message with the masternode voting key"))
self.action_export_configuration.setText(_translate("MainWindow", "Export Configuration..."))
self.action_export_configuration.setToolTip(_translate("MainWindow", "Export Configuration"))
self.action_import_configuration.setText(_translate("MainWindow", "Import Configuration..."))
self.action_import_configuration.setToolTip(_translate("MainWindow", "Import Configuration"))
self.action_wallet_tools.setText(_translate("MainWindow", "Toolbox"))
self.action_register_masternode.setText(_translate("MainWindow", "Register Masternode..."))
self.action_update_masternode_payout_address.setText(_translate("MainWindow", "Update Payout Address..."))
self.action_update_masternode_payout_address.setToolTip(_translate("MainWindow", "Send a transaction setting up a new payout address"))
self.action_update_masternode_operator_key.setText(_translate("MainWindow", "Update Operator Key..."))
self.action_update_masternode_operator_key.setToolTip(_translate("MainWindow", "Send a transaction setting up a new operator key"))
self.action_update_masternode_voting_key.setText(_translate("MainWindow", "Update Voting Key..."))
self.action_update_masternode_voting_key.setToolTip(_translate("MainWindow", "Send a transaction setting up a new voting key"))
self.action_revoke_masternode.setText(_translate("MainWindow", "Revoke Masternode..."))
self.action_revoke_masternode.setToolTip(_translate("MainWindow", "Send a transaction revoking masternode"))
self.action_new_masternode_entry.setText(_translate("MainWindow", "Add New Masternode Entry..."))
self.action_new_masternode_entry.setToolTip(_translate("MainWindow", "Add a new masternode entry to configuration"))
self.action_new_masternode_entry.setShortcut(_translate("MainWindow", "Ctrl+N"))
self.action_update_masternode_service.setText(_translate("MainWindow", "Update Service (IP/port/operator payout address)..."))
self.action_update_masternode_service.setToolTip(_translate("MainWindow", "Update masternode IP address, TCP port or operator payout address"))
self.action_show_masternode_details.setText(_translate("MainWindow", "Go to Masternode Details"))
self.action_show_masternode_details.setToolTip(_translate("MainWindow", "Show masternode configuration details"))
self.action_delete_masternode_entry.setText(_translate("MainWindow", "Delete Masternode Entry"))
self.action_delete_masternode_entry.setToolTip(_translate("MainWindow", "Delete seleted masternode entry"))
self.action_delete_masternode_entry.setShortcut(_translate("MainWindow", "Ctrl+D"))
self.action_clone_masternode_entry.setText(_translate("MainWindow", "Clone Masternode Entry"))
self.action_clone_masternode_entry.setToolTip(_translate("MainWindow", "Clone masternode configuration entry"))
self.action_new_configuration.setText(_translate("MainWindow", "New Configuration"))
self.action_new_configuration.setToolTip(_translate("MainWindow", "Creates a new configuration file"))
| 73.710098 | 160 | 0.77056 |
139b4b5fdc7e0afbb0fa928c4d1ac2e80aa22cb7
| 1,178 |
py
|
Python
|
accounts/views.py
|
nelsondude/tweetmymouth
|
b7614850189c21c18dc291b9167cc5afedacf386
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
nelsondude/tweetmymouth
|
b7614850189c21c18dc291b9167cc5afedacf386
|
[
"MIT"
] | null | null | null |
accounts/views.py
|
nelsondude/tweetmymouth
|
b7614850189c21c18dc291b9167cc5afedacf386
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponseRedirect
from django.views.generic import DetailView
from django.views import View
from django.contrib.auth import get_user_model
from .models import UserProfile
User = get_user_model()
# Create your views here.
class UserDetailView(DetailView):
template_name = "accounts/user_detail.html"
queryset = User.objects.all
def get_object(self):
return get_object_or_404(User, username__iexact=self.kwargs.get("username"))
def get_context_data(self, *args, **kwargs):
context = super(UserDetailView, self).get_context_data(*args, **kwargs)
following = UserProfile.objects.is_following(self.request.user, self.get_object())
context['following'] = following
return context
class UserFollowView(View):
def get(self, request, username, *args, **kwargs):
toggle_user = get_object_or_404(User, username__iexact=username)
if request.user.is_authenticated():
is_following = UserProfile.objects.toggle_follow(request.user, toggle_user)
return redirect("profiles:detail", username=username)
| 38 | 90 | 0.74618 |
decf855b54451efba5f6a7868fbcf631789f3572
| 10,423 |
py
|
Python
|
benchmark/tensorflow/image/googlenet.py
|
shenchaohua/Paddle
|
9c5942db13308d53cc115708058c1e885f4b57a3
|
[
"Apache-2.0"
] | 3 |
2018-04-16T23:35:32.000Z
|
2019-08-12T01:01:07.000Z
|
benchmark/tensorflow/image/googlenet.py
|
shenchaohua/Paddle
|
9c5942db13308d53cc115708058c1e885f4b57a3
|
[
"Apache-2.0"
] | 9 |
2017-09-13T07:39:31.000Z
|
2017-10-18T05:58:23.000Z
|
benchmark/tensorflow/image/googlenet.py
|
shenchaohua/Paddle
|
9c5942db13308d53cc115708058c1e885f4b57a3
|
[
"Apache-2.0"
] | 2 |
2020-11-04T08:07:46.000Z
|
2020-11-06T08:33:24.000Z
|
from six.moves import xrange
from datetime import datetime
import math
import time
import tensorflow.python.platform
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 128, """Batch size.""")
tf.app.flags.DEFINE_integer('num_batches', 100, """Number of batches to run.""")
tf.app.flags.DEFINE_boolean('forward_only', False,
"""Only run the forward pass.""")
tf.app.flags.DEFINE_boolean('forward_backward_only', False,
"""Only run the forward-forward pass.""")
tf.app.flags.DEFINE_string('data_format', 'NCHW',
"""The data format for Convnet operations.
Can be either NHWC or NCHW.
""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
parameters = []
conv_counter = 1
pool_counter = 1
affine_counter = 1
def _conv(inpOp, nIn, nOut, kH, kW, dH, dW, padType, wd=0.0005):
global conv_counter
global parameters
name = 'conv' + str(conv_counter)
conv_counter += 1
with tf.name_scope(name) as scope:
kernel = tf.Variable(
tf.truncated_normal(
[kH, kW, nIn, nOut], dtype=tf.float32, stddev=1e-1),
name='weights')
if wd is not None and wd > 0:
weight_decay = tf.mul(tf.nn.l2_loss(kernel), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
if FLAGS.data_format == 'NCHW':
strides = [1, 1, dH, dW]
else:
strides = [1, dH, dW, 1]
conv = tf.nn.conv2d(
inpOp,
kernel,
strides,
padding=padType,
data_format=FLAGS.data_format)
biases = tf.Variable(
tf.constant(
0.0, shape=[nOut], dtype=tf.float32),
trainable=True,
name='biases')
bias = tf.reshape(
tf.nn.bias_add(
conv, biases, data_format=FLAGS.data_format),
conv.get_shape())
conv1 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
return conv1
def _affine(inpOp, nIn, nOut, act=True, wd=0.0005):
global affine_counter
global parameters
name = 'affine' + str(affine_counter)
affine_counter += 1
with tf.name_scope(name) as scope:
kernel = tf.Variable(
tf.truncated_normal(
[nIn, nOut], dtype=tf.float32, stddev=1e-1),
name='weights')
if wd is not None and wd > 0:
weight_decay = tf.mul(tf.nn.l2_loss(kernel), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
biases = tf.Variable(
tf.constant(
0.0, shape=[nOut], dtype=tf.float32),
trainable=True,
name='biases')
affine1 = tf.nn.relu_layer(
inpOp, kernel, biases,
name=name) if act else tf.matmul(inpOp, kernel) + biases
parameters += [kernel, biases]
return affine1
def _mpool(inpOp, kH, kW, dH, dW, padding):
global pool_counter
global parameters
name = 'pool' + str(pool_counter)
pool_counter += 1
if FLAGS.data_format == 'NCHW':
ksize = [1, 1, kH, kW]
strides = [1, 1, dH, dW]
else:
ksize = [1, kH, kW, 1]
strides = [1, dH, dW, 1]
return tf.nn.max_pool(
inpOp,
ksize=ksize,
strides=strides,
padding=padding,
data_format=FLAGS.data_format,
name=name)
def _apool(inpOp, kH, kW, dH, dW, padding):
global pool_counter
global parameters
name = 'pool' + str(pool_counter)
pool_counter += 1
if FLAGS.data_format == 'NCHW':
ksize = [1, 1, kH, kW]
strides = [1, 1, dH, dW]
else:
ksize = [1, kH, kW, 1]
strides = [1, dH, dW, 1]
return tf.nn.avg_pool(
inpOp,
ksize=ksize,
strides=strides,
padding=padding,
data_format=FLAGS.data_format,
name=name)
def _inception(inp, inSize, o1s, o2s1, o2s2, o3s1, o3s2, o4s1, o4s2):
conv1 = _conv(inp, inSize, o1s, 1, 1, 1, 1, 'VALID')
conv3_ = _conv(inp, inSize, o2s1, 1, 1, 1, 1, 'VALID')
conv3 = _conv(conv3_, o2s1, o2s2, 3, 3, 1, 1, 'SAME')
conv5_ = _conv(inp, inSize, o3s1, 1, 1, 1, 1, 'VALID')
conv5 = _conv(conv5_, o3s1, o3s2, 5, 5, 1, 1, 'SAME')
pool_ = _mpool(inp, o4s1, o4s1, 1, 1, 'SAME')
pool = _conv(pool_, inSize, o4s2, 1, 1, 1, 1, 'VALID')
if FLAGS.data_format == 'NCHW':
channel_dim = 1
else:
channel_dim = 3
incept = tf.concat(channel_dim, [conv1, conv3, conv5, pool])
return incept
def loss(logits, labels):
batch_size = tf.size(labels)
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(concated,
tf.pack([batch_size, 1000]), 1.0, 0.0)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(
logits, onehot_labels, name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
return loss
def inference(images):
# stage 1
conv1 = _conv(images, 3, 64, 7, 7, 2, 2, 'SAME')
pool1 = _mpool(conv1, 3, 3, 2, 2, 'SAME')
# stage 2
conv2 = _conv(pool1, 64, 64, 1, 1, 1, 1, 'VALID')
conv3 = _conv(conv2, 64, 192, 3, 3, 1, 1, 'SAME')
pool3 = _mpool(conv3, 3, 3, 2, 2, 'SAME')
# stage 3
incept3a = _inception(pool3, 192, 64, 96, 128, 16, 32, 3, 32)
incept3b = _inception(incept3a, 256, 128, 128, 192, 32, 96, 3, 64)
pool4 = _mpool(incept3b, 3, 3, 2, 2, 'SAME')
# stage 4
incept4a = _inception(pool4, 480, 192, 96, 208, 16, 48, 3, 64)
incept4b = _inception(incept4a, 512, 160, 112, 224, 24, 64, 3, 64)
incept4c = _inception(incept4b, 512, 128, 128, 256, 24, 64, 3, 64)
incept4d = _inception(incept4c, 512, 112, 144, 288, 32, 64, 3, 64)
incept4e = _inception(incept4d, 528, 256, 160, 320, 32, 128, 3, 128)
pool5 = _mpool(incept4e, 3, 3, 2, 2, 'SAME')
# stage 5
incept5a = _inception(pool5, 832, 256, 160, 320, 32, 128, 3, 128)
incept5b = _inception(incept5a, 832, 384, 192, 384, 48, 128, 3, 128)
pool6 = _apool(incept5b, 7, 7, 1, 1, 'VALID')
# output 1
resh1 = tf.reshape(pool6, [-1, 1024])
drop = tf.nn.dropout(resh1, 0.4)
affn1 = _affine(resh1, 1024, 1000, act=False)
return affn1
def time_tensorflow_run(session, target, info_string):
num_steps_burn_in = 10
total_duration = 0.0
total_duration_squared = 0.0
if not isinstance(target, list):
target = [target]
target_op = tf.group(*target)
for i in range(FLAGS.num_batches + num_steps_burn_in):
start_time = time.time()
_ = session.run(target_op)
duration = time.time() - start_time
if i > num_steps_burn_in:
if not i % 10:
print('%s: step %d, duration = %.3f' %
(datetime.now(), i - num_steps_burn_in, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / FLAGS.num_batches
vr = total_duration_squared / FLAGS.num_batches - mn * mn
sd = math.sqrt(vr)
print('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, FLAGS.num_batches, mn, sd))
def run_benchmark():
global parameters
with tf.Graph().as_default():
# Generate some dummy images.
image_size = 224
if FLAGS.data_format == 'NCHW':
image_shape = [FLAGS.batch_size, 3, image_size, image_size]
else:
image_shape = [FLAGS.batch_size, image_size, image_size, 3]
images = tf.get_variable(
'image',
image_shape,
initializer=tf.truncated_normal_initializer(
stddev=0.1, dtype=tf.float32),
dtype=tf.float32,
trainable=False)
labels = tf.get_variable(
'label', [FLAGS.batch_size],
initializer=tf.constant_initializer(1),
dtype=tf.int32,
trainable=False)
# Build a Graph that computes the logits predictions from the
# inference model.
last_layer = inference(images)
objective = loss(last_layer, labels)
# Compute gradients.
# opt = tf.train.GradientDescentOptimizer(0.001)
opt = tf.train.MomentumOptimizer(0.001, 0.9)
grads = opt.compute_gradients(objective)
global_step = tf.get_variable(
'global_step', [],
initializer=tf.constant_initializer(
0.0, dtype=tf.float32),
trainable=False,
dtype=tf.float32)
apply_gradient_op = opt.apply_gradients(grads, global_step=global_step)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(0.9, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables(
))
# Build an initialization operation.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
sess = tf.Session(config=tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=FLAGS.log_device_placement))
sess.run(init)
run_forward = True
run_forward_backward = True
if FLAGS.forward_only and FLAGS.forward_backward_only:
raise ValueError("Cannot specify --forward_only and "
"--forward_backward_only at the same time.")
if FLAGS.forward_only:
run_forward_backward = False
elif FLAGS.forward_backward_only:
run_forward = False
if run_forward:
# Run the forward benchmark.
time_tensorflow_run(sess, last_layer, "Forward")
if run_forward_backward:
with tf.control_dependencies(
[apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
time_tensorflow_run(sess, [train_op, objective], "Forward-backward")
def main(_):
run_benchmark()
if __name__ == '__main__':
tf.app.run()
| 33.407051 | 80 | 0.587739 |
fa5d4577cc51b3dc3070c2dbae01c14937217ce4
| 1,339 |
py
|
Python
|
Service_Components/wsgi.py
|
mydata-sdk/mydata-sdk-1.x
|
74064d7a42fc0435511eae6e77e49ddc7d9723f3
|
[
"MIT"
] | 4 |
2018-04-21T00:46:40.000Z
|
2019-12-03T13:52:03.000Z
|
Service_Components/wsgi.py
|
markella-n-th/mydata-sdk
|
71aef60c091a0cef81d0f4a611afa795dff52189
|
[
"MIT"
] | 2 |
2018-04-21T04:09:46.000Z
|
2018-04-30T21:54:09.000Z
|
Service_Components/wsgi.py
|
fititnt/mydata-sdk--hiit
|
19d7a2ddbc3b5a05665539fbcc7f461c13793e03
|
[
"MIT"
] | 4 |
2018-04-21T01:12:12.000Z
|
2020-09-24T06:19:29.000Z
|
# -*- coding: utf-8 -*-
"""
wsgi
~~~~
overholt wsgi module
"""
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware
import Service_Mgmnt
import Service_Root
import Authorization_Management
import Sink
import Source
import logging
logger = logging.getLogger("sequence")
try:
from restapi_logging_handler import RestApiHandler
restapihandler = RestApiHandler("http://172.18.0.1:9004/")
logger.addHandler(restapihandler)
except Exception as e:
pass
logger.setLevel(logging.INFO)
debug_log = logging.getLogger("debug")
logging.basicConfig()
debug_log.setLevel(logging.INFO)
from instance.settings import SERVICE_ROOT_PATH, SERVICE_CR_PATH, SERVICE_SLR_PATH, IS_SINK, IS_SOURCE
# Common parts.
paths = {
SERVICE_ROOT_PATH+SERVICE_SLR_PATH: Service_Mgmnt.create_app(),
SERVICE_ROOT_PATH+SERVICE_CR_PATH: Authorization_Management.create_app()
}
if IS_SINK:
debug_log.info(SERVICE_ROOT_PATH+"/sink_flow")
paths[SERVICE_ROOT_PATH+"/sink_flow"] = Sink.create_app()
if IS_SOURCE:
paths[SERVICE_ROOT_PATH+"/source_flow"] = Source.create_app()
application = DispatcherMiddleware(Service_Root.create_app(), paths)
if __name__ == "__main__":
run_simple('0.0.0.0', 7000, application, use_reloader=False, use_debugger=False, threaded=True)
| 24.796296 | 102 | 0.761016 |
840ab62ebed1e4c18af7941b8dd0aeb9232a5852
| 791 |
py
|
Python
|
105/test_slicing.py
|
theconsultant/bitesofpy
|
6b248d28f4b61e8c0963a81515f4a17ad89a182e
|
[
"MIT"
] | null | null | null |
105/test_slicing.py
|
theconsultant/bitesofpy
|
6b248d28f4b61e8c0963a81515f4a17ad89a182e
|
[
"MIT"
] | null | null | null |
105/test_slicing.py
|
theconsultant/bitesofpy
|
6b248d28f4b61e8c0963a81515f4a17ad89a182e
|
[
"MIT"
] | 1 |
2019-09-03T19:53:22.000Z
|
2019-09-03T19:53:22.000Z
|
from slicing import slice_and_dice
another_text = """
Take the block of text provided and strip() off the whitespace at the ends.
Split the whole block up by newline (\n).
if the first character is lowercase, split it into words and add the last word
of that line to the results list.
Strip the trailing dot (.) and exclamation mark (!) from the word first.
finally return the results list!
"""
def test_slice_and_dice_default_text():
expected = ['objects', 'y', 'too', ':)', 'bites']
assert slice_and_dice() == expected
def test_slice_and_dice_other_text():
# each line needs to be stripped, so the line starting with ' if'
# is a match here, hence expected matches 'word' too
expected = ['word', 'list', 'list']
assert slice_and_dice(another_text) == expected
| 35.954545 | 79 | 0.718078 |
3f80e9ee91952a3c9c14c20e632d0b4a5a12ac48
| 11,719 |
py
|
Python
|
wavenet_vocoder/student.py
|
dendisuhubdy/parallel_wavenet_vocoder
|
8f2bd7c0bd30cb90cc7ff8438ce78545c409227b
|
[
"MIT"
] | 25 |
2018-09-10T08:01:09.000Z
|
2020-04-07T09:19:14.000Z
|
wavenet_vocoder/student.py
|
dendisuhubdy/parallel_wavenet_vocoder
|
8f2bd7c0bd30cb90cc7ff8438ce78545c409227b
|
[
"MIT"
] | 1 |
2018-10-09T01:38:23.000Z
|
2018-10-09T03:46:05.000Z
|
wavenet_vocoder/student.py
|
dendisuhubdy/parallel_wavenet_vocoder
|
8f2bd7c0bd30cb90cc7ff8438ce78545c409227b
|
[
"MIT"
] | 8 |
2018-09-21T02:31:28.000Z
|
2020-04-07T09:19:15.000Z
|
# coding: utf-8
from __future__ import with_statement, print_function, absolute_import
import math
import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
from .modules import Embedding
from .modules import Conv1d1x1, ResidualConv1dGLU, ConvTranspose2d
from .mixture import sample_from_discretized_mix_logistic, sample_from_gaussian
def _expand_global_features(B, T, g, bct=True):
"""Expand global conditioning features to all time steps
Args:
B (int): Batch size.
T (int): Time length.
g (Tensor): Global features, (B x C) or (B x C x 1).
bct (bool) : returns (B x C x T) if True, otherwise (B x T x C)
Returns:
Tensor: B x C x T or B x T x C or None
"""
if g is None:
return None
g = g.unsqueeze(-1) if g.dim() == 2 else g
if bct:
g_bct = g.expand(B, -1, T)
return g_bct.contiguous()
else:
g_btc = g.expand(B, -1, T).transpose(1, 2)
return g_btc.contiguous()
def receptive_field_size(total_layers, num_cycles, kernel_size,
dilation=lambda x: 2**x):
"""Compute receptive field size
Args:
total_layers (int): total layers
num_cycles (int): cycles
kernel_size (int): kernel size
dilation (lambda): lambda to compute dilation factor. ``lambda x : 1``
to disable dilated convolution.
Returns:
int: receptive field size in sample
"""
assert total_layers % num_cycles == 0
layers_per_cycle = total_layers // num_cycles
dilations = [dilation(i % layers_per_cycle) for i in range(total_layers)]
return (kernel_size - 1) * sum(dilations) + 1
class Student(nn.Module):
"""The WaveNet model that supports local and global conditioning.
Args:
out_channels (int): Output channels. If input_type is mu-law quantized
one-hot vector. this must equal to the quantize channels. Other wise
num_mixtures x 3 (pi, mu, log_scale).
iaf_layers (int): Number of total layers
iaf_stacks (int): Number of dilation cycles
residual_channels (int): Residual input / output channels
gate_channels (int): Gated activation channels.
skip_out_channels (int): Skip connection channels.
kernel_size (int): Kernel size of convolution layers.
dropout (float): Dropout probability.
cin_channels (int): Local conditioning channels. If negative value is
set, local conditioning is disabled.
gin_channels (int): Global conditioning channels. If negative value is
set, global conditioning is disabled.
n_speakers (int): Number of speakers. Used only if global conditioning
is enabled.
weight_normalization (bool): If True, DeepVoice3-style weight
normalization is applied.
upsample_conditional_features (bool): Whether upsampling local
conditioning features by transposed convolution layers or not.
upsample_scales (list): List of upsample scale.
``np.prod(upsample_scales)`` must equal to hop size. Used only if
upsample_conditional_features is enabled.
freq_axis_kernel_size (int): Freq-axis kernel_size for transposed
convolution layers for upsampling. If you only care about time-axis
upsampling, set this to 1.
scalar_input (Bool): If True, scalar input ([-1, 1]) is expected, otherwise
quantized one-hot vector is expected.
use_speaker_embedding (Bool): Use speaker embedding or Not. Set to False
if you want to disable embedding layer and use external features
directly.
legacy (bool) Use legacy code or not. Default is True for backward
compatibility.
"""
def __init__(self, out_channels=2, iaf_layers=[10, 10, 10, 10, 10, 10],
iaf_stacks=[1, 1, 1, 1, 1, 1],
residual_channels=128,
gate_channels=128,
skip_out_channels=128,
kernel_size=3, dropout=1 - 0.95,
cin_channels=-1, gin_channels=-1, n_speakers=None,
weight_normalization=True,
upsample_conditional_features=False,
upsample_scales=None,
freq_axis_kernel_size=3,
scalar_input=False,
use_speaker_embedding=True,
legacy=True,
use_gaussian=False,
):
super(Student, self).__init__()
self.scalar_input = scalar_input
self.out_channels = out_channels
self.cin_channels = cin_channels
self.legacy = legacy
self.use_gaussian = use_gaussian
assert len(iaf_layers) == len(iaf_stacks)
iaf_nums = range(len(iaf_layers))
self.iaf_layers = nn.ModuleList([nn.ModuleList() for _ in range(len(iaf_layers))])
for i, layers, stacks in zip(iaf_nums, iaf_layers, iaf_stacks):
layers_per_stack = layers // stacks
if scalar_input:
first_conv = Conv1d1x1(1, residual_channels)
else:
first_conv = Conv1d1x1(out_channels, residual_channels)
self.iaf_layers[i].append(first_conv)
conv_layers = nn.ModuleList()
for layer in range(layers):
dilation = 2**(layer % layers_per_stack)
conv = ResidualConv1dGLU(
residual_channels, gate_channels,
kernel_size=kernel_size,
skip_out_channels=skip_out_channels,
bias=True, # magenda uses bias, but musyoku doesn't
dilation=dilation, dropout=dropout,
cin_channels=cin_channels,
gin_channels=gin_channels,
weight_normalization=weight_normalization)
conv_layers.append(conv)
self.iaf_layers[i].append(conv_layers)
last_conv_layers = nn.ModuleList([
nn.ReLU(inplace=True),
Conv1d1x1(skip_out_channels, skip_out_channels,
weight_normalization=weight_normalization),
nn.ReLU(inplace=True),
Conv1d1x1(skip_out_channels, out_channels,
weight_normalization=weight_normalization),
])
self.iaf_layers[i].append(last_conv_layers)
if gin_channels > 0 and use_speaker_embedding:
assert n_speakers is not None
self.embed_speakers = Embedding(
n_speakers, gin_channels, padding_idx=None, std=0.1)
else:
self.embed_speakers = None
# Upsample conv net
if upsample_conditional_features:
self.upsample_conv = nn.ModuleList()
for s in upsample_scales:
freq_axis_padding = (freq_axis_kernel_size - 1) // 2
convt = ConvTranspose2d(1, 1, (freq_axis_kernel_size, s),
padding=(freq_axis_padding, 0),
dilation=1, stride=(1, s),
weight_normalization=weight_normalization)
self.upsample_conv.append(convt)
# assuming we use [0, 1] scaled features
# this should avoid non-negative upsampling output
# so we don't use leaky relu, i think use relu better
self.upsample_conv.append(nn.ReLU(inplace=True))
else:
self.upsample_conv = None
self.receptive_field = receptive_field_size(sum(iaf_layers), sum(iaf_stacks), kernel_size)
def has_speaker_embedding(self):
return self.embed_speakers is not None
def local_conditioning_enabled(self):
return self.cin_channels > 0
def forward(self, x, c=None, g=None, softmax=False, device='cuda', log_scale_min=-7.0):
"""Forward step
Args:
x (Tensor): Gaussian Noise, shape (B x 1 x T)
c (Tensor): Local conditioning features,
shape (B x cin_channels x T)
g (Tensor): Global conditioning features,
shape (B x gin_channels x 1) or speaker Ids of shape (B x 1).
Note that ``self.use_speaker_embedding`` must be False when you
want to disable embedding layer and use external features
directly (e.g., one-hot vector).
Also type of input tensor must be FloatTensor, not LongTensor
in case of ``self.use_speaker_embedding`` equals False.
softmax (bool): Whether applies softmax or not.
device: which device the tensor will be put
Returns:
Tensor: output, shape B x out_channels x T
"""
B, _, T = x.size()
if g is not None:
if self.embed_speakers is not None:
# (B x 1) -> (B x 1 x gin_channels)
g = self.embed_speakers(g.view(B, -1))
# (B x gin_channels x 1)
g = g.transpose(1, 2)
assert g.dim() == 3
# Expand global conditioning features to all time steps
g_bct = _expand_global_features(B, T, g, bct=True)
if c is not None and self.upsample_conv is not None:
# B x 1 x C x T
c = c.unsqueeze(1)
for f in self.upsample_conv:
c = f(c)
# B x C x T
c = c.squeeze(1)
assert c.size(-1) == x.size(-1)
# Feed data to network
mu_tot = torch.zeros((B, 1, T), requires_grad=True).to(device)
scale_tot = torch.ones((B, 1, T), requires_grad=True).to(device)
log_scale_tot = torch.zeros((B, 1, T), requires_grad=True).to(device)
for each_iaf_layer in self.iaf_layers:
# first conv
output = each_iaf_layer[0](x)
# residual layer
skips = None
for residual_layer in each_iaf_layer[1]:
output, h = residual_layer(output, c, g_bct)
if skips is None:
skips = h
else:
skips += h
if self.legacy:
skips *= math.sqrt(0.5)
# last layer
output = skips
for layer in each_iaf_layer[2]:
output = layer(output)
mu, log_scale = torch.unsqueeze(output[:, 0, :], dim=1), torch.unsqueeze(output[:, 1, :], dim=1)
# log_scale = torch.clamp(log_scale, min=log_scale_min)
scale = torch.exp(log_scale)
x = x * scale + mu
mu_tot = mu + mu_tot * scale
scale_tot = scale_tot * scale
log_scale_tot += log_scale
log_scale_tot = torch.clamp(log_scale_tot, min=log_scale_min)
scale_tot = torch.clamp(scale_tot, min=np.exp(log_scale_min))
mu_tot = torch.squeeze(mu_tot, dim=1)
log_scale_tot = torch.squeeze(log_scale_tot, dim=1)
scale_tot = torch.squeeze(scale_tot, dim=1)
x = torch.clamp(x, min=-1.0, max=1.0)
if torch.isnan(log_scale_tot).any():
import pdb; pdb.set_trace()
if torch.isnan(scale_tot).any():
import pdb; pdb.set_trace()
if torch.isnan(mu_tot).any():
import pdb; pdb.set_trace()
return x, mu_tot, scale_tot, log_scale_tot
def make_generation_fast_(self):
def remove_weight_norm(m):
try:
nn.utils.remove_weight_norm(m)
except ValueError: # this module didn't have weight norm
return
self.apply(remove_weight_norm)
| 40.410345 | 108 | 0.591177 |
48d09fd10e30b900f0fe100ac3e1ad26cb6021bb
| 2,771 |
py
|
Python
|
expt-mpii-ca2/make_completion_videos.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | 6 |
2017-09-13T10:49:13.000Z
|
2022-01-01T10:53:03.000Z
|
expt-mpii-ca2/make_completion_videos.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | null | null | null |
expt-mpii-ca2/make_completion_videos.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | 2 |
2017-07-31T04:12:30.000Z
|
2018-11-09T22:34:36.000Z
|
#!/usr/bin/env python3
"""Turn completion files into actual SxS videos of the completion, the ground
truth, etc."""
import argparse
import json
import os
import h5py
import numpy as np
import matplotlib.pyplot as plt
import addpaths # noqa
from plot_2d_seqs import draw_poses
from common_pp.completion_video_common import load_sorted_paths, \
alignment_constant
FRAME_DIR = '/data/home/cherian/MPII/Cheng-MPII-Pose-Action/frames/'
# FRAME_DIR = '/home/sam/sshfs/paloalto' + FRAME_DIR # XXX
# POSE_DIR = '/home/sam/sshfs/paloalto/etc/cpm-keras/mpii-ca2-mat-poses' # XXX
POSE_DIR = '/home/sam/etc/cpm-keras/mpii-ca2-mat-poses'
parser = argparse.ArgumentParser()
parser.add_argument('completion_path', help='path to .json completion file')
parser.add_argument('--vid-dir', type=str, default=None,
help='save videos to this directory instead of showing poses')
if __name__ == '__main__':
args = parser.parse_args()
with open(args.completion_path) as fp:
d = json.load(fp)
vid_name = d['vid_name']
all_frame_fns = load_sorted_paths(os.path.join(FRAME_DIR, vid_name))
frame_paths = [all_frame_fns[i] for i in d['frame_inds']]
pose_seqs = np.stack(
(d['true_poses'], d['prior_poses'], d['posterior_poses']), axis=0)
seq_names = ['True poses', 'Prior prediction', 'Posterior prediction']
all_mat_pose_paths = load_sorted_paths(os.path.join(POSE_DIR, vid_name))
mat_fst_pose_path = all_mat_pose_paths[d['frame_inds'][0]]
with h5py.File(mat_fst_pose_path) as fp:
# gives us 2*14
ref_pose = fp['pose'].value[:, :8].astype('float')
alpha, beta = alignment_constant(pose_seqs[0, 0], ref_pose)
pose_seqs = pose_seqs * alpha + beta[None, None, :, None]
# important not to let return value be gc'd (anims won't run otherwise!)
anim = draw_poses(
'Completed poses in %s' % args.completion_path,
d['parents'],
pose_seqs,
frame_paths=[frame_paths] * 3,
subplot_titles=seq_names,
fps=50 / 9.0,
crossover=d['crossover_time'])
if args.vid_dir is not None:
# save video
print('Saving video')
try:
os.makedirs(args.vid_dir)
except FileExistsError:
pass
bn = os.path.basename(args.completion_path).rsplit('.')[0]
key = d['vid_name'] + '-' + bn
anim.save(os.path.join(args.vid_dir, key + '.mp4'),
writer='avconv',
# no idea what bitrate defaults to, but empircally it seems
# to be around 1000 (?)
bitrate=3000,
# dpi defaults to 300
dpi=300,
fps=50/3.0)
else:
print('Showing sequence')
plt.show()
| 33.792683 | 79 | 0.640202 |
7d99c5085274db94bbbd5b3d1b9c40f0dc041209
| 10,238 |
py
|
Python
|
examples/atari/train_ppo_ale.py
|
fratim/pfrl
|
ddcdfbebf7aa55753beb5195edf8d571be7b862c
|
[
"MIT"
] | 824 |
2020-07-29T00:30:14.000Z
|
2022-03-29T13:12:34.000Z
|
examples/atari/train_ppo_ale.py
|
fratim/pfrl
|
ddcdfbebf7aa55753beb5195edf8d571be7b862c
|
[
"MIT"
] | 124 |
2020-07-30T01:53:47.000Z
|
2022-03-15T07:05:38.000Z
|
examples/atari/train_ppo_ale.py
|
fratim/pfrl
|
ddcdfbebf7aa55753beb5195edf8d571be7b862c
|
[
"MIT"
] | 122 |
2020-07-29T04:33:35.000Z
|
2022-03-13T13:29:41.000Z
|
"""An example of training PPO against OpenAI Gym Atari Envs.
This script is an example of training a PPO agent on Atari envs.
To train PPO for 10M timesteps on Breakout, run:
python train_ppo_ale.py
To train PPO using a recurrent model on a flickering Atari env, run:
python train_ppo_ale.py --recurrent --flicker --no-frame-stack
"""
import argparse
import functools
import numpy as np
import torch
from torch import nn
import pfrl
from pfrl import experiments, utils
from pfrl.agents import PPO
from pfrl.policies import SoftmaxCategoricalHead
from pfrl.wrappers import atari_wrappers
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--env", type=str, default="BreakoutNoFrameskip-v4", help="Gym Env ID."
)
parser.add_argument(
"--gpu", type=int, default=0, help="GPU device ID. Set to -1 to use CPUs only."
)
parser.add_argument(
"--num-envs",
type=int,
default=8,
help="Number of env instances run in parallel.",
)
parser.add_argument("--seed", type=int, default=0, help="Random seed [0, 2 ** 32)")
parser.add_argument(
"--outdir",
type=str,
default="results",
help=(
"Directory path to save output files."
" If it does not exist, it will be created."
),
)
parser.add_argument(
"--steps", type=int, default=10 ** 7, help="Total time steps for training."
)
parser.add_argument(
"--max-frames",
type=int,
default=30 * 60 * 60, # 30 minutes with 60 fps
help="Maximum number of frames for each episode.",
)
parser.add_argument("--lr", type=float, default=2.5e-4, help="Learning rate.")
parser.add_argument(
"--eval-interval",
type=int,
default=100000,
help="Interval (in timesteps) between evaluation phases.",
)
parser.add_argument(
"--eval-n-runs",
type=int,
default=10,
help="Number of episodes ran in an evaluation phase.",
)
parser.add_argument(
"--demo",
action="store_true",
default=False,
help="Run demo episodes, not training.",
)
parser.add_argument(
"--load",
type=str,
default="",
help=(
"Directory path to load a saved agent data from"
" if it is a non-empty string."
),
)
parser.add_argument(
"--log-level",
type=int,
default=20,
help="Logging level. 10:DEBUG, 20:INFO etc.",
)
parser.add_argument(
"--render",
action="store_true",
default=False,
help="Render env states in a GUI window.",
)
parser.add_argument(
"--monitor",
action="store_true",
default=False,
help=(
"Monitor env. Videos and additional information are saved as output files."
),
)
parser.add_argument(
"--update-interval",
type=int,
default=128 * 8,
help="Interval (in timesteps) between PPO iterations.",
)
parser.add_argument(
"--batchsize",
type=int,
default=32 * 8,
help="Size of minibatch (in timesteps).",
)
parser.add_argument(
"--epochs",
type=int,
default=4,
help="Number of epochs used for each PPO iteration.",
)
parser.add_argument(
"--log-interval",
type=int,
default=10000,
help="Interval (in timesteps) of printing logs.",
)
parser.add_argument(
"--recurrent",
action="store_true",
default=False,
help="Use a recurrent model. See the code for the model definition.",
)
parser.add_argument(
"--flicker",
action="store_true",
default=False,
help=(
"Use so-called flickering Atari, where each"
" screen is blacked out with probability 0.5."
),
)
parser.add_argument(
"--no-frame-stack",
action="store_true",
default=False,
help=(
"Disable frame stacking so that the agent can only see the current screen."
),
)
parser.add_argument(
"--checkpoint-frequency",
type=int,
default=None,
help="Frequency at which agents are stored.",
)
args = parser.parse_args()
import logging
logging.basicConfig(level=args.log_level)
# Set a random seed used in PFRL.
utils.set_random_seed(args.seed)
# Set different random seeds for different subprocesses.
# If seed=0 and processes=4, subprocess seeds are [0, 1, 2, 3].
# If seed=1 and processes=4, subprocess seeds are [4, 5, 6, 7].
process_seeds = np.arange(args.num_envs) + args.seed * args.num_envs
assert process_seeds.max() < 2 ** 32
args.outdir = experiments.prepare_output_dir(args, args.outdir)
print("Output files are saved in {}".format(args.outdir))
def make_env(idx, test):
# Use different random seeds for train and test envs
process_seed = int(process_seeds[idx])
env_seed = 2 ** 32 - 1 - process_seed if test else process_seed
env = atari_wrappers.wrap_deepmind(
atari_wrappers.make_atari(args.env, max_frames=args.max_frames),
episode_life=not test,
clip_rewards=not test,
flicker=args.flicker,
frame_stack=False,
)
env.seed(env_seed)
if args.monitor:
env = pfrl.wrappers.Monitor(
env, args.outdir, mode="evaluation" if test else "training"
)
if args.render:
env = pfrl.wrappers.Render(env)
return env
def make_batch_env(test):
vec_env = pfrl.envs.MultiprocessVectorEnv(
[
functools.partial(make_env, idx, test)
for idx, env in enumerate(range(args.num_envs))
]
)
if not args.no_frame_stack:
vec_env = pfrl.wrappers.VectorFrameStack(vec_env, 4)
return vec_env
sample_env = make_batch_env(test=False)
print("Observation space", sample_env.observation_space)
print("Action space", sample_env.action_space)
n_actions = sample_env.action_space.n
obs_n_channels = sample_env.observation_space.low.shape[0]
del sample_env
def lecun_init(layer, gain=1):
if isinstance(layer, (nn.Conv2d, nn.Linear)):
pfrl.initializers.init_lecun_normal(layer.weight, gain)
nn.init.zeros_(layer.bias)
else:
pfrl.initializers.init_lecun_normal(layer.weight_ih_l0, gain)
pfrl.initializers.init_lecun_normal(layer.weight_hh_l0, gain)
nn.init.zeros_(layer.bias_ih_l0)
nn.init.zeros_(layer.bias_hh_l0)
return layer
if args.recurrent:
model = pfrl.nn.RecurrentSequential(
lecun_init(nn.Conv2d(obs_n_channels, 32, 8, stride=4)),
nn.ReLU(),
lecun_init(nn.Conv2d(32, 64, 4, stride=2)),
nn.ReLU(),
lecun_init(nn.Conv2d(64, 64, 3, stride=1)),
nn.ReLU(),
nn.Flatten(),
lecun_init(nn.Linear(3136, 512)),
nn.ReLU(),
lecun_init(nn.GRU(num_layers=1, input_size=512, hidden_size=512)),
pfrl.nn.Branched(
nn.Sequential(
lecun_init(nn.Linear(512, n_actions), 1e-2),
SoftmaxCategoricalHead(),
),
lecun_init(nn.Linear(512, 1)),
),
)
else:
model = nn.Sequential(
lecun_init(nn.Conv2d(obs_n_channels, 32, 8, stride=4)),
nn.ReLU(),
lecun_init(nn.Conv2d(32, 64, 4, stride=2)),
nn.ReLU(),
lecun_init(nn.Conv2d(64, 64, 3, stride=1)),
nn.ReLU(),
nn.Flatten(),
lecun_init(nn.Linear(3136, 512)),
nn.ReLU(),
pfrl.nn.Branched(
nn.Sequential(
lecun_init(nn.Linear(512, n_actions), 1e-2),
SoftmaxCategoricalHead(),
),
lecun_init(nn.Linear(512, 1)),
),
)
opt = torch.optim.Adam(model.parameters(), lr=args.lr, eps=1e-5)
def phi(x):
# Feature extractor
return np.asarray(x, dtype=np.float32) / 255
agent = PPO(
model,
opt,
gpu=args.gpu,
phi=phi,
update_interval=args.update_interval,
minibatch_size=args.batchsize,
epochs=args.epochs,
clip_eps=0.1,
clip_eps_vf=None,
standardize_advantages=True,
entropy_coef=1e-2,
recurrent=args.recurrent,
max_grad_norm=0.5,
)
if args.load:
agent.load(args.load)
if args.demo:
eval_stats = experiments.eval_performance(
env=make_batch_env(test=True),
agent=agent,
n_steps=None,
n_episodes=args.eval_n_runs,
)
print(
"n_runs: {} mean: {} median: {} stdev: {}".format(
args.eval_n_runs,
eval_stats["mean"],
eval_stats["median"],
eval_stats["stdev"],
)
)
else:
step_hooks = []
# Linearly decay the learning rate to zero
def lr_setter(env, agent, value):
for param_group in agent.optimizer.param_groups:
param_group["lr"] = value
step_hooks.append(
experiments.LinearInterpolationHook(args.steps, args.lr, 0, lr_setter)
)
experiments.train_agent_batch_with_evaluation(
agent=agent,
env=make_batch_env(False),
eval_env=make_batch_env(True),
outdir=args.outdir,
steps=args.steps,
eval_n_steps=None,
eval_n_episodes=args.eval_n_runs,
checkpoint_freq=args.checkpoint_frequency,
eval_interval=args.eval_interval,
log_interval=args.log_interval,
save_best_so_far_agent=False,
step_hooks=step_hooks,
)
if __name__ == "__main__":
main()
| 30.561194 | 87 | 0.572377 |
71438cde4862f8cc74b536fee699723661daf881
| 517 |
py
|
Python
|
_ext/python/crawlab/cli/actions/config.py
|
crawlab-team/crawlab-python-sdk
|
35f83f8d76046d3ee2700d63e96624ed534c1ca5
|
[
"BSD-3-Clause"
] | null | null | null |
_ext/python/crawlab/cli/actions/config.py
|
crawlab-team/crawlab-python-sdk
|
35f83f8d76046d3ee2700d63e96624ed534c1ca5
|
[
"BSD-3-Clause"
] | null | null | null |
_ext/python/crawlab/cli/actions/config.py
|
crawlab-team/crawlab-python-sdk
|
35f83f8d76046d3ee2700d63e96624ed534c1ca5
|
[
"BSD-3-Clause"
] | null | null | null |
from crawlab.config.config import config
from crawlab.constants.upload import CLI_DEFAULT_CONFIG_KEY_PASSWORD
def cli_config_func(args):
if args.set is not None:
k, v = args.set.split('=')
config.set(k, v)
config.save()
return
if args.unset is not None:
k = args.unset
config.unset(k)
config.save()
return
for k, v in config.data.items():
if k == CLI_DEFAULT_CONFIG_KEY_PASSWORD:
continue
print(f'{k}: {v}')
| 22.478261 | 68 | 0.597679 |
51af18018cd697ec832d8bc8d886dd3745edfb91
| 643 |
py
|
Python
|
python/twoSumLessThanK.py
|
l0latgithub/codediary
|
a0327d2ee1137a542886d0af85129692711cd68a
|
[
"MIT"
] | null | null | null |
python/twoSumLessThanK.py
|
l0latgithub/codediary
|
a0327d2ee1137a542886d0af85129692711cd68a
|
[
"MIT"
] | null | null | null |
python/twoSumLessThanK.py
|
l0latgithub/codediary
|
a0327d2ee1137a542886d0af85129692711cd68a
|
[
"MIT"
] | null | null | null |
class Solution:
def twoSumLessThanK(self, nums: List[int], k: int) -> int:
"""
Given an array nums of integers and integer k,
return the maximum sum such that there exists i < j
with nums[i] + nums[j] = sum and sum < k.
If no i, j exist satisfying this equation, return -1.
"""
maxsum = -1
nums.sort()
lo, hi = 0, len(nums)-1
while lo<hi:
sum2 = nums[lo]+nums[hi]
if sum2<k:
maxsum = max(maxsum, sum2)
lo+=1
else:
hi-=1
return maxsum
| 29.227273 | 62 | 0.458787 |
b7c9fb718e0524ffdb53a5ea16276ac8e2237779
| 9,754 |
py
|
Python
|
foolbox/attacks/localsearch.py
|
anonymous-user-commits/foolbox1
|
0f7e3db7c095a1d3dfdcdf3b3e0745d9ebc047e5
|
[
"MIT"
] | 1 |
2020-01-06T02:35:44.000Z
|
2020-01-06T02:35:44.000Z
|
foolbox/attacks/localsearch.py
|
adam-dziedzic/foolbox
|
0d8b5119f0afee9e2b42f563815340b206bbd14c
|
[
"MIT"
] | null | null | null |
foolbox/attacks/localsearch.py
|
adam-dziedzic/foolbox
|
0d8b5119f0afee9e2b42f563815340b206bbd14c
|
[
"MIT"
] | null | null | null |
from __future__ import division
import numpy as np
from .base import Attack
from .base import call_decorator
from ..utils import softmax
from .. import nprng
class SinglePixelAttack(Attack):
"""Perturbs just a single pixel and sets it to the min or max."""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
max_pixels=1000):
"""Perturbs just a single pixel and sets it to the min or max.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, correctly classified image. If image is a
numpy array, label must be passed as well. If image is
an :class:`Adversarial` instance, label must not be passed.
label : int
The reference label of the original image. Must be passed
if image is a numpy array, must not be passed if image is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial image, otherwise returns
the Adversarial object.
max_pixels : int
Maximum number of pixels to try.
"""
a = input_or_adv
del input_or_adv
del label
del unpack
channel_axis = a.channel_axis(batch=False)
image = a.original_image
axes = [i for i in range(image.ndim) if i != channel_axis]
assert len(axes) == 2
h = image.shape[axes[0]]
w = image.shape[axes[1]]
min_, max_ = a.bounds()
pixels = nprng.permutation(h * w)
pixels = pixels[:max_pixels]
for i, pixel in enumerate(pixels):
x = pixel % w
y = pixel // w
location = [x, y]
location.insert(channel_axis, slice(None))
location = tuple(location)
for value in [min_, max_]:
perturbed = image.copy()
perturbed[location] = value
_, is_adv = a.predictions(perturbed)
if is_adv:
return
class MultiplePixelsAttack(Attack):
"""Perturbs multiple pixels and sets them to the min or max.
Proposed by Adam Dziedzic ([email protected]).
"""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
num_pixels=1000, iterations=1):
"""Perturbs multiple pixels and sets them to the min or max.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, correctly classified image. If image is a
numpy array, label must be passed as well. If image is
an :class:`Adversarial` instance, label must not be passed.
label : int
The reference label of the original image. Must be passed
if image is a numpy array, must not be passed if image is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial image, otherwise returns
the Adversarial object.
num_pixels : int
Number of pixels that are perturbed in a single trial.
iterations : int
Number of times to try different set of num_pixels until an
adversarial example is found.
"""
a = input_or_adv
del input_or_adv
del label
del unpack
channel_axis = a.channel_axis(batch=False)
image = a.original_image
axes = [i for i in range(image.ndim) if i != channel_axis]
assert len(axes) == 2
h = image.shape[axes[0]]
w = image.shape[axes[1]]
min_, max_ = a.bounds()
for _ in range(iterations):
pixels = nprng.permutation(h * w)
pixels = pixels[:num_pixels]
perturbed = image.copy()
for i, pixel in enumerate(pixels):
x = pixel % w
y = pixel // w
location = [x, y]
location.insert(channel_axis, slice(None))
location = tuple(location)
if np.random.randint(0, 2) == 1:
value = min_
else:
value = max_
perturbed[location] = value
# check if the current number of perturbed pictures is enough
_, is_adv = a.predictions(perturbed)
if is_adv:
return
class LocalSearchAttack(Attack):
"""A black-box attack based on the idea of greedy local search.
This implementation is based on the algorithm in [1]_.
References
----------
.. [1] Nina Narodytska, Shiva Prasad Kasiviswanathan, "Simple
Black-Box Adversarial Perturbations for Deep Networks",
https://arxiv.org/abs/1612.06299
"""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
r=1.5, p=10., d=5, t=5, R=150):
"""A black-box attack based on the idea of greedy local search.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, correctly classified image. If image is a
numpy array, label must be passed as well. If image is
an :class:`Adversarial` instance, label must not be passed.
label : int
The reference label of the original image. Must be passed
if image is a numpy array, must not be passed if image is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial image, otherwise returns
the Adversarial object.
r : float
Perturbation parameter that controls the cyclic perturbation;
must be in [0, 2]
p : float
Perturbation parameter that controls the pixel sensitivity
estimation
d : int
The half side length of the neighborhood square
t : int
The number of pixels perturbed at each round
R : int
An upper bound on the number of iterations
"""
a = input_or_adv
del input_or_adv
del label
del unpack
# TODO: incorporate the modifications mentioned in the manuscript
# under "Implementing Algorithm LocSearchAdv"
assert 0 <= r <= 2
if a.target_class() is not None:
# TODO: check if this algorithm can be used as a targeted attack
return
def normalize(im):
min_, max_ = a.bounds()
im = im - (min_ + max_) / 2
im = im / (max_ - min_)
LB = -1 / 2
UB = 1 / 2
return im, LB, UB
def unnormalize(im):
min_, max_ = a.bounds()
im = im * (max_ - min_)
im = im + (min_ + max_) / 2
return im
Im = a.original_image
Im, LB, UB = normalize(Im)
cI = a.original_class
channel_axis = a.channel_axis(batch=False)
axes = [i for i in range(Im.ndim) if i != channel_axis]
assert len(axes) == 2
h = Im.shape[axes[0]]
w = Im.shape[axes[1]]
channels = Im.shape[channel_axis]
def random_locations():
n = int(0.1 * h * w)
n = min(n, 128)
locations = nprng.permutation(h * w)[:n]
p_x = locations % w
p_y = locations // w
pxy = list(zip(p_x, p_y))
pxy = np.array(pxy)
return pxy
def pert(Ii, p, x, y):
Im = Ii.copy()
location = [x, y]
location.insert(channel_axis, slice(None))
location = tuple(location)
Im[location] = p * np.sign(Im[location])
return Im
def cyclic(r, Ibxy):
result = r * Ibxy
if result < LB:
result = result + (UB - LB)
elif result > UB:
result = result - (UB - LB)
assert LB <= result <= UB
return result
Ii = Im
PxPy = random_locations()
for _ in range(R):
# Computing the function g using the neighborhood
# IMPORTANT: random subset for efficiency
PxPy = PxPy[nprng.permutation(len(PxPy))[:128]]
L = [pert(Ii, p, x, y) for x, y in PxPy]
def score(Its):
Its = np.stack(Its)
Its = unnormalize(Its)
batch_logits, _ = a.batch_predictions(Its, strict=False)
scores = [softmax(logits)[cI] for logits in batch_logits]
return scores
scores = score(L)
indices = np.argsort(scores)[:t]
PxPy_star = PxPy[indices]
# Generation of new perturbed image Ii
for x, y in PxPy_star:
for b in range(channels):
location = [x, y]
location.insert(channel_axis, b)
location = tuple(location)
Ii[location] = cyclic(r, Ii[location])
# Check whether the perturbed image Ii is an adversarial image
_, is_adv = a.predictions(unnormalize(Ii))
if is_adv: # pragma: no cover
return
# Update a neighborhood of pixel locations for the next round
PxPy = [
(x, y)
for _a, _b in PxPy_star
for x in range(_a - d, _a + d + 1)
for y in range(_b - d, _b + d + 1)]
PxPy = [(x, y) for x, y in PxPy if 0 <= x < w and 0 <= y < h]
PxPy = list(set(PxPy))
PxPy = np.array(PxPy)
| 32.191419 | 77 | 0.538241 |
fe0b5f975378c327682c8085707aac3ad8caea25
| 2,198 |
py
|
Python
|
assignments/groupsize/confidence.py
|
LSaldyt/Modeling
|
f47481856bf1ef1227bb92cb86f6f0f639e9cd7c
|
[
"MIT"
] | null | null | null |
assignments/groupsize/confidence.py
|
LSaldyt/Modeling
|
f47481856bf1ef1227bb92cb86f6f0f639e9cd7c
|
[
"MIT"
] | null | null | null |
assignments/groupsize/confidence.py
|
LSaldyt/Modeling
|
f47481856bf1ef1227bb92cb86f6f0f639e9cd7c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from math import sqrt
from generate import read_csv
from statistics import mean
def transpose(data):
return list(map(list, zip(*data)))
def rss(data):
resid = lambda x : x - mean(data)
return sum([resid(x) ** 2 for x in data])
def standard_dev(data):
return sqrt(rss(data) / len(data))
def confidence(data):
zcrit = 1.645
sd = standard_dev(data)
dx = zcrit * (sd / sqrt(len(data)))
return mean(data), dx
def pretty_stats(data, labels, filename=None):
csv_table = []
for statset, label in zip(data, labels):
print('\t%s' % label)
average, dx = confidence(statset)
print('\t\tConfidence | %s +/- %s' % (round(average, 3), round(dx, 3)))
print('\t\tStandard Dev | %s' % str(standard_dev(statset)))
print('\t\tRSS | %s' % str(rss(statset)))
if filename is not None: # If we plan to write to a file
csv_table.append([label, average, dx])
if filename is not None:
with open(filename, 'a') as csvfile:
for row in csv_table:
csvfile.write(','.join([str(item) for item in row]) + '\n')
def find_confidence(inputfile, outputfile):
content = [[float(v) for v in row] for row in read_csv(inputfile)]
small = transpose(content[:20])
medium = transpose(content[20:40])
large = transpose(content[40:])
labels = ['Min + Max',
'2 * Mean',
'2 * Median',
'First Quartile + Third Quartile - Min',
'Max - Min',
'(2 * Median) - Min',
'Max + (Standard Deviation / 2) - Min']
print('Small')
pretty_stats(small, labels, outputfile)
print('Medium')
pretty_stats(medium, labels, outputfile)
print('Large')
pretty_stats(large, labels, outputfile)
def main():
print('Normal')
find_confidence('estimates.csv', 'confidence.csv')
print('Shifted')
find_confidence('estimates_shifted.csv', 'confidence_shifted.csv')
print('Skewed')
find_confidence('estimates_skewed.csv', 'confidence_skewed.csv')
if __name__ == '__main__':
main()
# Niel Stevenson
# Cryptonomicon
| 30.109589 | 81 | 0.595996 |
72e0560dc3eae919f040cc6f832e79eb313cf72d
| 1,894 |
py
|
Python
|
pptx/enum/action.py
|
just4jc/python-pptx
|
ec433085d84d48b5539c379e52eb3c279ab2cbc0
|
[
"MIT"
] | 169 |
2016-12-18T16:37:48.000Z
|
2022-03-08T11:37:49.000Z
|
lib-linux_x64/pptx/enum/action.py
|
Element84/lambda-text-extractor
|
6da52d077a2fc571e38bfe29c33ae68f6443cd5a
|
[
"Apache-2.0"
] | 26 |
2017-06-08T01:45:36.000Z
|
2021-09-23T19:13:40.000Z
|
lib-linux_x64/pptx/enum/action.py
|
Element84/lambda-text-extractor
|
6da52d077a2fc571e38bfe29c33ae68f6443cd5a
|
[
"Apache-2.0"
] | 42 |
2016-12-23T03:27:12.000Z
|
2021-10-01T13:46:21.000Z
|
# encoding: utf-8
"""
Enumerations that describe click action settings
"""
from __future__ import absolute_import
from .base import alias, Enumeration, EnumMember
@alias('PP_ACTION')
class PP_ACTION_TYPE(Enumeration):
"""
Specifies the type of a mouse action (click or hover action).
Alias: ``PP_ACTION``
Example::
from pptx.enum.action import PP_ACTION
assert shape.click_action.action == PP_ACTION.HYPERLINK
"""
__ms_name__ = 'PpActionType'
__url__ = (
'https://msdn.microsoft.com/EN-US/library/office/ff744895.aspx'
)
__members__ = (
EnumMember(
'END_SHOW', 6, 'Slide show ends.'
),
EnumMember(
'FIRST_SLIDE', 3, 'Returns to the first slide.'
),
EnumMember(
'HYPERLINK', 7, 'Hyperlink.'
),
EnumMember(
'LAST_SLIDE', 4, 'Moves to the last slide.'
),
EnumMember(
'LAST_SLIDE_VIEWED', 5, 'Moves to the last slide viewed.'
),
EnumMember(
'NAMED_SLIDE', 101, 'Moves to slide specified by slide number.'
),
EnumMember(
'NAMED_SLIDE_SHOW', 10, 'Runs the slideshow.'
),
EnumMember(
'NEXT_SLIDE', 1, 'Moves to the next slide.'
),
EnumMember(
'NONE', 0, 'No action is performed.'
),
EnumMember(
'OPEN_FILE', 102, 'Opens the specified file.'
),
EnumMember(
'OLE_VERB', 11, 'OLE Verb.'
),
EnumMember(
'PLAY', 12, 'Begins the slideshow.'
),
EnumMember(
'PREVIOUS_SLIDE', 2, 'Moves to the previous slide.'
),
EnumMember(
'RUN_MACRO', 8, 'Runs a macro.'
),
EnumMember(
'RUN_PROGRAM', 9, 'Runs a program.'
),
)
| 23.974684 | 75 | 0.530095 |
980785af6065f49e9be7ece50cb1370311972dba
| 507 |
py
|
Python
|
Raka/BruteFmaster/bruteforce.py
|
RakaKuswanto/UICTC-CTF
|
993943beea876fcac78d71057034f88059a3b010
|
[
"MIT"
] | null | null | null |
Raka/BruteFmaster/bruteforce.py
|
RakaKuswanto/UICTC-CTF
|
993943beea876fcac78d71057034f88059a3b010
|
[
"MIT"
] | null | null | null |
Raka/BruteFmaster/bruteforce.py
|
RakaKuswanto/UICTC-CTF
|
993943beea876fcac78d71057034f88059a3b010
|
[
"MIT"
] | 1 |
2020-10-02T05:01:08.000Z
|
2020-10-02T05:01:08.000Z
|
#2020 decenzo-lab Raka Kuswanto
import requests
URL = "http://uictc.herokuapp.com/BruteFmaster/"
# membaca file password
file1 = open('password.txt', 'r')
Lines = file1.readlines()
# Strips the newline character
for line in Lines:
print(line.strip())
password= line.strip()
PARAMS= {'password':password}
r = requests.get(url = URL, params = PARAMS)
if (r.text.find("Wrong")) == -1:
print("Menebak dengan kata "+line.strip()+" berhasil, respon : "+r.text)
exit()
| 28.166667 | 80 | 0.646943 |
b01732cc01e9ab1ecdab7325e8f72e1ab976e971
| 18,124 |
py
|
Python
|
discord/ui/view.py
|
alexyy802/discord.io
|
99d6ec71aeb121f6887ff266d36d9d1851abe4db
|
[
"MIT"
] | null | null | null |
discord/ui/view.py
|
alexyy802/discord.io
|
99d6ec71aeb121f6887ff266d36d9d1851abe4db
|
[
"MIT"
] | null | null | null |
discord/ui/view.py
|
alexyy802/discord.io
|
99d6ec71aeb121f6887ff266d36d9d1851abe4db
|
[
"MIT"
] | null | null | null |
"""
The MIT License (MIT)
Copyright (c) 2021-present VincentRPS
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import Any, Callable, ClassVar, Dict, Iterator, List, Optional, Sequence, TYPE_CHECKING, Tuple
from functools import partial
from itertools import groupby
import traceback
import asyncio
import sys
import time
import os
from .item import Item, ItemCallbackType
from ..components import (
Component,
ActionRow as ActionRowComponent,
_component_factory,
Button as ButtonComponent,
SelectMenu as SelectComponent,
)
__all__ = (
'View',
)
if TYPE_CHECKING:
from ..interactions import Interaction
from ..message import Message
from ..types.components import Component as ComponentPayload
from ..state import ConnectionState
def _walk_all_components(components: List[Component]) -> Iterator[Component]:
for item in components:
if isinstance(item, ActionRowComponent):
yield from item.children
else:
yield item
def _component_to_item(component: Component) -> Item:
if isinstance(component, ButtonComponent):
from .button import Button
return Button.from_component(component)
if isinstance(component, SelectComponent):
from .select import Select
return Select.from_component(component)
return Item.from_component(component)
class _ViewWeights:
__slots__ = (
'weights',
)
def __init__(self, children: List[Item]):
self.weights: List[int] = [0, 0, 0, 0, 0]
key = lambda i: sys.maxsize if i.row is None else i.row
children = sorted(children, key=key)
for row, group in groupby(children, key=key):
for item in group:
self.add_item(item)
def find_open_space(self, item: Item) -> int:
for index, weight in enumerate(self.weights):
if weight + item.width <= 5:
return index
raise ValueError('could not find open space for item')
def add_item(self, item: Item) -> None:
if item.row is not None:
total = self.weights[item.row] + item.width
if total > 5:
raise ValueError(f'item would not fit at row {item.row} ({total} > 5 width)')
self.weights[item.row] = total
item._rendered_row = item.row
else:
index = self.find_open_space(item)
self.weights[index] += item.width
item._rendered_row = index
def remove_item(self, item: Item) -> None:
if item._rendered_row is not None:
self.weights[item._rendered_row] -= item.width
item._rendered_row = None
def clear(self) -> None:
self.weights = [0, 0, 0, 0, 0]
class View:
"""Represents a UI view.
This object must be inherited to create a UI within Discord.
.. versionadded:: 2.0
Parameters
-----------
timeout: Optional[:class:`float`]
Timeout in seconds from last interaction with the UI before no longer accepting input.
If ``None`` then there is no timeout.
Attributes
------------
timeout: Optional[:class:`float`]
Timeout from last interaction with the UI before no longer accepting input.
If ``None`` then there is no timeout.
children: List[:class:`Item`]
The list of children attached to this view.
"""
__discord_ui_view__: ClassVar[bool] = True
__view_children_items__: ClassVar[List[ItemCallbackType]] = []
def __init_subclass__(cls) -> None:
children: List[ItemCallbackType] = []
for base in reversed(cls.__mro__):
for member in base.__dict__.values():
if hasattr(member, '__discord_ui_model_type__'):
children.append(member)
if len(children) > 25:
raise TypeError('View cannot have more than 25 children')
cls.__view_children_items__ = children
def __init__(self, *, timeout: Optional[float] = 180.0):
self.timeout = timeout
self.children: List[Item] = []
for func in self.__view_children_items__:
item: Item = func.__discord_ui_model_type__(**func.__discord_ui_model_kwargs__)
item.callback = partial(func, self, item)
item._view = self
setattr(self, func.__name__, item)
self.children.append(item)
self.__weights = _ViewWeights(self.children)
loop = asyncio.get_running_loop()
self.id: str = os.urandom(16).hex()
self.__cancel_callback: Optional[Callable[[View], None]] = None
self.__timeout_expiry: Optional[float] = None
self.__timeout_task: Optional[asyncio.Task[None]] = None
self.__stopped: asyncio.Future[bool] = loop.create_future()
def __repr__(self) -> str:
return f'<{self.__class__.__name__} timeout={self.timeout} children={len(self.children)}>'
async def __timeout_task_impl(self) -> None:
while True:
# Guard just in case someone changes the value of the timeout at runtime
if self.timeout is None:
return
if self.__timeout_expiry is None:
return self._dispatch_timeout()
# Check if we've elapsed our currently set timeout
now = time.monotonic()
if now >= self.__timeout_expiry:
return self._dispatch_timeout()
# Wait N seconds to see if timeout data has been refreshed
await asyncio.sleep(self.__timeout_expiry - now)
def to_components(self) -> List[Dict[str, Any]]:
def key(item: Item) -> int:
return item._rendered_row or 0
children = sorted(self.children, key=key)
components: List[Dict[str, Any]] = []
for _, group in groupby(children, key=key):
children = [item.to_component_dict() for item in group]
if not children:
continue
components.append(
{
'type': 1,
'components': children,
}
)
return components
@classmethod
def from_message(cls, message: Message, /, *, timeout: Optional[float] = 180.0) -> View:
"""Converts a message's components into a :class:`View`.
The :attr:`.Message.components` of a message are read-only
and separate types from those in the ``discord.ui`` namespace.
In order to modify and edit message components they must be
converted into a :class:`View` first.
Parameters
-----------
message: :class:`discord.Message`
The message with components to convert into a view.
timeout: Optional[:class:`float`]
The timeout of the converted view.
Returns
--------
:class:`View`
The converted view. This always returns a :class:`View` and not
one of its subclasses.
"""
view = View(timeout=timeout)
for component in _walk_all_components(message.components):
view.add_item(_component_to_item(component))
return view
@property
def _expires_at(self) -> Optional[float]:
if self.timeout:
return time.monotonic() + self.timeout
return None
def add_item(self, item: Item) -> None:
"""Adds an item to the view.
Parameters
-----------
item: :class:`Item`
The item to add to the view.
Raises
--------
TypeError
An :class:`Item` was not passed.
ValueError
Maximum number of children has been exceeded (25)
or the row the item is trying to be added to is full.
"""
if len(self.children) > 25:
raise ValueError('maximum number of children exceeded')
if not isinstance(item, Item):
raise TypeError(f'expected Item not {item.__class__!r}')
self.__weights.add_item(item)
item._view = self
self.children.append(item)
def remove_item(self, item: Item) -> None:
"""Removes an item from the view.
Parameters
-----------
item: :class:`Item`
The item to remove from the view.
"""
try:
self.children.remove(item)
except ValueError:
pass
else:
self.__weights.remove_item(item)
def clear_items(self) -> None:
"""Removes all items from the view."""
self.children.clear()
self.__weights.clear()
async def interaction_check(self, interaction: Interaction) -> bool:
"""|coro|
A callback that is called when an interaction happens within the view
that checks whether the view should process item callbacks for the interaction.
This is useful to override if, for example, you want to ensure that the
interaction author is a given user.
The default implementation of this returns ``True``.
.. note::
If an exception occurs within the body then the check
is considered a failure and :meth:`on_error` is called.
Parameters
-----------
interaction: :class:`~discord.Interaction`
The interaction that occurred.
Returns
---------
:class:`bool`
Whether the view children's callbacks should be called.
"""
return True
async def on_timeout(self) -> None:
"""|coro|
A callback that is called when a view's timeout elapses without being explicitly stopped.
"""
pass
async def on_error(self, error: Exception, item: Item, interaction: Interaction) -> None:
"""|coro|
A callback that is called when an item's callback or :meth:`interaction_check`
fails with an error.
The default implementation prints the traceback to stderr.
Parameters
-----------
error: :class:`Exception`
The exception that was raised.
item: :class:`Item`
The item that failed the dispatch.
interaction: :class:`~discord.Interaction`
The interaction that led to the failure.
"""
print(f'Ignoring exception in view {self} for item {item}:', file=sys.stderr)
traceback.print_exception(error.__class__, error, error.__traceback__, file=sys.stderr)
async def _scheduled_task(self, item: Item, interaction: Interaction):
try:
if self.timeout:
self.__timeout_expiry = time.monotonic() + self.timeout
allow = await self.interaction_check(interaction)
if not allow:
return
await item.callback(interaction)
if not interaction.response._responded:
await interaction.response.defer()
except Exception as e:
return await self.on_error(e, item, interaction)
def _start_listening_from_store(self, store: ViewStore) -> None:
self.__cancel_callback = partial(store.remove_view)
if self.timeout:
loop = asyncio.get_running_loop()
if self.__timeout_task is not None:
self.__timeout_task.cancel()
self.__timeout_expiry = time.monotonic() + self.timeout
self.__timeout_task = loop.create_task(self.__timeout_task_impl())
def _dispatch_timeout(self):
if self.__stopped.done():
return
self.__stopped.set_result(True)
asyncio.create_task(self.on_timeout(), name=f'discord-ui-view-timeout-{self.id}')
def _dispatch_item(self, item: Item, interaction: Interaction):
if self.__stopped.done():
return
asyncio.create_task(self._scheduled_task(item, interaction), name=f'discord-ui-view-dispatch-{self.id}')
def refresh(self, components: List[Component]):
# This is pretty hacky at the moment
# fmt: off
old_state: Dict[Tuple[int, str], Item] = {
(item.type.value, item.custom_id): item # type: ignore
for item in self.children
if item.is_dispatchable()
}
# fmt: on
children: List[Item] = []
for component in _walk_all_components(components):
try:
older = old_state[(component.type.value, component.custom_id)] # type: ignore
except (KeyError, AttributeError):
children.append(_component_to_item(component))
else:
older.refresh_component(component)
children.append(older)
self.children = children
def stop(self) -> None:
"""Stops listening to interaction events from this view.
This operation cannot be undone.
"""
if not self.__stopped.done():
self.__stopped.set_result(False)
self.__timeout_expiry = None
if self.__timeout_task is not None:
self.__timeout_task.cancel()
self.__timeout_task = None
if self.__cancel_callback:
self.__cancel_callback(self)
self.__cancel_callback = None
def is_finished(self) -> bool:
""":class:`bool`: Whether the view has finished interacting."""
return self.__stopped.done()
def is_dispatching(self) -> bool:
""":class:`bool`: Whether the view has been added for dispatching purposes."""
return self.__cancel_callback is not None
def is_persistent(self) -> bool:
""":class:`bool`: Whether the view is set up as persistent.
A persistent view has all their components with a set ``custom_id`` and
a :attr:`timeout` set to ``None``.
"""
return self.timeout is None and all(item.is_persistent() for item in self.children)
async def wait(self) -> bool:
"""Waits until the view has finished interacting.
A view is considered finished when :meth:`stop` is called
or it times out.
Returns
--------
:class:`bool`
If ``True``, then the view timed out. If ``False`` then
the view finished normally.
"""
return await self.__stopped
class ViewStore:
def __init__(self, state: ConnectionState):
# (component_type, message_id, custom_id): (View, Item)
self._views: Dict[Tuple[int, Optional[int], str], Tuple[View, Item]] = {}
# message_id: View
self._synced_message_views: Dict[int, View] = {}
self._state: ConnectionState = state
@property
def persistent_views(self) -> Sequence[View]:
# fmt: off
views = {
view.id: view
for (_, (view, _)) in self._views.items()
if view.is_persistent()
}
# fmt: on
return list(views.values())
def __verify_integrity(self):
to_remove: List[Tuple[int, Optional[int], str]] = []
for (k, (view, _)) in self._views.items():
if view.is_finished():
to_remove.append(k)
for k in to_remove:
del self._views[k]
def add_view(self, view: View, message_id: Optional[int] = None):
self.__verify_integrity()
view._start_listening_from_store(self)
for item in view.children:
if item.is_dispatchable():
self._views[(item.type.value, message_id, item.custom_id)] = (view, item) # type: ignore
if message_id is not None:
self._synced_message_views[message_id] = view
def remove_view(self, view: View):
for item in view.children:
if item.is_dispatchable():
self._views.pop((item.type.value, item.custom_id), None) # type: ignore
for key, value in self._synced_message_views.items():
if value.id == view.id:
del self._synced_message_views[key]
break
def dispatch(self, component_type: int, custom_id: str, interaction: Interaction):
self.__verify_integrity()
message_id: Optional[int] = interaction.message and interaction.message.id
key = (component_type, message_id, custom_id)
# Fallback to None message_id searches in case a persistent view
# was added without an associated message_id
value = self._views.get(key) or self._views.get((component_type, None, custom_id))
if value is None:
return
view, item = value
item.refresh_state(interaction)
view._dispatch_item(item, interaction)
def is_message_tracked(self, message_id: int):
return message_id in self._synced_message_views
def remove_message_tracking(self, message_id: int) -> Optional[View]:
return self._synced_message_views.pop(message_id, None)
def update_from_message(self, message_id: int, components: List[ComponentPayload]):
# pre-req: is_message_tracked == true
view = self._synced_message_views[message_id]
view.refresh([_component_factory(d) for d in components])
| 34.196226 | 112 | 0.621552 |
2ca86fbabf035f09c8a323ccad6f9fd2e4c7ce4b
| 2,870 |
py
|
Python
|
integrationtest/vm/installation/upgrade/test_zs_upgd_1.4_latest_on_cos7.py
|
sherry546/zstack-woodpecker
|
54a37459f2d72ce6820974feaa6eb55772c3d2ce
|
[
"Apache-2.0"
] | 1 |
2021-03-21T12:41:11.000Z
|
2021-03-21T12:41:11.000Z
|
integrationtest/vm/installation/upgrade/test_zs_upgd_1.4_latest_on_cos7.py
|
sherry546/zstack-woodpecker
|
54a37459f2d72ce6820974feaa6eb55772c3d2ce
|
[
"Apache-2.0"
] | null | null | null |
integrationtest/vm/installation/upgrade/test_zs_upgd_1.4_latest_on_cos7.py
|
sherry546/zstack-woodpecker
|
54a37459f2d72ce6820974feaa6eb55772c3d2ce
|
[
"Apache-2.0"
] | 1 |
2017-05-19T06:40:40.000Z
|
2017-05-19T06:40:40.000Z
|
'''
@author: MengLai
'''
import os
import tempfile
import uuid
import time
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstacklib.utils.ssh as ssh
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
node_ip = os.environ.get('node1Ip')
def test():
test_util.test_dsc('Create test vm to test zstack upgrade by -u.')
if os.path.exists('/home/installation-package/zstack'):
image_name = os.environ.get('imageName_i_c7_z_1.4')
update_file = "/home/%s/zstack-woodpecker/integrationtest/vm/installation/zstack_update_iso.sh" % node_ip
elif os.path.exists('/home/installation-package/mevoco'):
image_name = os.environ.get('imageName_i_c7_m_1.4')
update_file = "/home/%s/zstack-woodpecker/integrationtest/vm/installation/mevoco_update_iso.sh" % node_ip
vm = test_stub.create_vlan_vm(image_name)
test_obj_dict.add_vm(vm)
if os.environ.get('zstackManagementIp') == None:
vm.check()
else:
time.sleep(60)
vm_inv = vm.get_vm()
vm_ip = vm_inv.vmNics[0].ip
ssh_cmd = 'ssh -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s' % vm_ip
ssh.make_ssh_no_password(vm_ip, test_lib.lib_get_vm_username(vm_inv), \
test_lib.lib_get_vm_password(vm_inv))
test_stub.copy_id_dsa(vm_inv, ssh_cmd, tmp_file)
test_stub.copy_id_dsa_pub(vm_inv)
test_util.test_dsc('Update MN IP')
cmd = '%s "zstack-ctl change_ip --ip="%s ' % (ssh_cmd, vm_ip)
process_result = test_stub.execute_shell_in_process(cmd, tmp_file)
cmd = '%s "zstack-ctl start"' % ssh_cmd
process_result = test_stub.execute_shell_in_process(cmd, tmp_file)
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
test_util.test_dsc('Upgrade zstack to latest')
test_stub.update_iso(ssh_cmd, tmp_file, vm_inv, update_file)
upgrade_target_file = '/root/zstack-upgrade-all-in-one.tgz'
test_stub.prepare_test_env(vm_inv, upgrade_target_file)
test_stub.upgrade_zstack(ssh_cmd, upgrade_target_file, tmp_file)
zstack_latest_version = os.environ.get('zstackLatestVersion')
test_stub.check_zstack_version(ssh_cmd, tmp_file, vm_inv, zstack_latest_version)
cmd = '%s "zstack-ctl start"' % ssh_cmd
process_result = test_stub.execute_shell_in_process(cmd, tmp_file)
test_stub.check_installation(ssh_cmd, tmp_file, vm_inv)
os.system('rm -f %s' % tmp_file)
vm.destroy()
test_util.test_pass('ZStack upgrade Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
os.system('rm -f %s' % tmp_file)
test_lib.lib_error_cleanup(test_obj_dict)
| 39.315068 | 114 | 0.718815 |
c0d3a9d8ae04760fa6d293936db2ee8e695d60db
| 1,536 |
py
|
Python
|
scd/spaceless_except_pre.py
|
felipetomm/POX-Django
|
6060a9e2a999dc56b63826d0ec3498b11b03adce
|
[
"Apache-2.0"
] | 1 |
2019-10-20T00:05:34.000Z
|
2019-10-20T00:05:34.000Z
|
scd/spaceless_except_pre.py
|
felipetomm/POX-Django
|
6060a9e2a999dc56b63826d0ec3498b11b03adce
|
[
"Apache-2.0"
] | null | null | null |
scd/spaceless_except_pre.py
|
felipetomm/POX-Django
|
6060a9e2a999dc56b63826d0ec3498b11b03adce
|
[
"Apache-2.0"
] | null | null | null |
"""Copyright (c) 2013-2014 Stephan Groß, under MIT license."""
from __future__ import unicode_literals
import re
from django import template
from django.template import Node
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import allow_lazy
register = template.Library()
def strip_spaces_between_tags_except_pre(value):
def replacement(count, matches, match):
matches.append(match.group(0)[1:-1]) # save the whole match without leading "<" and trailing ">"
count[0] += 1
return '<}>'.format(count[0]) # add "<" and ">" to preserve space stripping
count = [-1]
matches = []
value = re.sub(r'<pre(\s.*)?>(.*?)</pre>', lambda match: replacement(count, matches, match), force_text(value), flags=re.S | re.M | re.I)
value = re.sub(r'>\s+<', '><', force_text(value))
return value.format(*matches)
strip_spaces_between_tags_except_pre = allow_lazy(strip_spaces_between_tags_except_pre, six.text_type)
class SpacelessExceptPreNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
return strip_spaces_between_tags_except_pre(self.nodelist.render(context).strip())
@register.tag
def spaceless_except_pre(parser, token):
"""Remove whitespace between HTML tags, including tab and newline characters except content between <pre>"""
nodelist = parser.parse(('endspaceless_except_pre',))
parser.delete_first_token()
return SpacelessExceptPreNode(nodelist)
| 35.72093 | 141 | 0.721354 |
58792ff3ae89bd6e75c67450344d5c80bbdf14e4
| 1,120 |
py
|
Python
|
spacy/tests/vocab/test_add_vectors.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | 5 |
2019-04-19T06:27:29.000Z
|
2019-12-02T13:30:47.000Z
|
spacy/tests/vocab/test_add_vectors.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | null | null | null |
spacy/tests/vocab/test_add_vectors.py
|
algteam/spacy_zh_model
|
0b0cba1a3964aa426e5f96087849c90e69e2a89d
|
[
"MIT"
] | 2 |
2019-04-19T06:27:18.000Z
|
2019-10-04T12:39:15.000Z
|
# coding: utf-8
from __future__ import unicode_literals
import numpy
from numpy.testing import assert_allclose
from ...vocab import Vocab
from ..._ml import cosine
def test_vocab_add_vector():
vocab = Vocab()
data = numpy.ndarray((5,3), dtype='f')
data[0] = 1.
data[1] = 2.
vocab.set_vector(u'cat', data[0])
vocab.set_vector(u'dog', data[1])
cat = vocab[u'cat']
assert list(cat.vector) == [1., 1., 1.]
dog = vocab[u'dog']
assert list(dog.vector) == [2., 2., 2.]
def test_vocab_prune_vectors():
vocab = Vocab()
_ = vocab[u'cat']
_ = vocab[u'dog']
_ = vocab[u'kitten']
data = numpy.ndarray((5,3), dtype='f')
data[0] = 1.
data[1] = 2.
data[2] = 1.1
vocab.set_vector(u'cat', data[0])
vocab.set_vector(u'dog', data[1])
vocab.set_vector(u'kitten', data[2])
remap = vocab.prune_vectors(2)
assert list(remap.keys()) == [u'kitten']
neighbour, similarity = list(remap.values())[0]
assert neighbour == u'cat', remap
assert_allclose(similarity, cosine(data[0], data[2]), atol=1e-6)
| 27.317073 | 69 | 0.592857 |
22e312af5ee0b2e45ec819416c4f315ce597273f
| 815 |
py
|
Python
|
toontown/compiler/clashdna/dna/components/DNAProp.py
|
chrisd149/OpenLevelEditor
|
2527f84910b198a38de140b533d07d65fe30c6c9
|
[
"MIT"
] | 25 |
2020-11-23T13:55:42.000Z
|
2022-03-26T06:17:18.000Z
|
toontown/compiler/clashdna/dna/components/DNAProp.py
|
chrisd149/OpenLevelEditor
|
2527f84910b198a38de140b533d07d65fe30c6c9
|
[
"MIT"
] | 15 |
2020-11-25T14:47:24.000Z
|
2021-04-21T23:40:57.000Z
|
toontown/compiler/clashdna/dna/components/DNAProp.py
|
chrisd149/OpenLevelEditor
|
2527f84910b198a38de140b533d07d65fe30c6c9
|
[
"MIT"
] | 11 |
2020-11-25T13:57:02.000Z
|
2022-02-23T14:25:15.000Z
|
from toontown.compiler.clashdna.dna.components.DNANode import DNANode
from toontown.compiler.clashdna.dna.base.DNAPacker import *
class DNAProp(DNANode):
COMPONENT_CODE = 4
def __init__(self, name):
DNANode.__init__(self, name)
self.code = ''
self.color = (1, 1, 1, 1)
def setCode(self, code):
self.code = code
def setColor(self, color):
self.color = color
def traverse(self, recursive=True, verbose=False):
packer = DNANode.traverse(self, recursive=False, verbose=verbose)
packer.name = 'DNAProp' # Override the name for debugging.
packer.pack('code', self.code, STRING)
packer.packColor('color', *self.color)
if recursive:
packer += self.traverseChildren(verbose=verbose)
return packer
| 30.185185 | 73 | 0.64908 |
c14777d1dfb0de3a8d8ddef9e55f91d8254df3fd
| 8,377 |
py
|
Python
|
applications/sentiment_analysis/predict.py
|
dancingalone/PaddleNLP
|
a93a047b0274d8bc71e18040eb69e1fb91e6733f
|
[
"Apache-2.0"
] | 1 |
2021-12-27T06:46:00.000Z
|
2021-12-27T06:46:00.000Z
|
applications/sentiment_analysis/predict.py
|
TmengT/PaddleNLP
|
806ff6f9984afa56012270b666a7954ea31be952
|
[
"Apache-2.0"
] | null | null | null |
applications/sentiment_analysis/predict.py
|
TmengT/PaddleNLP
|
806ff6f9984afa56012270b666a7954ea31be952
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import copy
import json
import argparse
from functools import partial
from collections import defaultdict
import paddle
from paddlenlp.data import Pad, Stack, Tuple
from paddlenlp.datasets import load_dataset, MapDataset
from paddlenlp.transformers import SkepModel, SkepTokenizer
from utils import decoding, load_dict, read_test_file
from extraction.data import convert_example_to_feature as convert_example_to_feature_ext
from extraction.model import SkepForTokenClassification
from classification.model import SkepForSequenceClassification
from classification.data import convert_example_to_feature as convert_example_to_feature_cls
def concate_aspect_and_opinion(text, aspect, opinions):
aspect_text = ""
for opinion in opinions:
if text.find(aspect) <= text.find(opinion):
aspect_text += aspect + opinion + ","
else:
aspect_text += opinion + aspect + ","
aspect_text = aspect_text[:-1]
return aspect_text
def predict_ext(ext_model_path, ext_label_path, test_path):
# load dict
model_name = "skep_ernie_1.0_large_ch"
ext_label2id, ext_id2label = load_dict(args.ext_label_path)
tokenizer = SkepTokenizer.from_pretrained(model_name)
ori_test_ds = load_dataset(read_test_file, data_path=test_path, lazy=False)
trans_func = partial(
convert_example_to_feature_ext,
tokenizer=tokenizer,
label2id=ext_label2id,
max_seq_len=args.max_seq_len,
is_test=True)
test_ds = copy.copy(ori_test_ds).map(trans_func, lazy=False)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=tokenizer.pad_token_id),
Pad(axis=0, pad_val=tokenizer.pad_token_type_id),
Stack(dtype="int64"), ): fn(samples)
test_batch_sampler = paddle.io.BatchSampler(
test_ds, batch_size=args.batch_size, shuffle=False)
test_loader = paddle.io.DataLoader(
test_ds, batch_sampler=test_batch_sampler, collate_fn=batchify_fn)
print("test data loaded.")
# load ext model
ext_state_dict = paddle.load(args.ext_model_path)
ext_skep = SkepModel.from_pretrained(model_name)
ext_model = SkepForTokenClassification(
ext_skep, num_classes=len(ext_label2id))
ext_model.load_dict(ext_state_dict)
print("extraction model loaded.")
ext_model.eval()
results = []
for bid, batch_data in enumerate(test_loader):
input_ids, token_type_ids, seq_lens = batch_data
logits = ext_model(input_ids, token_type_ids=token_type_ids)
predictions = logits.argmax(axis=2).numpy()
for eid, (seq_len, prediction) in enumerate(zip(seq_lens, predictions)):
idx = bid * args.batch_size + eid
tag_seq = [ext_id2label[idx] for idx in prediction[:seq_len][1:-1]]
text = ori_test_ds[idx]["text"]
aps = decoding(text, tag_seq)
for aid, ap in enumerate(aps):
aspect, opinions = ap[0], list(set(ap[1:]))
aspect_text = concate_aspect_and_opinion(text, aspect, opinions)
results.append({
"id": str(idx) + "_" + str(aid),
"aspect": aspect,
"opinions": opinions,
"text": text,
"aspect_text": aspect_text
})
return results
def predict_cls(cls_model_path, cls_label_path, ext_results):
# load dict
model_name = "skep_ernie_1.0_large_ch"
cls_label2id, cls_id2label = load_dict(args.cls_label_path)
tokenizer = SkepTokenizer.from_pretrained(model_name)
test_ds = MapDataset(ext_results)
trans_func = partial(
convert_example_to_feature_cls,
tokenizer=tokenizer,
label2id=cls_label2id,
max_seq_len=args.max_seq_len,
is_test=True)
test_ds = test_ds.map(trans_func, lazy=False)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=tokenizer.pad_token_id),
Pad(axis=0, pad_val=tokenizer.pad_token_type_id),
Stack(dtype="int64")): fn(samples)
# set shuffle is False
test_batch_sampler = paddle.io.BatchSampler(
test_ds, batch_size=args.batch_size, shuffle=False)
test_loader = paddle.io.DataLoader(
test_ds, batch_sampler=test_batch_sampler, collate_fn=batchify_fn)
print("test data loaded.")
# load cls model
cls_state_dict = paddle.load(args.cls_model_path)
cls_skep = SkepModel.from_pretrained(model_name)
cls_model = SkepForSequenceClassification(
cls_skep, num_classes=len(cls_label2id))
cls_model.load_dict(cls_state_dict)
print("classification model loaded.")
cls_model.eval()
results = []
for bid, batch_data in enumerate(test_loader):
input_ids, token_type_ids, seq_lens = batch_data
logits = cls_model(input_ids, token_type_ids=token_type_ids)
predictions = logits.argmax(axis=1).numpy().tolist()
results.extend(predictions)
results = [cls_id2label[pred_id] for pred_id in results]
return results
def post_process(ext_results, cls_results):
assert len(ext_results) == len(cls_results)
collect_dict = defaultdict(list)
for ext_result, cls_result in zip(ext_results, cls_results):
ext_result["sentiment_polarity"] = cls_result
eid, _ = ext_result["id"].split("_")
collect_dict[eid].append(ext_result)
sentiment_results = []
for eid in collect_dict.keys():
sentiment_result = {}
ap_list = []
for idx, single_ap in enumerate(collect_dict[eid]):
if idx == 0:
sentiment_result["text"] = single_ap["text"]
ap_list.append({
"aspect": single_ap["aspect"],
"opinions": single_ap["opinions"],
"sentiment_polarity": single_ap["sentiment_polarity"]
})
sentiment_result["ap_list"] = ap_list
sentiment_results.append(sentiment_result)
with open(args.save_path, "w", encoding="utf-8") as f:
for sentiment_result in sentiment_results:
f.write(json.dumps(sentiment_result, ensure_ascii=False) + "\n")
if __name__ == "__main__":
# yapf: disable
parser = argparse.ArgumentParser()
parser.add_argument("--ext_model_path", type=str, default=None, help="The path of extraction model path that you want to load.")
parser.add_argument("--cls_model_path", type=str, default=None, help="The path of classification model path that you want to load.")
parser.add_argument("--ext_label_path", type=str, default=None, help="The path of extraction label dict.")
parser.add_argument("--cls_label_path", type=str, default=None, help="The path of classification label dict.")
parser.add_argument('--test_path', type=str, default=None, help="The path of test set that you want to predict.")
parser.add_argument('--save_path', type=str, required=True, default=None, help="The saving path of predict results.")
parser.add_argument("--batch_size", type=int, default=16, help="Batch size per GPU/CPU for training.")
parser.add_argument("--max_seq_len", type=int, default=512, help="The maximum total input sequence length after tokenization.")
args = parser.parse_args()
# yapf: enbale
# predict with ext model
ext_results = predict_ext(args.ext_model_path, args.ext_label_path, args.test_path)
print("predicting with extraction model done!")
# predict with cls model
cls_results = predict_cls(args.cls_model_path, args.cls_label_path, ext_results)
print("predicting with classification model done!")
# post_process prediction results
post_process(ext_results, cls_results)
print(f"sentiment analysis results has been saved to path: {args.save_path}")
| 40.468599 | 136 | 0.698699 |
c8c5814608e4b57fda44583e712027bad2903366
| 5,932 |
py
|
Python
|
hpOneView/resources/fc_sans/san_managers.py
|
PragadeeswaranS/oneview-python
|
3acc113b8dd30029beb7c228c3bc2bbe67d3485b
|
[
"Apache-2.0"
] | null | null | null |
hpOneView/resources/fc_sans/san_managers.py
|
PragadeeswaranS/oneview-python
|
3acc113b8dd30029beb7c228c3bc2bbe67d3485b
|
[
"Apache-2.0"
] | null | null | null |
hpOneView/resources/fc_sans/san_managers.py
|
PragadeeswaranS/oneview-python
|
3acc113b8dd30029beb7c228c3bc2bbe67d3485b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
###
# (C) Copyright [2019] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
from hpOneView.resources.resource import ResourceClient
class SanManagers(object):
"""
SAN Managers API client.
"""
URI = '/rest/fc-sans/device-managers'
PROVIDER_URI = '/rest/fc-sans/providers'
def __init__(self, con):
self._connection = con
self._client = ResourceClient(con, self.URI)
self._provider_client = ResourceClient(con, self.PROVIDER_URI)
def get_all(self, start=0, count=-1, query='', sort=''):
"""
Retrieves the list of registered SAN Managers.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items. The actual number of items in
the response may differ from the requested count if the sum of start and count exceed the total number
of items.
query:
A general query string to narrow the list of resources returned.
The default is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
list: A list of SAN managers.
"""
return self._client.get_all(start=start, count=count, query=query, sort=sort)
def get(self, id_or_uri):
"""
Retrieves a single registered SAN Manager by ID or URI.
Args:
id_or_uri: Can be either the SAN Manager resource ID or URI.
Returns:
dict: The SAN Manager resource.
"""
return self._client.get(id_or_uri=id_or_uri)
def update(self, resource, id_or_uri):
"""
Updates a registered Device Manager.
Args:
resource (dict): Object to update.
id_or_uri: Can be either the Device manager ID or URI.
Returns:
dict: The device manager resource.
"""
return self._client.update(resource=resource, uri=id_or_uri)
def add(self, resource, provider_uri_or_id, timeout=-1):
"""
Adds a Device Manager under the specified provider.
Args:
resource (dict): Object to add.
provider_uri_or_id: ID or URI of provider.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
Returns:
dict: Added SAN Manager.
"""
uri = self._provider_client.build_uri(provider_uri_or_id) + "/device-managers"
return self._client.create(resource=resource, uri=uri, timeout=timeout)
def get_provider_uri(self, provider_display_name):
"""
Gets uri for a specific provider.
Args:
provider_display_name: Display name of the provider.
Returns:
uri
"""
providers = self._provider_client.get_by('displayName', provider_display_name)
return providers[0]['uri'] if providers else None
def get_default_connection_info(self, provider_name):
"""
Gets default connection info for a specific provider.
Args:
provider_name: Name of the provider.
Returns:
dict: Default connection information.
"""
provider = self._provider_client.get_by_name(provider_name)
if provider:
return provider['defaultConnectionInfo']
else:
return {}
def remove(self, resource, timeout=-1):
"""
Removes a registered SAN Manager.
Args:
resource (dict): Object to delete.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: Indicates if the resource was successfully removed.
"""
return self._client.delete(resource, timeout=timeout)
def get_by_name(self, name):
"""
Gets a SAN Manager by name.
Args:
name: Name of the SAN Manager
Returns:
dict: SAN Manager.
"""
san_managers = self._client.get_all()
result = [x for x in san_managers if x['name'] == name]
return result[0] if result else None
def get_by_provider_display_name(self, provider_display_name):
"""
Gets a SAN Manager by provider display name.
Args:
provider_display_name: Name of the Provider Display Name
Returns:
dict: SAN Manager.
"""
san_managers = self._client.get_all()
result = [x for x in san_managers if x['providerDisplayName'] == provider_display_name]
return result[0] if result else None
| 32.593407 | 118 | 0.627613 |
d9db3f4df3295a1615b63080e920ff3c44fd9e0d
| 1,178 |
py
|
Python
|
UCourse/courses/migrations/0019_auto_20200607_2144.py
|
Natsu1270/UCourse
|
e8c814d91e54f5f51e4a0fa2df177ebb59544dc2
|
[
"MIT"
] | 1 |
2020-08-31T22:40:27.000Z
|
2020-08-31T22:40:27.000Z
|
UCourse/courses/migrations/0019_auto_20200607_2144.py
|
Natsu1270/UCourse
|
e8c814d91e54f5f51e4a0fa2df177ebb59544dc2
|
[
"MIT"
] | 13 |
2020-08-05T16:17:09.000Z
|
2022-03-12T00:18:42.000Z
|
UCourse/courses/migrations/0019_auto_20200607_2144.py
|
Natsu1270/UCourse
|
e8c814d91e54f5f51e4a0fa2df177ebb59544dc2
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-06-07 14:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('courses', '0018_remove_course_teacher'),
]
operations = [
migrations.CreateModel(
name='UserBuyCourse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bought_date', models.DateField(default=django.utils.timezone.now)),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='courses.Course')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='course',
name='user_buy',
field=models.ManyToManyField(related_name='buy_courses', through='courses.UserBuyCourse', to=settings.AUTH_USER_MODEL),
),
]
| 36.8125 | 131 | 0.65365 |
255c1d1b31382c01e6d6ca5eb40fe113ee8e9a98
| 52,483 |
py
|
Python
|
run_classifier.py
|
ICLR2020Sesame/SesameBert
|
98e4dbfa50408f0c04bd098831949af7542b70f9
|
[
"Apache-2.0"
] | 4 |
2020-07-16T23:48:38.000Z
|
2021-01-06T14:34:38.000Z
|
run_classifier.py
|
ICLR2020Sesame/SesameBert
|
98e4dbfa50408f0c04bd098831949af7542b70f9
|
[
"Apache-2.0"
] | null | null | null |
run_classifier.py
|
ICLR2020Sesame/SesameBert
|
98e4dbfa50408f0c04bd098831949af7542b70f9
|
[
"Apache-2.0"
] | 2 |
2019-10-24T08:49:49.000Z
|
2020-03-27T10:38:43.000Z
|
# coding=utf-8
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT finetuning runner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import csv
import os
import modeling
import optimization
import tokenization
import numpy as np
import tensorflow as tf
os.environ["CUDA_VISIBLE_DEVICES"]="2,3"
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"data_dir", None,
"The input data dir. Should contain the .tsv files (or other data files) "
"for the task.")
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string("task_name", None, "The name of the task to train.")
flags.DEFINE_string("vocab_file", None,
"The vocabulary file that the BERT model was trained on.")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
## Other parameters
flags.DEFINE_string(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_bool(
"do_lower_case", True,
"Whether to lower case the input text. Should be True for uncased "
"models and False for cased models.")
flags.DEFINE_integer(
"max_seq_length", 128,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_bool(
"do_predict", False,
"Whether to run the model in inference mode on the test set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_integer("predict_batch_size", 8, "Total batch size for predict.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_float("num_train_epochs", 3.0,
"Total number of training epochs to perform.")
flags.DEFINE_float(
"warmup_proportion", 0.1,
"Proportion of training to perform linear learning rate warmup for. "
"E.g., 0.1 = 10% of training.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.")
tf.flags.DEFINE_string(
"tpu_name", None,
"The Cloud TPU to use for training. This should be either the name "
"used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 "
"url.")
tf.flags.DEFINE_string(
"tpu_zone", None,
"[Optional] GCE zone where the Cloud TPU is located in. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string(
"gcp_project", None,
"[Optional] Project name for the Cloud TPU-enabled project. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.")
flags.DEFINE_integer(
"num_tpu_cores", 8,
"Only used if `use_tpu` is True. Total number of TPU cores to use.")
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class MetricInputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, pairID, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.pairID = pairID
self.text_a = text_a
self.text_b = text_b
self.label = label
class PaddingInputExample(object):
"""Fake example so the num input examples is a multiple of the batch size.
When running eval/predict on the TPU, we need to pad the number of examples
to be a multiple of the batch size, because the TPU requires a fixed batch
size. The alternative is to drop the last batch, which is bad because it means
the entire output data won't be generated.
We use this class instead of `None` because treating `None` as padding
battches could cause silent errors.
"""
class InputFeatures(object):
"""A single set of features of data."""
def __init__(self,
input_ids,
input_mask,
segment_ids,
label_id,
is_real_example=True):
self.input_ids = input_ids
self.input_mask = input_mask
self.segment_ids = segment_ids
self.label_id = label_id
self.is_real_example = is_real_example
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
class XnliProcessor(DataProcessor):
"""Processor for the XNLI data set."""
def __init__(self):
self.language = "zh"
def get_train_examples(self, data_dir):
"""See base class."""
lines = self._read_tsv(
os.path.join(data_dir, "multinli",
"multinli.train.%s.tsv" % self.language))
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "train-%d" % (i)
text_a = tokenization.convert_to_unicode(line[0])
text_b = tokenization.convert_to_unicode(line[1])
label = tokenization.convert_to_unicode(line[2])
if label == tokenization.convert_to_unicode("contradictory"):
label = tokenization.convert_to_unicode("contradiction")
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_dev_examples(self, data_dir):
"""See base class."""
lines = self._read_tsv(os.path.join(data_dir, "xnli.dev.tsv"))
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "dev-%d" % (i)
language = tokenization.convert_to_unicode(line[0])
if language != tokenization.convert_to_unicode(self.language):
continue
text_a = tokenization.convert_to_unicode(line[6])
text_b = tokenization.convert_to_unicode(line[7])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
class MnliProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")),
"dev_matched")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[8])
text_b = tokenization.convert_to_unicode(line[9])
if set_type == "test":
label = "contradiction"
else:
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class MnlimmProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev_mismatched.tsv")),
"dev_mismatched")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test_mismatched.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[8])
text_b = tokenization.convert_to_unicode(line[9])
if set_type == "test":
label = "contradiction"
else:
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class AxProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")),
"dev_matched")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "AX.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[-2])
text_b = tokenization.convert_to_unicode(line[-1])
label = "contradiction"
else:
text_a = tokenization.convert_to_unicode(line[8])
text_b = tokenization.convert_to_unicode(line[9])
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class AdvProcessor(DataProcessor):
"""Processor for the MultiNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")),
"dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "heuristics_evaluation_set.txt")), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
# return ["entailment", "non-entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0]))
text_a = tokenization.convert_to_unicode(line[5])
text_b = tokenization.convert_to_unicode(line[6])
if set_type == "test":
label = "entailment"
else:
label = tokenization.convert_to_unicode(line[0])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_metric_examples(self, data_dir):
"""See base class."""
return self._create_metric_examples(
self._read_tsv(os.path.join(data_dir, "heuristics_evaluation_set.txt")), "test")
def _create_metric_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, line[0])
text_a = line[5]
text_b = line[6]
pairID = line[7]
if set_type == "test":
label = "entailment"
else:
label = line[0]
examples.append(
MetricInputExample(guid=guid, pairID=pairID ,text_a=text_a, text_b=text_b, label=label))
return examples
class MrpcProcessor(DataProcessor):
"""Processor for the MRPC data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
text_a = tokenization.convert_to_unicode(line[3])
text_b = tokenization.convert_to_unicode(line[4])
if set_type == "test":
label = "0"
else:
label = tokenization.convert_to_unicode(line[0])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class ColaProcessor(DataProcessor):
"""Processor for the CoLA data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
tf.logging.info("path {}".format(os.path.join(data_dir, "train.tsv")))
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
# Only the test set has a header
if set_type == "test" and i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[3])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class Sst2Processor(DataProcessor):
"""Processor for the SST-2 data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[0])
label = tokenization.convert_to_unicode(line[1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class QqpProcessor(DataProcessor):
"""Processor for the QQP data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
try:
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[3])
text_b = tokenization.convert_to_unicode(line[4])
label = tokenization.convert_to_unicode(line[5])
except IndexError:
continue
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class QnliProcessor(DataProcessor):
"""Processor for the QNLI data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")),
"dev_matched")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, 1)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = "entailment"
else:
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class RteProcessor(DataProcessor):
"""Processor for the RTE data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["entailment", "not_entailment"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = "entailment"
else:
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class WnliProcessor(DataProcessor):
"""Processor for the Qnli data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "train.tsv")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
# Only the test set has a header
if i == 0:
continue
guid = "%s-%s" % (set_type, i)
if set_type == "test":
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = "0"
else:
text_a = tokenization.convert_to_unicode(line[1])
text_b = tokenization.convert_to_unicode(line[2])
label = tokenization.convert_to_unicode(line[-1])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def convert_single_example(ex_index, example, label_list, max_seq_length,
tokenizer):
"""Converts a single `InputExample` into a single `InputFeatures`."""
if isinstance(example, PaddingInputExample):
return InputFeatures(
input_ids=[0] * max_seq_length,
input_mask=[0] * max_seq_length,
segment_ids=[0] * max_seq_length,
label_id=0,
is_real_example=False)
label_map = {}
for (i, label) in enumerate(label_list):
label_map[label] = i
tokens_a = tokenizer.tokenize(example.text_a)
tokens_b = None
if example.text_b:
tokens_b = tokenizer.tokenize(example.text_b)
if tokens_b:
# Modifies `tokens_a` and `tokens_b` in place so that the total
# length is less than the specified length.
# Account for [CLS], [SEP], [SEP] with "- 3"
_truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3)
else:
# Account for [CLS] and [SEP] with "- 2"
if len(tokens_a) > max_seq_length - 2:
tokens_a = tokens_a[0:(max_seq_length - 2)]
# The convention in BERT is:
# (a) For sequence pairs:
# tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]
# type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1
# (b) For single sequences:
# tokens: [CLS] the dog is hairy . [SEP]
# type_ids: 0 0 0 0 0 0 0
#
# Where "type_ids" are used to indicate whether this is the first
# sequence or the second sequence. The embedding vectors for `type=0` and
# `type=1` were learned during pre-training and are added to the wordpiece
# embedding vector (and position vector). This is not *strictly* necessary
# since the [SEP] token unambiguously separates the sequences, but it makes
# it easier for the model to learn the concept of sequences.
#
# For classification tasks, the first vector (corresponding to [CLS]) is
# used as the "sentence vector". Note that this only makes sense because
# the entire model is fine-tuned.
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
if tokens_b:
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
input_ids = tokenizer.convert_tokens_to_ids(tokens)
# The mask has 1 for real tokens and 0 for padding tokens. Only real
# tokens are attended to.
input_mask = [1] * len(input_ids)
# Zero-pad up to the sequence length.
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
label_id = label_map[example.label]
# if ex_index < 5:
# tf.logging.info("*** Example ***")
# tf.logging.info("guid: %s" % (example.guid))
# tf.logging.info("tokens: %s" % " ".join(
# [tokenization.printable_text(x) for x in tokens]))
# tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids]))
# tf.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask]))
# tf.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids]))
# tf.logging.info("label: %s (id = %d)" % (example.label, label_id))
feature = InputFeatures(
input_ids=input_ids,
input_mask=input_mask,
segment_ids=segment_ids,
label_id=label_id,
is_real_example=True)
return feature
def file_based_convert_examples_to_features(
examples, label_list, max_seq_length, tokenizer, output_file):
"""Convert a set of `InputExample`s to a TFRecord file."""
writer = tf.python_io.TFRecordWriter(output_file)
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
tf.logging.info("Writing example %d of %d" % (ex_index, len(examples)))
feature = convert_single_example(ex_index, example, label_list,
max_seq_length, tokenizer)
def create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f
features = collections.OrderedDict()
features["input_ids"] = create_int_feature(feature.input_ids)
features["input_mask"] = create_int_feature(feature.input_mask)
features["segment_ids"] = create_int_feature(feature.segment_ids)
features["label_ids"] = create_int_feature([feature.label_id])
features["is_real_example"] = create_int_feature(
[int(feature.is_real_example)])
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
writer.write(tf_example.SerializeToString())
writer.close()
def file_based_input_fn_builder(input_file, seq_length, is_training,
drop_remainder):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
name_to_features = {
"input_ids": tf.FixedLenFeature([seq_length], tf.int64),
"input_mask": tf.FixedLenFeature([seq_length], tf.int64),
"segment_ids": tf.FixedLenFeature([seq_length], tf.int64),
"label_ids": tf.FixedLenFeature([], tf.int64),
"is_real_example": tf.FixedLenFeature([], tf.int64),
}
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def input_fn(params):
"""The actual input function."""
batch_size = params["batch_size"]
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
d = tf.data.TFRecordDataset(input_file)
if is_training:
d = d.repeat()
d = d.shuffle(buffer_size=100)
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
drop_remainder=drop_remainder))
return d
return input_fn
def _truncate_seq_pair(tokens_a, tokens_b, max_length):
"""Truncates a sequence pair in place to the maximum length."""
# This is a simple heuristic which will always truncate the longer sequence
# one token at a time. This makes more sense than truncating an equal percent
# of tokens from each, since if one sequence is very short then each token
# that's truncated likely contains more information than a longer sequence.
while True:
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_length:
break
if len(tokens_a) > len(tokens_b):
tokens_a.pop()
else:
tokens_b.pop()
def create_model(bert_config, is_training, input_ids, input_mask, segment_ids,
labels, num_labels, use_one_hot_embeddings):
"""Creates a classification model."""
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
# In the demo, we are doing a simple classification task on the entire
# segment.
#
# If you want to use the token-level output, use model.get_sequence_output()
# instead.
output_layer = model.get_pooled_output()
hidden_size = output_layer.shape[-1].value
output_weights = tf.get_variable(
"output_weights", [num_labels, hidden_size],
initializer=tf.truncated_normal_initializer(stddev=0.02))
output_bias = tf.get_variable(
"output_bias", [num_labels], initializer=tf.zeros_initializer())
with tf.variable_scope("loss"):
if is_training:
# I.e., 0.1 dropout
output_layer = tf.nn.dropout(output_layer, keep_prob=0.9)
logits = tf.matmul(output_layer, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
probabilities = tf.nn.softmax(logits, axis=-1)
log_probs = tf.nn.log_softmax(logits, axis=-1)
one_hot_labels = tf.one_hot(labels, depth=num_labels, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, logits, probabilities)
def model_fn_builder(bert_config, num_labels, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps, use_tpu,
use_one_hot_embeddings):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
tf.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
label_ids = features["label_ids"]
is_real_example = None
if "is_real_example" in features:
is_real_example = tf.cast(features["is_real_example"], dtype=tf.float32)
else:
is_real_example = tf.ones(tf.shape(label_ids), dtype=tf.float32)
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
(total_loss, per_example_loss, logits, probabilities) = create_model(
bert_config, is_training, input_ids, input_mask, segment_ids, label_ids,
num_labels, use_one_hot_embeddings)
tvars = tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if init_checkpoint:
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu)
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
scaffold_fn=scaffold_fn)
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(per_example_loss, label_ids, logits, is_real_example):
predictions = tf.argmax(logits, axis=-1, output_type=tf.int32)
accuracy = tf.metrics.accuracy(
labels=label_ids, predictions=predictions, weights=is_real_example)
loss = tf.metrics.mean(values=per_example_loss, weights=is_real_example)
return {
"eval_accuracy": accuracy,
"eval_loss": loss
}
eval_metrics = (metric_fn,
[per_example_loss, label_ids, logits, is_real_example])
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
eval_metrics=eval_metrics,
scaffold_fn=scaffold_fn)
else:
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
predictions={"probabilities": probabilities},
scaffold_fn=scaffold_fn)
return output_spec
return model_fn
# This function is not used by this file but is still used by the Colab and
# people who depend on it.
def input_fn_builder(features, seq_length, is_training, drop_remainder):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
all_input_ids = []
all_input_mask = []
all_segment_ids = []
all_label_ids = []
for feature in features:
all_input_ids.append(feature.input_ids)
all_input_mask.append(feature.input_mask)
all_segment_ids.append(feature.segment_ids)
all_label_ids.append(feature.label_id)
def input_fn(params):
"""The actual input function."""
batch_size = params["batch_size"]
num_examples = len(features)
# This is for demo purposes and does NOT scale to large data sets. We do
# not use Dataset.from_generator() because that uses tf.py_func which is
# not TPU compatible. The right way to load data is with TFRecordReader.
d = tf.data.Dataset.from_tensor_slices({
"input_ids":
tf.constant(
all_input_ids, shape=[num_examples, seq_length],
dtype=tf.int32),
"input_mask":
tf.constant(
all_input_mask,
shape=[num_examples, seq_length],
dtype=tf.int32),
"segment_ids":
tf.constant(
all_segment_ids,
shape=[num_examples, seq_length],
dtype=tf.int32),
"label_ids":
tf.constant(all_label_ids, shape=[num_examples], dtype=tf.int32),
})
if is_training:
d = d.repeat()
d = d.shuffle(buffer_size=100)
d = d.batch(batch_size=batch_size, drop_remainder=drop_remainder)
return d
return input_fn
# This function is not used by this file but is still used by the Colab and
# people who depend on it.
def convert_examples_to_features(examples, label_list, max_seq_length,
tokenizer):
"""Convert a set of `InputExample`s to a list of `InputFeatures`."""
features = []
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
tf.logging.info("Writing example %d of %d" % (ex_index, len(examples)))
feature = convert_single_example(ex_index, example, label_list,
max_seq_length, tokenizer)
features.append(feature)
return features
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
processors = {
"xnli": XnliProcessor,
"mnli-m": MnliProcessor,
"mnli-mm": MnlimmProcessor,
"ax": AxProcessor,
"adv": AdvProcessor,
"mrpc": MrpcProcessor,
"cola": ColaProcessor,
"sst-2": Sst2Processor,
"qqp": QqpProcessor,
"qnli": QnliProcessor,
"rte": RteProcessor,
"wnli": WnliProcessor,
}
tokenization.validate_case_matches_checkpoint(FLAGS.do_lower_case,
FLAGS.init_checkpoint)
if not FLAGS.do_train and not FLAGS.do_eval and not FLAGS.do_predict:
raise ValueError(
"At least one of `do_train`, `do_eval` or `do_predict' must be True.")
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
bert_config.gaus_batch_size=FLAGS.train_batch_size
FLAGS.eval_batch_size=FLAGS.train_batch_size
FLAGS.predict_batch_size=FLAGS.train_batch_size
# if FLAGS.do_train:
# bert_config.gaus_batch_size=FLAGS.train_batch_size
# elif FLAGS.do_eval:
# bert_config.gaus_batch_size=FLAGS.eval_batch_size
# elif FLAGS.do_predict:
# bert_config.gaus_batch_size=FLAGS.predict_batch_size
# else:
# pass
tf.logging.info(" bert_config = {}".format(bert_config))
if FLAGS.max_seq_length > bert_config.max_position_embeddings:
raise ValueError(
"Cannot use sequence length %d because the BERT model "
"was only trained up to sequence length %d" %
(FLAGS.max_seq_length, bert_config.max_position_embeddings))
tf.gfile.MakeDirs(FLAGS.output_dir)
task_name = FLAGS.task_name.lower()
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
processor = processors[task_name]()
label_list = processor.get_labels()
label_map = {}
for (i, label) in enumerate(label_list):
label_map[i] = label
tf.logging.info(" label_map {}".format(label_map))
tokenizer = tokenization.FullTokenizer(
vocab_file=FLAGS.vocab_file, do_lower_case=FLAGS.do_lower_case)
tpu_cluster_resolver = None
if FLAGS.use_tpu and FLAGS.tpu_name:
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
is_per_host = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
gpu_config=tf.ConfigProto()
gpu_config.gpu_options.allow_growth=True
run_config = tf.contrib.tpu.RunConfig(
cluster=tpu_cluster_resolver,
master=FLAGS.master,
model_dir=FLAGS.output_dir,
save_checkpoints_steps=FLAGS.save_checkpoints_steps,
session_config=gpu_config,
tpu_config=tf.contrib.tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.num_tpu_cores,
per_host_input_for_training=is_per_host))
train_examples = None
num_train_steps = None
num_warmup_steps = None
if FLAGS.do_train:
train_examples = processor.get_train_examples(FLAGS.data_dir)
num_train_steps = int(
len(train_examples) / FLAGS.train_batch_size * FLAGS.num_train_epochs)
num_warmup_steps = int(num_train_steps * FLAGS.warmup_proportion)
model_fn = model_fn_builder(
bert_config=bert_config,
num_labels=len(label_list),
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate,
num_train_steps=num_train_steps,
num_warmup_steps=num_warmup_steps,
use_tpu=FLAGS.use_tpu,
use_one_hot_embeddings=FLAGS.use_tpu)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
estimator = tf.contrib.tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=model_fn,
config=run_config,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size,
predict_batch_size=FLAGS.predict_batch_size)
if FLAGS.do_train:
train_file = os.path.join(FLAGS.output_dir, "train.tf_record")
file_based_convert_examples_to_features(
train_examples, label_list, FLAGS.max_seq_length, tokenizer, train_file)
tf.logging.info("***** Running training *****")
tf.logging.info(" Num examples = %d", len(train_examples))
tf.logging.info(" Batch size = %d", FLAGS.train_batch_size)
tf.logging.info(" Num steps = %d", num_train_steps)
train_input_fn = file_based_input_fn_builder(
input_file=train_file,
seq_length=FLAGS.max_seq_length,
is_training=True,
drop_remainder=True)
estimator.train(input_fn=train_input_fn, max_steps=num_train_steps)
if FLAGS.do_eval:
eval_examples = processor.get_dev_examples(FLAGS.data_dir)
num_actual_eval_examples = len(eval_examples)
if FLAGS.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on. These do NOT count towards the metric (all tf.metrics
# support a per-instance weight, and these get a weight of 0.0).
while len(eval_examples) % FLAGS.eval_batch_size != 0:
eval_examples.append(PaddingInputExample())
eval_file = os.path.join(FLAGS.output_dir, "eval.tf_record")
file_based_convert_examples_to_features(
eval_examples, label_list, FLAGS.max_seq_length, tokenizer, eval_file)
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(eval_examples), num_actual_eval_examples,
len(eval_examples) - num_actual_eval_examples)
tf.logging.info(" Batch size = %d", FLAGS.eval_batch_size)
# This tells the estimator to run through the entire set.
eval_steps = None
# However, if running eval on the TPU, you will need to specify the
# number of steps.
if FLAGS.use_tpu:
assert len(eval_examples) % FLAGS.eval_batch_size == 0
eval_steps = int(len(eval_examples) // FLAGS.eval_batch_size)
eval_drop_remainder = True if FLAGS.use_tpu else False
eval_input_fn = file_based_input_fn_builder(
input_file=eval_file,
seq_length=FLAGS.max_seq_length,
is_training=False,
drop_remainder=eval_drop_remainder)
result = estimator.evaluate(input_fn=eval_input_fn, steps=eval_steps)
output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if FLAGS.do_predict:
predict_examples = processor.get_test_examples(FLAGS.data_dir)
num_actual_predict_examples = len(predict_examples)
if FLAGS.use_tpu:
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on.
while len(predict_examples) % FLAGS.predict_batch_size != 0:
predict_examples.append(PaddingInputExample())
predict_file = os.path.join(FLAGS.output_dir, "predict.tf_record")
file_based_convert_examples_to_features(predict_examples, label_list,
FLAGS.max_seq_length, tokenizer,
predict_file)
tf.logging.info("***** Running prediction*****")
tf.logging.info(" Num examples = %d (%d actual, %d padding)",
len(predict_examples), num_actual_predict_examples,
len(predict_examples) - num_actual_predict_examples)
tf.logging.info(" Batch size = %d", FLAGS.predict_batch_size)
predict_drop_remainder = True if FLAGS.use_tpu else False
predict_input_fn = file_based_input_fn_builder(
input_file=predict_file,
seq_length=FLAGS.max_seq_length,
is_training=False,
drop_remainder=predict_drop_remainder)
result = estimator.predict(input_fn=predict_input_fn)
# import pickle
# with open(os.path.join(FLAGS.output_dir, '{}.pickle'.format(FLAGS.task_name.upper())), 'wb') as handle:
# pickle.dump(label_map, handle)
output_predict_file = os.path.join(FLAGS.output_dir, "{}.tsv".format(FLAGS.task_name.upper()))
with tf.gfile.GFile(output_predict_file, "w") as writer:
tf.logging.info("***** Predict results *****")
tf.logging.info("***** {} *****".format(task_name))
if task_name != "adv":
writer.write("index\tprediction\n")
num_written_lines = 0
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
tf.logging.info("***** probabilities {} *****".format(probabilities))
if i >= num_actual_predict_examples:
break
output_line = "\t".join(
(str(i),
str(label_map[np.argmax(probabilities)]))) + "\n"
writer.write(output_line)
num_written_lines += 1
else:
metric_examples = processor.get_metric_examples(FLAGS.data_dir)
writer.write("pairID,gold_label\n")
num_written_lines = 0
for (i, prediction) in enumerate(result):
probabilities = prediction["probabilities"]
tf.logging.info("***** probabilities {} *****".format(probabilities))
if i >= num_actual_predict_examples:
break
arg_key = label_map[np.argmax(probabilities)]
recalibration_label = "entailment"
if arg_key != "entailment":
recalibration_label = "non-entailment"
output_line = ",".join(
(str(metric_examples[i].pairID),
str(recalibration_label))) + "\n"
writer.write(output_line)
num_written_lines += 1
assert num_written_lines == num_actual_predict_examples
tf.logging.info("***** Done *****")
# output_predict_file = os.path.join(FLAGS.output_dir, "{}.tsv".format(FLAGS.task_name))
# with tf.gfile.GFile(output_predict_file, "w") as writer:
# writer.write("index\tprediction\n")
# num_written_lines = 0
# tf.logging.info("***** Predict results *****")
# for (i, prediction) in enumerate(result):
# tf.logging.info("***** probabilities {} *****".format(probabilities))
# probabilities = prediction["probabilities"]
# tf.logging.info("probabilities {}".format(probabilities))
# if i >= num_actual_predict_examples:
# break
# output_line = "\t".join(
# (str(i),
# str(label_map[np.argmax(probabilities)]))) + "\n"
# writer.write(output_line)
# num_written_lines += 1
# tf.logging.info("***** num_written_lines {} *****".format(num_written_lines))
# tf.logging.info("***** num_actual_predict_examples {} *****".format(num_actual_predict_examples))
# assert num_written_lines == num_actual_predict_examples
if __name__ == "__main__":
flags.mark_flag_as_required("data_dir")
flags.mark_flag_as_required("task_name")
flags.mark_flag_as_required("vocab_file")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
tf.app.run()
| 36.471855 | 110 | 0.649544 |
711133c19c1ba8621676589e2a9434ab57a648ef
| 6,141 |
py
|
Python
|
fem/gui/vtk_widget/groups_toolbar/_groups_ui.py
|
mjredmond/FEMApp
|
dd8cc53acf80d0a1bb83ce9c89bcfd51e85c6be8
|
[
"MIT"
] | 1 |
2019-08-03T21:40:26.000Z
|
2019-08-03T21:40:26.000Z
|
fem/gui/vtk_widget/groups_toolbar/_groups_ui.py
|
mjredmond/FEMApp
|
dd8cc53acf80d0a1bb83ce9c89bcfd51e85c6be8
|
[
"MIT"
] | null | null | null |
fem/gui/vtk_widget/groups_toolbar/_groups_ui.py
|
mjredmond/FEMApp
|
dd8cc53acf80d0a1bb83ce9c89bcfd51e85c6be8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'P:\redmond\mrFleet\gui\vtk_widget\groups\_groups_ui.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from qtpy import QtCore, QtGui, QtWidgets
class Ui_DockWidget(object):
def setupUi(self, DockWidget):
DockWidget.setObjectName("DockWidget")
DockWidget.resize(653, 778)
self.dockWidgetContents = QtWidgets.QWidget()
self.dockWidgetContents.setObjectName("dockWidgetContents")
self.gridLayout_3 = QtWidgets.QGridLayout(self.dockWidgetContents)
# self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName("gridLayout_3")
self.splitter_2 = QtWidgets.QSplitter(self.dockWidgetContents)
self.splitter_2.setOrientation(QtCore.Qt.Horizontal)
self.splitter_2.setObjectName("splitter_2")
self.splitter = QtWidgets.QSplitter(self.splitter_2)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.widget = QtWidgets.QWidget(self.splitter)
self.widget.setObjectName("widget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.widget)
# self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
spacerItem = QtWidgets.QSpacerItem(18, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 2, 0, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(18, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 2, 2, 1, 1)
self.tableView_groups = QtWidgets.QTableView(self.widget)
self.tableView_groups.setObjectName("tableView_groups")
self.gridLayout_2.addWidget(self.tableView_groups, 1, 0, 1, 3)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton_add_group = QtWidgets.QPushButton(self.widget)
self.pushButton_add_group.setObjectName("pushButton_add_group")
self.horizontalLayout.addWidget(self.pushButton_add_group)
self.pushButton_remove_group = QtWidgets.QPushButton(self.widget)
self.pushButton_remove_group.setObjectName("pushButton_remove_group")
self.horizontalLayout.addWidget(self.pushButton_remove_group)
self.gridLayout_2.addLayout(self.horizontalLayout, 2, 1, 1, 1)
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setObjectName("label_3")
self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 2)
self.frame = QtWidgets.QFrame(self.splitter)
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.widget1 = QtWidgets.QWidget(self.splitter_2)
self.widget1.setObjectName("widget1")
self.gridLayout = QtWidgets.QGridLayout(self.widget1)
# self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem2, 4, 2, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem3, 4, 0, 1, 1)
self.plainTextEdit_members = QtWidgets.QPlainTextEdit(self.widget1)
self.plainTextEdit_members.setObjectName("plainTextEdit_members")
self.gridLayout.addWidget(self.plainTextEdit_members, 1, 0, 1, 3)
self.label_2 = QtWidgets.QLabel(self.widget1)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 2, 0, 1, 2)
self.lineEdit_selection = QtWidgets.QLineEdit(self.widget1)
self.lineEdit_selection.setObjectName("lineEdit_selection")
self.gridLayout.addWidget(self.lineEdit_selection, 3, 0, 1, 3)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton_add_member = QtWidgets.QPushButton(self.widget1)
self.pushButton_add_member.setObjectName("pushButton_add_member")
self.horizontalLayout_2.addWidget(self.pushButton_add_member)
self.pushButton_remove_member = QtWidgets.QPushButton(self.widget1)
self.pushButton_remove_member.setObjectName("pushButton_remove_member")
self.horizontalLayout_2.addWidget(self.pushButton_remove_member)
self.gridLayout.addLayout(self.horizontalLayout_2, 4, 1, 1, 1)
self.label = QtWidgets.QLabel(self.widget1)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 2)
self.gridLayout_3.addWidget(self.splitter_2, 0, 0, 1, 1)
self.tableView_groups.raise_()
self.label_3.raise_()
self.label_2.raise_()
self.lineEdit_selection.raise_()
self.label.raise_()
self.plainTextEdit_members.raise_()
self.label.raise_()
self.label_3.raise_()
self.frame.raise_()
DockWidget.setWidget(self.dockWidgetContents)
self.retranslateUi(DockWidget)
QtCore.QMetaObject.connectSlotsByName(DockWidget)
def retranslateUi(self, DockWidget):
_translate = QtCore.QCoreApplication.translate
DockWidget.setWindowTitle(_translate("DockWidget", "DockWidget"))
self.pushButton_add_group.setText(_translate("DockWidget", "Add"))
self.pushButton_remove_group.setText(_translate("DockWidget", "Remove"))
self.label_3.setText(_translate("DockWidget", "Groups"))
self.label_2.setText(_translate("DockWidget", "Member List to Add/Remove"))
self.pushButton_add_member.setText(_translate("DockWidget", "Add"))
self.pushButton_remove_member.setText(_translate("DockWidget", "Remove"))
self.label.setText(_translate("DockWidget", "Member List"))
| 55.827273 | 115 | 0.722684 |
63ac50c5ea9aaa9e69275f1c34170cb0f5a9ad9e
| 2,759 |
py
|
Python
|
src/parser.py
|
lime-lang/lime
|
18afcba5bd8aec612670b8ad5a8a995bdfdf3978
|
[
"0BSD"
] | 3 |
2020-04-15T05:51:24.000Z
|
2020-04-15T19:22:50.000Z
|
src/parser.py
|
lime-lang/lime
|
18afcba5bd8aec612670b8ad5a8a995bdfdf3978
|
[
"0BSD"
] | null | null | null |
src/parser.py
|
lime-lang/lime
|
18afcba5bd8aec612670b8ad5a8a995bdfdf3978
|
[
"0BSD"
] | null | null | null |
from error import RuntimeError
import fractions
class AST:
pass
class BinaryOperation(AST):
def __init__(self, left, operation, right):
self.left = left
self.token = operation
self.operation = operation
self.right = right
class Equality(AST):
def __init__(self, left, operation, right):
self.left = left
self.token = operation
self.operation = operation
self.right = right
class Number(AST):
def __init__(self, token):
self.token = token
self.value = token.value
class Variable(AST):
def __init__(self, token):
self.token = token
self.value = token.value
class Parser:
def __init__(self, lexer):
self.lexer = lexer
self.pos = 0
self.current = self.lexer[self.pos]
def error(self):
raise RuntimeError(f"Invalid syntax: '{self.current}'")
def compare(self, token_type):
if self.current.type == token_type:
self.pos += 1
self.current = self.lexer[self.pos]
else:
self.error()
def factor(self):
token = self.current
if token.type == "NUMBER":
self.compare("NUMBER")
return Number(token)
elif token.type == "VAR":
self.compare("VAR")
return Variable(token)
elif token.type == "LPAREN":
self.compare("LPAREN")
node = self.expr()
self.compare("RPAREN")
return node
def exp(self):
node = self.factor()
while self.current.type == "EXP":
token = self.current
self.compare(token.type)
node = BinaryOperation(node, token, self.exp())
return node
def term(self):
node = self.exp()
while self.current.type in ("MUL", "DIV"):
token = self.current
self.compare(token.type)
node = BinaryOperation(node, token, self.exp())
return node
def expr(self):
node = self.term()
while self.current.type in ("PLUS", "MINUS"):
token = self.current
self.compare(token.type)
node = BinaryOperation(node, token, self.term())
return node
def equality(self):
node = self.expr()
while self.current.type in ("EQ"):
token = self.current
self.compare(token.type)
node = Equality(node, token, self.expr())
return node
def parse(self):
ast = []
while self.pos < len(self.lexer):
self.current = self.current = self.lexer[self.pos]
ast.append(self.equality())
self.pos += 1
return ast
| 21.896825 | 63 | 0.543675 |
977557b8bbec7c7e119a349c5b76aea2b828bc2a
| 492 |
py
|
Python
|
editdns/urls.py
|
jrlevine/editdns
|
58a66b8e90b1365593e3a91e59e5a556bff55288
|
[
"Unlicense"
] | null | null | null |
editdns/urls.py
|
jrlevine/editdns
|
58a66b8e90b1365593e3a91e59e5a556bff55288
|
[
"Unlicense"
] | null | null | null |
editdns/urls.py
|
jrlevine/editdns
|
58a66b8e90b1365593e3a91e59e5a556bff55288
|
[
"Unlicense"
] | null | null | null |
"""editdns URL Configuration
"""
from django.conf.urls import url, include
from django.contrib import admin
from editapp.views import indexview
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('^', include('django.contrib.auth.urls')), # various login and logout URLs
url(r'^edit/', include('editapp.urls'), name='edit'), # actual stuff in rpc app
url(r'^edit$', indexview), # default to index page
url(r'^$', indexview) # start in DNS app
]
| 32.8 | 83 | 0.648374 |
cb9a9f938d04eef36e03407a81078a1c33754d05
| 1,025 |
py
|
Python
|
flask01/app.py
|
guoziyuan/lflask
|
54f7a084b933a9d948086e7e2ec00081fb52e5f2
|
[
"Apache-2.0"
] | null | null | null |
flask01/app.py
|
guoziyuan/lflask
|
54f7a084b933a9d948086e7e2ec00081fb52e5f2
|
[
"Apache-2.0"
] | null | null | null |
flask01/app.py
|
guoziyuan/lflask
|
54f7a084b933a9d948086e7e2ec00081fb52e5f2
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask
from markupsafe import escape
from flask import url_for
from flask import request
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello World!'
# 默认只能接收get请求,也可以通过route装饰器设置请求类型
@app.route('/method', methods=['GET', 'POST'])
def method():
if request.method == 'GET':
return "get method"
else:
return 'post method!'
@app.route('/test')
def test():
return 'this is test'
# 使用 <value> 可以传递参数,默认是string类型
@app.route('/test/<username>')
def hello_name(username):
return 'hello %s' % escape(username)
# 使用 <value> 可以传递参数,默认是string类型,其它类型可做限定,
# int、float、path、uuid
# 注意类型和参数之间不能有空格
"""
类型介绍:
string : 接收不带/的字符串
int :接收正整数
float:接收正浮点数
path: 接收可能含有/的字符串
uuid:接收UUID字符串
"""
@app.route('/test/<int:id>')
def get_id(id):
return 'id is %s' % id
if __name__ == '__main__':
app.run()
with app.test_request_context():
print(url_for('index'))
print(url_for("test"))
print(url_for("test/liming"))
print(url_for("test/100"))
| 17.083333 | 46 | 0.665366 |
6babd5963cc17fbc010a32c8e66dbdf3cfb32903
| 29,316 |
py
|
Python
|
synapse/state/__init__.py
|
ankur12-1610/synapse
|
e2e1d90a5e4030616a3de242cde26c0cfff4a6b5
|
[
"Apache-2.0"
] | null | null | null |
synapse/state/__init__.py
|
ankur12-1610/synapse
|
e2e1d90a5e4030616a3de242cde26c0cfff4a6b5
|
[
"Apache-2.0"
] | 2 |
2022-03-01T08:22:45.000Z
|
2022-03-11T08:13:55.000Z
|
synapse/state/__init__.py
|
ankur12-1610/synapse
|
e2e1d90a5e4030616a3de242cde26c0cfff4a6b5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import heapq
import logging
from collections import defaultdict
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Collection,
DefaultDict,
Dict,
FrozenSet,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Union,
overload,
)
import attr
from frozendict import frozendict
from prometheus_client import Counter, Histogram
from typing_extensions import Literal
from synapse.api.constants import EventTypes
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions
from synapse.events import EventBase
from synapse.events.snapshot import EventContext
from synapse.logging.context import ContextResourceUsage
from synapse.state import v1, v2
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.storage.roommember import ProfileInfo
from synapse.types import StateMap
from synapse.util.async_helpers import Linearizer
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.metrics import Measure, measure_func
if TYPE_CHECKING:
from synapse.server import HomeServer
from synapse.storage.databases.main import DataStore
logger = logging.getLogger(__name__)
metrics_logger = logging.getLogger("synapse.state.metrics")
# Metrics for number of state groups involved in a resolution.
state_groups_histogram = Histogram(
"synapse_state_number_state_groups_in_resolution",
"Number of state groups used when performing a state resolution",
buckets=(1, 2, 3, 5, 7, 10, 15, 20, 50, 100, 200, 500, "+Inf"),
)
EVICTION_TIMEOUT_SECONDS = 60 * 60
_NEXT_STATE_ID = 1
POWER_KEY = (EventTypes.PowerLevels, "")
def _gen_state_id() -> str:
global _NEXT_STATE_ID
s = "X%d" % (_NEXT_STATE_ID,)
_NEXT_STATE_ID += 1
return s
class _StateCacheEntry:
__slots__ = ["state", "state_group", "state_id", "prev_group", "delta_ids"]
def __init__(
self,
state: StateMap[str],
state_group: Optional[int],
prev_group: Optional[int] = None,
delta_ids: Optional[StateMap[str]] = None,
):
# A map from (type, state_key) to event_id.
self.state = frozendict(state)
# the ID of a state group if one and only one is involved.
# otherwise, None otherwise?
self.state_group = state_group
self.prev_group = prev_group
self.delta_ids = frozendict(delta_ids) if delta_ids is not None else None
# The `state_id` is a unique ID we generate that can be used as ID for
# this collection of state. Usually this would be the same as the
# state group, but on worker instances we can't generate a new state
# group each time we resolve state, so we generate a separate one that
# isn't persisted and is used solely for caches.
# `state_id` is either a state_group (and so an int) or a string. This
# ensures we don't accidentally persist a state_id as a stateg_group
if state_group:
self.state_id: Union[str, int] = state_group
else:
self.state_id = _gen_state_id()
def __len__(self) -> int:
return len(self.state)
class StateHandler:
"""Fetches bits of state from the stores, and does state resolution
where necessary
"""
def __init__(self, hs: "HomeServer"):
self.clock = hs.get_clock()
self.store = hs.get_datastores().main
self.state_store = hs.get_storage().state
self.hs = hs
self._state_resolution_handler = hs.get_state_resolution_handler()
@overload
async def get_current_state(
self,
room_id: str,
event_type: Literal[None] = None,
state_key: str = "",
latest_event_ids: Optional[List[str]] = None,
) -> StateMap[EventBase]:
...
@overload
async def get_current_state(
self,
room_id: str,
event_type: str,
state_key: str = "",
latest_event_ids: Optional[List[str]] = None,
) -> Optional[EventBase]:
...
async def get_current_state(
self,
room_id: str,
event_type: Optional[str] = None,
state_key: str = "",
latest_event_ids: Optional[List[str]] = None,
) -> Union[Optional[EventBase], StateMap[EventBase]]:
"""Retrieves the current state for the room. This is done by
calling `get_latest_events_in_room` to get the leading edges of the
event graph and then resolving any of the state conflicts.
This is equivalent to getting the state of an event that were to send
next before receiving any new events.
Returns:
If `event_type` is specified, then the method returns only the one
event (or None) with that `event_type` and `state_key`.
Otherwise, a map from (type, state_key) to event.
"""
if not latest_event_ids:
latest_event_ids = await self.store.get_latest_event_ids_in_room(room_id)
assert latest_event_ids is not None
logger.debug("calling resolve_state_groups from get_current_state")
ret = await self.resolve_state_groups_for_events(room_id, latest_event_ids)
state = ret.state
if event_type:
event_id = state.get((event_type, state_key))
event = None
if event_id:
event = await self.store.get_event(event_id, allow_none=True)
return event
state_map = await self.store.get_events(
list(state.values()), get_prev_content=False
)
return {
key: state_map[e_id] for key, e_id in state.items() if e_id in state_map
}
async def get_current_state_ids(
self, room_id: str, latest_event_ids: Optional[Iterable[str]] = None
) -> StateMap[str]:
"""Get the current state, or the state at a set of events, for a room
Args:
room_id:
latest_event_ids: if given, the forward extremities to resolve. If
None, we look them up from the database (via a cache).
Returns:
the state dict, mapping from (event_type, state_key) -> event_id
"""
if not latest_event_ids:
latest_event_ids = await self.store.get_latest_event_ids_in_room(room_id)
assert latest_event_ids is not None
logger.debug("calling resolve_state_groups from get_current_state_ids")
ret = await self.resolve_state_groups_for_events(room_id, latest_event_ids)
return ret.state
async def get_current_users_in_room(
self, room_id: str, latest_event_ids: List[str]
) -> Dict[str, ProfileInfo]:
"""
Get the users who are currently in a room.
Note: This is much slower than using the equivalent method
`DataStore.get_users_in_room` or `DataStore.get_users_in_room_with_profiles`,
so this should only be used when wanting the users at a particular point
in the room.
Args:
room_id: The ID of the room.
latest_event_ids: Precomputed list of latest event IDs. Will be computed if None.
Returns:
Dictionary of user IDs to their profileinfo.
"""
assert latest_event_ids is not None
logger.debug("calling resolve_state_groups from get_current_users_in_room")
entry = await self.resolve_state_groups_for_events(room_id, latest_event_ids)
return await self.store.get_joined_users_from_state(room_id, entry)
async def get_current_hosts_in_room(self, room_id: str) -> Set[str]:
event_ids = await self.store.get_latest_event_ids_in_room(room_id)
return await self.get_hosts_in_room_at_events(room_id, event_ids)
async def get_hosts_in_room_at_events(
self, room_id: str, event_ids: Iterable[str]
) -> Set[str]:
"""Get the hosts that were in a room at the given event ids
Args:
room_id:
event_ids:
Returns:
The hosts in the room at the given events
"""
entry = await self.resolve_state_groups_for_events(room_id, event_ids)
return await self.store.get_joined_hosts(room_id, entry)
async def compute_event_context(
self,
event: EventBase,
old_state: Optional[Iterable[EventBase]] = None,
partial_state: bool = False,
) -> EventContext:
"""Build an EventContext structure for a non-outlier event.
(for an outlier, call EventContext.for_outlier directly)
This works out what the current state should be for the event, and
generates a new state group if necessary.
Args:
event:
old_state: The state at the event if it can't be
calculated from existing events. This is normally only specified
when receiving an event from federation where we don't have the
prev events for, e.g. when backfilling.
partial_state: True if `old_state` is partial and omits non-critical
membership events
Returns:
The event context.
"""
assert not event.internal_metadata.is_outlier()
#
# first of all, figure out the state before the event
#
if old_state:
# if we're given the state before the event, then we use that
state_ids_before_event: StateMap[str] = {
(s.type, s.state_key): s.event_id for s in old_state
}
state_group_before_event = None
state_group_before_event_prev_group = None
deltas_to_state_group_before_event = None
entry = None
else:
# otherwise, we'll need to resolve the state across the prev_events.
# partial_state should not be set explicitly in this case:
# we work it out dynamically
assert not partial_state
# if any of the prev-events have partial state, so do we.
# (This is slightly racy - the prev-events might get fixed up before we use
# their states - but I don't think that really matters; it just means we
# might redundantly recalculate the state for this event later.)
prev_event_ids = event.prev_event_ids()
incomplete_prev_events = await self.store.get_partial_state_events(
prev_event_ids
)
if any(incomplete_prev_events.values()):
logger.debug(
"New/incoming event %s refers to prev_events %s with partial state",
event.event_id,
[k for (k, v) in incomplete_prev_events.items() if v],
)
partial_state = True
logger.debug("calling resolve_state_groups from compute_event_context")
entry = await self.resolve_state_groups_for_events(
event.room_id, event.prev_event_ids()
)
state_ids_before_event = entry.state
state_group_before_event = entry.state_group
state_group_before_event_prev_group = entry.prev_group
deltas_to_state_group_before_event = entry.delta_ids
#
# make sure that we have a state group at that point. If it's not a state event,
# that will be the state group for the new event. If it *is* a state event,
# it might get rejected (in which case we'll need to persist it with the
# previous state group)
#
if not state_group_before_event:
state_group_before_event = await self.state_store.store_state_group(
event.event_id,
event.room_id,
prev_group=state_group_before_event_prev_group,
delta_ids=deltas_to_state_group_before_event,
current_state_ids=state_ids_before_event,
)
# Assign the new state group to the cached state entry.
#
# Note that this can race in that we could generate multiple state
# groups for the same state entry, but that is just inefficient
# rather than dangerous.
if entry and entry.state_group is None:
entry.state_group = state_group_before_event
#
# now if it's not a state event, we're done
#
if not event.is_state():
return EventContext.with_state(
state_group_before_event=state_group_before_event,
state_group=state_group_before_event,
current_state_ids=state_ids_before_event,
prev_state_ids=state_ids_before_event,
prev_group=state_group_before_event_prev_group,
delta_ids=deltas_to_state_group_before_event,
partial_state=partial_state,
)
#
# otherwise, we'll need to create a new state group for after the event
#
key = (event.type, event.state_key)
if key in state_ids_before_event:
replaces = state_ids_before_event[key]
if replaces != event.event_id:
event.unsigned["replaces_state"] = replaces
state_ids_after_event = dict(state_ids_before_event)
state_ids_after_event[key] = event.event_id
delta_ids = {key: event.event_id}
state_group_after_event = await self.state_store.store_state_group(
event.event_id,
event.room_id,
prev_group=state_group_before_event,
delta_ids=delta_ids,
current_state_ids=state_ids_after_event,
)
return EventContext.with_state(
state_group=state_group_after_event,
state_group_before_event=state_group_before_event,
current_state_ids=state_ids_after_event,
prev_state_ids=state_ids_before_event,
prev_group=state_group_before_event,
delta_ids=delta_ids,
partial_state=partial_state,
)
@measure_func()
async def resolve_state_groups_for_events(
self, room_id: str, event_ids: Iterable[str]
) -> _StateCacheEntry:
"""Given a list of event_ids this method fetches the state at each
event, resolves conflicts between them and returns them.
Args:
room_id
event_ids
Returns:
The resolved state
"""
logger.debug("resolve_state_groups event_ids %s", event_ids)
# map from state group id to the state in that state group (where
# 'state' is a map from state key to event id)
# dict[int, dict[(str, str), str]]
state_groups_ids = await self.state_store.get_state_groups_ids(
room_id, event_ids
)
if len(state_groups_ids) == 0:
return _StateCacheEntry(state={}, state_group=None)
elif len(state_groups_ids) == 1:
name, state_list = list(state_groups_ids.items()).pop()
prev_group, delta_ids = await self.state_store.get_state_group_delta(name)
return _StateCacheEntry(
state=state_list,
state_group=name,
prev_group=prev_group,
delta_ids=delta_ids,
)
room_version = await self.store.get_room_version_id(room_id)
result = await self._state_resolution_handler.resolve_state_groups(
room_id,
room_version,
state_groups_ids,
None,
state_res_store=StateResolutionStore(self.store),
)
return result
async def resolve_events(
self,
room_version: str,
state_sets: Collection[Iterable[EventBase]],
event: EventBase,
) -> StateMap[EventBase]:
logger.info(
"Resolving state for %s with %d groups", event.room_id, len(state_sets)
)
state_set_ids = [
{(ev.type, ev.state_key): ev.event_id for ev in st} for st in state_sets
]
state_map = {ev.event_id: ev for st in state_sets for ev in st}
new_state = await self._state_resolution_handler.resolve_events_with_store(
event.room_id,
room_version,
state_set_ids,
event_map=state_map,
state_res_store=StateResolutionStore(self.store),
)
return {key: state_map[ev_id] for key, ev_id in new_state.items()}
@attr.s(slots=True, auto_attribs=True)
class _StateResMetrics:
"""Keeps track of some usage metrics about state res."""
# System and User CPU time, in seconds
cpu_time: float = 0.0
# time spent on database transactions (excluding scheduling time). This roughly
# corresponds to the amount of work done on the db server, excluding event fetches.
db_time: float = 0.0
# number of events fetched from the db.
db_events: int = 0
_biggest_room_by_cpu_counter = Counter(
"synapse_state_res_cpu_for_biggest_room_seconds",
"CPU time spent performing state resolution for the single most expensive "
"room for state resolution",
)
_biggest_room_by_db_counter = Counter(
"synapse_state_res_db_for_biggest_room_seconds",
"Database time spent performing state resolution for the single most "
"expensive room for state resolution",
)
class StateResolutionHandler:
"""Responsible for doing state conflict resolution.
Note that the storage layer depends on this handler, so all functions must
be storage-independent.
"""
def __init__(self, hs: "HomeServer"):
self.clock = hs.get_clock()
self.resolve_linearizer = Linearizer(name="state_resolve_lock")
# dict of set of event_ids -> _StateCacheEntry.
self._state_cache: ExpiringCache[
FrozenSet[int], _StateCacheEntry
] = ExpiringCache(
cache_name="state_cache",
clock=self.clock,
max_len=100000,
expiry_ms=EVICTION_TIMEOUT_SECONDS * 1000,
iterable=True,
reset_expiry_on_get=True,
)
#
# stuff for tracking time spent on state-res by room
#
# tracks the amount of work done on state res per room
self._state_res_metrics: DefaultDict[str, _StateResMetrics] = defaultdict(
_StateResMetrics
)
self.clock.looping_call(self._report_metrics, 120 * 1000)
async def resolve_state_groups(
self,
room_id: str,
room_version: str,
state_groups_ids: Mapping[int, StateMap[str]],
event_map: Optional[Dict[str, EventBase]],
state_res_store: "StateResolutionStore",
) -> _StateCacheEntry:
"""Resolves conflicts between a set of state groups
Always generates a new state group (unless we hit the cache), so should
not be called for a single state group
Args:
room_id: room we are resolving for (used for logging and sanity checks)
room_version: version of the room
state_groups_ids:
A map from state group id to the state in that state group
(where 'state' is a map from state key to event id)
event_map:
a dict from event_id to event, for any events that we happen to
have in flight (eg, those currently being persisted). This will be
used as a starting point for finding the state we need; any missing
events will be requested via state_res_store.
If None, all events will be fetched via state_res_store.
state_res_store
Returns:
The resolved state
"""
group_names = frozenset(state_groups_ids.keys())
with (await self.resolve_linearizer.queue(group_names)):
cache = self._state_cache.get(group_names, None)
if cache:
return cache
logger.info(
"Resolving state for %s with groups %s",
room_id,
list(group_names),
)
state_groups_histogram.observe(len(state_groups_ids))
new_state = await self.resolve_events_with_store(
room_id,
room_version,
list(state_groups_ids.values()),
event_map=event_map,
state_res_store=state_res_store,
)
# if the new state matches any of the input state groups, we can
# use that state group again. Otherwise we will generate a state_id
# which will be used as a cache key for future resolutions, but
# not get persisted.
with Measure(self.clock, "state.create_group_ids"):
cache = _make_state_cache_entry(new_state, state_groups_ids)
self._state_cache[group_names] = cache
return cache
async def resolve_events_with_store(
self,
room_id: str,
room_version: str,
state_sets: Sequence[StateMap[str]],
event_map: Optional[Dict[str, EventBase]],
state_res_store: "StateResolutionStore",
) -> StateMap[str]:
"""
Args:
room_id: the room we are working in
room_version: Version of the room
state_sets: List of dicts of (type, state_key) -> event_id,
which are the different state groups to resolve.
event_map:
a dict from event_id to event, for any events that we happen to
have in flight (eg, those currently being persisted). This will be
used as a starting point for finding the state we need; any missing
events will be requested via state_map_factory.
If None, all events will be fetched via state_res_store.
state_res_store: a place to fetch events from
Returns:
a map from (type, state_key) to event_id.
"""
try:
with Measure(self.clock, "state._resolve_events") as m:
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
if room_version_obj.state_res == StateResolutionVersions.V1:
return await v1.resolve_events_with_store(
room_id,
room_version_obj,
state_sets,
event_map,
state_res_store.get_events,
)
else:
return await v2.resolve_events_with_store(
self.clock,
room_id,
room_version_obj,
state_sets,
event_map,
state_res_store,
)
finally:
self._record_state_res_metrics(room_id, m.get_resource_usage())
def _record_state_res_metrics(
self, room_id: str, rusage: ContextResourceUsage
) -> None:
room_metrics = self._state_res_metrics[room_id]
room_metrics.cpu_time += rusage.ru_utime + rusage.ru_stime
room_metrics.db_time += rusage.db_txn_duration_sec
room_metrics.db_events += rusage.evt_db_fetch_count
def _report_metrics(self) -> None:
if not self._state_res_metrics:
# no state res has happened since the last iteration: don't bother logging.
return
self._report_biggest(
lambda i: i.cpu_time,
"CPU time",
_biggest_room_by_cpu_counter,
)
self._report_biggest(
lambda i: i.db_time,
"DB time",
_biggest_room_by_db_counter,
)
self._state_res_metrics.clear()
def _report_biggest(
self,
extract_key: Callable[[_StateResMetrics], Any],
metric_name: str,
prometheus_counter_metric: Counter,
) -> None:
"""Report metrics on the biggest rooms for state res
Args:
extract_key: a callable which, given a _StateResMetrics, extracts a single
metric to sort by.
metric_name: the name of the metric we have extracted, for the log line
prometheus_counter_metric: a prometheus metric recording the sum of the
the extracted metric
"""
n_to_log = 10
if not metrics_logger.isEnabledFor(logging.DEBUG):
# only need the most expensive if we don't have debug logging, which
# allows nlargest() to degrade to max()
n_to_log = 1
items = self._state_res_metrics.items()
# log the N biggest rooms
biggest: List[Tuple[str, _StateResMetrics]] = heapq.nlargest(
n_to_log, items, key=lambda i: extract_key(i[1])
)
metrics_logger.debug(
"%i biggest rooms for state-res by %s: %s",
len(biggest),
metric_name,
["%s (%gs)" % (r, extract_key(m)) for (r, m) in biggest],
)
# report info on the single biggest to prometheus
_, biggest_metrics = biggest[0]
prometheus_counter_metric.inc(extract_key(biggest_metrics))
def _make_state_cache_entry(
new_state: StateMap[str], state_groups_ids: Mapping[int, StateMap[str]]
) -> _StateCacheEntry:
"""Given a resolved state, and a set of input state groups, pick one to base
a new state group on (if any), and return an appropriately-constructed
_StateCacheEntry.
Args:
new_state: resolved state map (mapping from (type, state_key) to event_id)
state_groups_ids:
map from state group id to the state in that state group (where
'state' is a map from state key to event id)
Returns:
The cache entry.
"""
# if the new state matches any of the input state groups, we can
# use that state group again. Otherwise we will generate a state_id
# which will be used as a cache key for future resolutions, but
# not get persisted.
# first look for exact matches
new_state_event_ids = set(new_state.values())
for sg, state in state_groups_ids.items():
if len(new_state_event_ids) != len(state):
continue
old_state_event_ids = set(state.values())
if new_state_event_ids == old_state_event_ids:
# got an exact match.
return _StateCacheEntry(state=new_state, state_group=sg)
# TODO: We want to create a state group for this set of events, to
# increase cache hits, but we need to make sure that it doesn't
# end up as a prev_group without being added to the database
# failing that, look for the closest match.
prev_group = None
delta_ids: Optional[StateMap[str]] = None
for old_group, old_state in state_groups_ids.items():
n_delta_ids = {k: v for k, v in new_state.items() if old_state.get(k) != v}
if not delta_ids or len(n_delta_ids) < len(delta_ids):
prev_group = old_group
delta_ids = n_delta_ids
return _StateCacheEntry(
state=new_state, state_group=None, prev_group=prev_group, delta_ids=delta_ids
)
@attr.s(slots=True, auto_attribs=True)
class StateResolutionStore:
"""Interface that allows state resolution algorithms to access the database
in well defined way.
"""
store: "DataStore"
def get_events(
self, event_ids: Collection[str], allow_rejected: bool = False
) -> Awaitable[Dict[str, EventBase]]:
"""Get events from the database
Args:
event_ids: The event_ids of the events to fetch
allow_rejected: If True return rejected events.
Returns:
An awaitable which resolves to a dict from event_id to event.
"""
return self.store.get_events(
event_ids,
redact_behaviour=EventRedactBehaviour.AS_IS,
get_prev_content=False,
allow_rejected=allow_rejected,
)
def get_auth_chain_difference(
self, room_id: str, state_sets: List[Set[str]]
) -> Awaitable[Set[str]]:
"""Given sets of state events figure out the auth chain difference (as
per state res v2 algorithm).
This equivalent to fetching the full auth chain for each set of state
and returning the events that don't appear in each and every auth
chain.
Returns:
An awaitable that resolves to a set of event IDs.
"""
return self.store.get_auth_chain_difference(room_id, state_sets)
| 35.620899 | 93 | 0.633204 |
653fc8606786b6b093e9cf2d436e1a0a2eb3417f
| 725 |
py
|
Python
|
docs/source/conf.py
|
rlckd159/deep-graph-matching-consensus
|
1656cdae27c705a0aa6d2912a24e566b8b86e1b0
|
[
"MIT"
] | 194 |
2020-01-17T08:59:09.000Z
|
2022-03-29T10:16:30.000Z
|
docs/source/conf.py
|
rlckd159/deep-graph-matching-consensus
|
1656cdae27c705a0aa6d2912a24e566b8b86e1b0
|
[
"MIT"
] | 17 |
2020-01-31T10:58:06.000Z
|
2021-10-05T14:48:49.000Z
|
docs/source/conf.py
|
rlckd159/deep-graph-matching-consensus
|
1656cdae27c705a0aa6d2912a24e566b8b86e1b0
|
[
"MIT"
] | 38 |
2020-01-19T01:23:24.000Z
|
2022-03-23T21:56:24.000Z
|
import datetime
import sphinx_rtd_theme
import doctest
import dgmc
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
source_suffix = '.rst'
master_doc = 'index'
author = 'Matthias Fey'
project = 'deep-graph-matching-consensus'
copyright = '{}, {}'.format(datetime.datetime.now().year, author)
version = dgmc.__version__
release = dgmc.__version__
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
doctest_default_flags = doctest.NORMALIZE_WHITESPACE
intersphinx_mapping = {'python': ('https://docs.python.org/', None)}
| 23.387097 | 68 | 0.732414 |
bebe4fe867eb05f0ddfe4190db807c3792d85759
| 1,047 |
py
|
Python
|
Profilecode.py
|
aditya67-gonella/KarthikMothiki
|
18269cd8de5522e7bcb38807ba4a6afb6d3f4202
|
[
"MIT"
] | null | null | null |
Profilecode.py
|
aditya67-gonella/KarthikMothiki
|
18269cd8de5522e7bcb38807ba4a6afb6d3f4202
|
[
"MIT"
] | null | null | null |
Profilecode.py
|
aditya67-gonella/KarthikMothiki
|
18269cd8de5522e7bcb38807ba4a6afb6d3f4202
|
[
"MIT"
] | null | null | null |
# Profile Code
dev_name = "Gonella Venkata Rama Aditya Ganesh"
def developer_details():
print("Developer Name : ", dev_name)
def status():
learning = ["Python", "Java", "Iot"]
Interests = ["Robotics", "Artificial Intelligence", "ROS", "IoT"]
print("Learning = ", learning)
print("Interests = ", Interests)
def skills():
languages = ["Python", "Kotlin", "C", "C++", "Java"]
ide = ["Vs Code", "Visual Studio", "PyCharm", "Atom"]
OS = ["Windows", "Linux", "Ubuntu", "Kali Linux", "Android"]
Cloud = ["AWS"]
print("Languages Using = ", languages)
print("OS Used = ", OS)
print("Cloud = ", Cloud)
def experience():
internships = {"AI Tech Web" : "Technical Content Writer", "Indian Robotics Community" : "Campus Ambassador"}
Community = {"TechnoPhiles" : "Discord Community"}
print("Internships = ", internships)
print("Founder at = ", Community)
if __name__ == "__main__":
developer_details()
status()
skills()
experience()
| 29.083333 | 114 | 0.592168 |
739df75dd8ed287fc33c34f1b2e592e200395329
| 7,659 |
py
|
Python
|
contrib/devtools/update-translations.py
|
goodthebest/saros
|
8e1dd0142c8d26db2c614d2066fcf9a485e5899b
|
[
"MIT"
] | 6 |
2018-01-15T00:25:35.000Z
|
2020-11-17T17:33:05.000Z
|
contrib/devtools/update-translations.py
|
goodthebest/saros
|
8e1dd0142c8d26db2c614d2066fcf9a485e5899b
|
[
"MIT"
] | 1 |
2018-02-08T15:06:02.000Z
|
2018-06-28T04:14:56.000Z
|
contrib/devtools/update-translations.py
|
goodthebest/saros
|
8e1dd0142c8d26db2c614d2066fcf9a485e5899b
|
[
"MIT"
] | 21 |
2018-01-09T15:13:52.000Z
|
2021-06-15T19:56:13.000Z
|
#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'saros_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
#assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
# fetch_all_translations()
postprocess_translations()
| 37.544118 | 124 | 0.629325 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.