hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a8347ba774f82ced779a3ceac5d45c914fbb1cf6 | 3,179 | py | Python | tests/test_derivatives.py | whalenpt/rkstiff | 9fbec7ddd123cc644d392933b518d342751b4cd8 | [
"MIT"
]
| 4 | 2021-11-05T15:35:21.000Z | 2022-01-17T10:20:57.000Z | tests/test_derivatives.py | whalenpt/rkstiff | 9fbec7ddd123cc644d392933b518d342751b4cd8 | [
"MIT"
]
| null | null | null | tests/test_derivatives.py | whalenpt/rkstiff | 9fbec7ddd123cc644d392933b518d342751b4cd8 | [
"MIT"
]
| null | null | null |
from rkstiff.grids import construct_x_kx_rfft, construct_x_kx_fft
from rkstiff.grids import construct_x_Dx_cheb
from rkstiff.derivatives import dx_rfft, dx_fft
import numpy as np
def test_periodic_dx_rfft():
N = 100
a, b = 0, 2*np.pi
x,kx = construct_x_kx_rfft(N,a,b)
u = np.sin(x)
ux_exact = np.cos(x)
ux_approx = dx_rfft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_zeroboundaries_dx_rfft():
N = 400
a, b = -30., 30.
x,kx = construct_x_kx_rfft(N,a,b)
u = 1./np.cosh(x)
ux_exact = -np.tanh(x)/np.cosh(x)
ux_approx = dx_rfft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_gauss_dx_rfft():
N = 128
a,b = -10,10
x,kx = construct_x_kx_rfft(N,a,b)
u = np.exp(-x**2)
ux_exact = -2*x*np.exp(-x**2)
ux_approx = dx_rfft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_manydx_rfft():
N = 128
a, b = 0, 2*np.pi
x,kx = construct_x_kx_rfft(N,a,b)
u = np.sin(x)
ux_exact = np.sin(x)
ux_approx = u.copy()
for _ in range(4):
ux_approx = dx_rfft(kx,ux_approx)
rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact)
assert rel_err < 1e-6
ux_approx = u.copy()
ux_approx = dx_rfft(kx,ux_approx,8)
rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact)
assert rel_err < 0.1
def test_manydx_fft():
N = 128
a, b = 0, 2*np.pi
x,kx = construct_x_kx_fft(N,a,b)
u = np.sin(x)
ux_exact = np.sin(x)
ux_approx = u.copy()
for _ in range(4):
ux_approx = dx_fft(kx,ux_approx)
rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact)
assert rel_err < 1e-6
ux_approx = u.copy()
ux_approx = dx_fft(kx,ux_approx,8)
rel_err = np.linalg.norm(ux_exact-ux_approx)/np.linalg.norm(ux_exact)
assert rel_err < 0.1
def test_periodic_dx_fft():
N = 100
a, b = 0, 2*np.pi
x,kx = construct_x_kx_fft(N,a,b)
u = np.sin(x)
ux_exact = np.cos(x)
ux_approx = dx_fft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_zeroboundaries_dx_fft():
N = 400
a, b = -30., 30.
x,kx = construct_x_kx_fft(N,a,b)
u = 1./np.cosh(x)
ux_exact = -np.tanh(x)/np.cosh(x)
ux_approx = dx_fft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_gauss_dx_fft():
N = 128
a,b = -10,10
x,kx = construct_x_kx_fft(N,a,b)
u = np.exp(-x**2)
ux_exact = -2*x*np.exp(-x**2)
ux_approx = dx_fft(kx,u)
assert np.allclose(ux_exact,ux_approx)
def test_exp_trig_x_Dx_cheb():
# standard interval [-1,1]
N = 20; a = -1; b = 1
x,Dx = construct_x_Dx_cheb(N,-1,1)
u = np.exp(x)*np.sin(5*x)
Du_exact = np.exp(x)*(np.sin(5*x)+5*np.cos(5*x))
Du_approx = Dx.dot(u)
error = Du_exact - Du_approx
assert np.linalg.norm(error)/np.linalg.norm(Du_exact) < 1e-8
# non-standard interval [-3,3]
N = 30; a = -3; b = 3
x,Dx = construct_x_Dx_cheb(N,a,b)
u = np.exp(x)*np.sin(5*x)
Du_exact = np.exp(x)*(np.sin(5*x)+5*np.cos(5*x))
Du_approx = Dx.dot(u)
error = Du_exact - Du_approx
assert np.linalg.norm(error)/np.linalg.norm(Du_exact) < 1e-7
| 26.057377 | 74 | 0.624096 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 56 | 0.017616 |
a834a938200061353abd64e3aa79cc1eac77b3bf | 2,511 | py | Python | python/jinja2_template.py | bismog/leetcode | 13b8a77045f96e7c59ddfe287481f6aaa68e564d | [
"MIT"
]
| null | null | null | python/jinja2_template.py | bismog/leetcode | 13b8a77045f96e7c59ddfe287481f6aaa68e564d | [
"MIT"
]
| null | null | null | python/jinja2_template.py | bismog/leetcode | 13b8a77045f96e7c59ddfe287481f6aaa68e564d | [
"MIT"
]
| 1 | 2018-08-17T07:07:15.000Z | 2018-08-17T07:07:15.000Z | #!/usr/bin/env python
import os
from jinja2 import Environment, FileSystemLoader
PATH = os.path.dirname(os.path.abspath(__file__))
env = Environment(loader=FileSystemLoader(os.path.join(PATH, 'templates')))
mac_addr = "01:23:45:67:89:01"
PXE_ROOT_DIR = "/data/tftpboot"
pxe_options = {
'os_distribution': 'centos7',
'path_to_vmlinuz': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'vmlinuz'),
'path_to_initrd': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'initrd.img'),
'path_to_kickstart_cfg': os.path.join(PXE_ROOT_DIR, 'node', mac_addr, 'ks.cfg'),
'pxe_server_ip': '128.0.0.1',
'protocol': 'nfs'
}
def build_pxe_config(ctxt, template):
"""Build the PXE boot configuration file.
This method builds the PXE boot configuration file by rendering the
template with the given parameters.
:param pxe_options: A dict of values to set on the configuration file.
:param template: The PXE configuration template.
:param root_tag: Root tag used in the PXE config file.
:param disk_ident_tag: Disk identifier tag used in the PXE config file.
:returns: A formatted string with the file content.
"""
tmpl_path, tmpl_file = os.path.split(template)
env = Environment(loader=FileSystemLoader(tmpl_path))
template = env.get_template(tmpl_file)
return template.render(ctxt)
def get_pxe_mac_path(mac, delimiter=None):
"""Convert a MAC address into a PXE config file name.
:param mac: A MAC address string in the format xx:xx:xx:xx:xx:xx.
:param delimiter: The MAC address delimiter. Defaults to dash ('-').
:returns: the path to the config file.
"""
if delimiter is None:
delimiter = '-'
mac_file_name = mac.replace(':', delimiter).lower()
mac_file_name = '01-' + mac_file_name
return os.path.join(PXE_ROOT_DIR, 'pxelinux.cfg', mac_file_name)
def get_teml_path():
"""
"""
return os.path.join(PXE_ROOT_DIR, 'template', '01-xx-xx-xx-xx-xx-xx.template')
#def render_template(template_filename, context):
# return env.get_template(template_filename).render(context)
def create_pxe_config_file(pxe_options):
# fname = "output.html"
cname = get_pxe_mac_path(mac_addr)
tname = get_teml_path()
context = {
'pxe_opts': pxe_options
}
with open(cname, 'w') as f:
config = build_pxe_config(context, tname)
f.write(config)
########################################
if __name__ == "__main__":
create_pxe_config_file(pxe_options)
| 31 | 84 | 0.68419 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,244 | 0.49542 |
a837db7dbbd9e3811093f9342986a637e65f9e07 | 1,101 | py | Python | school_system/users/admin.py | SanyaDeath/BIA-school-system | d07e4e86f91cf1e24c211cc9f5524c50da45b0e5 | [
"BSD-3-Clause"
]
| null | null | null | school_system/users/admin.py | SanyaDeath/BIA-school-system | d07e4e86f91cf1e24c211cc9f5524c50da45b0e5 | [
"BSD-3-Clause"
]
| null | null | null | school_system/users/admin.py | SanyaDeath/BIA-school-system | d07e4e86f91cf1e24c211cc9f5524c50da45b0e5 | [
"BSD-3-Clause"
]
| null | null | null | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin
from .models import Student, User
admin.site.site_header = 'BIA SCHOOL SYSTEM'
class UserAdmin(DjangoUserAdmin):
model = User
fieldsets = DjangoUserAdmin.fieldsets + ((None, {
'fields': ('role', 'middle_name',
'birth_date')}),)
list_display = ('role', 'last_name', 'first_name',
'middle_name', 'birth_date')
def save_model(self, request, obj, form, change):
if request.user.is_teacher:
obj.is_staff = True
obj.save()
admin.site.register(User, UserAdmin)
class StudentUser(UserAdmin):
model = Student
fieldsets = UserAdmin.fieldsets + ((None, {
'fields': ('entry_year', 'klass')}),)
list_display = ('role', 'last_name', 'first_name',
'middle_name', 'birth_date',
'entry_year', 'klass')
search_fields = ('last_name', 'first_name',
'middle_name', 'entry_year', 'klass')
admin.site.register(Student, StudentUser)
| 28.230769 | 66 | 0.613079 | 831 | 0.754768 | 0 | 0 | 0 | 0 | 0 | 0 | 267 | 0.242507 |
b5179adb5c10e59288f470f8fa76ecec344ba97b | 1,111 | py | Python | converter.py | ownerofworld/TDroidDesk | 5c773f15d764e6cff468bb39ed40dca5ba07d902 | [
"MIT"
]
| 20 | 2017-02-22T18:36:57.000Z | 2022-03-23T11:03:35.000Z | converter.py | extratone/TDroidDesk | e778463e996368374c856e6154dc0885df1f3c11 | [
"MIT"
]
| 3 | 2017-02-23T03:51:07.000Z | 2017-03-26T15:06:35.000Z | converter.py | extratone/TDroidDesk | e778463e996368374c856e6154dc0885df1f3c11 | [
"MIT"
]
| 9 | 2017-02-23T19:39:20.000Z | 2022-01-02T03:28:01.000Z | # coding: utf-8
"""Converter module."""
import util
THEME = 'theme'
BACKGROUND = 'background'
class ThemeConverter(object):
"""Object that converts themes using given map file."""
def __init__(self, theme_map, transp_map):
"""Constructor."""
self.theme_map = theme_map
self.transp_map = transp_map
def convert(self, source_theme):
"""Create object that describes desktop theme.
Arguments:
source_theme - theme object
"""
target_theme = util.get_empty_theme()
for desktop_key, att_key in self.theme_map.items():
if att_key not in source_theme[THEME]:
# print('Missing {0} key in source theme'.format(att_key))
continue
color = source_theme[THEME][att_key]
if desktop_key in self.transp_map:
alpha = self.transp_map[desktop_key]
color = util.apply_transparency(color, alpha)
target_theme[THEME][desktop_key] = color
target_theme[BACKGROUND] = source_theme[BACKGROUND]
return target_theme
| 26.452381 | 74 | 0.621062 | 1,011 | 0.909991 | 0 | 0 | 0 | 0 | 0 | 0 | 302 | 0.271827 |
b517e64ad1c06cf00c0f78b0ee1fc02a33f3ce6e | 3,109 | py | Python | tools/serve/test_serve.py | mtrzos/wpt | 6e559a60ecfa38ad6cc434911dd0995a63900db6 | [
"BSD-3-Clause"
]
| null | null | null | tools/serve/test_serve.py | mtrzos/wpt | 6e559a60ecfa38ad6cc434911dd0995a63900db6 | [
"BSD-3-Clause"
]
| null | null | null | tools/serve/test_serve.py | mtrzos/wpt | 6e559a60ecfa38ad6cc434911dd0995a63900db6 | [
"BSD-3-Clause"
]
| 1 | 2021-04-06T20:06:58.000Z | 2021-04-06T20:06:58.000Z | import pickle
import platform
import os
import pytest
import localpaths
from . import serve
from .serve import Config
@pytest.mark.skipif(platform.uname()[0] == "Windows",
reason="Expected contents are platform-dependent")
def test_make_hosts_file_nix():
c = Config(browser_host="foo.bar", alternate_hosts={"alt": "foo2.bar"})
hosts = serve.make_hosts_file(c, "192.168.42.42")
lines = hosts.split("\n")
assert set(lines) == {"",
"192.168.42.42\tfoo.bar",
"192.168.42.42\tfoo2.bar",
"192.168.42.42\twww.foo.bar",
"192.168.42.42\twww.foo2.bar",
"192.168.42.42\twww1.foo.bar",
"192.168.42.42\twww1.foo2.bar",
"192.168.42.42\twww2.foo.bar",
"192.168.42.42\twww2.foo2.bar",
"192.168.42.42\txn--lve-6lad.foo.bar",
"192.168.42.42\txn--lve-6lad.foo2.bar",
"192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo.bar",
"192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo2.bar"}
assert lines[-1] == ""
@pytest.mark.skipif(platform.uname()[0] != "Windows",
reason="Expected contents are platform-dependent")
def test_make_hosts_file_windows():
c = Config(browser_host="foo.bar", alternate_hosts={"alt": "foo2.bar"})
hosts = serve.make_hosts_file(c, "192.168.42.42")
lines = hosts.split("\n")
assert set(lines) == {"",
"0.0.0.0\tnonexistent.foo.bar",
"0.0.0.0\tnonexistent.foo2.bar",
"192.168.42.42\tfoo.bar",
"192.168.42.42\tfoo2.bar",
"192.168.42.42\twww.foo.bar",
"192.168.42.42\twww.foo2.bar",
"192.168.42.42\twww1.foo.bar",
"192.168.42.42\twww1.foo2.bar",
"192.168.42.42\twww2.foo.bar",
"192.168.42.42\twww2.foo2.bar",
"192.168.42.42\txn--lve-6lad.foo.bar",
"192.168.42.42\txn--lve-6lad.foo2.bar",
"192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo.bar",
"192.168.42.42\txn--n8j6ds53lwwkrqhv28a.foo2.bar"}
assert lines[-1] == ""
def test_ws_doc_root_default():
c = Config()
assert c.ws_doc_root == os.path.join(localpaths.repo_root, "websockets", "handlers")
def test_init_ws_doc_root():
c = Config(ws_doc_root="/")
assert c.doc_root == localpaths.repo_root # check this hasn't changed
assert c._ws_doc_root == "/"
assert c.ws_doc_root == "/"
def test_set_ws_doc_root():
c = Config()
c.ws_doc_root = "/"
assert c.doc_root == localpaths.repo_root # check this hasn't changed
assert c._ws_doc_root == "/"
assert c.ws_doc_root == "/"
def test_pickle():
# Ensure that the config object can be pickled
pickle.dumps(Config())
| 38.8625 | 88 | 0.525571 | 0 | 0 | 0 | 0 | 2,331 | 0.749759 | 0 | 0 | 1,189 | 0.382438 |
b51830bb1dccb3fd7e3c8b9bb5061b4737e27584 | 8,440 | py | Python | webshell/preprocess.py | radish608/graduationProject_DL4WebSecurity | 1bafeca95d8c02be438b79e8192cae3f624879c9 | [
"MIT"
]
| 1 | 2020-09-15T01:44:21.000Z | 2020-09-15T01:44:21.000Z | webshell/preprocess.py | radish608/graduationProject_DL4WebSecurity | 1bafeca95d8c02be438b79e8192cae3f624879c9 | [
"MIT"
]
| null | null | null | webshell/preprocess.py | radish608/graduationProject_DL4WebSecurity | 1bafeca95d8c02be438b79e8192cae3f624879c9 | [
"MIT"
]
| null | null | null | # -*-coding: utf-8 -*-
import os
import re
from sklearn.feature_extraction.text import CountVectorizer
import sys
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfTransformer
import commands
import tflearn
import pickle
max_features=10000
max_document_length=100
min_opcode_count=2
webshell_dir="../Datasets/dataset_webshell/webshell/PHP/"
whitefile_dir="../Datasets/dataset_webshell/normal/php/"
white_count=0
black_count=0
php_bin="/usr/bin/php"
def load_files_re(dir):
files_list = []
g = os.walk(dir)
for path, d, filelist in g:
#print d;
for filename in filelist:
#print os.path.join(path, filename)
if filename.endswith('.php') or filename.endswith('.txt'):
fulepath = os.path.join(path, filename)
print "Load %s" % fulepath
t = load_file(fulepath)
print len(t)
files_list.append(t)
return files_list
def load_files_opcode_re(dir):
global min_opcode_count
files_list = []
g = os.walk(dir)
for path, d, filelist in g:
#print d;
for filename in filelist:
#print os.path.join(path, filename)
if filename.endswith('.php') :
fulepath = os.path.join(path, filename)
print "Load %s opcode" % fulepath
t = load_file_opcode(fulepath)
print len(t)
if len(t) > min_opcode_count:
files_list.append(t)
else:
print "Load %s opcode failed" % fulepath
#print "Add opcode %s" % t
return files_list
def load_file(file_path):
t=""
with open(file_path) as f:
for line in f:
line=line.strip('\n')
t+=line
return t
def load_file_opcode(file_path):
global php_bin
t=""
cmd=php_bin+" -dvld.active=1 -dvld.execute=0 "+file_path
#print "exec "+cmd
status,output=commands.getstatusoutput(cmd)
t=output
#print t
tokens=re.findall(r'\s(\b[A-Z_]+\b)\s',output)
t=" ".join(tokens)
print "opcode count %d" % len(t)
return t
def load_files(path):
files_list=[]
for r, d, files in os.walk(path):
for file in files:
if file.endswith('.php'):
file_path=path+file
print "Load %s" % file_path
t=load_file(file_path)
files_list.append(t)
return files_list
#php N-Gram + TF-IDF
def get_feature_by_ngram():
global white_count
global black_count
global max_features
print "max_features=%d" % max_features
x=[]
y=[]
webshell_files_list = load_files_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
y=y1+y2
CV = CountVectorizer(ngram_range=(2, 2), decode_error="ignore",max_features=max_features,
token_pattern = r'\b\w+\b',min_df=1, max_df=1.0)
x=CV.fit_transform(x).toarray()
transformer = TfidfTransformer(smooth_idf=False)
#x_tfidf = transformer.fit_transform(x)
#x = x_tfidf.toarray()
return x,y
#opcode N-Gram
def get_feature_by_opcode_ngram():
global white_count
global black_count
global max_features
print "max_features=%d" % max_features
x=[]
y=[]
data_file = "./Model/Data/opcode_ngram_tf.data"
if os.path.exists(data_file):
f = open(data_file, 'rb')
x, y = pickle.loads(f.read())
f.close()
return x, y
webshell_files_list = load_files_opcode_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_opcode_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
y=y1+y2
CV = CountVectorizer(ngram_range=(2, 4), decode_error="ignore",max_features=max_features,
token_pattern = r'\b\w+\b',min_df=1, max_df=1.0)
x=CV.fit_transform(x).toarray()
transformer = TfidfTransformer(smooth_idf=False)
#x_tfidf = transformer.fit_transform(x)
#x = x_tfidf.toarray()
data = pickle.dumps((x, y))
with open(data_file, 'w') as f:
f.write(data)
f.close()
f.close()
return x,y
#opcode词汇表
def get_feature_by_opcode_vt():
global white_count
global black_count
x=[]
y=[]
data_file = "./Model/Data/opcode_vt.data"
if os.path.exists(data_file):
f = open(data_file, 'rb')
x, y = pickle.loads(f.read())
f.close()
else:
webshell_files_list = load_files_opcode_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_opcode_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
#print x
y=y1+y2
vp=tflearn.data_utils.VocabularyProcessor(max_document_length=100,
min_frequency=0,
vocabulary=None,
tokenizer_fn=None)
x=vp.fit_transform(x, unused_y=None)
x=np.array(list(x))
f = open(data_file, 'wb')
data = pickle.dumps((x, y))
f.write(data)
f.close()
#print x
#print y
return x,y
#php词汇表
def get_feature_by_vt():
global white_count
global black_count
x=[]
y=[]
webshell_files_list = load_files_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
y=y1+y2
vp=tflearn.data_utils.VocabularyProcessor(max_document_length=100,
min_frequency=0,
vocabulary=None,
tokenizer_fn=None)
x=vp.fit_transform(x, unused_y=None)
x=np.array(list(x))
return x,y
#php序列
def get_feature_by_php():
global white_count
global black_count
global max_features
global webshell_dir
global whitefile_dir
print "max_features=%d webshell_dir=%s whitefile_dir=%s" % (max_features,webshell_dir,whitefile_dir)
x=[]
y=[]
webshell_files_list = load_files_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
#print x
y=y1+y2
CV = CountVectorizer(ngram_range=(3000, 3000), decode_error="ignore",max_features=max_features,
token_pattern = r'\b\w+\b',min_df=1, max_df=1.0)
x=CV.fit_transform(x).toarray()
return x,y
#opcode序列
def get_feature_by_opcode():
global white_count
global black_count
global max_features
global webshell_dir
global whitefile_dir
print "max_features=%d webshell_dir=%s whitefile_dir=%s" % (max_features,webshell_dir,whitefile_dir)
x=[]
y=[]
data_file = "./Model/Data/opcodelist.data"
if os.path.exists(data_file):
f = open(data_file, 'rb')
x, y = pickle.loads(f.read())
f.close()
return x, y
webshell_files_list = load_files_opcode_re(webshell_dir)
y1=[1]*len(webshell_files_list)
black_count=len(webshell_files_list)
wp_files_list =load_files_opcode_re(whitefile_dir)
y2=[0]*len(wp_files_list)
white_count=len(wp_files_list)
x=webshell_files_list+wp_files_list
#print x
y=y1+y2
CV = CountVectorizer(ngram_range=(3000, 3000), decode_error="ignore",max_features=max_features,
token_pattern = r'\b\w+\b',min_df=1, max_df=1.0)
x=CV.fit_transform(x).toarray()
f = open(data_file, 'wb')
data = pickle.dumps((x, y))
f.write(data)
f.close()
return x,y
| 26.049383 | 104 | 0.619313 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 996 | 0.11773 |
b5195d6a3d0b3fd5a3b08706a1231fda25ed0eb8 | 2,252 | py | Python | py/DREAM/Settings/Equations/RunawayElectronDistribution.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
]
| 12 | 2020-09-07T11:19:10.000Z | 2022-02-17T17:40:19.000Z | py/DREAM/Settings/Equations/RunawayElectronDistribution.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
]
| 110 | 2020-09-02T15:29:24.000Z | 2022-03-09T09:50:01.000Z | py/DREAM/Settings/Equations/RunawayElectronDistribution.py | chalmersplasmatheory/DREAM | 715637ada94f5e35db16f23c2fd49bb7401f4a27 | [
"MIT"
]
| 3 | 2021-05-21T13:24:31.000Z | 2022-02-11T14:43:12.000Z |
import numpy as np
from DREAM.Settings.Equations.EquationException import EquationException
from . import DistributionFunction as DistFunc
from . DistributionFunction import DistributionFunction
from .. TransportSettings import TransportSettings
INIT_FORWARD = 1
INIT_XI_NEGATIVE = 2
INIT_XI_POSITIVE = 3
INIT_ISOTROPIC = 4
class RunawayElectronDistribution(DistributionFunction):
def __init__(self, settings,
fre=[0.0], initr=[0.0], initp=[0.0], initxi=[0.0],
initppar=None, initpperp=None,
rn0=None, n0=None, rT0=None, T0=None, bc=DistFunc.BC_PHI_CONST,
ad_int_r=DistFunc.AD_INTERP_CENTRED,
ad_int_p1=DistFunc.AD_INTERP_CENTRED,
ad_int_p2=DistFunc.AD_INTERP_CENTRED,
ad_jac_r=DistFunc.AD_INTERP_JACOBIAN_LINEAR,
ad_jac_p1=DistFunc.AD_INTERP_JACOBIAN_LINEAR,
ad_jac_p2=DistFunc.AD_INTERP_JACOBIAN_LINEAR,
fluxlimiterdamping=1.0):
"""
Constructor.
"""
super().__init__(settings=settings, name='f_re', grid=settings.runawaygrid,
f=fre, initr=initr, initp=initp, initxi=initxi, initppar=initppar,
initpperp=initpperp, rn0=rn0, n0=n0, rT0=rT0, T0=T0,
bc=bc, ad_int_r=ad_int_r, ad_int_p1=ad_int_p1,
ad_int_p2=ad_int_p2, fluxlimiterdamping=fluxlimiterdamping)
self.inittype = INIT_FORWARD
def setInitType(self, inittype):
"""
Specifies how the runaway electron distribution function f_re should be
initialized from the runaway density n_re.
:param int inittype: Flag indicating how to initialize f_re.
"""
self.inittype = int(inittype)
def fromdict(self, data):
"""
Load data for this object from the given dictionary.
"""
super().fromdict(data)
def scal(v):
if type(v) == np.ndarray: return v[0]
else: return v
if 'inittype' in data:
self.inittype = int(scal(data['inittype']))
def todict(self):
"""
Returns a Python dictionary containing all settings of
this RunawayElectronDistribution object.
"""
d = super().todict()
d['inittype'] = self.inittype
return d
| 30.026667 | 83 | 0.655861 | 1,920 | 0.852575 | 0 | 0 | 0 | 0 | 0 | 0 | 491 | 0.218028 |
b519b948a7702826eb1cadca71144eb49329174c | 8,907 | py | Python | chevah/compat/tests/normal/testing/test_assertion.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
]
| 5 | 2016-12-03T22:54:50.000Z | 2021-11-17T11:17:39.000Z | chevah/compat/tests/normal/testing/test_assertion.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
]
| 76 | 2015-01-22T16:00:31.000Z | 2022-02-09T22:13:34.000Z | chevah/compat/tests/normal/testing/test_assertion.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
]
| 1 | 2016-12-10T15:57:31.000Z | 2016-12-10T15:57:31.000Z | # Copyright (c) 2015 Adi Roiban.
# See LICENSE for details.
"""
Tests for the assertion helpers.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import os
from chevah.compat.exceptions import CompatError
from chevah.compat.testing import ChevahTestCase, mk
class TestAssertionMixin(ChevahTestCase):
"""
Test for assertions.
ChevahTestCase is inheriting the assertion mixin and we can test it.
"""
def check_assertWorkingFolderIsClean(self, content):
"""
Common tests for assertWorkingFolderIsClean.
"""
with self.assertRaises(AssertionError) as context:
self.assertWorkingFolderIsClean()
message = context.exception.args[0].decode('utf-8')
for member in content:
self.assertContains(member, message)
# Calling it again will not raise any error since the folder is clean.
self.assertWorkingFolderIsClean()
def test_assertTempIsClean_clean_temp(self):
"""
No error is raised if temp folder is clean.
"""
self.assertTempIsClean()
def test_assertTempIsClean_dirty(self):
"""
If temp is not clean an error is raised and then temp folders
is cleaned.
"""
temp_segments = mk.fs.createFileInTemp()
with self.assertRaises(AssertionError) as context:
self.assertTempIsClean()
message = context.exception.args[0].decode('utf-8')
self.assertStartsWith(u'Temporary folder is not clean.', message)
self.assertContains(temp_segments[-1], message)
self.assertFalse(mk.fs.exists(temp_segments))
def test_assertWorkingFolderIsClean_with_folder(self):
"""
An error is raised if current working folder contains a temporary
folder and folder is cleaned.
"""
# Our compat filesystem API does not support creating files in
# current working directory so we use direct API call to OS.
name = mk.string()
os.mkdir(mk.fs.getEncodedPath(name))
self.check_assertWorkingFolderIsClean([name])
def test_assertWorkingFolderIsClean_with_file(self):
"""
An error is raised if current working folder contains a temporary
file and file is cleaned.
"""
name = mk.string()
open(mk.fs.getEncodedPath(name), 'a').close()
self.check_assertWorkingFolderIsClean([name])
def test_assertWorkingFolderIsClean_with_file_and_folder(self):
"""
An error is raised if current working folder contains a temporary
folder and file, and folder and folder is cleaned.
"""
file_name = mk.string()
folder_name = mk.string()
open(mk.fs.getEncodedPath(file_name), 'a').close()
os.mkdir(mk.fs.getEncodedPath(folder_name))
self.check_assertWorkingFolderIsClean([file_name, folder_name])
def test_assertIsEmpty(self):
"""
Raise an exception when not empty and otherwise does nothing.
"""
self.assertIsEmpty(())
self.assertIsEmpty([])
self.assertIsEmpty('')
self.assertIsEmpty(set())
with self.assertRaises(AssertionError) as context:
self.assertIsEmpty((1, 2))
self.assertEqual(
'Iterable is not empty.\n(1, 2).', context.exception.args[0])
def test_assertCompatError_no_CompatError(self):
"""
Will show the details if error is not an CompatError.
"""
exception = self.assertRaises(
AssertionError,
self.assertCompatError,
u'123-id',
Exception('generic-error')
)
self.assertEqual(
"Error generic-error not CompatError but "
"<type 'exceptions.Exception'>",
exception.args[0],
)
def test_assertCompatError_bad_id(self):
"""
Will show the details if error is not an CompatError.
"""
exception = self.assertRaises(
AssertionError,
self.assertCompatError,
u'123-id',
CompatError(u'456', u'Some details.')
)
self.assertEqual(
'Error id for CompatError 456 - Some details. is not 123-id, '
'but 456.',
exception.args[0],
)
def test_assertIteratorItemsEqual_no_iterable(self):
"""
Raise an exception if the actual value is not iterable.
"""
sut = [1, 3]
exception = self.assertRaises(
AssertionError,
self.assertIteratorItemsEqual,
[],
sut,
)
self.assertEqual(
'Value is not iterable.',
exception.args[0],
)
def test_assertIteratorItemsEqual_ok(self):
"""
Is equal even if elements are in a different order.
"""
iterator = iter([2])
value = [1, b'3', u'a', iterator]
sut = iter(value)
self.assertIteratorItemsEqual([b'3', 1, u'a', iterator], sut)
def test_assertIteratorItemsEqual_less(self):
"""
It fails if the values are not equal.
"""
value = [1, b'3', u'a']
sut = iter(value)
exception = self.assertRaises(
AssertionError,
self.assertIteratorItemsEqual,
[1],
sut,
)
# The check here is more complicated since the message relies on the
# assertEqual implementation.
self.assertStartsWith(
"Element counts were not equal:",
exception.args[0],
)
def test_assertEqual_unicode_vs_bytestring_in_list(self):
"""
Fails with AssertionError when asserting that lists containing
a Unicode string vs. a bytestring are equal.
"""
unicode_list = [u'text']
bytes_list = [b'text']
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_list, bytes_list)
self.assertEqual('First is unicode while second is str for "text".',
context.exception.message)
def test_assertEqual_unicode_vs_bytestring_in_nested_list(self):
"""
Fails with AssertionError when asserting that nested lists containing
a Unicode string vs. a bytestring are equal.
"""
unicode_list = [[u'text']]
bytes_list = [[b'text']]
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_list, bytes_list)
self.assertEqual('First is unicode while second is str for "text".',
context.exception.message)
def test_assertEqual_unicode_vs_bytestring_in_tuple(self):
"""
Fails with AssertionError when asserting that tuples containing
a Unicode string vs. a bytestring are equal.
"""
unicode_tuple = (u'text',)
bytes_tuple = (b'text',)
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_tuple, bytes_tuple)
self.assertEqual('First is unicode while second is str for "text".',
context.exception.message)
def test_assertEqual_unicode_vs_bytestring_in_set(self):
"""
Fails with AssertionError when asserting that sets containing
a Unicode string vs. a bytestring are equal.
"""
unicode_set = set([u'text'])
bytes_set = set([b'text'])
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_set, bytes_set)
self.assertEqual('First is unicode while second is str for "text".',
context.exception.message)
def test_assertEqual_unicode_vs_bytestring_in_dict_keys(self):
"""
Fails with AssertionError when asserting that lists containing
a Unicode string vs. a bytestring are equal.
"""
unicode_dict = {u'key': 'value'}
bytes_dict = {b'key': 'value'}
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_dict, bytes_dict)
self.assertEqual('First is unicode while second is str for "key".',
context.exception.message)
def test_assertEqual_unicode_vs_bytestring_in_dict_values(self):
"""
Fails with AssertionError when asserting that lists containing
a Unicode string vs. a bytestring are equal.
"""
unicode_dict = {'key': u'value'}
bytes_dict = {'key': b'value'}
with self.assertRaises(AssertionError) as context:
self.assertEqual(unicode_dict, bytes_dict)
self.assertEqual('First is unicode while second is str for "value".',
context.exception.message)
| 32.507299 | 78 | 0.615359 | 8,581 | 0.9634 | 0 | 0 | 0 | 0 | 0 | 0 | 3,206 | 0.359942 |
b51c24c2a8046cf56cf971ce8b89fbb099048127 | 2,253 | py | Python | tests/basic_test.py | patpizio/conformal_predictors | 80d46d8728af23cf4a412024f592c40b51d977c7 | [
"MIT"
]
| null | null | null | tests/basic_test.py | patpizio/conformal_predictors | 80d46d8728af23cf4a412024f592c40b51d977c7 | [
"MIT"
]
| 4 | 2021-08-04T15:11:33.000Z | 2021-08-04T22:52:03.000Z | tests/basic_test.py | patpizio/conformal_predictors | 80d46d8728af23cf4a412024f592c40b51d977c7 | [
"MIT"
]
| null | null | null | import unittest
import sys
sys.path.insert(0, '../src/')
from conformal_predictors.icp import ConformalPredictor
from conformal_predictors.nc_measures import *
import conformal_predictors.calibrutils as cu
from sklearn.datasets import *
import numpy as np
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsRegressor, KNeighborsClassifier
from sklearn.base import clone
from sklearn.metrics import classification_report
from nonconformist.cp import IcpClassifier
from nonconformist.nc import NcFactory, InverseProbabilityErrFunc, MarginErrFunc
class TestPValues(unittest.TestCase):
def test_iris(self):
is_smoothed = False
# iris = load_iris()
iris = load_breast_cancer()
model = KNeighborsClassifier(n_neighbors=11)
test_model = clone(model)
idx = np.random.permutation(iris.target.size)
idx_train, idx_cal, idx_test = idx[:50], idx[50:100], idx[100:]
## Nonconformist
nc = NcFactory.create_nc(
model,
InverseProbabilityErrFunc()
# MarginErrFunc()
)
icp = IcpClassifier(nc, smoothing=is_smoothed) # Create an inductive conformal classifier
# Fit the ICP using the proper training set
icp.fit(iris.data[idx_train, :], iris.target[idx_train])
# Calibrate the ICP using the calibration set
icp.calibrate(iris.data[idx_cal, :], iris.target[idx_cal])
nonconformist_p_values = icp.predict(iris.data[idx_test, :])
## Test model
y_cal = iris.target[idx_cal]
y_test = iris.target[idx_test]
test_model.fit(iris.data[idx_train, :], iris.target[idx_train])
y_cal_proba = test_model.predict_proba(iris.data[idx_cal, :])
y_test_proba = test_model.predict_proba(iris.data[idx_test, :])
icp = ConformalPredictor(y_cal_proba, y_cal, y_test_proba, y_test, smoothed=is_smoothed, mondrian=False)
icp.fit(negative_logit)
# icp.fit(margin_error_func)
self.assertEqual(np.round(np.sum(nonconformist_p_values - icp.p_values), 12), 0)
def test_breast_cancer(self):
pass
if __name__ == '__main__':
unittest.main() | 29.644737 | 112 | 0.699512 | 1,585 | 0.703506 | 0 | 0 | 0 | 0 | 0 | 0 | 243 | 0.107856 |
b51c95bad3faa026a48a62db4fc8bca989c644e2 | 7,561 | py | Python | data/unaligned_dataset.py | basicskywards/cyclegan-yolo | 536498706da30707facf1211355ff21df2e5b227 | [
"BSD-3-Clause"
]
| null | null | null | data/unaligned_dataset.py | basicskywards/cyclegan-yolo | 536498706da30707facf1211355ff21df2e5b227 | [
"BSD-3-Clause"
]
| null | null | null | data/unaligned_dataset.py | basicskywards/cyclegan-yolo | 536498706da30707facf1211355ff21df2e5b227 | [
"BSD-3-Clause"
]
| null | null | null | import os.path
import torchvision.transforms as transforms
from data.base_dataset import BaseDataset, get_transform
from data.image_folder import make_dataset
from PIL import Image
import PIL
from pdb import set_trace as st
import torch
import numpy as np
#from yolo.utils.datasets import pad
#import torchvision.transforms as transforms
from yolo.utils.datasets import pad_to_square, resize, pad_to_square2
class UnalignedDataset(BaseDataset): # I/O for hybrid YOLOv3 + CycleGAN! Unsupported for batch data for YOLOv3
def initialize(self, opt, normalized_labels = True):
self.opt = opt
self.root = opt.dataroot
self.normalized_labels = normalized_labels
# self.dir_A = os.path.join(opt.dataroot, opt.phase + 'A')
# self.dir_B = os.path.join(opt.dataroot, opt.phase + 'B')
self.dir_A = os.path.join(opt.dataroot, 'A_train.txt') # A.txt contains a list of path/to/img1.jpg
self.dir_B = os.path.join(opt.dataroot, 'B_train.txt')
self.A_paths = make_dataset(self.dir_A)
self.B_paths = make_dataset(self.dir_B)
self.A_paths = sorted(self.A_paths)
self.B_paths = sorted(self.B_paths)
self.A_size = len(self.A_paths)
self.B_size = len(self.B_paths)
self.transform = get_transform(opt) # transform for cyclegan
# prepare targets for yolo
self.A_label_files = [
path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt")
for path in self.A_paths
]
# self.A_label_files = [
# path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt").replace("rainy/", "").replace("cloudy1000/", "").replace("sunny/", "").replace("night_or_night_and_rainy/", "")
# for path in self.A_paths
# ]
self.B_label_files = [
path.replace("images", "labels").replace(".png", ".txt").replace(".jpg", ".txt").replace("rainy/", "").replace("cloudy1000/", "").replace("sunny/", "").replace("night_or_night_and_rainy/", "")
for path in self.B_paths
]
def __getitem__(self, index):
A_path = self.A_paths[index % self.A_size]
B_path = self.B_paths[index % self.B_size]
A_path = A_path.strip('\n')
B_path = B_path.strip('\n')
#print('A_path = ', A_path)
A_img = Image.open(A_path).convert('RGB')
B_img = Image.open(B_path).convert('RGB')
#img = transforms.ToTensor()(Image.open(img_path).convert('RGB'))
tmp_A = transforms.ToTensor()(A_img)
#print('\n**************************************************A_img.shape = ', tmp_A.shape)
_, h, w = tmp_A.shape
h_factor, w_factor = (h, w) if self.normalized_labels else (1, 1)
# Pad to square resolution
tmp_A, pad = pad_to_square2(tmp_A, 0)
_, padded_h, padded_w = tmp_A.shape
tmp_B = transforms.ToTensor()(B_img)
#print('\n**************************************************A_img.shape = ', tmp_A.shape)
_, hB, wB = tmp_B.shape
h_factorB, w_factorB = (hB, wB) if self.normalized_labels else (1, 1)
# Pad to square resolution
tmp_B, padB = pad_to_square2(tmp_B, 0)
_, padded_hB, padded_wB = tmp_B.shape
A_img = self.transform(A_img)
B_img = self.transform(B_img)
# ---------
# Label
# ---------
def label_path2bboxes(label_path, pad, h_factor, w_factor, padded_h, padded_w):
tmp_targets = None
if os.path.exists(label_path):
boxes = torch.from_numpy(np.loadtxt(label_path).reshape(-1, 5))
# Extract coordinates for unpadded + unscaled image
x1 = w_factor * (boxes[:, 1] - boxes[:, 3] / 2)
y1 = h_factor * (boxes[:, 2] - boxes[:, 4] / 2)
x2 = w_factor * (boxes[:, 1] + boxes[:, 3] / 2)
y2 = h_factor * (boxes[:, 2] + boxes[:, 4] / 2)
# Adjust for added padding
x1 += pad[0]
y1 += pad[2]
x2 += pad[1]
y2 += pad[3]
# Returns (x, y, w, h) in scale [0, 1]
boxes[:, 1] = ((x1 + x2) / 2) / padded_w
boxes[:, 2] = ((y1 + y2) / 2) / padded_h
boxes[:, 3] *= w_factor / padded_w
boxes[:, 4] *= h_factor / padded_h
#print('\nboxes x y w h: ', boxes)
tmp_targets = torch.zeros((len(boxes), 6))
tmp_targets[:, 1:] = boxes
return tmp_targets
label_path = self.A_label_files[index % len(self.A_paths)].rstrip()
A_targets = label_path2bboxes(label_path, pad, h_factor, w_factor, padded_h, padded_w)
label_path_B = self.B_label_files[index % len(self.B_paths)].rstrip()
B_targets = label_path2bboxes(label_path_B, padB, h_factorB, w_factorB, padded_hB, padded_wB)
#print('targets = ', targets)
#targets = generate_YOLO_targets(self.bbox) # A_path = A_annotation
# return {'A': A_img, 'B': B_img,
# 'A_paths': A_path, 'B_paths': B_path,
# 'targets': targets}
return {'A': A_img, 'B': B_img,
'A_paths': A_path, 'B_paths': B_path,
'A_targets': A_targets, 'B_targets': B_targets} # add B_bbox, A_bbox
def collate_fn(self, batch):
# input images will be resized to 416
# this collate_fn to suport batchSize >= 2
#print('collate fn: ', zip(*batch))
tmp = list(batch)
#print('tmp = ', len(tmp))
target_As = [data['A_targets'] for data in tmp if data['A_targets'] is not None]
#print('targets_As = ', target_As)
for i, boxes in enumerate(target_As):
boxes[:, 0] = i
target_As = torch.cat(target_As, 0) # BUG
#print('target_As: ', target_As.shape)
#print('target_As cat = ', target_As)
target_Bs = [data['B_targets'] for data in tmp if data['B_targets'] is not None]
for i, boxes in enumerate(target_Bs):
boxes[:, 0] = i
#print('\ntarget_Bs: ', target_Bs)
#target_Bs = torch.cat(target_Bs, 0) # BUG
As = torch.stack([data['A'] for data in tmp])
Bs = torch.stack([data['B'] for data in tmp])
path_As = [data['A_paths'] for data in tmp]
#path_As = torch.cat(path_As, 0)
path_Bs = [data['B_paths'] for data in tmp]
#path_Bs = torch.cat(path_Bs, 0)
# paths, imgs, targets = list(zip(*batch))
# # Remove empty placeholder targets
# targets = [boxes for boxes in targets if boxes is not None]
# # Add sample index to targets
# for i, boxes in enumerate(targets):
# boxes[:, 0] = i
# targets = torch.cat(targets, 0)
# # Selects new image size every tenth batch
# if self.multiscale and self.batch_count % 10 == 0:
# self.img_size = random.choice(range(self.min_size, self.max_size + 1, 32))
# # Resize images to input shape
# imgs = torch.stack([resize(img, self.img_size) for img in imgs])
# self.batch_count += 1
return {'A': As, 'B': Bs,
'A_paths': path_As, 'B_paths': path_Bs,
'A_targets': target_As, 'B_targets': target_Bs}
def __len__(self):
return max(self.A_size, self.B_size)
def name(self):
return 'UnalignedDataset'
| 40.005291 | 206 | 0.562492 | 7,148 | 0.945378 | 0 | 0 | 0 | 0 | 0 | 0 | 2,675 | 0.353789 |
b51f90c659e185b69613117f368541efd8ec132f | 8,396 | py | Python | primare_control/primare_interface.py | ZenithDK/primare-control | 597a2dd15bedb511fab5cca8d01044692d1e2d96 | [
"Apache-2.0"
]
| null | null | null | primare_control/primare_interface.py | ZenithDK/primare-control | 597a2dd15bedb511fab5cca8d01044692d1e2d96 | [
"Apache-2.0"
]
| null | null | null | primare_control/primare_interface.py | ZenithDK/primare-control | 597a2dd15bedb511fab5cca8d01044692d1e2d96 | [
"Apache-2.0"
]
| null | null | null | """Interface to Primare amplifiers using Twisted SerialPort.
This module allows you to control your Primare I22 and I32 amplifier from the
command line using Primare's binary protocol via the RS232 port on the
amplifier.
"""
import logging
import click
from contextlib import closing
from primare_control import PrimareController
# from twisted.logger import (
# FilteringLogObserver,
# globalLogBeginner,
# Logger,
# LogLevel,
# LogLevelFilterPredicate,
# textFileLogObserver
# )
# log = Logger()
# globalLogBeginner.beginLoggingTo([
# FilteringLogObserver(
# textFileLogObserver(sys.stdout),
# [LogLevelFilterPredicate(LogLevel.debug)]
# )
# ])
# Setup logging so that is available
FORMAT = '%(asctime)-15s %(name)s %(levelname)-8s %(message)s'
logging.basicConfig(level=logging.DEBUG, format=FORMAT)
logger = logging.getLogger(__name__)
class DefaultCmdGroup(click.Group):
"""Custom implementation for handling Primare methods in a unified way."""
def list_commands(self, ctx):
"""List Primare Control methods."""
rv = [method for method in dir(PrimareController)
if not method.startswith('_')]
rv.append('interactive')
rv.sort()
return rv
def get_command(self, ctx, name):
"""Return click command."""
@click.pass_context
def subcommand(*args, **kwargs):
#logger.debug("subcommand args: {}".format(args))
#logger.debug("subcommand kwargs: {}".format(kwargs))
ctx = args[0]
params = ctx.obj['parameters']
ctx.obj['p_ctrl'] = PrimareController(port=params['port'],
baudrate=params['baudrate'],
source=None,
volume=None,
debug=params['debug'])
with closing(ctx.obj['p_ctrl']):
try:
if ctx.obj['parameters']['amp_info']:
ctx.obj['p_ctrl'].setup()
method = getattr(PrimareController, name)
if len(kwargs):
method(ctx.obj['p_ctrl'], int(kwargs['value']))
else:
method(ctx.obj['p_ctrl'])
except KeyboardInterrupt:
logger.info("User aborted")
except TypeError as e:
logger.error(e)
if name == "interactive":
cmd = click.Group.get_command(self, ctx, 'interactive')
else:
if name in [method for method in dir(PrimareController)
if not method.startswith('_')]:
# attach doc from original callable so it will appear in CLI
# output
subcommand.__doc__ = getattr(PrimareController, name).__doc__
if getattr(PrimareController,
name).__func__.__code__.co_argcount > 1:
params_arg = [click.Argument(("value",))]
else:
params_arg = None
cmd = click.Command(name,
params=params_arg,
callback=subcommand)
else:
#logger.debug("get_command no_such_cmd")
cmd = None
return cmd
@click.command(cls=DefaultCmdGroup)
@click.pass_context
@click.option("--amp-info",
default=False,
is_flag=True,
help="Retrieve and print amplifier information")
@click.option("--baudrate",
default='4800',
type=click.Choice(['300',
'1200',
'2400',
'4800',
'9600',
'19200',
'57600',
'115200']),
help="Serial port baudrate. For I22 it _must_ be 4800.")
@click.option("--debug",
"-d",
default=False,
is_flag=True,
help="Enable debug output.")
@click.option("--port",
"-p",
default="/dev/ttyUSB0",
help="Serial port to use (e.g. 3 for a COM port on Windows, "
"/dev/ttyATH0 for Arduino Yun, /dev/ttyACM0 for Serial-over-USB "
"on RaspberryPi.")
def cli(ctx, amp_info, baudrate, debug, port):
"""Prototype command."""
try:
# on Windows, we need port to be an integer
port = int(port)
except ValueError:
pass
ctx.obj = {}
ctx.obj['p_ctrl'] = None
ctx.obj['parameters'] = {
'amp_info': amp_info,
'baudrate': baudrate,
'debug': debug,
'port': port,
}
@cli.command()
@click.pass_context
def interactive(ctx):
"""Start interactive shell for controlling a Primare amplifier.
Press enter (blank line), 'q' or 'quit' to exit.
For a list of available commands, type 'help'
"""
method_list = [
(method,
getattr(PrimareController, method).__doc__) for
method in dir(PrimareController) if not method.startswith('_')]
help_string = """To exit, press enter (blank line) or type 'q' or 'quit'.\n
Available commands are:
{}""".format('\n'.join(" {} {}".format(method.ljust(25), doc.splitlines()[0])
for method, doc in method_list))
try:
params = ctx.obj['parameters']
ctx.obj['p_ctrl'] = PrimareController(port=params['port'],
baudrate=params['baudrate'],
source=None,
volume=None,
debug=params['debug'])
if ctx.obj['parameters']['amp_info']:
ctx.obj['p_ctrl'].setup()
logger.info(help_string)
nb = ''
while True:
nb = raw_input('Cmd: ').strip()
if not nb or nb == 'q' or nb == 'quit':
logger.debug("Quit: '{}'".format(nb))
break
elif nb.startswith('help'):
if len(nb.split()) == 2:
help_method = nb.split()[1]
matches = [item for item in method_list
if item[0].startswith(help_method)]
if len(matches):
logger.info("\n".join("\n== {}\n{}".format(
method.ljust(25), doc_string) for
method, doc_string in matches))
else:
logger.info(
"Help requested on unknown method: {}".format(
help_method))
else:
logger.info(help_string)
else:
parsed_cmd = nb.split()
command = getattr(ctx.obj['p_ctrl'], parsed_cmd[0], None)
if command:
try:
if len(parsed_cmd) > 1:
if parsed_cmd[1].lower() == "true":
parsed_cmd[1] = True
elif parsed_cmd[1].lower() == "false":
parsed_cmd[1] = False
elif parsed_cmd[0] == "remote_cmd":
pass
parsed_cmd[1] = '{}'.format(parsed_cmd[1])
else:
parsed_cmd[1] = int(parsed_cmd[1])
command(parsed_cmd[1])
else:
command()
except TypeError as e:
logger.warn("You called a method with an incorrect" +
"number of parameters: {}".format(e))
else:
logger.info("No such function - try again")
except KeyboardInterrupt:
logger.info("User aborted")
# in a non-main thread:
ctx.obj['p_ctrl'].close()
del ctx.obj['p_ctrl']
ctx.obj['p_ctrl'] = None
if __name__ == '__main__':
cli()
| 36.504348 | 79 | 0.47737 | 2,575 | 0.306694 | 0 | 0 | 6,070 | 0.722963 | 0 | 0 | 2,322 | 0.27656 |
b51fa08d66290d275d2da9e4167fcbc0a1d4e931 | 382 | py | Python | sjfxjc/foundations-for-analytics-with-python-master/csv/2csv_reader_parsing_and_write.py | SaronZhou/python | 40d73b49b9b17542c73a3c09d28e479d2fefcde3 | [
"MIT"
]
| null | null | null | sjfxjc/foundations-for-analytics-with-python-master/csv/2csv_reader_parsing_and_write.py | SaronZhou/python | 40d73b49b9b17542c73a3c09d28e479d2fefcde3 | [
"MIT"
]
| null | null | null | sjfxjc/foundations-for-analytics-with-python-master/csv/2csv_reader_parsing_and_write.py | SaronZhou/python | 40d73b49b9b17542c73a3c09d28e479d2fefcde3 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
import csv
import sys
input_file = sys.argv[1]
output_file = sys.argv[2]
with open(input_file, 'r', newline='') as csv_in_file:
with open(output_file, 'w', newline='') as csv_out_file:
filereader = csv.reader(csv_in_file, delimiter=',')
filewriter = csv.writer(csv_out_file, delimiter=',')
for row_list in filereader:
filewriter.writerow(row_list) | 29.384615 | 57 | 0.730366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 38 | 0.099476 |
b5205edd8aab4d0aa6260c04b7e15a833d448e5d | 298 | py | Python | challenges/fifo-animal-shelter/pets.py | jeremyCtown/data-structures-and-algorithms | d4ba8741f858fb5298f8ce560240373fb7742e20 | [
"MIT"
]
| null | null | null | challenges/fifo-animal-shelter/pets.py | jeremyCtown/data-structures-and-algorithms | d4ba8741f858fb5298f8ce560240373fb7742e20 | [
"MIT"
]
| null | null | null | challenges/fifo-animal-shelter/pets.py | jeremyCtown/data-structures-and-algorithms | d4ba8741f858fb5298f8ce560240373fb7742e20 | [
"MIT"
]
| null | null | null | class Dog:
"""
Creates dog object
"""
def __init__(self):
self.val = 'dog'
def __repr__(self):
return self.val
class Cat:
"""
Creates cat object
"""
def __init__(self):
self.val = 'cat'
def __repr__(self):
return self.val
| 14.190476 | 24 | 0.516779 | 294 | 0.986577 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.261745 |
b520efe1b1cf2e4fbb13042874cc3d5189db3ae6 | 2,980 | py | Python | provdbconnector/tests/db_adapters/neo4j/test_neo4jadapter.py | Ama-Gi/prov-neo4j-covid19-track | 67a79694ad3b48c34dd263f1508c0bdfbc6702fb | [
"Apache-2.0"
]
| 15 | 2016-09-21T22:27:45.000Z | 2022-01-17T15:44:42.000Z | provdbconnector/tests/db_adapters/neo4j/test_neo4jadapter.py | Ama-Gi/prov-neo4j-covid19-track | 67a79694ad3b48c34dd263f1508c0bdfbc6702fb | [
"Apache-2.0"
]
| 87 | 2016-09-19T13:26:05.000Z | 2022-03-16T04:16:47.000Z | provdbconnector/tests/db_adapters/neo4j/test_neo4jadapter.py | Ama-Gi/prov-neo4j-covid19-track | 67a79694ad3b48c34dd263f1508c0bdfbc6702fb | [
"Apache-2.0"
]
| 3 | 2016-10-17T19:25:10.000Z | 2020-06-26T12:38:34.000Z | import unittest
from provdbconnector.exceptions.database import InvalidOptionsException, AuthException
from provdbconnector import Neo4jAdapter, NEO4J_USER, NEO4J_PASS, NEO4J_HOST, NEO4J_BOLT_PORT
from provdbconnector.prov_db import ProvDb
from provdbconnector.tests import AdapterTestTemplate
from provdbconnector.tests import ProvDbTestTemplate
class Neo4jAdapterTests(AdapterTestTemplate):
"""
This test extends from AdapterTestTemplate and provide a common set for the neo4j adapter
"""
def setUp(self):
"""
Setup the test
"""
self.instance = Neo4jAdapter()
auth_info = {"user_name": NEO4J_USER,
"user_password": NEO4J_PASS,
"host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT
}
self.instance.connect(auth_info)
session = self.instance._create_session()
session.run("MATCH (x) DETACH DELETE x")
@unittest.skip(
"Skipped because the server configuration currently is set to 'no password', so the authentication will never fail")
def test_connect_fails(self):
"""
Try to connect with the wrong password
"""
auth_info = {"user_name": NEO4J_USER,
"user_password": 'xxxxxx',
"host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT
}
self.instance.connect(auth_info)
with self.assertRaises(AuthException):
self.instance.connect(auth_info)
def test_connect_invalid_options(self):
"""
Try to connect with some invalid arguments
"""
auth_info = {"u": NEO4J_USER,
"p": 'xxxxxx',
"h": NEO4J_HOST + ":" + NEO4J_BOLT_PORT
}
with self.assertRaises(InvalidOptionsException):
self.instance.connect(auth_info)
def tearDown(self):
"""
Delete all data on the database
:return:
"""
session = self.instance._create_session()
session.run("MATCH (x) DETACH DELETE x")
del self.instance
class Neo4jAdapterProvDbTests(ProvDbTestTemplate):
"""
High level api test for the neo4j adapter
"""
def setUp(self):
self.auth_info = {"user_name": NEO4J_USER,
"user_password": NEO4J_PASS,
"host": NEO4J_HOST + ":" + NEO4J_BOLT_PORT
}
self.provapi = ProvDb(api_id=1, adapter=Neo4jAdapter, auth_info=self.auth_info)
def clear_database(self):
"""
This function get called before each test starts
"""
session = self.provapi._adapter._create_session()
session.run("MATCH (x) DETACH DELETE x")
def tearDown(self):
"""
Delete all data in the database
"""
session = self.provapi._adapter._create_session()
session.run("MATCH (x) DETACH DELETE x")
del self.provapi
| 33.863636 | 124 | 0.601342 | 2,626 | 0.881208 | 0 | 0 | 559 | 0.187584 | 0 | 0 | 884 | 0.296644 |
b521c6c0f419d3631f195792a8be1ffaddad4502 | 1,956 | py | Python | Python3-ThirdPartyLibrary/Chapter06_psutil.py | anliven/Reading-Code-Learning-Python | a814cab207bbaad6b5c69b9feeb8bf2f459baf2b | [
"Apache-2.0"
]
| null | null | null | Python3-ThirdPartyLibrary/Chapter06_psutil.py | anliven/Reading-Code-Learning-Python | a814cab207bbaad6b5c69b9feeb8bf2f459baf2b | [
"Apache-2.0"
]
| null | null | null | Python3-ThirdPartyLibrary/Chapter06_psutil.py | anliven/Reading-Code-Learning-Python | a814cab207bbaad6b5c69b9feeb8bf2f459baf2b | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
import psutil
# CPU
print("CPU: ", psutil.cpu_count()) # CPU逻辑数量
print("CPU: ", psutil.cpu_count(logical=False)) # CPU物理核心
print("CPU: ", psutil.cpu_times()) # 统计CPU的用户/系统/空闲时间
# for x in range(3):
# print(psutil.cpu_percent(interval=1, percpu=True)) # 每秒刷新一次CPU使用率
# 内存
print("memory", psutil.virtual_memory()) # 物理内存信息, 以整数字节为单位显示
print("memory", psutil.swap_memory()) # 交换内存信息
# 磁盘
print("disk: ", psutil.disk_partitions()) # 磁盘分区信息
print("disk: ", psutil.disk_usage('/')) # 磁盘使用情况
print("disk: ", psutil.disk_io_counters()) # 磁盘IO
# 网络
print("network: ", psutil.net_io_counters()) # 网络读写字节/包的个数
print("network: ", psutil.net_if_addrs()) # 网络接口信息
print("network: ", psutil.net_if_stats()) # 网络接口状态
print("network: ", psutil.net_connections()) # 当前网络连接信息
# 进程
print("process: ", psutil.pids()) # 所有进程ID
p = psutil.Process(12052) # 获取指定进程
print("process: ", p.name(), # 进程名称
"\nprocess: ", p.status(), # 进程状态
"\nprocess: ", p.exe(), # 进程exe路径
"\nprocess: ", p.cwd(), # 进程工作目录
"\nprocess: ", p.create_time(), # 进程创建时间
"\nprocess: ", p.cmdline(), # 进程启动的命令行
"\nprocess: ", p.ppid(), # 父进程ID
"\nprocess: ", p.parent(), # 父进程
"\nprocess: ", p.children(), # 子进程列表
"\nprocess: ", p.username(), # 进程用户名
"\nprocess: ", p.cpu_times(), # 进程使用的CPU时间
"\nprocess: ", p.memory_info(), # 进程使用的内存
"\nprocess: ", p.num_threads(), # 进程的线程数量
"\nprocess: ", p.threads(), # 所有线程信息
"\nprocess: ", p.environ(), # 进程环境变量
"\nprocess: ", p.open_files(), # 进程打开的文件
"\nprocess: ", p.connections() # 进程相关网络连接
)
# p.terminate() # 结束进程
psutil.test() # test()函数可模拟出ps命令的效果
# ### psutil
# - Cross-platform lib for process and system monitoring in Python.
# - Home Page: https://github.com/giampaolo/psutil
# - Documentation: http://psutil.readthedocs.io/en/latest/
| 36.222222 | 73 | 0.596626 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,481 | 0.615033 |
b5220f9d88a447b033fc07fa837a16f3731fa688 | 1,971 | py | Python | ocrDA.py | it-pebune/ani-research-data-extraction | e8b0ffecb0835020ce7942223cf566dc45ccee35 | [
"MIT"
]
| null | null | null | ocrDA.py | it-pebune/ani-research-data-extraction | e8b0ffecb0835020ce7942223cf566dc45ccee35 | [
"MIT"
]
| 7 | 2022-01-29T22:19:55.000Z | 2022-03-28T18:18:19.000Z | ocrDA.py | it-pebune/ani-research-data-extraction | e8b0ffecb0835020ce7942223cf566dc45ccee35 | [
"MIT"
]
| null | null | null |
import json
from NewDeclarationInQueue.formular_converter import FormularConverter
from NewDeclarationInQueue.preprocess_one_step import PreprocessOneStep
from NewDeclarationInQueue.preprocess_two_steps import PreProcessTwoSteps
from NewDeclarationInQueue.processfiles.customprocess.search_text_line_parameter import SearchTextLineParameter
from NewDeclarationInQueue.processfiles.customprocess.table_config_detail import TableConfigDetail
from NewDeclarationInQueue.processfiles.customprocess.text_with_special_ch import TextWithSpecialCharacters
from NewDeclarationInQueue.processfiles.ocr_worker import OcrWorker
from NewDeclarationInQueue.processfiles.process_messages import ProcessMessages
def process_only_second_steps(input_file_path: str):
second_step = PreprocessOneStep()
#second_step.process_step_two(input_file_path)
second_step.process_custom_model_step_two(input_file_path)
def get_input(input_file: str):
node = []
with open(input_file) as json_data:
node = json.load(json_data)
json_data.close()
return node
def process_two_steps(sfile: str):
str_msg_id = 'abc'
dict_input = get_input(sfile)
two_steps = PreProcessTwoSteps()
process_messages = ProcessMessages('OCR Process', str_msg_id)
one_step = PreprocessOneStep()
ocr_constants = one_step.get_env()
ocr_file, process_messages = two_steps.get_file_info(dict_input, process_messages)
formular_converter = FormularConverter()
ocr_formular = formular_converter.get_formular_info(ocr_constants, ocr_file)
#process_messages_json = two_steps.process_document(ocr_file, ocr_constants, ocr_formular, process_messages)
process_messages = two_steps.process_document_with_custom_model(ocr_file, ocr_constants, process_messages)
#two_steps.save_in_output_queue(process_messages_json)
#process_only_second_steps(r"test_url.json")
process_two_steps(r"test_url.json")
| 38.647059 | 112 | 0.811771 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 286 | 0.145104 |
b522c08e48bccc21cac46f1faeda9461a2a4bfcf | 1,187 | py | Python | backend/model/migrate/versions/18632a2d5fc_.py | deti/boss | bc0cfe3067bf1cbf26789f7443a36e7cdd2ac869 | [
"Apache-2.0"
]
| 7 | 2018-05-20T08:56:08.000Z | 2022-03-11T15:50:54.000Z | backend/model/migrate/versions/18632a2d5fc_.py | deti/boss | bc0cfe3067bf1cbf26789f7443a36e7cdd2ac869 | [
"Apache-2.0"
]
| 2 | 2021-06-08T21:12:51.000Z | 2022-01-13T01:25:27.000Z | backend/model/migrate/versions/18632a2d5fc_.py | deti/boss | bc0cfe3067bf1cbf26789f7443a36e7cdd2ac869 | [
"Apache-2.0"
]
| 5 | 2016-10-09T14:52:09.000Z | 2020-12-25T01:04:35.000Z | """Extend event column in account history
Revision ID: 18632a2d5fc
Revises: 3e19c50e864
Create Date: 2015-06-05 17:49:12.757269
"""
# revision identifiers, used by Alembic.
revision = '18632a2d5fc'
down_revision = '3e19c50e864'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_account():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('tariff_history', 'event',
existing_type=mysql.VARCHAR(length=8),
type_=sa.String(length=16),
existing_nullable=True)
### end Alembic commands ###
def downgrade_account():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('tariff_history', 'event',
existing_type=sa.String(length=16),
type_=mysql.VARCHAR(length=8),
existing_nullable=True)
### end Alembic commands ###
def upgrade_fitter():
pass
def downgrade_fitter():
pass
| 21.581818 | 63 | 0.670598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 445 | 0.374895 |
b522cbe5a92fd1c03c4cc0d0200215ca4a546a8f | 8,018 | py | Python | grid_user/models.py | topd333/Xlab | 28d89b3b18717957229ca52cb2cbbbc20bd31eae | [
"Unlicense"
]
| null | null | null | grid_user/models.py | topd333/Xlab | 28d89b3b18717957229ca52cb2cbbbc20bd31eae | [
"Unlicense"
]
| null | null | null | grid_user/models.py | topd333/Xlab | 28d89b3b18717957229ca52cb2cbbbc20bd31eae | [
"Unlicense"
]
| null | null | null | import random
import datetime
from django.db import models
from django.contrib.auth.models import (
BaseUserManager, AbstractBaseUser
)
from django.utils import timezone
SECURTYQUESTION = (
('1', "What city were you born in?"),
('2', "What is your mother's maiden name?"),
('3', "What street did you grow up on?"),
('4', "What is the title of your favorite book?"),
('5', "What is your favorite vacation spot?"),
('6', "What is your pet's name?"),
)
class UserManager(BaseUserManager):
def create_user(self, email, first, last, principal, username, dob, securtyq,
securtya, avatarname, password=None):
"""
Creates and saves a User with the given email, date of
birth and password.
"""
if not email:
raise ValueError('Users must have an email address')
if not principal:
raise ValueError('Users must have a principal id')
if not username:
raise ValueError('Users must have a username')
user = self.model(
email=UserManager.normalize_email(email),
firstname=first,
lastname=last,
principal_id=principal,
username=username,
securtyq=securtyq,
securtya=securtya,
dob=dob,
avatarname=avatarname
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, firstname, lastname, principal_id, password):
"""
Creates and saves a superuser with the given parameters
"""
# securtyq = '1'
# securtya = 'india'
# dob = '2014-08-18'#datetime.date.today
# avatarname = 'people-pic1.png'
# user = self.create_user(email,
# firstname, lastname, principal_id, username, dob, securtyq, securtya, avatarname, password,
# )
user = self.model(
username=username,
email=UserManager.normalize_email(email),
firstname=firstname,
lastname=lastname,
principal_id=principal_id
)
user.set_password(password)
user.is_admin = True
user.is_superuser = True
user.is_staff = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
email = models.EmailField(
verbose_name='email address',
max_length=255,
unique=True,
db_index=True,
)
username = models.CharField(max_length=255, unique=True)
dob = models.DateField(default=datetime.date.today)
firstname = models.CharField(max_length=64)
lastname = models.CharField(max_length=64)
principal_id = models.CharField(max_length=36)
scope_id = models.CharField(
max_length=36,
default='00000000-0000-0000-0000-000000000000'
)
securtyq = models.CharField(
max_length=255, choices=SECURTYQUESTION, default='1')
securtya = models.CharField(max_length=255, default='india')
user_level = models.IntegerField(default=0)
is_active = models.BooleanField(default=True)
is_admin = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
avatarname = models.CharField(max_length=250, blank=True, null=True)
date_joined = models.DateTimeField(default=timezone.now)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'firstname', 'lastname', 'principal_id']
def get_full_name(self):
self.email
def get_short_name(self):
self.email
def __unicode__(self):
return '%s ** %s %s' % (self.email, self.firstname, self.lastname)
def has_perms(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True
def get_firstname_lastname(self):
return '%s %s' % (self.firstname, self.lastname)
# @property
# def is_staff(self):
# "Is the user a member of staff?"
# Simplest possible answer: All admins are staff
# return self.is_admin
class TempUserManager(models.Manager):
def create_temp_user(self, email, firstname, lastname, key, username, dob, securtyq, securtya, password=None):
if not email:
raise ValueError('Users must have an email address')
temp_user = self.model(
email=UserManager.normalize_email(email),
username=username,
firstname=firstname,
lastname=lastname,
securtyq=securtyq,
securtya=securtya,
activation_key=key,
password=password
)
temp_user.save(using=self._db)
return temp_user
class TempUser(models.Model):
username = models.CharField(max_length=255, unique=True)
email = models.EmailField(max_length=95, unique=True)
firstname = models.CharField(max_length=32)
lastname = models.CharField(max_length=32)
password = models.CharField(max_length=20)
dob = models.DateField()
securtyq = models.CharField(max_length=255, choices=SECURTYQUESTION)
securtya = models.CharField(max_length=255)
created = models.DateTimeField(auto_now=True)
activation_key = models.CharField(max_length=64)
avatarname = models.CharField(max_length=250, blank=True, null=True)
accounttype = models.CharField(
max_length=64, blank=True, default='basic membership')
objects = TempUserManager()
def __unicode__(self):
return '%s ** %s %s ** %s' % (self.email,
self.firstname,
self.lastname,
self.created)
class ChangeEmailManager(models.Manager):
def create_temp_email(self, email, key):
if not email:
raise ValueError('Users must have an email address')
temp_email = self.model(
email=ChangeEmailManager.normalize_email(email),
activation_key=key
)
temp_email.save(using=self._db)
return temp_email
class ChangeEmail(models.Model):
email = models.EmailField(max_length=95, unique=True)
created = models.DateTimeField(auto_now=True)
activation_key = models.CharField(max_length=64)
class ChangePasswordManager(models.Manager):
def create_confirmation(self, password, key, cuser_id):
temp_password = self.model(
password=password,
activation_key=key,
user_id=cuser_id,
)
temp_password.save(using=self._db)
return temp_password
class ChangePassword(models.Model):
password = models.CharField(max_length=20)
created = models.DateTimeField(auto_now=True)
activation_key = models.CharField(max_length=64)
user_id = models.CharField(max_length=50, blank=True, null=True)
objects = ChangePasswordManager()
class SyncUser(models.Model):
email = models.EmailField(
verbose_name='email address',
max_length=255,
unique=True,
db_index=True,
)
firstname = models.CharField(max_length=64)
lastname = models.CharField(max_length=64)
principal_id = models.CharField(max_length=36)
scope_id = models.CharField(
max_length=36,
default='00000000-0000-0000-0000-000000000000'
)
user_level = models.IntegerField(default=0)
def __unicode__(self):
return '%s ** %s %s' % (self.firstname,
self.lastname,
self.email)
| 31.077519 | 114 | 0.634697 | 7,512 | 0.936892 | 0 | 0 | 0 | 0 | 0 | 0 | 1,451 | 0.180968 |
b523c2ff097c63e33e8bee17d44fcc56243d89de | 1,729 | py | Python | keyboards/inline/in_processing/confirm_keyboard.py | itcosplay/cryptobot | 6890cfde64a631bf0e4db55f6873a2217212d801 | [
"MIT"
]
| null | null | null | keyboards/inline/in_processing/confirm_keyboard.py | itcosplay/cryptobot | 6890cfde64a631bf0e4db55f6873a2217212d801 | [
"MIT"
]
| null | null | null | keyboards/inline/in_processing/confirm_keyboard.py | itcosplay/cryptobot | 6890cfde64a631bf0e4db55f6873a2217212d801 | [
"MIT"
]
| null | null | null | from emoji import emojize
from data import all_emoji
from aiogram.types import InlineKeyboardMarkup
from aiogram.types import InlineKeyboardButton
from aiogram.utils.callback_data import CallbackData
cb_confirm_close = CallbackData('cb_cc', 'type_btn')
def create_kb_confirm_close():
emo_snail = all_emoji['back__main_menu']
keyboard = InlineKeyboardMarkup()
keyboard.add (
InlineKeyboardButton (
text = 'подтверждаю!',
callback_data = cb_confirm_close.new(type_btn='confirm')
)
)
keyboard.add (
InlineKeyboardButton (
text = 'добавить сообщение',
callback_data = cb_confirm_close.new(type_btn='add_message')
)
)
keyboard.add (
InlineKeyboardButton (
text = 'вернуться к заявке',
callback_data = cb_confirm_close.new(type_btn='back_to_request')
)
)
keyboard.add (
InlineKeyboardButton (
text = f'назад {emo_snail} главное меню',
callback_data = cb_confirm_close.new(type_btn='back__main_menu')
)
)
return keyboard
def create_kb_confirm_cancel_request():
emo_snail = all_emoji['back__main_menu']
keyboard = InlineKeyboardMarkup()
keyboard.add (
InlineKeyboardButton (
text = 'отменить заявку',
callback_data = 'cancel'
)
)
keyboard.add (
InlineKeyboardButton (
text = 'вернуться к заявке',
callback_data = 'back_to_request'
)
)
keyboard.add (
InlineKeyboardButton (
text = f'назад {emo_snail} главное меню',
callback_data = 'back__main_menu'
)
)
return keyboard
| 26.6 | 76 | 0.625795 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 412 | 0.224523 |
b524a997831cceef37fb6ffcb9a5a1813e885500 | 9,076 | py | Python | utils/HCA/a_star.py | proroklab/magat_pathplanning | a2cab3b11abc46904bc45be1762a780becb1e8c7 | [
"MIT"
]
| 40 | 2021-07-01T03:14:20.000Z | 2022-03-23T23:45:22.000Z | utils/HCA/a_star.py | QingbiaoLi/magat_pathplanning | f28429b1a2ab7866c3001b82e6ae9ca3f072c106 | [
"MIT"
]
| null | null | null | utils/HCA/a_star.py | QingbiaoLi/magat_pathplanning | f28429b1a2ab7866c3001b82e6ae9ca3f072c106 | [
"MIT"
]
| 13 | 2021-07-14T07:57:16.000Z | 2022-03-03T10:43:25.000Z | '''
This file contains utility of AStarSearch.
Thanks to Binyu Wang for providing the codes.
'''
from random import randint
import numpy as np
class SearchEntry():
def __init__(self, x, y, g_cost, f_cost=0, pre_entry=None):
self.x = x
self.y = y
# cost move form start entry to this entry
self.g_cost = g_cost
self.f_cost = f_cost
self.pre_entry = pre_entry
def getPos(self):
return (self.x, self.y)
def AStarSearch(img, source, dest):
def getNewPosition(img, location, offset):
x, y = (location.x + offset[0], location.y + offset[1])
if x < 0 or x >= img.shape[0] or y < 0 or y >= img.shape[1] or img[x, y] == 1 or img[x, y] == 3:
return None
return (x, y)
def getPositions(img, location):
# use four ways or eight ways to move
offsets = [(-1, 0), (0, -1), (1, 0), (0, 1)]
# offsets = [(-1,0), (0, -1), (1, 0), (0, 1), (-1,-1), (1, -1), (-1, 1), (1, 1)]
poslist = []
for offset in offsets:
pos = getNewPosition(img, location, offset)
if pos is not None:
poslist.append(pos)
return poslist
# imporve the heuristic distance more precisely in future
def calHeuristic(pos, dest):
return abs(dest.x - pos[0]) + abs(dest.y - pos[1])
def getMoveCost(location, pos):
if location.x != pos[0] and location.y != pos[1]:
return 1.4
else:
return 1
# check if the position is in list
def isInList(list, pos):
if pos in list:
return list[pos]
return None
# add available adjacent positions
def addAdjacentPositions(img, location, dest, openlist, closedlist):
poslist = getPositions(img, location)
for pos in poslist:
# if position is already in closedlist, do nothing
if isInList(closedlist, pos) is None:
findEntry = isInList(openlist, pos)
h_cost = calHeuristic(pos, dest)
g_cost = location.g_cost + getMoveCost(location, pos)
if findEntry is None:
# if position is not in openlist, add it to openlist
openlist[pos] = SearchEntry(pos[0], pos[1], g_cost, g_cost + h_cost, location)
elif findEntry.g_cost > g_cost:
# if position is in openlist and cost is larger than current one,
# then update cost and previous position
findEntry.g_cost = g_cost
findEntry.f_cost = g_cost + h_cost
findEntry.pre_entry = location
# find a least cost position in openlist, return None if openlist is empty
def getFastPosition(openlist):
fast = None
for entry in openlist.values():
if fast is None:
fast = entry
elif fast.f_cost > entry.f_cost:
fast = entry
return fast
all_path = []
openlist = {}
closedlist = {}
location = SearchEntry(source[0], source[1], 0.0)
dest = SearchEntry(dest[0], dest[1], 0.0)
openlist[source] = location
while True:
location = getFastPosition(openlist)
if location is None:
# not found valid path
# print("can't find valid path")
return ([source])
if location.x == dest.x and location.y == dest.y:
break
closedlist[location.getPos()] = location
openlist.pop(location.getPos())
addAdjacentPositions(img, location, dest, openlist, closedlist)
while location is not None:
all_path.append([location.x, location.y])
# img[location.x][location.y] = 2
location = location.pre_entry
return all_path[::-1]
def hca(img, all_start, all_end, steps=100):
all_path = []
robot_loc = np.where(img == 3)
for i in range(img.shape[0]):
for j in range(img.shape[1]):
if img[i, j] == 3:
img[i, j] = 0
res_imgs = np.expand_dims(img, axis=0).repeat(steps, axis=0)
for i in range(len(robot_loc[0])):
res_imgs[0, robot_loc[0][i], robot_loc[1][i]] = 3
for i in range(len(all_start)):
robot_path = AStarTime(res_imgs, (all_start[i][0], all_start[i][1]), (all_end[i][0], all_end[i][1]))
# print(i)
if len(robot_path) == 1:
new_path = []
for j in range(steps - 1):
res_imgs[j, all_start[i][0], all_start[i][1]] = 3
new_path.append([all_start[i][0], all_start[i][1], j])
all_path.append(new_path)
continue
else:
for loc in robot_path:
res_imgs[loc[2], loc[0], loc[1]] = 3
all_path.append(robot_path)
return all_path
class SearchEntryTime():
def __init__(self, x, y, z, g_cost, f_cost=0, pre_entry=None):
self.x = x
self.y = y
self.z = z
# cost move form start entry to this entry
self.g_cost = g_cost
self.f_cost = f_cost
self.pre_entry = pre_entry
def getPos(self):
return (self.x, self.y, self.z)
def AStarTime(imgs, source, dest, total_steps=80):
def getNewPosition(img, location, offset, step=0):
x, y = (location.x + offset[0], location.y + offset[1])
if x < 0 or x >= img.shape[0] or y < 0 or y >= img.shape[1] or img[x, y] == 1 or img[x, y] == 3:
return None
return (x, y, step)
def getPositions(img, location, step=0):
# use four ways or eight ways to move
offsets = [(-1, 0), (0, -1), (1, 0), (0, 1)]
# offsets = [(-1,0), (0, -1), (1, 0), (0, 1), (-1,-1), (1, -1), (-1, 1), (1, 1)]
poslist = []
for offset in offsets:
pos = getNewPosition(img, location, offset, step)
if pos is not None:
poslist.append(pos)
return poslist
# imporve the heuristic distance more precisely in future
def calHeuristic(pos, dest):
return abs(dest.x - pos[0]) + abs(dest.y - pos[1])
def getMoveCost(location, pos):
if location.x != pos[0] and location.y != pos[1]:
return 1.4
else:
return 1
# check if the position is in list
def isInList(list, pos):
if pos in list:
return list[pos]
return None
# add available adjacent positions
def addAdjacentPositions(imgs, location, dest, openlist, closedlist, steps):
img = imgs[int(steps + 1), :, :]
poslist = getPositions(img, location, steps)
for pos in poslist:
# if position is already in closedlist, do nothing
if isInList(closedlist, pos) is None:
findEntry = isInList(openlist, pos)
h_cost = calHeuristic(pos, dest)
g_cost = location.g_cost + getMoveCost(location, pos)
if findEntry is None:
# if position is not in openlist, add it to openlist
steps = int(g_cost)
openlist[(pos[0], pos[1], steps)] = SearchEntryTime(pos[0], pos[1], steps, g_cost, g_cost + h_cost,
location)
elif findEntry.g_cost > g_cost:
# if position is in openlist and cost is larger than current one,
# then update cost and previous position
findEntry.g_cost = g_cost
findEntry.f_cost = g_cost + h_cost
findEntry.z = int(g_cost)
findEntry.pre_entry = location
# find a least cost position in openlist, return None if openlist is empty
def getFastPosition(openlist):
fast = None
for entry in openlist.values():
if fast is None:
fast = entry
elif fast.f_cost > entry.f_cost:
fast = entry
return fast
all_path = []
openlist = {}
closedlist = {}
location = SearchEntryTime(source[0], source[1], 0, 0.0)
dest = SearchEntryTime(dest[0], dest[1], 0, 0.0)
openlist[(source[0], source[1], 0)] = location
steps = 0
while steps < total_steps:
location = getFastPosition(openlist)
if location is None:
# not found valid path
# print("can't find valid path")
return ([source])
if location.x == dest.x and location.y == dest.y:
break
closedlist[location.getPos()] = location
openlist.pop(location.getPos())
steps = int(location.g_cost)
addAdjacentPositions(imgs, location, dest, openlist, closedlist, steps)
while location is not None:
all_path.append([location.x, location.y, location.z])
# img[location.x][location.y] = 2
location = location.pre_entry
return all_path[::-1]
# img = np.zeros((20,20))
# source = (0,0)
# dest = (img.shape[0]-1, img.shape[1]-1)
# path = AStarSearch(img, source, dest)
| 35.592157 | 119 | 0.552336 | 676 | 0.074482 | 0 | 0 | 0 | 0 | 0 | 0 | 1,537 | 0.169348 |
b525a442d992316233f044f50e799f9a075c90fa | 1,270 | py | Python | app/users/tasks.py | atulmishra-one/dairy_management_portal | a07320dc0f4419d4c78f7d2453c63b1c9544aba8 | [
"MIT"
]
| 2 | 2020-08-02T10:06:19.000Z | 2022-03-29T06:10:57.000Z | app/users/tasks.py | atulmishra-one/dairy_management_portal | a07320dc0f4419d4c78f7d2453c63b1c9544aba8 | [
"MIT"
]
| null | null | null | app/users/tasks.py | atulmishra-one/dairy_management_portal | a07320dc0f4419d4c78f7d2453c63b1c9544aba8 | [
"MIT"
]
| 2 | 2019-02-03T15:44:02.000Z | 2021-03-09T07:30:28.000Z | import xlrd
from app.services.extension import task_server, sqlalchemy as db
from app.models.core.user import User
from app.application import initialize_app
try:
from app.config.production import ProductionConfig as config_object
except ImportError:
from app.config.local import LocalConfig as config_object
@task_server.task()
def upload_users(file_object):
workbook = xlrd.open_workbook(file_object)
worksheet = workbook.sheet_by_index(0)
offset = 0
rows = []
for i, row in enumerate(range(worksheet.nrows)):
if i <= offset: # (Optionally) skip headers
continue
r = []
for j, col in enumerate(range(worksheet.ncols)):
r.append(worksheet.cell_value(i, j))
rows.append(r)
users = []
for i, row in enumerate(rows):
users.append({
'initial_name': row[0],
'first_name': row[1],
'last_name': row[2],
'username': row[3],
'email': row[4],
'password': row[5],
'active': row[6]
})
app = initialize_app(config_object)
with app.test_request_context():
user_object = User()
user_object.create_or_update(users)
return "OK." | 27.608696 | 71 | 0.607874 | 0 | 0 | 0 | 0 | 949 | 0.747244 | 0 | 0 | 104 | 0.08189 |
b526e227b8af6adb71768eb4900aaf57a69f1acb | 3,444 | py | Python | savenger.py | SlapBot/GodkillerArmor | 27058332cd94c4389b092a621eeedc834d8f5a15 | [
"MIT"
]
| 3 | 2018-07-06T17:06:28.000Z | 2018-09-06T03:31:43.000Z | savenger.py | SlapBot/GodkillerArmor | 27058332cd94c4389b092a621eeedc834d8f5a15 | [
"MIT"
]
| null | null | null | savenger.py | SlapBot/GodkillerArmor | 27058332cd94c4389b092a621eeedc834d8f5a15 | [
"MIT"
]
| 1 | 2018-07-10T00:13:07.000Z | 2018-07-10T00:13:07.000Z | from praw import Reddit
import random
class Savenger:
AVENGERS = ["Iron Man", "Doctor Strange", "Star-Lord", "Black Widow", "Thor",
"Spider-Man", "Captain America", "Wanda Maximoff", "Bucky Barnes",
"Loki", "Hulk", "Black Panther", "Vision", "Gamora", "Drax", "Nebula",
"Sam Wilson", "Mantis", "Okoye", "Shuri", "Groot", "Rocket", "Heimdall"]
def __init__(self):
self.Reddit = Reddit
def get_superhero(self):
return random.choice(self.AVENGERS)
def authenticate(self, username, password, client_id, client_secret, user_agent):
print("Authenticating...")
try:
self.reddit = self.Reddit(user_agent=user_agent, client_id=client_id,
client_secret=client_secret, username=username,
password=password)
self.user = self.reddit.user.me()
print(f"Authenticated as {self.user}")
return self.reddit
except Exception as e:
print(e)
exit()
def save(self, subreddit):
try:
print("Savengers are on the way, stay hold.")
subreddit = self.reddit.subreddit(subreddit)
print(f"{self.get_superhero()} finding every threatening submission made in {subreddit}")
subreddit_submissions = self.get_user_subreddit_submissions(subreddit)
self.delete_submissions(subreddit_submissions)
print(f"{self.get_superhero()} saved your from dying by the submission's author")
print(f"{self.get_superhero()} finding every forbidding comment made in {subreddit}")
subreddit_comments = self.get_user_subreddit_comments(subreddit)
self.delete_comments(subreddit_comments)
print("Savengers have saved you!")
print("Go visit https://www.reddit.com/r/savengers/ to have a chat with the fellow superheroes")
return True
except Exception as e:
print(e)
exit()
def get_user_subreddit_comments(self, subreddit):
subreddit_comments = []
for comment in self.user.comments.new(limit=None):
if comment.subreddit == subreddit:
if comment.body:
print(f"{self.get_superhero()} found a comment with the body: {comment.body}")
subreddit_comments.append(comment)
return subreddit_comments
def get_user_subreddit_submissions(self, subreddit):
subreddit_submissions = []
for submission in self.user.submissions.new(limit=None):
if submission.subreddit == subreddit:
if submission.title:
print(f"{self.get_superhero()} found a submission with the title: {submission.title}")
subreddit_submissions.append(submission)
return subreddit_submissions
def delete_comments(self, subreddit_comments):
for subreddit_comment in subreddit_comments:
print(f"{self.get_superhero()} successfully eliminated the threatening comment!")
subreddit_comment.delete()
return True
def delete_submissions(self, subreddit_submissions):
for subreddit_submission in subreddit_submissions:
print(f"{self.get_superhero()} successfully eliminated the forbidding post!")
subreddit_submission.delete()
return True
| 44.727273 | 108 | 0.626597 | 3,403 | 0.988095 | 0 | 0 | 0 | 0 | 0 | 0 | 963 | 0.279617 |
b52a4b91de40afb841386437bc92df7dcd61942d | 1,493 | py | Python | python-packages/pyRiemann-0.2.2/pyriemann/channelselection.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
]
| 2 | 2017-08-13T14:09:32.000Z | 2018-07-16T23:39:00.000Z | python-packages/pyRiemann-0.2.2/pyriemann/channelselection.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
]
| null | null | null | python-packages/pyRiemann-0.2.2/pyriemann/channelselection.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
]
| 2 | 2018-04-02T06:45:11.000Z | 2018-07-16T23:39:02.000Z | from .utils.distance import distance
from .classification import MDM
import numpy
from sklearn.base import BaseEstimator, TransformerMixin
##########################################################
class ElectrodeSelection(BaseEstimator, TransformerMixin):
def __init__(self, nelec=16, metric='riemann'):
self.nelec = nelec
self.metric = metric
self.subelec = -1
self.dist = []
def fit(self, X, y=None):
mdm = MDM(metric=self.metric)
mdm.fit(X, y)
self.covmeans = mdm.covmeans
Ne, _ = self.covmeans[0].shape
self.subelec = range(0, Ne, 1)
while (len(self.subelec)) > self.nelec:
di = numpy.zeros((len(self.subelec), 1))
for idx in range(len(self.subelec)):
sub = self.subelec[:]
sub.pop(idx)
di[idx] = 0
for i in range(len(self.covmeans)):
for j in range(i + 1, len(self.covmeans)):
di[idx] += distance(
self.covmeans[i][
:, sub][
sub, :], self.covmeans[j][
:, sub][
sub, :])
# print di
torm = di.argmax()
self.dist.append(di.max())
self.subelec.pop(torm)
return self
def transform(self, X):
return X[:, self.subelec, :][:, :, self.subelec]
| 31.765957 | 62 | 0.464836 | 1,291 | 0.864702 | 0 | 0 | 0 | 0 | 0 | 0 | 77 | 0.051574 |
b52aa43dd118effc265e50061d8175f3814721d2 | 1,897 | py | Python | syncbase/user/urls.py | gkrnours/syncbase | 0e20d400fe83e2157ee6e893a105253d20634808 | [
"MIT"
]
| null | null | null | syncbase/user/urls.py | gkrnours/syncbase | 0e20d400fe83e2157ee6e893a105253d20634808 | [
"MIT"
]
| null | null | null | syncbase/user/urls.py | gkrnours/syncbase | 0e20d400fe83e2157ee6e893a105253d20634808 | [
"MIT"
]
| null | null | null | from django.conf.urls import url, include
from django.contrib.auth import views as auth
from user.forms import NewAccountForm
from user import views
app_name = 'user'
urlpatterns = [
# auth
url(r'^create/$', views.UserCreate.as_view(), name='create'),
url(r'^login/$', auth.login,
{'template_name':'user/login.html'},
name='login'),
url(r'^logout/$', auth.logout,
{'template_name':'user/logout.html'},
name='logout'),
url(r'^password_change/$', auth.password_change,
{'template_name':'user/password_change_form.html',
'post_change_redirect':'user:password_change_done'},
name='password_change'),
url(r'^password_change/done/$', auth.password_change_done,
{'template_name':'user/password_change_done.html'},
name='password_change_done'),
url(r'^password_reset/$', auth.password_reset,
{'post_reset_redirect': 'user:password_reset_done',
'template_name': 'user/password_reset_form.html',
'email_template_name': 'user/password_reset_email.html',
'subject_template_name': 'user/password_reset_subject.txt'},
name='password_reset'),
url(r'^password_reset/done/$', auth.password_reset_done,
{'template_name': 'user/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
auth.password_reset_confirm,
{'post_reset_redirect':'user:password_reset_complete',
'template_name': "user/password_reset_confirm.html"},
name='password_reset_confirm'),
url(r'^reset/done/$', auth.password_reset_complete,
{'template_name': 'user/password_reset_complete.html'},
name='password_reset_complete'),
# profile
url(r'^basic/$', views.BasicInfo.as_view(), name="basic"),
]
| 34.490909 | 95 | 0.655245 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,022 | 0.538745 |
b52daf8a9a6916b3bc3be9fb6b077491427da67f | 1,728 | py | Python | mac_changer.py | xicoder96/luv-sic | 033527b558c3e4d7f254dca1e2f6f0ccf9ff78fe | [
"MIT"
]
| null | null | null | mac_changer.py | xicoder96/luv-sic | 033527b558c3e4d7f254dca1e2f6f0ccf9ff78fe | [
"MIT"
]
| null | null | null | mac_changer.py | xicoder96/luv-sic | 033527b558c3e4d7f254dca1e2f6f0ccf9ff78fe | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
import subprocess
import re
import argparse
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--interface", dest="interface",
help="interface to change mac address")
parser.add_argument("-m", "--mac", dest="new_mac",
help="value of new mac address")
options = parser.parse_args()
if not options.interface:
parser.error("Please enter interface, use --help for more information")
elif not options.new_mac:
parser.error(
"Please enter new MAC address use --help for more information")
return options
def change_mac(interface, new_mac):
print(f"[+] Changing mac address for {interface} to {new_mac}")
subprocess.call(["sudo", "ifconfig", interface, "down"])
subprocess.call(["sudo", "ifconfig", interface, "hw", "ether", new_mac])
subprocess.call(["sudo", "ifconfig", interface, "up"])
def get_current_mac(interface):
ifconfig_result = str(subprocess.check_output(
["sudo", "ifconfig", interface]))
search_result = re.search(
r"\w\w:\w\w:\w\w:\w\w:\w\w:\w\w", ifconfig_result)
if search_result:
return search_result.group(0)
else:
print("[-] Could not read mac address")
if __name__ == "__main__":
options = get_arguments()
current_mac = get_current_mac(options.interface)
print(f"Current Mac:{current_mac}")
change_mac(options.interface, options.new_mac)
current_mac = get_current_mac(options.interface)
if current_mac == options.new_mac:
print(f"[+] MAC address was successfully changed to {current_mac}")
else:
print("[-] MAC address did not change")
| 33.230769 | 79 | 0.65162 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 583 | 0.337384 |
b53016b4f1a8a22aaafbf177615312636a59d031 | 1,916 | py | Python | training/model.py | J77M/stuffy-nose-recognition | e5d8957e2026e9046e6ffee69a60a11a686bc042 | [
"MIT"
]
| null | null | null | training/model.py | J77M/stuffy-nose-recognition | e5d8957e2026e9046e6ffee69a60a11a686bc042 | [
"MIT"
]
| null | null | null | training/model.py | J77M/stuffy-nose-recognition | e5d8957e2026e9046e6ffee69a60a11a686bc042 | [
"MIT"
]
| null | null | null | import tensorflow as tf
import numpy as np
import time
import utils
path = r'data/'
x, y = utils.reload_data(path)
inp_shape = (x[0].shape[0],1)
x = np.array(x).reshape(-1, 1000, 1)# change 1000 to your sample lenght if you changed frame (= CHUNK ) or RESOLUTION
# prepared for testing and evaluating. try other combinations of architecture
dense_layers = [1]
conv_sizes = [64]
conv_layers = [2]
dense_layer_sizes = [256]
kernel = 10
pool_size = 4
_batchs = 5
_epochs = 10
for dense_layer in dense_layers:
for conv_layer in conv_layers:
for dense_size in dense_layer_sizes:
for conv_size in conv_sizes:
NAME = '{}-conv_layers-{}-dense_layers-{}-conv_size-{}-dense_size-{}-kernel-{}'.format(conv_layer,dense_layer,conv_size, dense_size,kernel, int(time.time()))
model = tf.keras.Sequential()
model.add(tf.keras.layers.Conv1D(conv_size, kernel, activation='relu', input_shape = inp_shape))
model.add(tf.keras.layers.MaxPooling1D(pool_size))
for i in range(conv_layer-1):
model.add(tf.keras.layers.Conv1D(conv_size, kernel, activation='relu'))
model.add(tf.keras.layers.MaxPooling1D(pool_size))
model.add(tf.keras.layers.Flatten())
for _ in range(dense_layer):
model.add(tf.keras.layers.Dense(dense_size, activation='relu'))
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
model.compile(loss = 'binary_crossentropy', optimizer='adam', metrics=['accuracy'])
tensorboard = tf.keras.callbacks.TensorBoard(log_dir='model_evaluate/{}'.format(NAME))
print(NAME)
model.fit(x,y, batch_size = _batchs, epochs=_epochs, validation_split = 0.2, callbacks=[tensorboard])
model.save('trained_models/{}.h5'.format(NAME)) | 39.102041 | 173 | 0.641441 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 343 | 0.179019 |
b5317c9553e4578dd1313a39baa5b57770eff21b | 6,163 | py | Python | app/api_service/ice_creams/migrations/0001_initial.py | TheRayOfSeasons/worker-heavy-cicd | fa36e89dd68ee2fd8b37bda55d6bb885f31afaa7 | [
"MIT"
]
| null | null | null | app/api_service/ice_creams/migrations/0001_initial.py | TheRayOfSeasons/worker-heavy-cicd | fa36e89dd68ee2fd8b37bda55d6bb885f31afaa7 | [
"MIT"
]
| null | null | null | app/api_service/ice_creams/migrations/0001_initial.py | TheRayOfSeasons/worker-heavy-cicd | fa36e89dd68ee2fd8b37bda55d6bb885f31afaa7 | [
"MIT"
]
| null | null | null | # Generated by Django 3.1.6 on 2021-02-12 07:47
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Flavor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime_created', models.DateTimeField(auto_now_add=True, null=True)),
('datetime_modified', models.DateTimeField(auto_now=True, null=True)),
('is_active', models.BooleanField(default=True)),
('datetime_deleted', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(max_length=128)),
('description', models.TextField(blank=True, default='')),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_created_set', to=settings.AUTH_USER_MODEL)),
('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_deleted_set', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_flavor_modified_set', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Topping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime_created', models.DateTimeField(auto_now_add=True, null=True)),
('datetime_modified', models.DateTimeField(auto_now=True, null=True)),
('is_active', models.BooleanField(default=True)),
('datetime_deleted', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(max_length=128)),
('description', models.TextField(blank=True, default='')),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_created_set', to=settings.AUTH_USER_MODEL)),
('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_deleted_set', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_topping_modified_set', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='IceCreamServing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime_created', models.DateTimeField(auto_now_add=True, null=True)),
('datetime_modified', models.DateTimeField(auto_now=True, null=True)),
('is_active', models.BooleanField(default=True)),
('datetime_deleted', models.DateTimeField(blank=True, null=True)),
('name', models.CharField(blank=True, default='', max_length=128)),
('description', models.TextField(blank=True, default='')),
('category', models.IntegerField(choices=[(1, 'In Cone'), (2, '500ml Tub'), (3, '1L Tub'), (4, '2L Tub')])),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_created_set', to=settings.AUTH_USER_MODEL)),
('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_deleted_set', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecreamserving_modified_set', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='IceCream',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime_created', models.DateTimeField(auto_now_add=True, null=True)),
('datetime_modified', models.DateTimeField(auto_now=True, null=True)),
('is_active', models.BooleanField(default=True)),
('datetime_deleted', models.DateTimeField(blank=True, null=True)),
('order', models.IntegerField()),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_created_set', to=settings.AUTH_USER_MODEL)),
('deleted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_deleted_set', to=settings.AUTH_USER_MODEL)),
('flavor', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.flavor')),
('modified_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='ice_creams_icecream_modified_set', to=settings.AUTH_USER_MODEL)),
('serving', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.icecreamserving')),
('toppings', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ice_creams.topping')),
],
options={
'abstract': False,
},
),
]
| 66.268817 | 204 | 0.646438 | 6,004 | 0.974201 | 0 | 0 | 0 | 0 | 0 | 0 | 1,199 | 0.194548 |
b5325a85e324486debcb82eb330c6fd293cb8cf4 | 1,306 | py | Python | game/game/protocol.py | maosplx/L2py | 5d81b2ea150c0096cfce184706fa226950f7f583 | [
"MIT"
]
| 7 | 2020-09-01T21:52:37.000Z | 2022-02-25T16:00:08.000Z | game/game/protocol.py | maosplx/L2py | 5d81b2ea150c0096cfce184706fa226950f7f583 | [
"MIT"
]
| 4 | 2021-09-10T22:15:09.000Z | 2022-03-25T22:17:43.000Z | game/game/protocol.py | maosplx/L2py | 5d81b2ea150c0096cfce184706fa226950f7f583 | [
"MIT"
]
| 9 | 2020-09-01T21:53:39.000Z | 2022-03-30T12:03:04.000Z | import logging
from common.api_handlers import handle_request
from common.packet import Packet
from common.response import Response
from common.transport.protocol import TCPProtocol
from game.models.world import WORLD
from game.session import GameSession
from game.states import Connected
LOG = logging.getLogger(f"l2py.{__name__}")
class Lineage2GameProtocol(TCPProtocol):
session_cls = GameSession
def connection_made(self, transport):
super().connection_made(transport)
LOG.info(
"New connection from %s:%s",
*self.transport.peer,
)
self.session.set_state(Connected)
@TCPProtocol.make_async
async def data_received(self, data: bytes):
request = self.transport.read(data)
response = await handle_request(request)
if response:
LOG.debug(
"Sending packet to %s:%s",
*self.transport.peer,
)
self.transport.write(response)
for action in response.actions_after:
action_result = await action
if isinstance(action_result, Packet):
self.transport.write(Response(action_result, self.session))
def connection_lost(self, exc) -> None:
self.session.logout_character()
| 30.372093 | 79 | 0.658499 | 968 | 0.741194 | 0 | 0 | 575 | 0.440276 | 547 | 0.418836 | 70 | 0.053599 |
b532e08e69d241104c91b1c89e9d10205dab72ab | 292 | py | Python | day6.py | seblars/AdventOfCode2020 | dc41181caa50fe03645aa36d70fe0ebd76cd6e25 | [
"MIT"
]
| 1 | 2020-12-16T09:37:01.000Z | 2020-12-16T09:37:01.000Z | day6.py | seblars/AdventOfCode2020 | dc41181caa50fe03645aa36d70fe0ebd76cd6e25 | [
"MIT"
]
| null | null | null | day6.py | seblars/AdventOfCode2020 | dc41181caa50fe03645aa36d70fe0ebd76cd6e25 | [
"MIT"
]
| null | null | null | import fileinput
# "day6.txt"
groups = [x.split() for x in ''.join(fileinput.input()).split('\n\n')]
# part 1
print(sum(len(set([j for sub in group for j in sub])) for group in groups))
# part 2
print(sum(len(set.intersection(*[set(list(j)) for j in group])) for group in groups))
| 29.2 | 86 | 0.64726 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.133562 |
b53308bf2a97499e9f1e960c2ded7b7ca3598996 | 370 | py | Python | Conditional/Extras/Everton/02.py | Vitor-ORB/algorithms-and-programming-1-ufms | 10821e9b580b78b7f78c27e740f3ead9c6b9f0bd | [
"MIT"
]
| 7 | 2021-05-25T16:49:20.000Z | 2022-02-17T11:57:32.000Z | Conditional/Extras/Everton/02.py | Vitor-ORB/algorithms-and-programming-1-ufms | 10821e9b580b78b7f78c27e740f3ead9c6b9f0bd | [
"MIT"
]
| null | null | null | Conditional/Extras/Everton/02.py | Vitor-ORB/algorithms-and-programming-1-ufms | 10821e9b580b78b7f78c27e740f3ead9c6b9f0bd | [
"MIT"
]
| 8 | 2021-05-25T16:49:39.000Z | 2021-09-30T18:02:07.000Z | '''Considere o problema de computar o valor absoluto de um número real.
O valor absoluto de um número real x é dado por f(x) = x se x >= 0 ou f(x) = -x se x < 0.
Projete e implemente um programa em Python que lei um número de ponto flutuante x, calcule e imprima o valor absoluto de x.'''
x = float(input())
y = (x**2)**(1/2)
print("|{:.2f}| = {:.2f}".format(x,y))
| 33.636364 | 126 | 0.651351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 311 | 0.831551 |
b536ac94f02abdab43e5ca604aa965f6ad2715d0 | 1,394 | py | Python | pyoptmat/solvers.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
]
| null | null | null | pyoptmat/solvers.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
]
| 1 | 2022-03-30T22:20:38.000Z | 2022-03-31T15:02:22.000Z | pyoptmat/solvers.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
]
| 2 | 2021-11-16T15:13:54.000Z | 2022-01-06T21:35:42.000Z | import torch
import warnings
def newton_raphson(fn, x0, linsolver = "lu", rtol = 1e-6, atol = 1e-10,
miter = 100):
"""
Solve a nonlinear system with Newton's method. Return the
solution and the last Jacobian
Args:
fn: function that returns the residual and Jacobian
x0: starting point
linsolver (optional): method to use to solve the linear system
rtol (optional): nonlinear relative tolerance
atol (optional): nonlinear absolute tolerance
miter (optional): maximum number of nonlinear iterations
"""
x = x0
R, J = fn(x)
nR = torch.norm(R, dim = -1)
nR0 = nR
i = 0
while (i < miter) and torch.any(nR > atol) and torch.any(nR / nR0 > rtol):
x -= solve_linear_system(J, R)
R, J = fn(x)
nR = torch.norm(R, dim = -1)
i += 1
if i == miter:
warnings.warn("Implicit solve did not succeed. Results may be inaccurate...")
return x, J
def solve_linear_system(A, b, method = "lu"):
"""
Solve or iterate on a linear system of equations
Args:
A: block matrix
b: block RHS
method (optional):
"""
if method == "diag":
return b / torch.diagonal(A, dim1=-2, dim2=-1)
elif method == "lu":
return torch.linalg.solve(A, b)
else:
raise ValueError("Unknown solver method!")
| 27.333333 | 82 | 0.585366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 770 | 0.552367 |
b5373a616def2b1d58dca3805f309b56a4c149e0 | 323 | py | Python | Algo and DSA/LeetCode-Solutions-master/Python/number-of-substrings-with-only-1s.py | Sourav692/FAANG-Interview-Preparation | f523e5c94d582328b3edc449ea16ac6ab28cdc81 | [
"Unlicense"
]
| 3,269 | 2018-10-12T01:29:40.000Z | 2022-03-31T17:58:41.000Z | Algo and DSA/LeetCode-Solutions-master/Python/number-of-substrings-with-only-1s.py | Sourav692/FAANG-Interview-Preparation | f523e5c94d582328b3edc449ea16ac6ab28cdc81 | [
"Unlicense"
]
| 53 | 2018-12-16T22:54:20.000Z | 2022-02-25T08:31:20.000Z | Algo and DSA/LeetCode-Solutions-master/Python/number-of-substrings-with-only-1s.py | Sourav692/FAANG-Interview-Preparation | f523e5c94d582328b3edc449ea16ac6ab28cdc81 | [
"Unlicense"
]
| 1,236 | 2018-10-12T02:51:40.000Z | 2022-03-30T13:30:37.000Z | # Time: O(n)
# Space: O(1)
class Solution(object):
def numSub(self, s):
"""
:type s: str
:rtype: int
"""
MOD = 10**9+7
result, count = 0, 0
for c in s:
count = count+1 if c == '1' else 0
result = (result+count)%MOD
return result
| 20.1875 | 46 | 0.436533 | 293 | 0.907121 | 0 | 0 | 0 | 0 | 0 | 0 | 85 | 0.263158 |
b537ff6eac7f94b76cf8db09b3957cee998efb52 | 4,531 | py | Python | usecase-2/monitoring/fleet-seat-info-monitor/src/seat_res_train_monitor.py | edgefarm/edgefarm-demos | 6381d4a2f7f9c1d0632ab8123fed2bd0763d3b34 | [
"MIT"
]
| null | null | null | usecase-2/monitoring/fleet-seat-info-monitor/src/seat_res_train_monitor.py | edgefarm/edgefarm-demos | 6381d4a2f7f9c1d0632ab8123fed2bd0763d3b34 | [
"MIT"
]
| 9 | 2021-04-21T10:37:45.000Z | 2021-07-28T05:56:50.000Z | usecase-2/monitoring/fleet-seat-info-monitor/src/seat_res_train_monitor.py | edgefarm/train-simulation | 6381d4a2f7f9c1d0632ab8123fed2bd0763d3b34 | [
"MIT"
]
| null | null | null | import logging
import datetime
import asyncio
from edgefarm_application.base.application_module import application_module_network_nats
from edgefarm_application.base.avro import schemaless_decode
from run_task import run_task
from state_tracker import StateTracker
from schema_loader import schema_load
_logger = logging.getLogger(__name__)
_state_report_subject = "public.seatres.status"
class SeatResTrainMonitor:
def __init__(self, train_id, q):
self.train_id = train_id
self.edge_report_ts = None
# this is the combined state from the train and the train online state
self.state = StateTracker(
"TrainSeatRes",
{
"UNKNOWN": "unknown",
"OFFLINE": "offline",
"ONLINE-UNKNOWN": "online, unclear state",
"ONLINE-NOK": "online, but not ok",
"ONLINE-OK": "online, ok",
},
)
# this is just the online state of the train
self.state_online = StateTracker(
"Train-Online-Monitor",
{
"UNKNOWN": "train state unknown",
"OFFLINE": "train is offline",
"ONLINE": "train is online",
},
)
self._q = q
self._task = asyncio.create_task(run_task(_logger, q, self._watchdog))
async def start(self):
self.state.update("UNKNOWN")
await self.state_online.update_and_send_event("UNKNOWN", self._send_event)
def stop(self):
self._task.cancel()
async def update_edge_state(self, state):
self.edge_report_ts = datetime.datetime.now()
if state == -1:
up_state = "ONLINE-UNKNOWN"
elif state == 0:
up_state = "ONLINE-NOK"
elif state == 1:
up_state = "ONLINE-OK"
self.state.update(up_state)
await self.state_online.update_and_send_event("ONLINE", self._send_event)
async def _watchdog(self):
while True:
now = datetime.datetime.now()
if self.edge_report_ts is not None:
if (now - self.edge_report_ts).total_seconds() > 10:
self.state.update("OFFLINE")
await self.state_online.update_and_send_event(
"OFFLINE", self._send_event
)
await asyncio.sleep(1)
async def _send_event(self, data):
data["train_id"] = self.train_id
await self._q.put(data)
class TrainStatusCollector:
"""
Collect seat reservation system status of all trains.
The individual trains report their SeatRes state via Nats subject 'public.seatres.status' to
this module.
"""
def __init__(self, q):
self._nc = application_module_network_nats()
self._q = q
self._state_report_codec = schema_load(__file__, "system_status")
self._trains = {}
async def start(self):
self._state_report_subscription_id = await self._nc.subscribe(
_state_report_subject, cb=self._state_report_handler
)
async def stop(self):
await self._nc.unsubscribe(self._state_report_subscription_id)
for v in self._trains.values():
v.stop()
async def add_train(self, train_id):
if train_id not in self._trains.keys():
v = SeatResTrainMonitor(train_id, self._q)
self._trains[train_id] = v
await v.start()
else:
v = self._trains[train_id]
return v
def trains(self):
return self._trains.values()
async def _state_report_handler(self, nats_msg):
"""
Called when a NATS message is received on _state_report_subject
"""
reply_subject = nats_msg.reply
msg = schemaless_decode(nats_msg.data, self._state_report_codec)
_logger.debug(f"state report received msg {msg}")
train_id = msg["data"]["trainId"]
try:
v = self._trains[train_id]
await self._update_edge_state(v, msg)
except KeyError:
_logger.info(f"received state report from new train {train_id}")
v = await self.add_train(train_id)
await self._update_edge_state(v, msg)
await self._nc.publish(reply_subject, b"")
async def _update_edge_state(self, v, msg):
try:
await v.update_edge_state(msg["data"]["status"])
except KeyError:
_logger.error(f"couldn't find [data][status] in {msg}")
| 31.685315 | 96 | 0.608034 | 4,132 | 0.91194 | 0 | 0 | 0 | 0 | 2,598 | 0.573383 | 920 | 0.203046 |
b53809b9629204fc4062a7f81e9f288aeb36cec8 | 302 | py | Python | modulo 03/script_03.py | p-g-krish/CursoSecurityToolsPython | 7b2205a33d23166a37a6b8105b9ca5863855aa85 | [
"Apache-2.0"
]
| 10 | 2020-02-13T03:14:29.000Z | 2021-09-16T04:32:40.000Z | modulo 03/script_03.py | p-g-krish/CursoSecurityToolsPython | 7b2205a33d23166a37a6b8105b9ca5863855aa85 | [
"Apache-2.0"
]
| null | null | null | modulo 03/script_03.py | p-g-krish/CursoSecurityToolsPython | 7b2205a33d23166a37a6b8105b9ca5863855aa85 | [
"Apache-2.0"
]
| 4 | 2020-02-18T23:42:23.000Z | 2021-09-10T05:52:09.000Z | from termcolor import colored, cprint
import sys
text = colored('Hello, World!', 'red', attrs=['reverse', 'blink'])
print(text)
cprint('Hello, World!', 'green', 'on_red')
for i in range(10):
cprint(i, 'magenta', end=' ')
cprint("Attention!",'red', attrs=['bold'], file=sys.stdout)
| 33.555556 | 68 | 0.622517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 101 | 0.334437 |
b538595bde41c89c5a8fbdc33e2ae560a927b953 | 1,597 | py | Python | src/AML/run_training.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
]
| 2 | 2021-07-12T02:37:46.000Z | 2021-12-28T23:03:20.000Z | src/AML/run_training.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
]
| 28 | 2021-12-29T00:51:24.000Z | 2022-03-24T08:03:59.000Z | src/AML/run_training.py | monkeypants/CartridgeOCR | a2cdaa72e3839a881118b85f5ff7b4515579004b | [
"MIT"
]
| 4 | 2021-09-24T16:13:43.000Z | 2022-03-09T17:52:35.000Z | import sys
from azureml.core import Workspace, Experiment, Environment, ScriptRunConfig
from azureml.core.compute import ComputeTarget, AmlCompute
from azureml.core.compute_target import ComputeTargetException
from shutil import copy
ws = Workspace.from_config()
# Choose a name for your CPU cluster
# cpu_cluster_name = "cpucluster"
cpu_cluster_name = "gpucompute"
experiment_name = "main"
src_dir = "model"
script = "train.py"
# Verify that cluster does not exist already
try:
cpu_cluster = ComputeTarget(workspace=ws, name=cpu_cluster_name)
print('Found existing cluster, use it.')
except ComputeTargetException:
compute_config = AmlCompute.provisioning_configuration(vm_size='Standard_DS12_v2',
max_nodes=4)
cpu_cluster = ComputeTarget.create(ws, cpu_cluster_name, compute_config)
cpu_cluster.wait_for_completion(show_output=True)
experiment = Experiment(workspace=ws, name=experiment_name)
copy('./config.json', 'model/config.json')
myenv = Environment.from_pip_requirements(name="myenv",
file_path="requirements.txt")
myenv.environment_variables['PYTHONPATH'] = './model'
myenv.environment_variables['RUNINAZURE'] = 'true'
config = ScriptRunConfig(source_directory=src_dir,
script="./training/train.py",
arguments=sys.argv[1:] if len(sys.argv) > 1 else None,
compute_target=cpu_cluster_name, environment=myenv)
run = experiment.submit(config)
aml_url = run.get_portal_url()
print(aml_url)
| 35.488889 | 86 | 0.708203 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 318 | 0.199123 |
b538fc619dc6adad01e93a8132a517e7cc8b2d80 | 818 | py | Python | tests/conftest.py | cielavenir/pyppmd-py2 | c148b8fbe7cb0c0e9f68fdf9a1c3599325f0e4c8 | [
"BSD-3-Clause"
]
| 3 | 2021-05-04T13:20:39.000Z | 2021-11-03T12:43:02.000Z | tests/conftest.py | cielavenir/pyppmd-py2 | c148b8fbe7cb0c0e9f68fdf9a1c3599325f0e4c8 | [
"BSD-3-Clause"
]
| 39 | 2021-04-16T02:55:28.000Z | 2022-03-30T14:23:50.000Z | tests/conftest.py | cielavenir/pyppmd-py2 | c148b8fbe7cb0c0e9f68fdf9a1c3599325f0e4c8 | [
"BSD-3-Clause"
]
| 3 | 2021-07-07T17:39:30.000Z | 2022-03-30T15:15:44.000Z | import cpuinfo
def pytest_benchmark_update_json(config, benchmarks, output_json):
"""Calculate compression/decompression speed and add as extra_info"""
for benchmark in output_json["benchmarks"]:
if "data_size" in benchmark["extra_info"]:
rate = benchmark["extra_info"].get("data_size", 0.0) / benchmark["stats"]["mean"]
benchmark["extra_info"]["rate"] = rate
def pytest_benchmark_update_machine_info(config, machine_info):
cpu_info = cpuinfo.get_cpu_info()
brand = cpu_info.get("brand_raw", None)
if brand is None:
brand = "{} core(s) {} CPU ".format(cpu_info.get("count", "unknown"), cpu_info.get("arch", "unknown"))
machine_info["cpu"]["brand"] = brand
machine_info["cpu"]["hz_actual_friendly"] = cpu_info.get("hz_actual_friendly", "unknown")
| 43.052632 | 110 | 0.684597 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 286 | 0.349633 |
b53920dd20dbdafabadb24be44f2a512437147fb | 331 | py | Python | examples/test_gcld3.py | lbp0200/EasyNMT | d253e9346996a47aa989bb33aed72e531528dc27 | [
"Apache-2.0"
]
| null | null | null | examples/test_gcld3.py | lbp0200/EasyNMT | d253e9346996a47aa989bb33aed72e531528dc27 | [
"Apache-2.0"
]
| null | null | null | examples/test_gcld3.py | lbp0200/EasyNMT | d253e9346996a47aa989bb33aed72e531528dc27 | [
"Apache-2.0"
]
| null | null | null | import time
import gcld3
detector = gcld3.NNetLanguageIdentifier(min_num_bytes=0,
max_num_bytes=1000)
# text = "This text is written in English"
text = "薄雾"
while True:
result = detector.FindLanguage(text=text)
print(text, result.probability, result.language)
time.sleep(0.01)
| 25.461538 | 59 | 0.65861 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.149254 |
b539e3fd28c31f9e28937feef603fdbd7a3fc98e | 1,593 | py | Python | src/0075下一个排列/index.py | zzh2036/OneDayOneLeetcode | 1198692e68f8f0dbf15555e45969122e1a92840a | [
"MIT"
]
| null | null | null | src/0075下一个排列/index.py | zzh2036/OneDayOneLeetcode | 1198692e68f8f0dbf15555e45969122e1a92840a | [
"MIT"
]
| null | null | null | src/0075下一个排列/index.py | zzh2036/OneDayOneLeetcode | 1198692e68f8f0dbf15555e45969122e1a92840a | [
"MIT"
]
| null | null | null | '''
实现获取 下一个排列 的函数,算法需要将给定数字序列重新排列成字典序中下一个更大的排列。
如果不存在下一个更大的排列,则将数字重新排列成最小的排列(即升序排列)。
必须 原地 修改,只允许使用额外常数空间。
示例 1:
输入:nums = [1,2,3]
输出:[1,3,2]
示例 2:
输入:nums = [3,2,1]
输出:[1,2,3]
示例 3:
输入:nums = [1,1,5]
输出:[1,5,1]
示例 4:
输入:nums = [1]
输出:[1]
提示:
1 <= nums.length <= 100
0 <= nums[i] <= 100
'''
class Solution:
def nextPermutation(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
n = len(nums)
if n <= 1:
return nums
# 从右向左循环数组
i = n - 1
while i > 0:
# 找到相邻的两位元素,右侧的数值大于左侧的数值
if nums[i] > nums[i - 1]:
# 从右向左循环 n - 1到 i区间的数组元素
j = n - 1
while j >= i:
# 找到在此区间内比 i - 1位置的数值大的元素,开始进行换位操作
if nums[j] > nums[i - 1]:
# 移位交换操作
self.exchangeVal(nums, i - 1, j)
# 将 n - 1到 i区间的元素调整为升序,即为最小的数值排列
self.reverseArr(nums, i, n - 1)
return
j -= 1
i -= 1
# 如果是降序数组,则反转数组,称为最小数值的排列
self.reverseArr(nums, 0, n - 1)
def exchangeVal(self, arr, left, right):
arr[left], arr[right] = arr[right], arr[left]
def reverseArr(self, arr, begin, end):
while begin < end:
self.exchangeVal(arr, begin, end)
begin += 1
end -= 1
if __name__ == '__main__':
points = [1, 2, 3]
ins = Solution()
ins.nextPermutation(points)
print(points)
| 22.125 | 61 | 0.468927 | 1,411 | 0.669355 | 0 | 0 | 0 | 0 | 0 | 0 | 1,056 | 0.500949 |
b53a0396d1bab4e9ce336103d0380f331ae41db6 | 7,906 | py | Python | scripts/structural_make_html_report.py | vogelbac/LAB-QA2GO- | be434da7399d396413309f947f4b634d8fae9a17 | [
"BSD-3-Clause"
]
| 14 | 2019-02-07T10:50:58.000Z | 2021-09-03T16:11:00.000Z | scripts/structural_make_html_report.py | vogelbac/LAB-QA2GO- | be434da7399d396413309f947f4b634d8fae9a17 | [
"BSD-3-Clause"
]
| 6 | 2019-01-28T09:19:27.000Z | 2021-09-09T06:56:42.000Z | scripts/structural_make_html_report.py | vogelbac/LAB-QA2GO | be434da7399d396413309f947f4b634d8fae9a17 | [
"BSD-3-Clause"
]
| 4 | 2019-01-28T09:00:58.000Z | 2021-05-25T13:54:40.000Z | # script to generate the overview and individual html report website.
import os
import numpy
def main(result_folder, name, header_comp):
menu_html_file_path = '/home/brain/qa/html/menu_html.html'
menu_html_file = open(menu_html_file_path, 'r')
menu_html = menu_html_file.readlines()
menu_html_file.close()
result_folder_list = os.listdir(result_folder)
if not 'results.html' in result_folder_list:
report_file_name = ''
for i in result_folder_list:
if i.startswith('report_'):
report_file_name = i
report_file = open(result_folder +'/'+ report_file_name,'r')
html_file = open(result_folder +'/results.html','w')
#read reportfile
report_file_list = []
for line in report_file:
for word in line.split():
report_file_list.append(word)
html_file.writelines(menu_html)
html_file.write('\t\t<h1 style="margin-top:80px;">Result structural-data '+name+'</h1>\n')
html_file.write('\t\t<table>\n\t\t\t<tr bgcolor=#f6f6f6><td><b>pSignal</b></td><td>'+report_file_list[1]+'</td></tr>\n')
html_file.write('\t\t\t<tr bgcolor=#ffffff><td><b>pNoise</b></td><td>'+report_file_list[3]+'</td></tr>\n')
html_file.write('\t\t\t<tr bgcolor=#f6f6f6><td><b>bSNR</b></td><td>'+report_file_list[5]+'</td></tr>\n\t\t</table>\n')
html_file.write('\t\t<h2>Header comparison </h1>\n')
if not header_comp:
html_file.write('\t\t<p>No differences between headers or no DICOM file to compare available.</p>\n')
else:
html_file.write('\t\t<table>\n\t\t\t<th><td colspan="3"><b>DICOM header comparison</b></td></th>\n')
html_file.write('\t\t\t<tr><td><b>Field name</b></td><td><b>Reference value</b></td><td><b>Value in data</b></td></tr>\n')
for k in header_comp:
try:
html_file.write('\t\t\t<tr><td><i>'+str(k[0])+'</i></td><td>'+str(k[1])+'</td><td>'+str(k[2])+'</td></tr>\n')
except:
html_file.write('\t\t\t<tr><td><i>'+str(k[0])+'</i></td><td>'+k[1].encode('utf-8')+'</td><td>'+k[2].encode('utf-8')+'</td></tr>\n')
html_file.write('\t\t</table>\n')
html_file.write('\t\t<h2>Histogram of pNoise</h2>\n\t\t<p><img src="histogram_'+name+'.png" alt="histogram_of_noise"</p>\n')
html_file.write('\t\t<h2>Histogram of pNoise (intensity > 30 )</h2>\n\t\t<p><img src="histogram_upper_values_'+name+'.png" alt="histogram_of_noise"</p>\n')
html_file.write('\t\t<h2>Background mask slice 0</h2>\n\t\t<p><img src="slice0.png" alt="slice0"</p>\n')
html_file.write('\t\t<h2>Background mask slice 25%</h2>\n\t\t<p><img src="slice25p.png" alt="slice25p"</p>\n')
html_file.write('\t\t<h2>Background mask slice 50%</h2>\n\t\t<p><img src="slice50p.png" alt="slice50p"</p>\n')
html_file.write('\t\t<h2>Background mask slice 75%</h2>\n\t\t<p><img src="slice75p.png" alt="slice75p"</p>\n')
html_file.write('\t\t<h2>Background mask last slice </h2>\n\t\t<p><img src="sliceend.png" alt="sliceend"</p>\n')
html_file.write('\t</body>\n')
html_file.write('</html>')
html_file.close()
def generate_overview_html(result_folder,human_structural_settings):
menu_html_file_path = '/home/brain/qa/html/menu_html.html'
menu_html_file = open(menu_html_file_path, 'r')
menu_html = menu_html_file.readlines()
menu_html_file.close()
result_file = open(result_folder+'overview.html','w')
result_file.writelines(menu_html)
result_file.write('\t\t<h1 style="margin-top:80px;">Structural Results Overview</h1>\n')
result_file.write('\t\t<h2>Primitive mean intensity of brainmask (pSignal)</h2>\n\t\t<p><img src="pMean.png" alt="Mean intensity of brainmask"></p>\n')
result_file.write('\t\t<h2>standard deviation of background (pNoise)</h2>\n\t\t<p><img src="pNoise.png" alt="Std of background"></p>\n')
result_file.write('\t\t<h2>Signal to noise ratio (bSNR)</h2>\n\t\t<p><img src="bSNR.png" alt="Signal to noise ratio"></p>\n')
if human_structural_settings[8] == 0:
automatic_flag = False
else:
automatic_flag = True
std_automatic_multiplier = human_structural_settings[9]
if automatic_flag:
os.chdir('/home/brain/qa/html/results/structural/')
result_folder_list = os.listdir(result_folder)
names = []
mean = []
noise = []
snr = []
for i in result_folder_list:
if os.path.isdir(result_folder+i):
sub_result_folder_list = os.listdir(result_folder+i)
for j in sub_result_folder_list:
sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j)
for k in sub_sub_result_folder_list:
sub_sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j+'/'+k)
for l in sub_sub_sub_result_folder_list:
if l.startswith('report'):
names.append(i+'_'+j+'\n'+k)
report_file = open(result_folder+i+'/'+j+'/'+k+'/'+l,'r')
for data in report_file:
values = data.split()
if values[0].startswith('Mean_'):
mean.append(float(values[1]))
if values[0].startswith('Std_'):
noise.append(float(values[1]))
if values[0].startswith('SNR'):
snr.append(float(values[1]))
report_file.close()
auto_mean_mean = numpy.mean(mean)
auto_mean_std = numpy.std(mean) * float(std_automatic_multiplier)
auto_noise_mean = numpy.mean(noise)
auto_noise_std = numpy.std(noise) * float(std_automatic_multiplier)
auto_snr_mean = numpy.mean(snr)
auto_snr_std = numpy.std(snr) * float(std_automatic_multiplier)
plus_settings_mean = auto_mean_mean + auto_mean_std
minus_settings_mean = auto_mean_mean - auto_mean_std
plus_settings_noise = auto_noise_mean+ auto_noise_std
minus_settings_noise = auto_noise_mean - auto_noise_std
plus_settings_snr = auto_snr_mean + auto_snr_std
minus_settings_snr = auto_snr_mean - auto_snr_std
else:
mean_mean = float(human_structural_settings[1])
range_mean = float(human_structural_settings[2])
noise_mean = float(human_structural_settings[3])
range_noise = float(human_structural_settings[4])
snr_mean = float(human_structural_settings[5])
range_snr = float(human_structural_settings[6])
plus_settings_mean = mean_mean + range_mean
minus_settings_mean = mean_mean - range_mean
plus_settings_noise = noise_mean+ range_noise
minus_settings_noise = noise_mean - range_noise
plus_settings_snr = snr_mean + range_snr
minus_settings_snr = snr_mean - range_snr
result_folder_list = os.listdir(result_folder)
for i in result_folder_list:
if os.path.isdir(result_folder+i):
result_file.write('\t\t<h2>'+i+'</h2>\n')
result_sub_folder_list = os.listdir(result_folder+i)
result_sub_folder_list .sort()
for j in result_sub_folder_list:
result_file.write('\t\t<h3>'+j+'</h3>\n')
result_sub_sub_folder_list = os.listdir(result_folder+i+'/'+j)
result_sub_sub_folder_list .sort()
result_file.write('\t\t<ul>\n')
for k in result_sub_sub_folder_list:
sub_sub_sub_result_folder_list = os.listdir(result_folder+i+'/'+j+'/'+k)
mean = 0
noise = 0
snr = 0
for l in sub_sub_sub_result_folder_list:
if l.startswith('report'):
report_file = open(result_folder+i+'/'+j+'/'+k+'/'+l,'r')
for data in report_file:
values = data.split()
if values[0].startswith('Mean_'):
mean = float(values[1])
if values[0].startswith('Std_'):
noise = float(values[1])
if values[0].startswith('SNR'):
snr = float(values[1])
report_file.close()
if(mean > plus_settings_mean) or (mean < minus_settings_mean) or (noise > plus_settings_noise) or (noise < minus_settings_noise) or (snr > plus_settings_snr) or (snr < minus_settings_snr):
result_file.write('\t\t\t<li><img src="/warning.png"><a href="/results/structural/'+i+'/'+j+'/'+k+'/results.html">'+k+'</a></li>\n')
else:
result_file.write('\t\t\t<li><a href="/results/structural/'+i+'/'+j+'/'+k+'/results.html">'+k+'</a></li>\n')
result_file.write('\t\t</ul>\n')
result_file.write('\t</body>\n</html>')
result_file.close()
| 41.610526 | 193 | 0.68214 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,525 | 0.319378 |
b53d20ea8c808f96c712a842e16ae3ddaa8b7e31 | 786 | py | Python | argumenta.py | el3ctron/argumenta | 3d66425b31a36940ba0dccb1361b250a06b02fec | [
"Apache-2.0"
]
| null | null | null | argumenta.py | el3ctron/argumenta | 3d66425b31a36940ba0dccb1361b250a06b02fec | [
"Apache-2.0"
]
| null | null | null | argumenta.py | el3ctron/argumenta | 3d66425b31a36940ba0dccb1361b250a06b02fec | [
"Apache-2.0"
]
| null | null | null | class tabla_de_sesgos :
def __init__(self) :
self.sesgo = None #
self.clase_de_sesgo = None #
pass
class marcador (reproductor, tabla_de_sesgos, registro_de_tiempos, medio) :
def __init__(self) :
pass
def cargar_medio (self) :
# returns
pass
def marcar_tiempos (self) :
# returns
pass
def marcar_sesgo (self) :
# returns
pass
def indicar_idea_general (self) :
# returns
pass
class reproductor :
'''(NULL)'''
def __init__(self) :
pass
def cargar_medio (self) :
# returns
pass
class registro_de_tiempos :
def __init__(self) :
self.medio = None #
self.tiempo = None #
self.espacio = None #
pass
class medio :
def __init__(self) :
self.audio = None #
self.video = None #
self.texto = None #
self.imagen = None #
pass
| 19.170732 | 75 | 0.664122 | 781 | 0.993639 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.101781 |
b53dea5224a5f4701caa31694ad6c985328e3868 | 12,226 | py | Python | pynetdicom3/sop_class.py | mcgregorandrew/pynetdicom3 | 1c798f9b0ad086cf0a8d0619e57f2bc2fbbf13f1 | [
"MIT"
]
| null | null | null | pynetdicom3/sop_class.py | mcgregorandrew/pynetdicom3 | 1c798f9b0ad086cf0a8d0619e57f2bc2fbbf13f1 | [
"MIT"
]
| null | null | null | pynetdicom3/sop_class.py | mcgregorandrew/pynetdicom3 | 1c798f9b0ad086cf0a8d0619e57f2bc2fbbf13f1 | [
"MIT"
]
| 2 | 2020-09-27T06:41:41.000Z | 2021-02-07T06:53:02.000Z | """Generates the supported SOP Classes."""
from collections import namedtuple
import inspect
import logging
import sys
from pydicom.uid import UID
from pynetdicom3.service_class import (
VerificationServiceClass,
StorageServiceClass,
QueryRetrieveServiceClass,
BasicWorklistManagementServiceClass,
)
LOGGER = logging.getLogger('pynetdicom3.sop')
def uid_to_service_class(uid):
"""Return the ServiceClass object corresponding to `uid`.
Parameters
----------
uid : pydicom.uid.UID
The SOP Class UID to find the corresponding Service Class.
Returns
-------
service_class.ServiceClass
The Service Class corresponding to the SOP Class UID.
Raises
------
NotImplementedError
If the Service Class corresponding to the SOP Class `uid` hasn't been
implemented.
"""
if uid in _VERIFICATION_CLASSES.values():
return VerificationServiceClass
elif uid in _STORAGE_CLASSES.values():
return StorageServiceClass
elif uid in _QR_CLASSES.values():
return QueryRetrieveServiceClass
elif uid in _BASIC_WORKLIST_CLASSES.values():
return BasicWorklistManagementServiceClass
else:
raise NotImplementedError(
"The Service Class for the SOP Class with UID '{}' has not "
"been implemented".format(uid)
)
SOPClass = namedtuple("SOPClass", ['uid', 'UID', 'service_class'])
def _generate_sop_classes(sop_class_dict):
"""Generate the SOP Classes."""
for name in sop_class_dict:
globals()[name] = SOPClass(
UID(sop_class_dict[name]),
UID(sop_class_dict[name]),
uid_to_service_class(sop_class_dict[name])
)
# Generate the various SOP classes
_VERIFICATION_CLASSES = {
'VerificationSOPClass' : '1.2.840.10008.1.1',
}
# pylint: disable=line-too-long
_STORAGE_CLASSES = {
'ComputedRadiographyImageStorage' : '1.2.840.10008.5.1.4.1.1.1',
'DigitalXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.1',
'DigitalXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.1.1.1.1',
'DigitalMammographyXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.2',
'DigitalMammographyXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.1.2.1',
'DigitalIntraOralXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.1.3',
'DigitalIntraOralXRayImageProcessingStorage' : '1.2.840.10008.5.1.1.4.1.1.3.1',
'CTImageStorage' : '1.2.840.10008.5.1.4.1.1.2',
'EnhancedCTImageStorage' : '1.2.840.10008.5.1.4.1.1.2.1',
'LegacyConvertedEnhancedCTImageStorage' : '1.2.840.10008.5.1.4.1.1.2.2',
'UltrasoundMultiframeImageStorage' : '1.2.840.10008.5.1.4.1.1.3.1',
'MRImageStorage' : '1.2.840.10008.5.1.4.1.1.4',
'EnhancedMRImageStorage' : '1.2.840.10008.5.1.4.1.1.4.1',
'MRSpectroscopyStorage' : '1.2.840.10008.5.1.4.1.1.4.2',
'EnhancedMRColorImageStorage' : '1.2.840.10008.5.1.4.1.1.4.3',
'LegacyConvertedEnhancedMRImageStorage' : '1.2.840.10008.5.1.4.1.1.4.4',
'UltrasoundImageStorage' : '1.2.840.10008.5.1.4.1.1.6.1',
'EnhancedUSVolumeStorage' : '1.2.840.10008.5.1.4.1.1.6.2',
'SecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7',
'MultiframeSingleBitSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.1',
'MultiframeGrayscaleByteSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.2',
'MultiframeGrayscaleWordSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.3',
'MultiframeTrueColorSecondaryCaptureImageStorage' : '1.2.840.10008.5.1.4.1.1.7.4',
'TwelveLeadECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.1',
'GeneralECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.2',
'AmbulatoryECGWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.1.3',
'HemodynamicWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.2.1',
'CardiacElectrophysiologyWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.3.1',
'BasicVoiceAudioWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.4.1',
'GeneralAudioWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.4.2',
'ArterialPulseWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.5.1',
'RespiratoryWaveformStorage' : '1.2.840.10008.5.1.4.1.1.9.6.1',
'GrayscaleSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.1',
'ColorSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.2',
'PseudocolorSoftcopyPresentationStageStorage' : '1.2.840.10008.5.1.4.1.1.11.3',
'BlendingSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.4',
'XAXRFGrayscaleSoftcopyPresentationStateStorage' : '1.2.840.10008.5.1.4.1.1.11.5',
'XRayAngiographicImageStorage' : '1.2.840.10008.5.1.4.1.1.12.1',
'EnhancedXAImageStorage' : '1.2.840.10008.5.1.4.1.1.12.1.1',
'XRayRadiofluoroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.12.2',
'EnhancedXRFImageStorage' : '1.2.840.10008.5.1.4.1.1.12.2.1',
'XRay3DAngiographicImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.1',
'XRay3DCraniofacialImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.2',
'BreastTomosynthesisImageStorage' : '1.2.840.10008.5.1.4.1.1.13.1.3',
'BreastProjectionXRayImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.13.1.4',
'BreastProjectionXRayImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.13.1.5',
'IntravascularOpticalCoherenceTomographyImagePresentationStorage' : '1.2.840.10008.5.1.4.1.1.14.1',
'IntravascularOpticalCoherenceTomographyImageProcessingStorage' : '1.2.840.10008.5.1.4.1.1.14.2',
'NuclearMedicineImageStorage' : '1.2.840.10008.5.1.4.1.1.20',
'ParametricMapStorage' : '1.2.840.10008.5.1.4.1.1.30',
'RawDataStorage' : '1.2.840.10008.5.1.4.1.1.66',
'SpatialRegistrationStorage' : '1.2.840.10008.5.1.4.1.1.66.1',
'SpatialFiducialsStorage' : '1.2.840.10008.5.1.4.1.1.66.2',
'DeformableSpatialRegistrationStorage' : '1.2.840.10008.5.1.4.1.1.66.3',
'SegmentationStorage' : '1.2.840.10008.5.1.4.1.1.66.4',
'SurfaceSegmentationStorage' : '1.2.840.10008.5.1.4.1.1.66.5',
'RealWorldValueMappingStorage' : '1.2.840.10008.5.1.4.1.1.67',
'SurfaceScanMeshStorage' : '1.2.840.10008.5.1.4.1.1.68.1',
'SurfaceScanPointCloudStorage' : '1.2.840.10008.5.1.4.1.1.68.2',
'VLEndoscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.1',
'VideoEndoscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.1.1',
'VLMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.2',
'VideoMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.2.1',
'VLSlideCoordinatesMicroscopicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.3',
'VLPhotographicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.4',
'VideoPhotographicImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.4.1',
'OphthalmicPhotography8BitImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.1',
'OphthalmicPhotography16BitImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.2',
'StereometricRelationshipStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.3',
'OpthalmicTomographyImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.4',
'WideFieldOpthalmicPhotographyStereographicProjectionImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.5',
'WideFieldOpthalmicPhotography3DCoordinatesImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.5.6',
'VLWholeSlideMicroscopyImageStorage' : '1.2.840.10008.5.1.4.1.1.77.1.6',
'LensometryMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.1',
'AutorefractionMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.2',
'KeratometryMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.3',
'SubjectiveRefractionMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.4',
'VisualAcuityMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.5',
'SpectaclePrescriptionReportStorage' : '1.2.840.10008.5.1.4.1.1.78.6',
'OpthalmicAxialMeasurementsStorage' : '1.2.840.10008.5.1.4.1.1.78.7',
'IntraocularLensCalculationsStorage' : '1.2.840.10008.5.1.4.1.1.78.8',
'MacularGridThicknessAndVolumeReport' : '1.2.840.10008.5.1.4.1.1.79.1',
'OpthalmicVisualFieldStaticPerimetryMeasurementsStorag' : '1.2.840.10008.5.1.4.1.1.80.1',
'OpthalmicThicknessMapStorage' : '1.2.840.10008.5.1.4.1.1.81.1',
'CornealTopographyMapStorage' : '1.2.840.10008.5.1.4.1.1.82.1',
'BasicTextSRStorage' : '1.2.840.10008.5.1.4.1.1.88.11',
'EnhancedSRStorage' : '1.2.840.10008.5.1.4.1.1.88.22',
'ComprehensiveSRStorage' : '1.2.840.10008.5.1.4.1.1.88.33',
'Comprehenseice3DSRStorage' : '1.2.840.10008.5.1.4.1.1.88.34',
'ExtensibleSRStorage' : '1.2.840.10008.5.1.4.1.1.88.35',
'ProcedureSRStorage' : '1.2.840.10008.5.1.4.1.1.88.40',
'MammographyCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.50',
'KeyObjectSelectionStorage' : '1.2.840.10008.5.1.4.1.1.88.59',
'ChestCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.65',
'XRayRadiationDoseSRStorage' : '1.2.840.10008.5.1.4.1.1.88.67',
'RadiopharmaceuticalRadiationDoseSRStorage' : '1.2.840.10008.5.1.4.1.1.88.68',
'ColonCADSRStorage' : '1.2.840.10008.5.1.4.1.1.88.69',
'ImplantationPlanSRDocumentStorage' : '1.2.840.10008.5.1.4.1.1.88.70',
'EncapsulatedPDFStorage' : '1.2.840.10008.5.1.4.1.1.104.1',
'EncapsulatedCDAStorage' : '1.2.840.10008.5.1.4.1.1.104.2',
'PositronEmissionTomographyImageStorage' : '1.2.840.10008.5.1.4.1.1.128',
'EnhancedPETImageStorage' : '1.2.840.10008.5.1.4.1.1.130',
'LegacyConvertedEnhancedPETImageStorage' : '1.2.840.10008.5.1.4.1.1.128.1',
'BasicStructuredDisplayStorage' : '1.2.840.10008.5.1.4.1.1.131',
'RTImageStorage' : '1.2.840.10008.5.1.4.1.1.481.1',
'RTDoseStorage' : '1.2.840.10008.5.1.4.1.1.481.2',
'RTStructureSetStorage' : '1.2.840.10008.5.1.4.1.1.481.3',
'RTBeamsTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.4',
'RTPlanStorage' : '1.2.840.10008.5.1.4.1.1.481.5',
'RTBrachyTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.6',
'RTTreatmentSummaryRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.7',
'RTIonPlanStorage' : '1.2.840.10008.5.1.4.1.1.481.8',
'RTIonBeamsTreatmentRecordStorage' : '1.2.840.10008.5.1.4.1.1.481.9',
'RTBeamsDeliveryInstructionStorage' : '1.2.840.10008.5.1.4.34.7',
'GenericImplantTemplateStorage' : '1.2.840.10008.5.1.4.43.1',
'ImplantAssemblyTemplateStorage' : '1.2.840.10008.5.1.4.44.1',
'ImplantTemplateGroupStorage' : '1.2.840.10008.5.1.4.45.1'
}
_QR_CLASSES = {
'PatientRootQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.1.1',
'PatientRootQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.1.2',
'PatientRootQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.1.3',
'StudyRootQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.2.1',
'StudyRootQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.2.2',
'StudyRootQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.2.3',
'PatientStudyOnlyQueryRetrieveInformationModelFind' : '1.2.840.10008.5.1.4.1.2.3.1',
'PatientStudyOnlyQueryRetrieveInformationModelMove' : '1.2.840.10008.5.1.4.1.2.3.2',
'PatientStudyOnlyQueryRetrieveInformationModelGet' : '1.2.840.10008.5.1.4.1.2.3.3',
}
_BASIC_WORKLIST_CLASSES = {
'ModalityWorklistInformationFind' : '1.2.840.10008.5.1.4.31',
}
# pylint: enable=line-too-long
_generate_sop_classes(_VERIFICATION_CLASSES)
_generate_sop_classes(_STORAGE_CLASSES)
_generate_sop_classes(_QR_CLASSES)
_generate_sop_classes(_BASIC_WORKLIST_CLASSES)
def uid_to_sop_class(uid):
"""Given a `uid` return the corresponding SOPClass.
Parameters
----------
uid : pydicom.uid.UID
Returns
-------
sop_class.SOPClass subclass
The SOP class corresponding to `uid`.
Raises
------
NotImplementedError
If the SOP Class corresponding to the given UID has not been
implemented.
"""
# Get a list of all the class members of the current module
members = inspect.getmembers(
sys.modules[__name__],
lambda mbr: isinstance(mbr, tuple)
)
for obj in members:
if hasattr(obj[1], 'uid') and obj[1].uid == uid:
return obj[1]
raise NotImplementedError("The SOP Class for UID '{}' has not been " \
"implemented".format(uid))
| 49.699187 | 108 | 0.679699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9,389 | 0.767954 |
b53df049332ea39e2f7827214e41edfb7e42ca6c | 7,885 | py | Python | feed_forward_model.py | karlschrader/deepPD | 678793c9026eab2681d2d0a3b7e7f9f91c0f3bc5 | [
"MIT"
]
| null | null | null | feed_forward_model.py | karlschrader/deepPD | 678793c9026eab2681d2d0a3b7e7f9f91c0f3bc5 | [
"MIT"
]
| null | null | null | feed_forward_model.py | karlschrader/deepPD | 678793c9026eab2681d2d0a3b7e7f9f91c0f3bc5 | [
"MIT"
]
| null | null | null | import os
from datetime import datetime
import numpy as np
import tensorflow as tf
from tensorflow.python.training import moving_averages
TF_DTYPE = tf.float64
MOMENTUM = 0.99
EPSILON = 1e-6
DELTA_CLIP = 50.0
class FeedForwardModel():
"""
Abstract class for creating neural networks.
Offers functions to build or clone individual layers of complete networks
"""
def __init__(self, bsde, run_name):
self._bsde = bsde
# ops for statistics update of batch normalization
self._extra_train_ops = []
self.tb_dir = tf.app.flags.FLAGS.tensorboard_dir + run_name + "_" + datetime.now(
).strftime('%Y_%m_%d_%H_%M_%S')
os.mkdir(self.tb_dir)
def _clone_subnetwork(self, input_, timestep, layer_count, weights):
"""
Clone a neural network, using the same weights as the source networks.
Args:
input_ (Tensor): Input of the neural network that will be build
timestep (float): Time index, used for tensor names
layer_count (int): number of layers in the neural network that should be cloned
weights (np.array(size=[num_timesteps, layer_count]))
Returns:
Tensor: Output of the last layer of the neural network
"""
with tf.variable_scope(str(timestep)):
hiddens = self._batch_norm(input_, name='path_input_norm')
for i in range(1, layer_count - 1):
hiddens = self._copy_batch_layer(hiddens, 'layer_{}'.format(i),
i, timestep, weights)
output = self._copy_batch_layer(hiddens, 'final_layer',
layer_count - 1, timestep, weights)
return output
def _subnetwork(self, input_, timestep, num_hiddens):
"""
Generate a neural network
Args:
input_ (Tensor): Input of the neural network that will be build
timestep (float): Time index, used for tensor name
num_hiddens (np.array(size=[layer_count])): Specifies the number
of additional dimensions for each layer of the neural network.
Returns:
Tensor: Output of the last layer of the neural network
"""
matrix_weights = []
with tf.variable_scope(str(timestep)):
# input norm
hiddens = self._batch_norm(input_, name='path_input_norm')
for i in range(1, len(num_hiddens) - 1):
hiddens, weight = self._dense_batch_layer(
hiddens,
num_hiddens[i] + self._bsde.dim,
activation_fn=tf.nn.relu,
layer_name='layer_{}'.format(i),
)
matrix_weights.append(weight)
# last layer without relu
output, weight = self._dense_batch_layer(
hiddens,
num_hiddens[-1] + self._bsde.dim,
activation_fn=None,
layer_name='final_layer',
)
matrix_weights.append(weight)
return output, matrix_weights
def _dense_batch_layer(self,
input_,
output_size,
activation_fn=None,
stddev=5.0,
layer_name="linear"):
"""
Generate one fully connected layer
Args:
input_ (Tensor): Input of layer
output_size (int): Number of outputs this layer should have
KwArgs:
activation_fn (Function): activation function for the neurons in
this layer. Will usually be ReLU, but can be left blank for the last layer.
stddev (float): stddev to use for the initial distribution of weights in this layer
layer_name (string): tensorflow name used for the variables in this layer
Returns:
Tensor: Output of the layer
tf.Variable: Reference to the used Matrix weight
"""
with tf.variable_scope(layer_name):
shape = input_.get_shape().as_list()
weight = tf.get_variable(
'Matrix', [shape[1], output_size],
TF_DTYPE,
tf.random_normal_initializer(
stddev=stddev / np.sqrt(shape[1] + output_size)))
# matrix weight
hiddens = tf.matmul(input_, weight)
#batch norm
hiddens_bn = self._batch_norm(hiddens)
if activation_fn:
return activation_fn(hiddens_bn), weight
return hiddens_bn, weight
def _copy_batch_layer(self, input_, layer_name, layer, timestep, weights):
"""
Copy one fully connected layer, reusing the weights of the previous layer
Args:
input_ (Tensor): Input of layer
layer_name (string): tensorflow name used for the variables in this layer
layer (int): index of the layer in the current timestep
timestep (int): index of the current timestep
weights (np.array(size=[num_timesteps, layer_count])): weight database to copy from
Returns:
Tensor: Output of the layer
"""
with tf.variable_scope(layer_name):
# init matrix weight with matrix weights from primal stage
weight = tf.Variable(weights[timestep - 1][layer - 1], 'Matrix')
hiddens = tf.matmul(input_, weight)
hiddens_bn = self._batch_norm(hiddens)
return hiddens_bn
def _batch_norm(self, input_, name='batch_norm'):
"""
Batch normalize the data
Args:
input_ (Tensor): Input of layer
KwArgs:
name (string): Used as tensorflow name
Returns:
Tensor: Output of the layer
See https://arxiv.org/pdf/1502.03167v3.pdf p.3
"""
with tf.variable_scope(name):
params_shape = [input_.get_shape()[-1]]
beta = tf.get_variable(
'beta',
params_shape,
TF_DTYPE,
initializer=tf.random_normal_initializer(
0.0, stddev=0.1, dtype=TF_DTYPE))
gamma = tf.get_variable(
'gamma',
params_shape,
TF_DTYPE,
initializer=tf.random_uniform_initializer(
0.1, 0.5, dtype=TF_DTYPE))
moving_mean = tf.get_variable(
'moving_mean',
params_shape,
TF_DTYPE,
initializer=tf.constant_initializer(0.0, TF_DTYPE),
trainable=False)
moving_variance = tf.get_variable(
'moving_variance',
params_shape,
TF_DTYPE,
initializer=tf.constant_initializer(1.0, TF_DTYPE),
trainable=False)
# These ops will only be performed when training
mean, variance = tf.nn.moments(input_, [0], name='moments')
self._extra_train_ops.append(
moving_averages.assign_moving_average(moving_mean, mean,
MOMENTUM))
self._extra_train_ops.append(
moving_averages.assign_moving_average(moving_variance,
variance, MOMENTUM))
mean, variance = tf.cond(self._is_training,
lambda: (mean, variance),
lambda: (moving_mean, moving_variance))
hiddens_bn = tf.nn.batch_normalization(input_, mean, variance,
beta, gamma, EPSILON)
hiddens_bn.set_shape(input_.get_shape())
return hiddens_bn
| 38.842365 | 95 | 0.557261 | 7,672 | 0.972987 | 0 | 0 | 0 | 0 | 0 | 0 | 2,971 | 0.376791 |
b53f8bde038bcadd2b2dcbf0be630c6ab3f7dcdd | 122 | py | Python | semg_bss/hyser/__init__.py | nihil21/semg-bss | c8abd8aad6ed35e22a31d238adafe28f22bbcb3f | [
"MIT"
]
| 2 | 2021-12-09T15:27:42.000Z | 2021-12-16T12:48:54.000Z | semg_bss/hyser/__init__.py | nihil21/semg-bss | c8abd8aad6ed35e22a31d238adafe28f22bbcb3f | [
"MIT"
]
| null | null | null | semg_bss/hyser/__init__.py | nihil21/semg-bss | c8abd8aad6ed35e22a31d238adafe28f22bbcb3f | [
"MIT"
]
| null | null | null | from .dataset import load_pr, load_1dof, load_mvc, load_ndof
__all__ = ["load_pr", "load_1dof", "load_mvc", "load_ndof"]
| 30.5 | 60 | 0.737705 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.336066 |
b540b40d9aaf331bef2f785083b2bbd7ed30bfe6 | 619 | py | Python | Fibonacci/Python/fibonacci.py | IanDoarn/LearningRepo | 4c5906b3c1f497a979c3fce89a66d1e571cd6b42 | [
"MIT"
]
| null | null | null | Fibonacci/Python/fibonacci.py | IanDoarn/LearningRepo | 4c5906b3c1f497a979c3fce89a66d1e571cd6b42 | [
"MIT"
]
| null | null | null | Fibonacci/Python/fibonacci.py | IanDoarn/LearningRepo | 4c5906b3c1f497a979c3fce89a66d1e571cd6b42 | [
"MIT"
]
| null | null | null | """
Fibonacci sequence using python
generators
Written by: Ian Doarn
"""
def fib():
# Generator that yields fibonacci numbers
a, b = 0, 1
while True: # First iteration:
yield a # yield 0 to start with and then
a, b = b, a + b # a will now be 1, and b will also be 1, (0 + 1)
if __name__ == '__main__':
# Maximum fib numbers to print
max_i = 20
for i, fib_n in enumerate(fib()):
#Print each yielded fib number
print('{i:3}: {f:3}'.format(i=i, f=fib_n))
# Break when we hit max_i value
if i == max_i:
break
| 23.807692 | 75 | 0.55412 | 0 | 0 | 254 | 0.410339 | 0 | 0 | 0 | 0 | 327 | 0.528271 |
b543980e156f1837cc8c91284aa02b3f5bbf8218 | 974 | py | Python | Chapter 6/glossary_2.py | WilliamJaber/Python-Crash-Course | d87621785011039fbe0b42f0d8b6cd2364246577 | [
"MIT"
]
| null | null | null | Chapter 6/glossary_2.py | WilliamJaber/Python-Crash-Course | d87621785011039fbe0b42f0d8b6cd2364246577 | [
"MIT"
]
| null | null | null | Chapter 6/glossary_2.py | WilliamJaber/Python-Crash-Course | d87621785011039fbe0b42f0d8b6cd2364246577 | [
"MIT"
]
| 5 | 2021-09-22T16:53:47.000Z | 2022-03-24T00:56:49.000Z | glossary = {
'intger': 'is colloquially defined as a number that can be written without a fractional component.\n',
'iterate': 'is the repetition of a process in order to generate a sequence of outcomes.\n',
'indentation': 'is an empty space at the beginning of a line that groups particular blocks of code.\n',
'concatinate': 'is the operation of joining character strings end-to-end.\n',
'boolean': 'is a logical data type that can have only the values True or False.\n',
'loop': 'for loop iterates over an object until that object is complete.\n',
'tuple': 'is a immutable data structure that store an ordered sequence of values.\n',
'dictionary': 'is an unordered and mutable Python container that stores mappings of unique keys to values.\n',
'parse': 'is a command for dividing the given program code into a small piece of code for analyzing the correct syntax.',
}
for k, v in glossary.items():
print(f'{k.title()}: {v}')
| 64.933333 | 125 | 0.710472 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 840 | 0.862423 |
b543f58cf6e8b8dc209086801165057172e20d3f | 1,711 | py | Python | scripts/test_spider_roundtrip.py | mattr1/seq2struct_forPRs | cdc9e3c94380fb479ed3e3c77f370038d27cf2d6 | [
"MIT"
]
| 25 | 2019-07-16T22:32:44.000Z | 2022-01-25T05:23:07.000Z | scripts/test_spider_roundtrip.py | mattr1/seq2struct_forPRs | cdc9e3c94380fb479ed3e3c77f370038d27cf2d6 | [
"MIT"
]
| 19 | 2018-12-17T20:42:11.000Z | 2020-02-12T21:29:51.000Z | scripts/test_spider_roundtrip.py | mattr1/seq2struct_forPRs | cdc9e3c94380fb479ed3e3c77f370038d27cf2d6 | [
"MIT"
]
| 22 | 2019-03-16T05:57:27.000Z | 2020-10-25T04:34:54.000Z | import ast
import argparse
import json
import os
import pprint
import astor
import tqdm
import _jsonnet
from seq2struct import datasets
from seq2struct import grammars
from seq2struct.utils import registry
from third_party.spider import evaluation
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--config', required=True)
parser.add_argument('--config-args')
parser.add_argument('--output', required=True)
args = parser.parse_args()
if args.config_args:
config = json.loads(_jsonnet.evaluate_file(args.config, tla_codes={'args': args.config_args}))
else:
config = json.loads(_jsonnet.evaluate_file(args.config))
os.makedirs(args.output, exist_ok=True)
gold = open(os.path.join(args.output, 'gold.txt'), 'w')
predicted = open(os.path.join(args.output, 'predicted.txt'), 'w')
train_data = registry.construct('dataset', config['data']['train'])
grammar = registry.construct('grammar', config['model']['decoder_preproc']['grammar'])
evaluator = evaluation.Evaluator(
'data/spider-20190205/database',
evaluation.build_foreign_key_map_from_json('data/spider-20190205/tables.json'),
'match')
for i, item in enumerate(tqdm.tqdm(train_data, dynamic_ncols=True)):
parsed = grammar.parse(item.code, 'train')
sql = grammar.unparse(parsed, item)
evaluator.evaluate_one(
item.schema.db_id,
item.orig['query'].replace('\t', ' '),
sql)
gold.write('{}\t{}\n'.format(item.orig['query'].replace('\t', ' '), item.schema.db_id))
predicted.write('{}\n'.format(sql))
if __name__ == '__main__':
main()
| 30.553571 | 102 | 0.663939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 269 | 0.157218 |
b5441f2ff301c902adbb89a228a3e18af8032444 | 1,632 | py | Python | tune/noniterative/objective.py | fugue-project/tune | bf2288ddcb29c8345d996a9b22c0910da9002da1 | [
"Apache-2.0"
]
| 14 | 2021-03-03T20:02:09.000Z | 2021-11-10T20:32:22.000Z | tune/noniterative/objective.py | fugue-project/tune | bf2288ddcb29c8345d996a9b22c0910da9002da1 | [
"Apache-2.0"
]
| 26 | 2021-04-30T19:56:06.000Z | 2022-01-18T04:40:00.000Z | tune/noniterative/objective.py | fugue-project/tune | bf2288ddcb29c8345d996a9b22c0910da9002da1 | [
"Apache-2.0"
]
| 2 | 2021-04-30T03:12:21.000Z | 2022-02-05T12:13:37.000Z | from tune.constants import TUNE_STOPPER_DEFAULT_CHECK_INTERVAL
from typing import Any, Callable, Optional
from tune._utils import run_monitored_process
from tune.concepts.flow import Trial, TrialReport
class NonIterativeObjectiveFunc:
def generate_sort_metric(self, value: float) -> float: # pragma: no cover
return value
def run(self, trial: Trial) -> TrialReport: # pragma: no cover
raise NotImplementedError
def safe_run(self, trial: Trial) -> TrialReport:
report = self.run(trial)
return report.with_sort_metric(self.generate_sort_metric(report.metric))
class NonIterativeObjectiveLocalOptimizer:
@property
def distributable(self) -> bool:
return True
def run(self, func: NonIterativeObjectiveFunc, trial: Trial) -> TrialReport:
# TODO: how to utilize execution_engine?
return func.safe_run(trial)
def run_monitored_process(
self,
func: NonIterativeObjectiveFunc,
trial: Trial,
stop_checker: Callable[[], bool],
interval: Any = TUNE_STOPPER_DEFAULT_CHECK_INTERVAL,
) -> TrialReport:
return run_monitored_process(
self.run, [func, trial], {}, stop_checker=stop_checker, interval=interval
)
def validate_noniterative_objective(
func: NonIterativeObjectiveFunc,
trial: Trial,
validator: Callable[[TrialReport], None],
optimizer: Optional[NonIterativeObjectiveLocalOptimizer] = None,
) -> None:
_optimizer = optimizer or NonIterativeObjectiveLocalOptimizer()
validator(_optimizer.run_monitored_process(func, trial, lambda: False, "1sec"))
| 33.306122 | 85 | 0.717525 | 1,051 | 0.643995 | 0 | 0 | 66 | 0.040441 | 0 | 0 | 82 | 0.050245 |
b545de61a4d0708ce0bd62bccdbaa4f9ddf7238d | 371 | py | Python | ThinkPython/chap10/ex7.py | sokolowskik/Tutorials | d2681d4f18b03e00f90f9132c77f0b23b74d2629 | [
"MIT"
]
| null | null | null | ThinkPython/chap10/ex7.py | sokolowskik/Tutorials | d2681d4f18b03e00f90f9132c77f0b23b74d2629 | [
"MIT"
]
| null | null | null | ThinkPython/chap10/ex7.py | sokolowskik/Tutorials | d2681d4f18b03e00f90f9132c77f0b23b74d2629 | [
"MIT"
]
| null | null | null | a1 = 'mary'
b1 = 'army'
a2 = 'mary'
b2 = 'mark'
def is_anagram(a, b):
"""
Return True if words a and b are anagrams.
Return Flase if otherwise.
"""
a_list = list(a)
b_list = list(b)
a_list.sort()
b_list.sort()
if a_list == b_list:
return True
else:
return False
print is_anagram(a1, b1)
print is_anagram(a2, b2)
| 16.130435 | 46 | 0.574124 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.304582 |
b54662251afe3923c7f22e2ffa35d6fb5d4cc63b | 459 | py | Python | pymotion/models/notification.py | LeResKP/motion | 4a7c6200ca6fd20edb3b98c6ea1215c90d988a78 | [
"MIT"
]
| null | null | null | pymotion/models/notification.py | LeResKP/motion | 4a7c6200ca6fd20edb3b98c6ea1215c90d988a78 | [
"MIT"
]
| null | null | null | pymotion/models/notification.py | LeResKP/motion | 4a7c6200ca6fd20edb3b98c6ea1215c90d988a78 | [
"MIT"
]
| null | null | null | from sqlalchemy import (
Column,
ForeignKey,
Integer,
Text,
)
from sqlalchemy.orm import relationship
from .meta import Base
class Notification(Base):
__tablename__ = 'notification'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("user.id"), nullable=False)
subscription = Column(Text, nullable=True, default=None)
user = relationship('User', back_populates='notification', uselist=False)
| 21.857143 | 77 | 0.714597 | 313 | 0.681917 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.093682 |
b5473421d6c0b8e5ed5978ee678700c80296d6a9 | 1,340 | py | Python | utils/model_helper.py | CocoBir/django-restful-demo | aeb7f8a0bcff5c52b528c7b0c48f87de5f392320 | [
"MIT"
]
| null | null | null | utils/model_helper.py | CocoBir/django-restful-demo | aeb7f8a0bcff5c52b528c7b0c48f87de5f392320 | [
"MIT"
]
| null | null | null | utils/model_helper.py | CocoBir/django-restful-demo | aeb7f8a0bcff5c52b528c7b0c48f87de5f392320 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
model helper
~~~~~~~~~~~~
:Created: 2016-8-5
:Copyright: (c) 2016<[email protected]>
"""
from customer_exceptions import OffsetOutOfRangeException
class ListModelHelper(object):
"""get the object list"""
@classmethod
def list(cls, index=0, limit=8, sort=None, order='asc'):
"""get the list of the model object
:param condition: filter condition
:param index: page index
:param limit: page entry number
:param sort: sort condition
:param order: asc or desc
:return: object list
"""
if not sort:
sort = 'id'
order_by = '-' + sort if order != 'asc' else sort
offset = index * limit
# check the offset
total = cls.objects.count()
if offset > total: raise OffsetOutOfRangeException()
return {
'total': total,
'datalist': cls.objects.order_by(order_by)\
[offset:offset + limit]
}
class ViewModelHelper(object):
"""get a single instance"""
@classmethod
def view(cls, pk):
"""
get a specific objects
:param pk: primary key
:return:
"""
return cls.objects.get(id=pk)
class GenericModelHelper(ListModelHelper, ViewModelHelper):
pass
| 23.103448 | 60 | 0.568657 | 1,132 | 0.844776 | 0 | 0 | 931 | 0.694776 | 0 | 0 | 600 | 0.447761 |
b54756c2e6e68e661aab45de212b547f340ad633 | 2,603 | py | Python | djadmin2/templatetags/admin2_tags.py | beezz/django-admin2 | 4aec1a3836011cd46e5eb8b6375590bf5a76c044 | [
"BSD-3-Clause"
]
| 1 | 2015-04-30T13:34:03.000Z | 2015-04-30T13:34:03.000Z | djadmin2/templatetags/admin2_tags.py | taxido/django-admin2 | 6a6b3d5f790b8289b0dd0f9194d80799af8804dc | [
"BSD-3-Clause"
]
| 1 | 2021-03-19T23:57:09.000Z | 2021-03-19T23:57:09.000Z | djadmin2/templatetags/admin2_tags.py | RyanBalfanz/django-admin2 | e7f0611eea22370bb3418e25e9cd10ddbac4fd6d | [
"BSD-3-Clause"
]
| null | null | null | from django import template
register = template.Library()
from .. import utils
@register.filter
def admin2_urlname(view, action):
"""
Converts the view and the specified action into a valid namespaced URLConf name.
"""
return utils.admin2_urlname(view, action)
@register.filter
def model_app_label(obj):
"""
Returns the app label of a model instance or class.
"""
return utils.model_app_label(obj)
@register.filter
def model_verbose_name(obj):
"""
Returns the verbose name of a model instance or class.
"""
return utils.model_verbose_name(obj)
@register.filter
def model_verbose_name_plural(obj):
"""
Returns the pluralized verbose name of a model instance or class.
"""
return utils.model_verbose_name_plural(obj)
@register.filter
def formset_visible_fieldlist(formset):
"""
Returns the labels of a formset's visible fields as an array.
"""
return [f.label for f in formset.forms[0].visible_fields()]
@register.filter
def for_admin(permissions, admin):
"""
Only useful in the permission handling. This filter binds a new admin to
the permission handler to allow checking views of an arbitrary admin.
"""
# some permission check has failed earlier, so we don't bother trying to
# bind a new admin to it.
if permissions == '':
return permissions
return permissions.bind_admin(admin)
@register.filter
def for_view(permissions, view):
"""
Only useful in the permission handling. This filter binds a new view to
the permission handler to check for view names that are not known during
template compile time.
"""
# some permission check has failed earlier, so we don't bother trying to
# bind a new admin to it.
if permissions == '':
return permissions
return permissions.bind_view(view)
@register.filter
def for_object(permissions, obj):
"""
Only useful in the permission handling. This filter binds a new object to
the permission handler to check for object-level permissions.
"""
# some permission check has failed earlier, so we don't bother trying to
# bind a new object to it.
if permissions == '':
return permissions
return permissions.bind_object(obj)
@register.simple_tag
def get_attr(record, attribute_name):
""" Allows dynamic fetching of model attributes in templates """
if attribute_name == "__str__":
return record.__unicode__()
attribute = getattr(record, attribute_name)
if callable(attribute):
return attribute()
return attribute
| 26.835052 | 84 | 0.702651 | 0 | 0 | 0 | 0 | 2,495 | 0.958509 | 0 | 0 | 1,270 | 0.487899 |
b5484bee48cb34153d413c1639f3e4d36037235a | 2,323 | py | Python | tests/test_filters/test_edges.py | luluricketts/biothings_explorer | ae2009ff285f96a08e0145f242846ca613b5069c | [
"Apache-2.0"
]
| null | null | null | tests/test_filters/test_edges.py | luluricketts/biothings_explorer | ae2009ff285f96a08e0145f242846ca613b5069c | [
"Apache-2.0"
]
| null | null | null | tests/test_filters/test_edges.py | luluricketts/biothings_explorer | ae2009ff285f96a08e0145f242846ca613b5069c | [
"Apache-2.0"
]
| null | null | null | """
Tests for edges.py
"""
import unittest
import pandas as pd
from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher
from biothings_explorer.filters.edges import filter_node_degree
class TestFilterEdges(unittest.TestCase):
# test for count values
def test_count_values(self):
counts = [10, 20, 40, 50, 100]
seqd = SingleEdgeQueryDispatcher(input_cls='Gene',
output_cls='ChemicalSubstance',
input_id='NCBIGene',
values='1017')
seqd.query()
for count in counts:
newG = filter_node_degree(seqd.G, count)
self.assertEqual(len(newG.nodes), count+1)
# edge case test if count > num nodes, then returns num_nodes results
def test_num_nodes(self):
count = 1000
seqd = SingleEdgeQueryDispatcher(input_cls='Gene',
output_cls='ChemicalSubstance',
input_id='NCBIGene',
values='1017')
seqd.query()
newG = filter_node_degree(seqd.G, count)
self.assertEqual(len(newG.nodes), len(seqd.G.nodes))
# test for correct ordering of ranks
def test_ranks(self):
seqd = SingleEdgeQueryDispatcher(input_cls='Disease',
input_id='MONDO',
output_cls='PhenotypicFeature',
pred='related_to',
values='MONDO:0010997')
seqd.query()
newG = filter_node_degree(seqd.G)
for i1,node1 in enumerate(newG.nodes):
if node1 == 'MONDO:MONDO:0010997':
continue
for i2,node2 in enumerate(newG.nodes):
if node2 == 'MONDO:MONDO:0010997':
continue
if newG.degree(node1) > newG.degree(node2):
self.assertLess(newG.nodes.data()[node1]['rank'], newG.nodes.data()[node2]['rank'])
elif newG.degree(node1) < newG.degree(node2):
self.assertGreater(newG.nodes.data()[node1]['rank'], newG.nodes.data()[node2]['rank'])
if __name__ == '__main__':
unittest.main()
| 40.754386 | 106 | 0.54025 | 2,066 | 0.889367 | 0 | 0 | 0 | 0 | 0 | 0 | 374 | 0.160999 |
b54ed986a0849287fd62118ba89a87ae8732ba9e | 974 | py | Python | get_data.py | ryanw3bb/fpl | a06fbf8ada5f549f0750ed9af46f53b3a1a0149e | [
"MIT"
]
| 1 | 2018-08-15T02:52:52.000Z | 2018-08-15T02:52:52.000Z | get_data.py | ryanw3bb/fpl | a06fbf8ada5f549f0750ed9af46f53b3a1a0149e | [
"MIT"
]
| null | null | null | get_data.py | ryanw3bb/fpl | a06fbf8ada5f549f0750ed9af46f53b3a1a0149e | [
"MIT"
]
| null | null | null | """
Retrieves data as json files from fantasy.premierleague.com
"""
import json
import requests
LAST_SEASON_DATA_FILENAME = "data/player_data_20_21.json"
DATA_URL = "https://fantasy.premierleague.com/api/bootstrap-static/"
DATA_FILENAME = "data/player_data_21_22.json"
FIXTURES_URL = "https://fantasy.premierleague.com/api/fixtures/"
FIXTURES_FILENAME = "data/fixtures_data_21_22.json"
# Download all player data and write file
def get_player_data(use_last_season):
if use_last_season:
return LAST_SEASON_DATA_FILENAME
r = requests.get(DATA_URL)
json_response = r.json()
with open(DATA_FILENAME, 'w') as out_file:
json.dump(json_response, out_file)
return DATA_FILENAME
# Download all fixtures data and write file
def get_fixtures_data():
r = requests.get(FIXTURES_URL)
json_response = r.json()
with open(FIXTURES_FILENAME, 'w') as out_file:
json.dump(json_response, out_file)
return FIXTURES_FILENAME
| 24.974359 | 68 | 0.74846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 352 | 0.361396 |
b54f720607fa63d495bc79cd36045e62028217a1 | 5,587 | py | Python | examples/spawning5.py | MissMeriel/BeamNGpy | a8467c57537441802bc5b56f0012dfee2b5f5af0 | [
"MIT"
]
| 1 | 2021-08-10T19:29:52.000Z | 2021-08-10T19:29:52.000Z | examples/spawning5.py | MissMeriel/BeamNGpy | a8467c57537441802bc5b56f0012dfee2b5f5af0 | [
"MIT"
]
| null | null | null | examples/spawning5.py | MissMeriel/BeamNGpy | a8467c57537441802bc5b56f0012dfee2b5f5af0 | [
"MIT"
]
| null | null | null | from beamngpy import BeamNGpy, Vehicle, Scenario, ScenarioObject
from beamngpy import setup_logging, Config
from beamngpy.sensors import Camera, GForces, Lidar, Electrics, Damage, Timer
import beamngpy
import time, random
# globals
default_model = 'pickup'
default_scenario = 'west_coast_usa' #'cliff' # smallgrid
dt = 20
def spawn_point(scenario_locale):
if scenario_locale is 'cliff':
#return {'pos':(-124.806, 142.554, 465.489), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)}
return {'pos': (-124.806, 190.554, 465.489), 'rot': None, 'rot_quat': (0, 0, 0.3826834, 0.9238795)}
elif scenario_locale is 'west_coast_usa':
#return {'pos':(-717.121, 101, 118.675), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)}
return {'pos': (-717.121, 101, 118.675), 'rot': None, 'rot_quat': (0, 0, 0.918812, -0.394696)}
#906, 118.78 rot:
elif scenario_locale is 'smallgrid':
return {'pos':(0.0, 0.0, 0.0), 'rot':None, 'rot_quat':(0, 0, 0.3826834, 0.9238795)}
def setup_sensors(vehicle):
# Set up sensors
pos = (-0.3, 1, 1.0)
direction = (0, 1, 0)
fov = 120
resolution = (512, 512)
front_camera = Camera(pos, direction, fov, resolution,
colour=True, depth=True, annotation=True)
pos = (0.0, 3, 1.0)
direction = (0, -1, 0)
fov = 90
resolution = (512, 512)
back_camera = Camera(pos, direction, fov, resolution,
colour=True, depth=True, annotation=True)
gforces = GForces()
electrics = Electrics()
damage = Damage()
damage.encode_vehicle_request()
lidar = Lidar(visualized=False)
timer = Timer()
# Attach them
vehicle.attach_sensor('front_cam', front_camera)
vehicle.attach_sensor('back_cam', back_camera)
vehicle.attach_sensor('gforces', gforces)
vehicle.attach_sensor('electrics', electrics)
vehicle.attach_sensor('damage', damage)
vehicle.attach_sensor('timer', timer)
return vehicle
def compare_damage(d1, d2):
for key in d1['damage']:
if d1['damage'][key] != d2['damage'][key]:
print("d1['damage'][{}] == {}; d2['damage'][{}] == {}".format(key, d1['damage'][key], key, d2['damage'][key]))
try:
# handle specific keys
if key == 'deform_group_damage' or key == 'part_damage':
for k in d1['damage'][key].keys():
print("\td1['damage'][{}][{}] == {}; d2['damage'][{}][{}] == {}".format(key, k, d1['damage'][key][k], key, k,
d2['damage'][key][k]))
else:
if d1['damage'][key] < d2['damage'][key]:
print("\td2[damage][{}] is greater".format(key))
else:
print("\td1[damage][{}] is greater".format(key))
except:
continue
print()
return
def backup(cum_list, sec):
#return "1_24"
dt = sec * 5.0
index = len(cum_list) - int(dt)
if index < 0:
index = 0
elif index >= len(cum_list):
index = len(cum_list) -1
print("cum_list={}".format(cum_list))
print("index={}".format(index))
#try:
return cum_list[index]
#except:
#return "0_0"
def main():
global default_model, default_scenario
beamng = BeamNGpy('localhost', 64256, home='C:/Users/merie/Documents/BeamNG.research.v1.7.0.1')
#scenario = Scenario('smallgrid', 'spawn_objects_example')
scenario = Scenario(default_scenario, 'research_test', description='Random driving for research')
vehicle = Vehicle('ego_vehicle', model=default_model, licence='PYTHON')
vehicle = setup_sensors(vehicle)
spawn = spawn_point(default_scenario)
scenario.add_vehicle(vehicle, pos=spawn['pos'], rot=spawn['rot'], rot_quat=spawn['rot_quat'])
scenario.make(beamng)
bng = beamng.open()
bng.load_scenario(scenario)
bng.start_scenario()
vehicle.update_vehicle()
d1 = bng.poll_sensors(vehicle)
cum_list = []
bound = 0.0
for i in range(3):
for _ in range(45):
bound = bound + 0.0 # 0.1
# vehicle.save()
vehicle.update_vehicle()
d2 = bng.poll_sensors(vehicle)
throttle = 1.0
#throttle = random.uniform(0.0, 1.0)
steering = random.uniform(-1 * bound, bound)
brake = 0.0 #random.choice([0, 0, 0, 1])
vehicle.control(throttle=throttle, steering=steering, brake=brake)
pointName = "{}_{}".format(i, _)
cum_list.append(pointName)
vehicle.saveRecoveryPoint(pointName)
bng.step(20)
print("SEGMENT #{}: COMPARE DAMAGE".format(i))
damage_diff = compare_damage(d1, d2)
d1 = d2
# "Back up" 1 second -- load vehicle at that time in that position.
backup_pointName = backup(cum_list, 0.001)
print('recovering to {}'.format(pointName))
loadfile = vehicle.loadRecoveryPoint(backup_pointName)
print('loadfile is {}'.format(loadfile))
bng.pause()
vehicle.update_vehicle()
vehicle.load(loadfile)
#vehicle.load("vehicles/pickup/vehicle.save.json")
bng.resume()
#vehicle.startRecovering()
#time.sleep(1.5)
#vehicle.stopRecovering()
vehicle.update_vehicle()
bng.pause()
time.sleep(2)
# vehicle.load("vehicles/pickup/vehicle.save.json")
bng.resume()
bng.close()
if __name__ == "__main__":
main() | 36.51634 | 133 | 0.583139 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,407 | 0.251835 |
b55212239eec52a949a21799291c19c62811b1b2 | 17,827 | py | Python | steam/check_acf.py | DarkStarSword/junk | f0de649a3ef92715fbc6aeff7599ad843763c32b | [
"MIT"
]
| 19 | 2015-02-02T16:49:32.000Z | 2021-12-04T02:33:10.000Z | steam/check_acf.py | DarkStarSword/junk | f0de649a3ef92715fbc6aeff7599ad843763c32b | [
"MIT"
]
| 1 | 2015-07-11T13:57:34.000Z | 2015-11-16T02:36:23.000Z | steam/check_acf.py | DarkStarSword/junk | f0de649a3ef92715fbc6aeff7599ad843763c32b | [
"MIT"
]
| 4 | 2017-02-06T21:11:17.000Z | 2019-04-04T15:11:50.000Z | #!/usr/bin/env python
from __future__ import print_function
import os, optparse, glob
import depotcache, acf
from ui import ui_tty as ui
import hashlib
import sys
g_indent = ' '
colours = {
False: 'back_red black',
True: ''
}
class UnknownLen(list): pass
def depot_summary_ok(mounted):
if len(mounted) > 0:
return True
return False
def str_depot_summary(mounted, managed):
if isinstance(managed, UnknownLen):
l = ui._ctext('back_yellow black', '?')
else:
l = str(len(managed))
ret = '%i/%s depotcaches mounted' % (len(mounted), l)
if len(mounted) == 0:
ret += ' - Not released on this platform yet?'
return ret
def manifest_filename(depot, timestamp):
return '%s_%s.manifest' % (depot, timestamp)
def manifest_path(library_root, filename):
return os.path.join(library_root, 'depotcache/%s' % filename)
def find_library_root(acf_filename):
return os.path.relpath(os.path.realpath(os.path.join(
os.path.curdir, os.path.dirname(acf_filename), '..')))
def find_steam_path_from_registry(opts, reg):
if opts.verbose:
ui._print("Looking for steam path from registry...")
key = reg.OpenKey(reg.HKEY_CURRENT_USER,
'Software\\Valve\\Steam', 0,
reg.KEY_READ | reg.KEY_WOW64_32KEY)
return reg.QueryValueEx(key, 'SteamPath')[0]
def cygwin_path(path):
import subprocess
return subprocess.check_output(['cygpath.exe', '-u', path]).strip()
def guess_steam_path_win(opts, translate = lambda x: x):
for path in [
r'c:\program files (x86)\steam',
r'c:\program files\steam',
r'c:\steam'
]:
if opts.verbose:
ui._print("Searching '%s'..." % translate(path))
if os.path.isdir(translate(path)):
return path
ui._cprint('red', 'Unable to find Steam root - rerun with --steam-root=')
sys.exit(1)
def find_steam_root(opts, acf_filename = None):
if acf_filename is not None:
# If this library has a depotcache, assume it is also the steam root
# XXX: This could be tricked if someone has created or copied a
# depotcache folder into the library, or if several steam
# installations are sharing libraries. In these cases the user
# will just have to specify --steam-root= to override it.
library_root = find_library_root(acf_filename)
if os.path.isdir(os.path.join(library_root, 'depotcache')):
return library_root
path = None
if sys.platform.startswith('linux'):
path = os.path.expanduser('~/.steam/root')
elif sys.platform == 'cygwin':
try:
import cygwinreg
except ImportError:
if opts.verbose:
ui._print('python-cygwinreg not installed, searching common Steam paths...')
else:
if not hasattr(cygwinreg, 'KEY_WOW64_32KEY'):
cygwinreg.KEY_WOW64_32KEY = 512
path = cygwin_path(find_steam_path_from_registry(opts, cygwinreg))
path = path or guess_steam_path_win(opts, cygwin_path)
elif sys.platform == 'win32':
import _winreg
path = find_steam_path_from_registry(opts, _winreg)
path = path or guess_steam_path_win(opts)
if path:
return path
ui._cprint('red', 'Unable to find Steam root - rerun with --steam-root=')
sys.exit(1)
class FilenameSet(set):
# It may be more efficient to convert the paths to a tree structure,
# but for the moment this is easier.
def add(self, element):
"""
Override add method to ensure all directory components are also
added to the set individually.
"""
set.add(self, element)
dirname = os.path.dirname(element)
if dirname != '':
self.add(dirname)
def verify_file_hash(filename, depot_hash, indent, opts):
if depot_hash.filetype == 'directory':
return os.path.isdir(filename)
s = hashlib.sha1()
f = open(filename, 'rb')
bad_found = False
off = 0
for chunk in sorted(depot_hash):
assert(chunk.off == off)
buf = f.read(chunk.len)
off += chunk.len
s.update(buf)
sha = hashlib.sha1(buf).hexdigest()
if sha != chunk.sha:
if opts.verify == 1:
return False
if not bad_found:
ui._cprint('red', ' (BAD CHECKSUM)')
bad_found = True
ui._print(indent, end='')
ui._cprint('red', '%.10i:%.10i found %s expected %s' % \
(chunk.off, chunk.off+chunk.len, sha, chunk.sha))
assert(off == depot_hash.filesize)
if bad_found:
ui._print(indent, end='')
eof_garbage = False
while True:
buf = f.read(1024*1024)
if buf == '':
break
if not eof_garbage:
ui._cprint('red', ' (Garbage found at end of file!)', end='')
eof_garbage = True
s.update(buf)
if bad_found:
return False
return s.hexdigest() == depot_hash.sha
def verify_manifest_files_exist(manifest_path, game_path, indent, opts):
def verify_hash():
if (opts.verify or opts.delete_bad) and not verify_file_hash(filename, depot_hash, indent+g_indent, opts):
ui._cprint('red', ' (BAD CHECKSUM)', end='')
return True
def check_filesize():
if depot_hash.filetype == 'directory':
return True
return filesize == depot_hash.filesize
def warn_filesize():
if not check_filesize():
ui._cprint('red', ' (Filesize != %i, %+i)' % \
(depot_hash.filesize, filesize - depot_hash.filesize))
return True
ok = True
filenames = FilenameSet()
for (orig_filename, depot_hash) in depotcache.decode_depotcache(manifest_path):
filename = os.path.join(game_path, orig_filename.replace('\\', os.path.sep))
(found, correct, filename, pretty) = insensitive_path(filename, opts)
filenames.add(filename)
if opts.file_filter is not None and orig_filename not in opts.file_filter:
continue
if found:
filesize = os.stat(filename).st_size
corrupt = False
if not correct:
ui._print(indent, end='')
ui._print(pretty, end='')
if found:
corrupt = warn_filesize()
sys.stdout.flush()
corrupt = corrupt or verify_hash()
if corrupt and opts.delete_bad:
ui._cprint('red', ' (DELETED)')
os.remove(filename)
else:
ui._print(' (CASE MISMATCH, ', end='')
if not opts.rename:
ui._print('rerun with -r to fix)')
else:
ui._print('renamed)')
else:
ok = False
ui._print(' (FILE MISSING)')
elif opts.verbose > 2 or opts.verify or opts.delete_bad or not check_filesize():
ui._print(indent + filename, end='')
corrupt = warn_filesize()
sys.stdout.flush()
corrupt = corrupt or verify_hash()
if corrupt and opts.delete_bad:
ui._cprint('red', ' (DELETED)', end='')
os.remove(filename)
ui._print()
return (ok, filenames)
def check_depots_exist(mounted_depots, managed_depots, library_root, indent, opts):
ok = True
num_mounted = 0
for depot in managed_depots:
if depot in mounted_depots:
num_mounted += 1
manifest = manifest_filename(depot, mounted_depots[depot])
path = manifest_path(library_root, manifest)
if not os.path.exists(path):
ui._cprint('red', '%s%s NOT FOUND!' % (indent, manifest), end='')
ui._print(' (Verify the game cache and try again)')
ok = False
elif opts.verbose > 1:
ui._print('%s%s (not mounted)' % (indent, depot))
assert(num_mounted == len(mounted_depots))
return ok
def check_all_depot_files_exist(mounted_depots, library_root, game_path, indent, opts):
ok = True
filenames = set()
for depot in mounted_depots:
manifest = manifest_filename(depot, mounted_depots[depot])
if opts.depot_filter is not None and \
depot not in opts.depot_filter and \
manifest not in opts.depot_filter:
continue
path = manifest_path(library_root, manifest)
if opts.verbose:
ui._print('%s%s' % (indent, manifest))
(all_files_exist, manifest_filenames) = \
verify_manifest_files_exist(path, game_path, indent + g_indent, opts)
filenames.update(manifest_filenames)
ok = ok and all_files_exist
return (ok, filenames)
def mkdir_recursive(path):
if os.path.isdir(path):
return
dirname = pretty_dirname = os.path.dirname(path)
mkdir_recursive(dirname)
os.mkdir(path)
def insensitive_path(path, opts):
if os.path.exists(path):
return (True, True, path, path)
basename = os.path.basename(path)
dirname = pretty_dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
(found, correct, dirname, pretty_dirname) = insensitive_path(dirname, opts)
if not found:
return (False, False, dirname, os.path.join(pretty_dirname, basename))
pretty_basename = ''
for entry in os.listdir(dirname):
if entry.lower() == basename.lower():
for i in range(len(entry)):
if entry[i] != basename[i]:
pretty_basename += ui._ctext('back_yellow black', entry[i])
else:
pretty_basename += entry[i]
if opts.rename:
os.rename(os.path.join(dirname, entry), os.path.join(dirname, basename))
return (True, False, os.path.join(dirname, basename), os.path.join(pretty_dirname, pretty_basename))
return (True, False, os.path.join(dirname, entry), os.path.join(pretty_dirname, pretty_basename))
return (False, False, path, ui._ctext('back_red black', path))
def find_extra_files(game_path, known_filenames, indent, opts):
known_filenames_l = set(map(str.lower, known_filenames))
if opts.move:
dest_root = os.path.realpath(os.path.join(game_path, '..'))
dest_root = os.path.join(dest_root, os.path.basename(game_path) + '~EXTRANEOUS')
for (root, dirs, files) in os.walk(game_path, topdown = not (opts.delete or opts.move)):
for fname in dirs + files:
path = os.path.join(root, fname)
if path in known_filenames:
continue
ui._print(indent, end='')
extra='\n'
if opts.move:
if fname in dirs:
try:
os.rmdir(path)
extra = ' (REMOVED)\n'
except OSError as e:
extra = ' %s\n' % str(e)
else:
dest = os.path.join(dest_root, os.path.relpath(path, game_path))
try:
mkdir_recursive(os.path.dirname(dest))
os.rename(path, dest)
extra = '\n%s --> %s\n' % (indent, os.path.relpath(dest))
except OSError as e:
extra = ' %s\n' % str(e)
elif opts.delete:
extra = ' (DELETED)\n'
if fname in dirs:
os.rmdir(path)
else:
os.remove(path)
if path.lower() in known_filenames_l:
ui._cprint('back_blue yellow', path, end=' (DUPLICATE WITH DIFFERING CASE)%s' % extra)
else:
ui._cprint('back_blue yellow', path, end=extra)
def find_game_path(app_state, library_root, acf_filename, opts):
# XXX TODO: acf games can be installed in other libraries, I need to
# try it to find if that would change this logic.
#
# NOTE: There is also a UserConfig.appinstalldir, however it may be
# unreliable if the acf has been copied from another location and the
# game has not yet been launched.
install_dir = app_state['installdir']
if install_dir == '':
ui._cprint('yellow', g_indent + 'WARNING: Blank installdir in %s, trying UserConfig.appinstalldir...' % acf_filename)
install_dir = os.path.basename(app_state['UserConfig']['appinstalldir'])
# Occasionally the install_dir is the full path in the Windows format.
# This seems to happen sometimes when moving games from one install to
# another. AFAICT the full path is never used - the acf file must be in
# the same steam library as the install regardless, so discard the rest
# of the path.
install_dir = install_dir.split('\\')[-1]
(found, correct, game_path, pretty) = insensitive_path(os.path.join(library_root, 'SteamApps/common/%s' %
install_dir), opts)
if found:
# TODO: Warn if a second directory exists with the same name
# but differing case, since that may confuse Steam or the game
pass
else:
ui._print(g_indent, end='')
ui._cprint(colours[False], 'Missing game directory', end=': ')
ui._print(pretty)
return None
if not correct:
ui._print(g_indent, end='')
ui._cprint('back_yellow black', 'WARNING: Case Mismatch', end='')
if not opts.rename:
ui._print(' (rerun with -r to fix)', end='')
ui._print(': ', end='')
ui._print(pretty)
return game_path
def get_installed_depots(app_state):
installed_depots = app_state['InstalledDepots']
return {k: v['manifest'] for k,v in installed_depots.items()}
def get_mounted_depots(app_state):
try:
mounted_depots = app_state['MountedDepots']
except KeyError:
# NOTE: Windows acf files seem to use 'ActiveDepots' instead of
# 'MountedDepots'. Not sure why the difference.
# XXX: Double check 'ActiveDepots' is the right key on
# my Windows box
try:
return app_state['ActiveDepots']
except KeyError:
# Seems some acf files no longer have either Mounted or Active Depots section
return get_installed_depots(app_state)
assert('ActiveDepots' not in app_state)
return mounted_depots
def check_acf(acf_filename, opts):
app_state = acf.parse_acf(acf_filename)['AppState']
if 'appID' in app_state:
app_id = app_state['appID']
else:
app_id = app_state['appid']
try:
name = app_state['UserConfig']['name']
except:
name = app_state['name']
ui._print('%s (%s):' % (name, app_id))
library_root = find_library_root(acf_filename)
game_path = find_game_path(app_state, library_root, acf_filename, opts)
if game_path is None: return
mounted_depots = get_mounted_depots(app_state)
try:
managed_depots = app_state['ManagedDepots'].split(',')
except KeyError:
#ui._cprint('back_yellow black', 'WARNING: No ManagedDepots, using MountedDepots instead!')
managed_depots = UnknownLen(mounted_depots.keys())
ok = depot_summary_ok(mounted_depots)
colour = colours[ok]
if opts.verbose or not ok:
ui._print(g_indent, end='')
ui._cprint(colour, str_depot_summary(mounted_depots, managed_depots))
if not ok:
if opts.uninstall:
ui._print(g_indent, end='')
path = os.path.join(os.path.curdir, acf_filename)
os.rename(path, path + '~')
ui._cprint('back_yellow black', 'UNINSTALLED!')
return
ok = check_depots_exist(mounted_depots, managed_depots, opts.steam_root, g_indent*2, opts)
if not ok: return
(ok, filenames) = check_all_depot_files_exist(mounted_depots, opts.steam_root, game_path, g_indent*2, opts)
if opts.extra or opts.delete or opts.move:
if opts.verbose: # So they don't appear to be under a manifest heading
ui._print(g_indent*2 + 'Untracked files:')
find_extra_files(game_path, filenames, g_indent*3, opts)
if not ok: return
ui._cprint('green', 'OK')
def main():
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='count',
help='Print out info about things that pasesd. Use multiple times for more info.')
parser.add_option('-r', '--rename', action='store_true',
help='Rename files & directories to correct case mismatches')
parser.add_option('-e', '--extra', '--extraneous', action='store_true',
help='List any files in the game directory that are not tracked by any manifest files. Extraneous files are highlighted in ' + \
ui._ctext('back_blue yellow', 'blue'))
parser.add_option('--verify', action='count',
help='Validate files integrity (Note: may show false positives if a file is in multiple depots). Specify twice to identify corrupt chunks.')
parser.add_option('--file-filter', action='append',
help='Specify file to check. Useful with --verify on large games when the bad files are already known. Can be specified multiple times.')
parser.add_option('--depot-filter', action='append',
help='Specify which mounted depots to process. Can be specified multiple times.')
# '-d': Interractively delete (implies -e) files that not listed in the manifest file
parser.add_option('-D', '--delete', action='store_true',
help='Delete any extraneous files, without asking for confirmation (implies -e). CAUTION: Some games may store legitimate files in their directory that are not tracked by Steam which this option will delete. BE CAREFUL WITH THIS OPTION!')
parser.add_option('--delete-bad', action='store_true',
help='Delete any files with bad checksums, without asking for confirmation (implies --verify). CAUTION: Some games may store legitimate configuration files in their directory which this option may delete, potentially losing settings. BE CAREFUL WITH THIS OPTION!')
parser.add_option('-M', '--move', action='store_true', help="Move any extraneous files to SteamApps/common/game~EXTRANEOUS (implies -e). rsync may be used to merge them back into the game directory later.")
parser.add_option('-U', '--uninstall', action='store_true',
help="Mark games with bad acf files (Currently that means 0 depotcaches mounted, but that definition may change in the future) as uninstalled. This WILL NOT DELETE THE GAME - it is intended to quickly remove bad acf files that may be interfering with launching or updating particular games. These games will need to be manually re-installed in Steam. (NOTE: Restart Steam afterwards)")
parser.add_option('--steam-root',
help="Specify where Steam is installed. This is usually detected automatically based on the acf path, but it may be necessary to specify it if working with games installed in an alternate steam library and this script can't find the game's manifest files.")
# TODO:
# '--verify': Mark game as needs verification on next launch (XXX: What option is that in the .acf? XXX: happens if Steam is running at the time?)
# Also, when I can do this it might be an idea for some of the above rename/delete options to imply this.
(opts, args) = parser.parse_args()
# TODO: If directory specified, interactively ask which game to check by name (maybe change default to do this to)
if opts.file_filter is not None:
opts.file_filter = [ x.replace('/', '\\') for x in opts.file_filter ]
if len(args) == 0:
if opts.steam_root is None:
opts.steam_root = find_steam_root(opts)
args = glob.glob(os.path.join(opts.steam_root, 'SteamApps/appmanifest_*.acf'))
elif opts.steam_root is None:
opts.steam_root = find_steam_root(opts, args[0])
else:
opts.steam_root = os.path.expanduser(opts.steam_root)
if opts.verbose:
ui._print("Using Steam root: '%s'" % opts.steam_root)
for filename in args:
check_acf(filename, opts)
ui._print()
if __name__ == '__main__':
main()
# vi:noet:ts=8:sw=8
| 35.941532 | 388 | 0.710664 | 398 | 0.022326 | 0 | 0 | 0 | 0 | 0 | 0 | 5,998 | 0.336456 |
b5523d39a4d4c8cb3b8be163ac345c9888bb29a1 | 178 | py | Python | reference/old/distance-simple.py | Art31/trekking-pro-cefetrj | 37ab58759b42978cbd8d950bd75c487e1292cb2b | [
"Apache-1.1"
]
| null | null | null | reference/old/distance-simple.py | Art31/trekking-pro-cefetrj | 37ab58759b42978cbd8d950bd75c487e1292cb2b | [
"Apache-1.1"
]
| null | null | null | reference/old/distance-simple.py | Art31/trekking-pro-cefetrj | 37ab58759b42978cbd8d950bd75c487e1292cb2b | [
"Apache-1.1"
]
| null | null | null | from gpiozero import DistanceSensor
from time import sleep
sensor = DistanceSensor(echo=23, trigger=22)
while True:
print('Distance: ', sensor.distance * 100)
sleep(1)
| 19.777778 | 46 | 0.735955 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.067416 |
b5526b9490a6617e9343309ab67db978943793e5 | 1,070 | py | Python | SmallTips/RemoveDuplication.py | Akasan/PythonTips | eee85c35fd25576c7b2b01af838749608bf8989c | [
"MIT"
]
| null | null | null | SmallTips/RemoveDuplication.py | Akasan/PythonTips | eee85c35fd25576c7b2b01af838749608bf8989c | [
"MIT"
]
| null | null | null | SmallTips/RemoveDuplication.py | Akasan/PythonTips | eee85c35fd25576c7b2b01af838749608bf8989c | [
"MIT"
]
| null | null | null | import pickle
def remove_duplicate_from_list(data):
""" remove duplications from specific list
any data can be contained in the data.
if the data is hashable, you can implement this function easily like below.
data = list(set(data))
but if the data is unhashable, you have to implement in other ways.
This function use pickle.dumps to convert any data to binary.
Binary data is hashable, so after that, we can implement like with hashable data.
Arguments:
data {list(any)} -- list that contains any type of data
Returns:
{list(any)} -- list that contains any type of data without duplications
"""
pickled_data = [pickle.dumps(d) for d in data]
removed_pickled_data = list(set(pickled_data))
result = [pickle.loads(d) for d in removed_pickled_data]
return result
if __name__ == "__main__":
data = [1, 2, 2, 3, 2, 2, 2, 6]
print(remove_duplicate_from_list(data))
data = ["hoge", 1, "hdf", 3.4, "hoge", 2, 2, 2]
print(remove_duplicate_from_list(data))
| 36.896552 | 89 | 0.66729 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 651 | 0.608411 |
b5533e6640dc60d29a04f82e1a7722aa55036807 | 7,226 | py | Python | ultraviolet_cli/commands/fixtures.py | mnyrop/ultraviolet-cli | f177adde71a899ca6775bd4673d30e19ccdb2a30 | [
"MIT"
]
| 1 | 2022-02-08T18:28:30.000Z | 2022-02-08T18:28:30.000Z | ultraviolet_cli/commands/fixtures.py | mnyrop/ultraviolet-cli | f177adde71a899ca6775bd4673d30e19ccdb2a30 | [
"MIT"
]
| null | null | null | ultraviolet_cli/commands/fixtures.py | mnyrop/ultraviolet-cli | f177adde71a899ca6775bd4673d30e19ccdb2a30 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2022 NYU Libraries.
#
# ultraviolet-cli is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio module for custom UltraViolet commands."""
import click
import glob
import json
import os
import requests
import sys
from jsonschema import Draft4Validator
from time import sleep
from urllib3.exceptions import InsecureRequestWarning
from .. import config, utils
# Suppress InsecureRequestWarning warnings from urllib3.
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
def create_record_draft(metadata, api, token):
sleep(1)
try:
r = requests.get(api, timeout=5, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as e:
print(f'Couldn\'t connect to api at {api}. Is the application running?')
raise SystemExit(e)
headers = {
'content-type': 'application/json',
'authorization': f'Bearer {token}'
}
response = requests.post(url=api,
data=json.dumps(metadata),
headers=headers,
verify=False)
response.raise_for_status()
return response.json()
def delete_record_draft(pid, api, token):
sleep(1)
url = '/'.join((api.strip('/'), pid, 'draft'))
try:
r = requests.get(api, timeout=5, verify=False)
r.raise_for_status()
except requests.exceptions.RequestException as e:
print(f'Couldn\'t connect to api at {api}. Is the application running?')
raise SystemExit(e)
headers = {
'authorization': f'Bearer {token}'
}
try:
response = requests.delete(url=url, headers=headers, verify=False)
return(response)
except:
print(f'Unable to delet draft with pid {pid}')
def publish_record(record_metadata, access_token):
sleep(1)
url = record_metadata['links']['publish']
headers = {
'authorization': f'Bearer {access_token}'
}
response = requests.post(url=url,
headers=headers,
verify=False)
return response.json()
@click.group()
def fixtures():
"""
An entry point for fixtures subcommands, e.g., ingest, purge
"""
pass
@fixtures.command()
@click.option('-a', '--api', required=True, type=str,
default=config.DEFAULT_RECORDS_API_URL,
help=f'Invenio REST API base URL. Default={config.DEFAULT_RECORDS_API_URL}')
@click.option('-d', '--dir', required=True,
type=click.Path(exists=True),
default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}')
@click.option('-o', '--output', required=True, type=str,
default=config.DEFAULT_FIXTURES_OUTFILE,
help=f'Where new fixture pid mappings will be written')
@click.option('-t', '--token', help='REST API token')
def ingest(api, dir, output, token):
"""
Post local dir of UV fixture draft records via REST API.
"""
click.secho('REST API: ', nl=False, bold=True, fg='green')
click.secho(api)
click.secho('Fixtures directory: ', nl=False, bold=True, fg='green')
click.secho(dir)
if token is None:
token = utils.token_from_user(email=config.DEFAULT_FIXTURES_USER, name='default-su-token')
click.secho('Auth Token: ', nl=False, bold=True, fg='green')
click.secho(token)
records = glob.glob(f'{dir}/**/*.json', recursive=True)
click.secho(f'\nFound {len(records)} records', nl=True, bold=True, fg='blue')
results = json.loads(open(output).read()) if os.path.exists(output) else {}
for file in records:
click.secho(f'Posting record from {file}', nl=True, fg='blue')
dict = json.loads(open(file).read())
draft = create_record_draft(dict, api, token)
uv_id = os.path.dirname(file).split('/')[-1]
results[draft['id']] = uv_id
os.makedirs(os.path.dirname(output), exist_ok=True)
with open(output, "w") as f:
json.dump(results, f)
# record = publish_record(draft, token)
@fixtures.command()
@click.option('-a', '--api', required=True, type=str,
default=config.DEFAULT_RECORDS_API_URL,
help=f'Invenio REST API base URL. Default={config.DEFAULT_RECORDS_API_URL}')
@click.option('-d', '--dir', required=True,
type=click.Path(exists=True),
default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}')
@click.option('-o', '--output', required=True, type=str,
default=config.DEFAULT_FIXTURES_OUTFILE,
help=f'Where new fixture pid mappings will be written')
@click.option('-t', '--token', help='REST API token')
def purge(api, dir, output, token):
"""
Delete all UV fixture draft records via REST API.
"""
click.secho('REST API: ', nl=False, bold=True, fg='green')
click.secho(api)
if token is None:
token = utils.token_from_user(email=config.DEFAULT_FIXTURES_USER, name='default-su-token')
click.secho('Auth Token: ', nl=False, bold=True, fg='green')
click.secho(token)
results = json.loads(open(output).read()) if os.path.exists(output) else {}
for pid, uv_id in results.copy().items():
res = delete_record_draft(pid, api, token)
if res.ok:
click.secho(f'Delecting draft record {uv_id} aka {pid}', nl=True, bold=True, fg='blue')
results.pop(pid)
os.makedirs(os.path.dirname(output), exist_ok=True)
with open(output, "w") as f:
json.dump(results, f)
@fixtures.command()
@click.option('-d', '--dir', required=True,
type=click.Path(exists=True),
default=config.DEFAULT_FIXTURES_DIR, help=f'Path to directory of fixtures. Default={config.DEFAULT_FIXTURES_DIR}')
@click.option('-s', '--schema-file', required=True,
type=click.Path(exists=True),
default=config.DEFAULT_SCHEMA_PATH, help=f'Path to json schema. Default={config.DEFAULT_SCHEMA_PATH}')
def validate(dir, schema_file):
"""
Validate local dir of fixture records against JSON schema.
"""
click.secho('Fixtures directory: ', nl=False, bold=True, fg='green')
click.secho(dir)
click.secho('JSON Schema: ', nl=False, bold=True, fg='green')
click.secho(schema_file)
records = glob.glob(f'{dir}/**/*.json', recursive=True)
click.secho(f'\nFound {len(records)} records', nl=True, bold=True, fg='blue')
schema = json.loads(open(schema_file).read())
Draft4Validator.check_schema(schema)
validator = Draft4Validator(schema, format_checker=None)
for file in records:
dict = json.loads(open(file).read())
try:
validator.validate(dict)
click.secho(f'{file} passes', nl=True, fg='blue')
except BaseException as error:
click.secho(f'{file} fails', nl=True, fg='red')
print('An exception occurred: {}'.format(error))
| 33.146789 | 128 | 0.63313 | 0 | 0 | 0 | 0 | 4,900 | 0.678107 | 0 | 0 | 2,124 | 0.293939 |
b553c83a33407279287a7fa673284b12f5a363b1 | 1,320 | py | Python | consts.py | twyair/hebrew-verb-inflection | a85773a008ea3fc1b8c36f92ac3100315edee6b6 | [
"MIT"
]
| null | null | null | consts.py | twyair/hebrew-verb-inflection | a85773a008ea3fc1b8c36f92ac3100315edee6b6 | [
"MIT"
]
| null | null | null | consts.py | twyair/hebrew-verb-inflection | a85773a008ea3fc1b8c36f92ac3100315edee6b6 | [
"MIT"
]
| null | null | null | from __future__ import annotations
from enum import Enum, auto
class Paradigm(Enum):
NONE = auto()
KFULIM = auto()
KFULIM_2 = auto() # used only for HUFAL
NO_PREFIX = auto() # used for words like 'hUnDA!s', 'hU_wA!n', 'nI_sa!H', 'nI_qa!H'
PE_ALEF = auto() # used only for PAAL
PAAL_1 = auto()
PAAL_2 = auto()
PAAL_3 = auto() # some of the verbs that start with "[QRhj]"
PAAL_4 = auto()
PAAL_5 = auto() # some of the verbs that end with "a!Q"
def is_kfulim(self) -> bool:
return self in (Paradigm.KFULIM, Paradigm.KFULIM_2)
def is_paal(self) -> bool:
return self in (
Paradigm.PE_ALEF,
Paradigm.PAAL_1,
Paradigm.PAAL_2,
Paradigm.PAAL_3,
Paradigm.PAAL_4,
Paradigm.PAAL_5,
)
class Binyan(Enum):
PAAL = auto()
PIEL = auto()
PUAL = auto()
NIFAL = auto()
HIFIL = auto()
HUFAL = auto()
HITPAEL = auto()
class Pronoun(Enum):
ANI = auto()
ATA = auto()
AT = auto()
HU = auto()
HI = auto()
ANACNU = auto()
ATEM = auto()
ATEN = auto()
HEM = auto()
HEN = auto()
# TODO: rename
class Present(Enum):
MALE_SINGULAR = auto()
MALE_PLURAL = auto()
FEMALE_SINGULAR = auto()
FEMALE_PLURAL = auto()
| 22 | 88 | 0.559848 | 1,230 | 0.931818 | 0 | 0 | 0 | 0 | 0 | 0 | 202 | 0.15303 |
b55c52f4a65b506287148bb4b6f73c63cbf60fe9 | 266 | py | Python | cbsettings/exceptions.py | matthewwithanm/django-classbasedsettings | 4b208f1c73a2acedc5cd3bfa2b73541607ed9ce8 | [
"MIT"
]
| 23 | 2015-02-17T13:35:33.000Z | 2020-10-02T07:06:24.000Z | cbsettings/exceptions.py | matthewwithanm/django-classbasedsettings | 4b208f1c73a2acedc5cd3bfa2b73541607ed9ce8 | [
"MIT"
]
| 8 | 2015-12-23T19:42:49.000Z | 2021-10-01T20:13:40.000Z | cbsettings/exceptions.py | matthewwithanm/django-classbasedsettings | 4b208f1c73a2acedc5cd3bfa2b73541607ed9ce8 | [
"MIT"
]
| 4 | 2015-12-23T19:17:39.000Z | 2020-09-27T19:29:13.000Z | class SettingsFactoryDoesNotExist(Exception):
pass
class InvalidSettingsFactory(Exception):
pass
class NoMatchingSettings(Exception):
"""Raised when a suitable settings class cannot be found."""
pass
class InvalidCondition(Exception):
pass
| 16.625 | 64 | 0.75188 | 256 | 0.962406 | 0 | 0 | 0 | 0 | 0 | 0 | 60 | 0.225564 |
b55d244aa62443aced945674009694fb76ee238b | 1,834 | py | Python | src/function_manager/function_manager.py | lzjzx1122/FaaSFlow | c4a32a04797770c21fe6a0dcacd85ac27a3d29ec | [
"Apache-2.0"
]
| 24 | 2021-12-02T01:00:54.000Z | 2022-03-27T00:50:28.000Z | src/function_manager/function_manager.py | lzjzx1122/FaaSFlow | c4a32a04797770c21fe6a0dcacd85ac27a3d29ec | [
"Apache-2.0"
]
| null | null | null | src/function_manager/function_manager.py | lzjzx1122/FaaSFlow | c4a32a04797770c21fe6a0dcacd85ac27a3d29ec | [
"Apache-2.0"
]
| 3 | 2021-12-02T01:00:47.000Z | 2022-03-04T07:33:09.000Z | import gevent
import docker
import os
from function_info import parse
from port_controller import PortController
from function import Function
import random
repack_clean_interval = 5.000 # repack and clean every 5 seconds
dispatch_interval = 0.005 # 200 qps at most
# the class for scheduling functions' inter-operations
class FunctionManager:
def __init__(self, config_path, min_port):
self.function_info = parse(config_path)
self.port_controller = PortController(min_port, min_port + 4999)
self.client = docker.from_env()
self.functions = {
x.function_name: Function(self.client, x, self.port_controller)
for x in self.function_info
}
self.init()
def init(self):
print("Clearing previous containers.")
os.system('docker rm -f $(docker ps -aq --filter label=workflow)')
gevent.spawn_later(repack_clean_interval, self._clean_loop)
gevent.spawn_later(dispatch_interval, self._dispatch_loop)
def _clean_loop(self):
gevent.spawn_later(repack_clean_interval, self._clean_loop)
for function in self.functions.values():
gevent.spawn(function.repack_and_clean)
def _dispatch_loop(self):
gevent.spawn_later(dispatch_interval, self._dispatch_loop)
for function in self.functions.values():
gevent.spawn(function.dispatch_request)
def run(self, function_name, request_id, runtime, input, output, to, keys):
# print('run', function_name, request_id, runtime, input, output, to, keys)
if function_name not in self.functions:
raise Exception("No such function!")
return self.functions[function_name].send_request(request_id, runtime, input, output, to, keys)
| 37.428571 | 104 | 0.681025 | 1,497 | 0.816249 | 0 | 0 | 0 | 0 | 0 | 0 | 289 | 0.157579 |
b55f0296b5c70a3898760614021a220983b1ac36 | 333 | py | Python | getall.py | bransorem/Magic-Scraper | f68ae25122391efe440c8652119d146a75be4cea | [
"Unlicense"
]
| 2 | 2018-07-16T21:24:34.000Z | 2020-12-19T21:34:00.000Z | getall.py | bransorem/Magic-Scraper | f68ae25122391efe440c8652119d146a75be4cea | [
"Unlicense"
]
| null | null | null | getall.py | bransorem/Magic-Scraper | f68ae25122391efe440c8652119d146a75be4cea | [
"Unlicense"
]
| null | null | null | import sets
import scan_set
import os
path = 'ids/'
setlist = os.listdir(path)
def getall(set):
id = scan_set.scan_set(set)
scan_set.write_ids(set, id)
for set in sets.set_info:
s = set + '.txt'
if s not in setlist:
print "Getting " + set
getall(set)
print "\n\nCompletely Finished........"
| 15.857143 | 39 | 0.615616 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55 | 0.165165 |
b55f0ffd6458d9da1434363a2f94293d840e899b | 6,717 | py | Python | MalmoEnv/run.py | chemgymrl/malmo | 207e2530ec94af46450ba6d0e62d691ade91e282 | [
"MIT"
]
| 1 | 2022-02-17T07:58:06.000Z | 2022-02-17T07:58:06.000Z | MalmoEnv/run.py | chemgymrl/malmo | 207e2530ec94af46450ba6d0e62d691ade91e282 | [
"MIT"
]
| null | null | null | MalmoEnv/run.py | chemgymrl/malmo | 207e2530ec94af46450ba6d0e62d691ade91e282 | [
"MIT"
]
| null | null | null | # ------------------------------------------------------------------------------------------------
# Copyright (c) 2018 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
import os
import numpy as np
import matplotlib.pyplot as plt
import malmoenv
import argparse
from pathlib import Path
import time
from PIL import Image
from stable_baselines3.common import results_plotter
from stable_baselines3.common.monitor import Monitor
from stable_baselines3.common.results_plotter import load_results, ts2xy, plot_results
from stable_baselines3.common.noise import NormalActionNoise
from stable_baselines3.common.callbacks import BaseCallback
from stable_baselines3.common.env_checker import check_env
from stable_baselines3 import PPO
class SaveOnBestTrainingRewardCallback(BaseCallback):
"""
Callback for saving a model (the check is done every ``check_freq`` steps)
based on the training reward (in practice, we recommend using ``EvalCallback``).
:param check_freq:
:param log_dir: Path to the folder where the model will be saved.
It must contains the file created by the ``Monitor`` wrapper.
:param verbose: Verbosity level.
"""
def __init__(self, check_freq: int, log_dir: str, verbose: int = 1):
super(SaveOnBestTrainingRewardCallback, self).__init__(verbose)
self.check_freq = check_freq
self.log_dir = log_dir
self.save_path = os.path.join(log_dir, 'best_model')
self.best_mean_reward = -np.inf
# def _init_callback(self) -> None:
# # # Create folder if needed
# # if self.save_path is not None:
# # os.makedirs(self.save_path, exist_ok=True)
def _on_step(self) -> bool:
if self.n_calls % self.check_freq == 0:
# Retrieve training reward
x, y = ts2xy(load_results(self.log_dir), 'timesteps')
if len(x) > 0:
# Mean training reward over the last 100 episodes
mean_reward = np.mean(y[-100:])
if self.verbose > 0:
print(f"Num timesteps: {self.num_timesteps}")
print(f"Best mean reward: {self.best_mean_reward:.2f} - Last mean reward per episode: {mean_reward:.2f}")
# New best model, you could save the agent here
if mean_reward > self.best_mean_reward:
self.best_mean_reward = mean_reward
# Example for saving best model
if self.verbose > 0:
print(f"Saving new best model to {self.save_path}")
self.model.save(self.save_path)
return True
log_dir = "tmp/"
os.makedirs(log_dir, exist_ok=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='malmovnv test')
parser.add_argument('--mission', type=str, default='missions/jumping.xml', help='the mission xml')
parser.add_argument('--port', type=int, default=9000, help='the mission server port')
parser.add_argument('--server', type=str, default='127.0.0.1', help='the mission server DNS or IP address')
parser.add_argument('--port2', type=int, default=None, help="(Multi-agent) role N's mission port. Defaults to server port.")
parser.add_argument('--server2', type=str, default=None, help="(Multi-agent) role N's server DNS or IP")
parser.add_argument('--episodes', type=int, default=100, help='the number of resets to perform - default is 1')
parser.add_argument('--episode', type=int, default=0, help='the start episode - default is 0')
parser.add_argument('--role', type=int, default=0, help='the agent role - defaults to 0')
parser.add_argument('--episodemaxsteps', type=int, default=100, help='max number of steps per episode')
parser.add_argument('--saveimagesteps', type=int, default=0, help='save an image every N steps')
parser.add_argument('--resync', type=int, default=0, help='exit and re-sync every N resets'
' - default is 0 meaning never.')
parser.add_argument('--experimentUniqueId', type=str, default='test1', help="the experiment's unique id.")
args = parser.parse_args()
if args.server2 is None:
args.server2 = args.server
xml = Path(args.mission).read_text()
env = malmoenv.make()
env.init(xml, args.port,
server=args.server,
server2=args.server2, port2=args.port2,
role=args.role,
exp_uid=args.experimentUniqueId,
episode=args.episode, resync=args.resync)
env = Monitor(env, log_dir)
# print("checking env")
check_env(env, True)
s = SaveOnBestTrainingRewardCallback(2000, log_dir)
# print("checked env")
model = PPO("MlpPolicy", env, verbose=1, tensorboard_log="./ppo_test_tensorboard/")
#model.load("tmp/best_model.zip")
model.learn(total_timesteps=100000, callback=s, reset_num_timesteps=False)
# print("trained and saved model")
# for i in range(args.episodes):
# print("reset " + str(i))
# obs = env.reset()
# steps = 0
# done = False
# while not done and (args.episodemaxsteps <= 0 or steps < args.episodemaxsteps):
# # h, w, d = env.observation_space.shape
# # print(done)
# action, _states = model.predict(obs, deterministic=True)
# # action = env.action_space.sample()
# obs, reward, done, info = env.step(action)
# steps += 1
# # print("reward: " + str(reward))
# # print(obs)
# time.sleep(.05)
env.close()
| 46.645833 | 128 | 0.650737 | 1,867 | 0.277951 | 0 | 0 | 0 | 0 | 0 | 0 | 3,538 | 0.526723 |
b55f2629add10c43d98efae9012f1f13e3691bd5 | 1,172 | py | Python | example/wrapper/common/5001-get_tgpio_digital.py | krasin/xArm-Python-SDK-ssh | 9c854e8bfa78d0e91b67efbab79f733ddf19e916 | [
"BSD-3-Clause"
]
| 62 | 2018-11-30T05:53:32.000Z | 2022-03-20T13:15:22.000Z | example/wrapper/common/5001-get_tgpio_digital.py | krasin/xArm-Python-SDK-ssh | 9c854e8bfa78d0e91b67efbab79f733ddf19e916 | [
"BSD-3-Clause"
]
| 25 | 2019-08-12T18:53:41.000Z | 2021-12-28T10:17:39.000Z | example/wrapper/common/5001-get_tgpio_digital.py | krasin/xArm-Python-SDK-ssh | 9c854e8bfa78d0e91b67efbab79f733ddf19e916 | [
"BSD-3-Clause"
]
| 43 | 2019-01-03T04:47:13.000Z | 2022-03-18T06:40:59.000Z | #!/usr/bin/env python3
# Software License Agreement (BSD License)
#
# Copyright (c) 2019, UFACTORY, Inc.
# All rights reserved.
#
# Author: Vinman <[email protected]> <[email protected]>
"""
Example: Get GPIO Digital
"""
import os
import sys
import time
sys.path.append(os.path.join(os.path.dirname(__file__), '../../..'))
from xarm.wrapper import XArmAPI
from configparser import ConfigParser
parser = ConfigParser()
parser.read('../robot.conf')
try:
ip = parser.get('xArm', 'ip')
except:
ip = input('Please input the xArm ip address[192.168.1.194]:')
if not ip:
ip = '192.168.1.194'
arm = XArmAPI(ip)
time.sleep(0.5)
if arm.warn_code != 0:
arm.clean_warn()
if arm.error_code != 0:
arm.clean_error()
last_digitals = [-1, -1]
while arm.connected and arm.error_code != 19 and arm.error_code != 28:
code, digitals = arm.get_tgpio_digital()
if code == 0:
if digitals[0] == 1 and digitals[0] != last_digitals[0]:
print('IO0 input high level')
if digitals[1] == 1 and digitals[1] != last_digitals[1]:
print('IO1 input high level')
last_digitals = digitals
time.sleep(0.1)
| 23.44 | 70 | 0.648464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 365 | 0.311433 |
b560237f424501f39681590b33c61e9846076455 | 2,429 | py | Python | sdk/python/pulumi_lxd/__init__.py | soupdiver/pulumi-lxd | 258395aefd6a4cf138d470d7de70babed3310063 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | sdk/python/pulumi_lxd/__init__.py | soupdiver/pulumi-lxd | 258395aefd6a4cf138d470d7de70babed3310063 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | sdk/python/pulumi_lxd/__init__.py | soupdiver/pulumi-lxd | 258395aefd6a4cf138d470d7de70babed3310063 | [
"ECL-2.0",
"Apache-2.0"
]
| null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from . import _utilities
import typing
# Export this package's modules as members:
from .cached_image import *
from .container import *
from .container_file import *
from .network import *
from .profile import *
from .provider import *
from .publish_image import *
from .snapshot import *
from .storage_pool import *
from .volume import *
from .volume_container_attach import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_lxd.config as config
else:
config = _utilities.lazy_import('pulumi_lxd.config')
_utilities.register(
resource_modules="""
[
{
"pkg": "lxd",
"mod": "index/profile",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/profile:Profile": "Profile"
}
},
{
"pkg": "lxd",
"mod": "index/storagePool",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/storagePool:StoragePool": "StoragePool"
}
},
{
"pkg": "lxd",
"mod": "index/volumeContainerAttach",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/volumeContainerAttach:VolumeContainerAttach": "VolumeContainerAttach"
}
},
{
"pkg": "lxd",
"mod": "index/cachedImage",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/cachedImage:CachedImage": "CachedImage"
}
},
{
"pkg": "lxd",
"mod": "index/container",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/container:Container": "Container"
}
},
{
"pkg": "lxd",
"mod": "index/network",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/network:Network": "Network"
}
},
{
"pkg": "lxd",
"mod": "index/volume",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/volume:Volume": "Volume"
}
},
{
"pkg": "lxd",
"mod": "index/containerFile",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/containerFile:ContainerFile": "ContainerFile"
}
},
{
"pkg": "lxd",
"mod": "index/publishImage",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/publishImage:PublishImage": "PublishImage"
}
},
{
"pkg": "lxd",
"mod": "index/snapshot",
"fqn": "pulumi_lxd",
"classes": {
"lxd:index/snapshot:Snapshot": "Snapshot"
}
}
]
""",
resource_packages="""
[
{
"pkg": "lxd",
"token": "pulumi:providers:lxd",
"fqn": "pulumi_lxd",
"class": "Provider"
}
]
"""
)
| 19.58871 | 87 | 0.627419 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,865 | 0.767806 |
b56057ff5dbd4cdc1d25d244ff87b18b26455492 | 544 | py | Python | 49-group anagrams/main.py | ytong82/leetcode | 34e08c430d654b14b1608211f74702f57e507189 | [
"Apache-2.0"
]
| null | null | null | 49-group anagrams/main.py | ytong82/leetcode | 34e08c430d654b14b1608211f74702f57e507189 | [
"Apache-2.0"
]
| null | null | null | 49-group anagrams/main.py | ytong82/leetcode | 34e08c430d654b14b1608211f74702f57e507189 | [
"Apache-2.0"
]
| null | null | null | class Solution:
def groupAnagrams(self, strs):
l = len(strs)
if l == 0:
return []
map = dict()
for i in range(l):
key = ''.join(sorted(strs[i]))
if key in map.keys():
map[key].append(i)
else:
map[key] = [i]
res = []
for key in map.keys():
res.append([strs[k] for k in map[key]])
return res
strs = ["eat", "tea", "tan", "ate", "nat", "bat"]
sol = Solution()
print(sol.groupAnagrams(strs)) | 22.666667 | 51 | 0.443015 | 444 | 0.816176 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.058824 |
b561af012e5087c35cc2997a33fe02fbbdb5ae5d | 2,429 | py | Python | vending.py | mit-dci/litvending | 28f8f2b51691eac7c69de153aafbe72663d9892c | [
"MIT"
]
| 1 | 2018-06-20T01:42:54.000Z | 2018-06-20T01:42:54.000Z | vending.py | mit-dci/litvending | 28f8f2b51691eac7c69de153aafbe72663d9892c | [
"MIT"
]
| null | null | null | vending.py | mit-dci/litvending | 28f8f2b51691eac7c69de153aafbe72663d9892c | [
"MIT"
]
| 1 | 2022-02-15T06:48:15.000Z | 2022-02-15T06:48:15.000Z | #!/usr/bin/env python3
import os
import time
import sys
gpio = None
try:
import RPi.GPIO
gpio = RPi.GPIO
except:
print('RPi library not found. We\'re probably on a dev machine. Moving on...')
import lvconfig
import litrpc
# This could be more efficient, we're making a lot more requests than we need to.
def check_deposit(cointype):
bals = conn.balance()['Balances']
sum = 0
for b in bals:
if b['CoinType'] == int(cointype):
# I'm not sure how this works, can it return dupes?
sum += b['ChanTotal'] + b['TxoTotal']
return sum
def main(cfg):
if cfg['trigger_pin_num'] == -1:
print('You need to configure me first. Come back later.')
sys.exit(1)
# Find important commonly-used variables.
trigger_pin = cfg['trigger_pin_num']
sleep_time = cfg['pin_high_time']
deposit_delay = cfg['deposit_delay_time']
# Set up the GPIO pins.
if gpio is not None:
gpio.setmode(gpio.BOARD)
gpio.setwarnings(False)
gpio.setup(trigger_pin, gpio.OUT)
# Set up the connection and connect.
print('Connecting to lit at', cfg['lit_ip'], 'on port', cfg['lit_port'])
global conn
conn = litrpc.LitClient(cfg['lit_ip'], cfg['lit_port'])
print('Set up client.')
# Then just enter the main loop.
print('Waiting for payment...')
last_bal = {}
for ty in cfg['coin_type_ids']:
last_bal[ty] = -1
while True:
# First figure out how much might have been sent to us.
to_insert = 0
for ty in cfg['coin_type_ids']:
bal = check_deposit(ty)
if last_bal[ty] != -1:
diff = bal - last_bal[ty]
if diff <= 0: # when we withdraw it would break everything
continue
unit_cost = cfg['unit_costs'][ty]
units = int(diff // unit_cost)
extra = diff - units * unit_cost
to_insert += units
print('Balance for', ty, 'is now', bal, ', got a spend of', diff, 'sat worth', units, 'units with an extra', extra, 'sat left over')
last_bal[ty] = bal
# Then send that many quarters.
if to_insert != 0:
print('Total to insert:', to_insert)
if gpio is not None:
for i in range(to_insert):
# Just turn it on, wait a bit, and turn it off.
gpio.output(trigger_pin, gpio.HIGH)
time.sleep(sleep_time)
gpio.output(trigger_pin, gpio.LOW)
time.sleep(deposit_delay)
print('Done')
else:
print('Not running on RPi, doing nothing!')
else:
print('No payment')
time.sleep(cfg['poll_rate'])
if __name__ == '__main__':
main(lvconfig.load_config())
| 26.11828 | 136 | 0.669823 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,025 | 0.421984 |
b563b116c6a5501886a56db83d13d435b9bc033e | 3,070 | py | Python | python-study/2.py | yu1k/python-study | 0bf2e2927696f58f36c05602446ecd71e31596e3 | [
"MIT"
]
| null | null | null | python-study/2.py | yu1k/python-study | 0bf2e2927696f58f36c05602446ecd71e31596e3 | [
"MIT"
]
| null | null | null | python-study/2.py | yu1k/python-study | 0bf2e2927696f58f36c05602446ecd71e31596e3 | [
"MIT"
]
| null | null | null | #*** 文字列 ***
#Pythonで文字列を作るには, ' (シングルクォーテーション)もしくは, " (ダブルクォーテーション)で囲む.
print('some eggs')
print("some eggs")
print('some eggs\nsome eggs')
#a == b は aとb同値であればTrue, そうでなければFalseを返す演算子です
print('some eggs' == "some eggs") #True
#'...' の中で ' ,または "..." の中で " を使う場合には,
#各記号のまえに \ (バックスラッシュ) を入力する.
print("I don't Know him") #I don't know him
print('"Python"') #"Python"
print("I don\'t know him") #I don't know him
print("\"Python\"") #"Python"
#\nは改行文字を表す. \nは一文字として扱われる.
print("一行目\n二行目")
#一行目
#二行目
#\nを改行文字としてではなく,
#バックスラッシュ+nという文字列として使いたい場合は\\nと入力するか,
#引用符の前にrをつけます.
print("一行目\\n二行目")
#一行目\n二行目
print(r"一行目\n二行目")
#一行目\n二行目
#複数行の文字列を作りたいときは,
#三連引用符("""...""" または ''' ... ''')を利用する.
#改行は自動で含まれますが, 行末に \ を付けることで改行を無視することができる
print("""改行あり
改行\
なし""")
#改行あり
#改行なし
#文字列にも演算子がある.
#+は文字列を連結して1つの文字列を作る.
print("a lot of" + " eggs")
#a lot of eggs
#* は文字列を繰り返します
print("Python" * 3)
#PythonPythonPython
#文字列も変数に代入して使うことができる.
first_name = "太郎"
last_name = "ドワンゴ"
print(first_name + last_name)
#太郎ドワンゴ
#*** インデックス, スライス ***
#文字列は添字表記,
#(インデックス表記, つまり, ある文字列の何文字目かを指定)することができる.
#最初の文字は 0番目になる.
word = "Python"
print(word)
#Python
#インデックスの指定には[]を使用します.
#1文字目(0番目)
print(word[0])
#P
#5文字目(4番目)
print(word[4])
#o
#添字には負の数を指定するこもでき,
#その場合は右から数えます. ただし, 0と-0は等しいので, 負の添字は-1から始まることに注意する.
#最終文字
print(word[-1])
#n
#後ろから2文字目
print(word[-2])
#o
#まとめると正のインデックスと負のインデックスの関係は以下のようになる.
# P y t h o n
#正のインデックス 0 1 2 3 4 5
#負のインデックス -0 -5 -4 -3 -2 -1
#上の例にもあるように, oという文字は正のインデックスでは4番目, 負のインデックスでは, -2番目になる.
#文字列のi番目からj番目までというように一部を切り出して取得することもできます. これをスライスと呼ぶ.
#以下の例では, 0番目から1番目の文字列を取り出します.
#終端の添字は1つ大きいことに注意する.
#つまり開始番目の文字は含まれ, 終了番目は文字は列に含まれない(終了番目の1つ前まで取り出される)
print(word[0:2])
#Py
#開始番目を省略すると0とみなされます.
#先頭から2番目まで
print(word[:3])
#Pyt
#逆に終了添字を省略すると文字列の最後までとみなされます.
#3番目から最後まで
print(word[3:])
#hon
print(word[:3] + word[3:])
#Python
#文字列の長さより大きい番目を指定した場合は範囲外エラーが発生します.
#print(word[42])
#文字列の長さの取得にはlen()関数を使います.
print("length:", len(word))
#length: 6
#ただし, スライスを利用した場合はエラーは発生せず適切に処理されます.
print(word[4:42])
#on
#またPythonの文字列は後から変更することができません.
#word[0] = "J"
#文字列の変更をするためには, 例えば修正したい文字列を再定義し直します.
#1文字目をJにして, 以降はword[1:]を使う
word = "J" + word[1:]
print(word)
#Jython
#*** Format ***
#ここでは文字列を特定のフォーマットで出力する方法を見ていきます.
#print()関数を使って文字と数値などを同時に表示したいということがあると思います.
#Pythonではそのような場合,
#フォーマット済み文字列リテラル(f-string)というのを利用する.
#名前が大げさですが, これは文字列を作るときに接頭辞としてfまたはFを付けることで生成される文字列である.
#これらの文字列には波括弧{}を使って変数や式を埋め込むことができる.
#下の例では{word}の部分を変数wordの内容で置き換える.
word = "Python"
print(f"Hello {word}")
#Hello Python
#{}の中では変数だけでなくPythonの気泡をそのまま使うことができます.
print(f"length: {len(word)}")
#length: 6
print(f"slice: {word[:2]}")
#slice: Py
#また数値であれば小数点以下の表示する桁数, 桁を揃えるために0や空白で埋める,
#配置を中央に揃えるなど様々なフォーマットの文字列を作成することができる.
pi = 3.14159265359
#そのまま表示
print(f"πの値は{pi}です")
#πの値は3.14159265359です
#小数点以下2桁まで
print(f"πの値は{pi:.2f}です")
#πの値は3.14です
#最大10桁で不足分は空白で埋める
print(f"πの値は{pi:10.2f}です")
#πの値は 3.14です
#最大5桁で不足分は0で埋める
print(f"πの値は{pi:05.2f}です")
#πの値は03.14です
#右寄せ 空白埋め
print(f"'{word:>10s}'")
#' Python'
#中央揃え
print(f"'{word:^10s}'")
#' Python ' | 17.150838 | 59 | 0.70684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,108 | 0.888039 |
b5656eebed6c2a64ded792a644ba9d21bbe79fe1 | 570 | py | Python | tests/test_delete_query.py | RaduG/fluentql | 653a77bb95b40724eb58744f5f8dbed9c88eaebd | [
"MIT"
]
| 4 | 2020-04-15T10:50:03.000Z | 2021-07-22T12:23:50.000Z | tests/test_delete_query.py | RaduG/fluentql | 653a77bb95b40724eb58744f5f8dbed9c88eaebd | [
"MIT"
]
| 2 | 2020-05-24T08:54:56.000Z | 2020-05-24T09:04:31.000Z | tests/test_delete_query.py | RaduG/fluentql | 653a77bb95b40724eb58744f5f8dbed9c88eaebd | [
"MIT"
]
| null | null | null | import pytest
from fluentql import GenericSQLDialect, Q
from fluentql.types import Table
test_table = Table("test_table")
@pytest.fixture
def dialect_cls():
return GenericSQLDialect
@pytest.mark.parametrize(
["q", "expected"],
[
(Q.delete().from_(test_table), "delete from test_table;"),
(
Q.delete().from_(test_table).where(test_table["col1"] > 100),
"delete from test_table where col1 > 100;",
),
],
)
def test_delete_query(q, expected, dialect_cls):
assert q.compile(dialect_cls) == expected
| 21.111111 | 73 | 0.652632 | 0 | 0 | 0 | 0 | 439 | 0.770175 | 0 | 0 | 98 | 0.17193 |
b56b02915f5cdfb61babcb70fc1c32bc2970b2fa | 597 | py | Python | Section02/ParsingChart.py | fosterleejoe/Developing-NLP-Applications-Using-NLTK-in-Python | f2cac32c02d0632fb89f32446388ef15d9926bbc | [
"MIT"
]
| 67 | 2017-11-23T18:48:47.000Z | 2022-03-29T08:03:25.000Z | Section02/ParsingChart.py | fosterleejoe/Developing-NLP-Applications-Using-NLTK-in-Python | f2cac32c02d0632fb89f32446388ef15d9926bbc | [
"MIT"
]
| null | null | null | Section02/ParsingChart.py | fosterleejoe/Developing-NLP-Applications-Using-NLTK-in-Python | f2cac32c02d0632fb89f32446388ef15d9926bbc | [
"MIT"
]
| 49 | 2017-12-06T16:10:14.000Z | 2021-11-25T09:02:49.000Z | from nltk.grammar import CFG
from nltk.parse.chart import ChartParser, BU_LC_STRATEGY
grammar = CFG.fromstring("""
S -> T1 T4
T1 -> NNP VBZ
T2 -> DT NN
T3 -> IN NNP
T4 -> T3 | T2 T3
NNP -> 'Tajmahal' | 'Agra' | 'Bangalore' | 'Karnataka'
VBZ -> 'is'
IN -> 'in' | 'of'
DT -> 'the'
NN -> 'capital'
""")
cp = ChartParser(grammar, BU_LC_STRATEGY, trace=True)
sentence = "Bangalore is the capital of Karnataka"
tokens = sentence.split()
chart = cp.chart_parse(tokens)
parses = list(chart.parses(grammar.start()))
print("Total Edges :", len(chart.edges()))
for tree in parses: print(tree)
tree.draw()
| 22.961538 | 56 | 0.676717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.403685 |
b56b4c5922bce77528a0bba1e6eec4b2613a9cca | 62 | py | Python | msm_pele/AdaptivePELE/docs/makehtml.py | danielSoler93/msm_pele | 80b187ceb6446059f6c7b0dd2c0968f0db4a17a1 | [
"MIT"
]
| 13 | 2017-06-14T14:42:22.000Z | 2022-01-25T08:46:04.000Z | msm_pele/AdaptivePELE/docs/makehtml.py | danielSoler93/msm_pele | 80b187ceb6446059f6c7b0dd2c0968f0db4a17a1 | [
"MIT"
]
| 16 | 2018-01-16T01:32:02.000Z | 2021-02-19T17:05:12.000Z | msm_pele/AdaptivePELE/docs/makehtml.py | danielSoler93/msm_pele | 80b187ceb6446059f6c7b0dd2c0968f0db4a17a1 | [
"MIT"
]
| 8 | 2018-02-20T10:47:07.000Z | 2022-03-21T12:28:07.000Z | from sphinx import cmdline
import sys
cmdline.main(sys.argv)
| 12.4 | 26 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
b56c623a069eaa852720532015deec19073b3d1a | 5,526 | py | Python | sirbot/slack/wrapper.py | Ovvovy/sirbot-slack | 2d27e49cfbc2cb12e87ef3814823d2ad68d0a788 | [
"MIT"
]
| 7 | 2017-05-06T11:37:25.000Z | 2018-11-22T09:46:32.000Z | sirbot/slack/wrapper.py | Ovvovy/sirbot-slack | 2d27e49cfbc2cb12e87ef3814823d2ad68d0a788 | [
"MIT"
]
| 19 | 2017-05-07T16:25:02.000Z | 2017-09-22T08:02:59.000Z | sirbot/slack/wrapper.py | Ovvovy/sirbot-slack | 2d27e49cfbc2cb12e87ef3814823d2ad68d0a788 | [
"MIT"
]
| 3 | 2017-05-06T11:37:28.000Z | 2017-07-07T09:32:54.000Z | import logging
from .store.user import User
from .errors import SlackInactiveDispatcher, SlackNoThread
logger = logging.getLogger(__name__)
class SlackWrapper:
"""
A class to compose all available functionality of the slack plugin.
An instance is offered to all incoming message of all the plugins to
allow cross service messages
"""
def __init__(self, http_client, users, channels, groups, messages, threads,
bot, dispatcher):
self._http_client = http_client
self._threads = threads
self._dispatcher = dispatcher
self.messages = messages
self.users = users
self.channels = channels
self.groups = groups
self.bot = bot
async def send(self, *messages):
"""
Send the messages provided and update their timestamp
:param messages: Messages to send
"""
for message in messages:
message.frm = self.bot
if self.bot.type == 'rtm' and isinstance(message.to, User):
await self.users.ensure_dm(message.to)
if message.response_url:
# Message with a response url are response to actions or slash
# commands
data = message.serialize(type_='response')
await self._http_client.response(
data=data,
url=message.response_url
)
elif isinstance(message.to, User) and self.bot.type == 'rtm':
data = message.serialize(type_='send', to=self.bot.type)
message.raw = await self._http_client.message_send(
data=data,
token='bot'
)
elif isinstance(message.to, User) and self.bot.type == 'event':
data = message.serialize(type_='send', to=self.bot.type)
message.raw = await self._http_client.message_send(data=data)
else:
data = message.serialize(type_='send', to=self.bot.type)
message.raw = await self._http_client.message_send(data=data)
async def update(self, *messages):
"""
Update the messages provided and update their timestamp
:param messages: Messages to update
"""
for message in messages:
if isinstance(message.to, User):
await self.users.ensure_dm(message.to)
message.frm = self.bot
message.subtype = 'message_changed'
message.raw = await self._http_client.message_update(
message=message)
message.ts = message.raw.get('ts')
# await self._save_outgoing_message(message)
async def delete(self, *messages):
"""
Delete the messages provided
:param messages: Messages to delete
"""
for message in messages:
message.timestamp = await self._http_client.message_delete(message)
async def add_reaction(self, message, reaction):
"""
Add a reaction to a message
:Example:
>>> chat.add_reaction(Message, 'thumbsup')
Add the thumbup and robotface reaction to the message
:param messages: List of message and reaction to add
"""
await self._http_client.add_reaction(message, reaction)
async def delete_reaction(self, message, reaction):
"""
Delete reactions from messages
:Example:
>>> chat.delete_reaction(Message, 'thumbsup')
Delete the thumbup and robotface reaction from the message
:param messages: List of message and reaction to delete
"""
await self._http_client.delete_reaction(message, reaction)
async def get_reactions(self, message):
"""
Query the reactions of messages
:param messages: Messages to query reaction from
:return: dictionary of reactions by message
:rtype: dict
"""
reactions = await self._http_client.get_reaction(message)
for reaction in reactions:
reaction['users'] = [
self.users.get(id_=user_id)
for user_id in reaction.get('users', list())
]
message.reactions = reactions
return reactions
def add_action(self, id_, func, public=False):
if 'action' in self._dispatcher:
self._dispatcher['action'].register(id_, func, public=public)
else:
raise SlackInactiveDispatcher
def add_event(self, event, func):
if 'event' in self._dispatcher:
self._dispatcher['event'].register(event, func)
else:
raise SlackInactiveDispatcher
def add_command(self, command, func):
if 'command' in self._dispatcher:
self._dispatcher['command'].register(command, func)
else:
raise SlackInactiveDispatcher
def add_message(self, match, func, flags=0, mention=False, admin=False,
channel_id='*'):
if 'action' in self._dispatcher:
self._dispatcher['message'].register(match, func, flags, mention,
admin, channel_id)
else:
raise SlackInactiveDispatcher
def add_thread(self, message, func, user_id='all'):
if message.thread or message.timestamp:
self._threads[message.thread or message.timestamp][user_id] = func
else:
raise SlackNoThread()
| 32.892857 | 79 | 0.596091 | 5,381 | 0.97376 | 0 | 0 | 0 | 0 | 3,545 | 0.641513 | 1,497 | 0.270901 |
b56d3d57d3b008ef213624e96067cf823658819f | 4,321 | py | Python | rc/returninfo/classifier.py | ddangelorb/gthbmining | a7d18623cd14a2ffd2508a4bb6a71b06a5f26215 | [
"MIT"
]
| 4 | 2019-09-17T02:53:51.000Z | 2020-10-23T14:48:16.000Z | rc/returninfo/classifier.py | ddangelorb/gthbmining | a7d18623cd14a2ffd2508a4bb6a71b06a5f26215 | [
"MIT"
]
| null | null | null | rc/returninfo/classifier.py | ddangelorb/gthbmining | a7d18623cd14a2ffd2508a4bb6a71b06a5f26215 | [
"MIT"
]
| null | null | null | import warnings
warnings.filterwarnings('ignore') #ignore warnings to print values properly
import logging
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn import metrics
from datetime import datetime
from plotter import Plotter
class Classifier:
# constructor
def __init__(self, conn, repo_user, repo_name):
self.conn = conn
self.repository_id = self._get_repository_id(repo_user, repo_name)
self.dic_classifier = {
'decisiontree': ["../output/decisiontreeplot.png", "Decision Tree", DecisionTreeClassifier(criterion="entropy", max_depth=3)],
'naivebayes': ["../output/nbplot.png", "Naive Bayes", GaussianNB()],
'knn': ["../output/knnplot.png", "K-Nearest Neighbors (3)", KNeighborsClassifier(n_neighbors=3)]
}
logging.basicConfig(filename="../output/returninfo.log", level=logging.INFO)
def _get_repository_id(self, repo_user, repo_name):
cursor_conn = self.conn.cursor()
sql = "SELECT Id FROM Repositories WHERE Name = ?"
cursor_conn.execute(sql, ["{}/{}".format(repo_user, repo_name)])
id = 0
cursor_fetch = cursor_conn.fetchone()
if cursor_fetch:
id = cursor_fetch[0]
return id
def _print_scores(self, classifier, X, y, test_size):
# Split dataset into training set and test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size, random_state=1)
# Train Decision Tree Classifer
classifier.fit(X_train, y_train)
# Predict the response for test dataset
y_pred = classifier.predict(X_test)
print(" Accuracy:", metrics.accuracy_score(y_test, y_pred))
logging.info(" Accuracy: {}".format(metrics.accuracy_score(y_test, y_pred)))
print(" F1-Score:", metrics.f1_score(y_test, y_pred))
logging.info(" F1-Score: {}".format(metrics.f1_score(y_test, y_pred)))
print(" Precision:", metrics.precision_score(y_test, y_pred))
logging.info(" Precision: {}".format(metrics.precision_score(y_test, y_pred)))
print(" Recall:", metrics.recall_score(y_test, y_pred))
logging.info(" Recall: {}".format(metrics.recall_score(y_test, y_pred)))
#print(" Confusion Matrix:", metrics.confusion_matrix(y_test, y_pred))
def classify(self, classifier_key):
if classifier_key in self.dic_classifier:
dic_item = self.dic_classifier[classifier_key]
classifier_path_plot_file = dic_item[0]
classifier_name = dic_item[1]
classifier_obj = dic_item[2]
print("repository_id = '{}'".format(self.repository_id))
#Get X, y arrays for classification, normalized data
sql = "SELECT AuthorInfluencer, ClosedIssues, ClosedPullRequests, ClosedIssuesInfluencer, ClosedPullRequestsInfluencer, PrereleaseClass FROM ReleasesData WHERE IdRepository = ?;"
dataset = pd.read_sql_query(sql, self.conn, params=str(self.repository_id))
X = dataset[['ClosedIssuesInfluencer', 'ClosedPullRequestsInfluencer']]
y = dataset['PrereleaseClass'] # contains the values from the "Class" column
self._print_scores(classifier_obj, X, y, test_size = 0.2)
plotter = Plotter(classifier_name, classifier_obj, X, y)
plotter.plot(classifier_path_plot_file)
print("File '{}' plotted from current data and classifier '{}'".format(classifier_path_plot_file, classifier_name))
logging.info("File '{}' plotted from current data and classifier '{}'".format(classifier_path_plot_file, classifier_name))
else:
print("{} :: classifier_key{} not found. Supported ones are: 'decisiontree', 'naivebayes', 'knn'".format(datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), classifier_key))
logging.info("{} :: classifier_key{} not found. Supported ones are: 'decisiontree', 'naivebayes', 'knn'".format(datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), classifier_key))
| 50.244186 | 190 | 0.682249 | 3,813 | 0.882435 | 0 | 0 | 0 | 0 | 0 | 0 | 1,315 | 0.304328 |
b56d8510f015d44a803fb673140f624e3488a4d1 | 326 | py | Python | pctiler/pctiler/colormaps/mtbs.py | hobu/planetary-computer-apis | 27f5b8ce78737f43b306fa4738007c207a329b5b | [
"MIT"
]
| 1 | 2021-11-02T16:13:43.000Z | 2021-11-02T16:13:43.000Z | pctiler/pctiler/colormaps/mtbs.py | moradology/planetary-computer-apis-1 | 81a666e843cd0d8592708b35e1360fb68815816d | [
"MIT"
]
| null | null | null | pctiler/pctiler/colormaps/mtbs.py | moradology/planetary-computer-apis-1 | 81a666e843cd0d8592708b35e1360fb68815816d | [
"MIT"
]
| null | null | null | from typing import Dict, List
mtbs_colormaps: Dict[str, Dict[int, List[int]]] = {
"mtbs-severity": {
0: [0, 0, 0, 0],
1: [0, 100, 0, 255],
2: [127, 255, 212, 255],
3: [255, 255, 0, 255],
4: [255, 0, 0, 255],
5: [127, 255, 0, 255],
6: [255, 255, 255, 255],
},
}
| 23.285714 | 51 | 0.435583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 0.046012 |
b56dd907e3a9ba7c7134351a3ded86b0fead6823 | 183 | py | Python | run.py | sgilhuly/mire | 8ac07af9083831a03a1901c1bb655932111ae4cf | [
"MIT"
]
| 2 | 2020-06-15T10:51:43.000Z | 2020-08-02T07:38:44.000Z | run.py | sgilhuly/mire | 8ac07af9083831a03a1901c1bb655932111ae4cf | [
"MIT"
]
| null | null | null | run.py | sgilhuly/mire | 8ac07af9083831a03a1901c1bb655932111ae4cf | [
"MIT"
]
| 1 | 2018-05-15T04:45:37.000Z | 2018-05-15T04:45:37.000Z | import sys
from app import app, socketio
if __name__ == "__main__":
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port=5000
socketio.run(app, host="0.0.0.0", port=port) | 18.3 | 45 | 0.661202 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 19 | 0.103825 |
b56fc2f3040d889070f9fe524690dd7b2af07b3c | 1,004 | py | Python | pyFoam/extractForces.py | mjsauvinen/P4US | ba7bbc77a6e482f612ba5aa5f021a41fcbb23345 | [
"MIT"
]
| 4 | 2017-06-10T13:34:29.000Z | 2021-10-08T14:33:43.000Z | pyFoam/extractForces.py | mjsauvinen/P4US | ba7bbc77a6e482f612ba5aa5f021a41fcbb23345 | [
"MIT"
]
| 8 | 2018-07-10T12:00:49.000Z | 2021-09-16T13:58:59.000Z | pyFoam/extractForces.py | mjsauvinen/P4US | ba7bbc77a6e482f612ba5aa5f021a41fcbb23345 | [
"MIT"
]
| 6 | 2019-05-03T07:29:12.000Z | 2022-01-21T03:10:27.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import numpy as np
import pylab as pl
from txtTools import openIOFile
# =*=*=*=* FUNCTION DEFINITIONS *=*=*=*=*=*=*=*=*=*=*=*
def isolateValues( line , stripChars ):
v = []
sl = line.split()
for i in xrange(len(sl)):
for sc in stripChars:
sl[i] = sl[i].strip(sc)
for s in sl:
v.append(float(s))
return v
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
try:
factor = sys.argv[1]
except:
factor = 1.
factor = float(factor)
f = openIOFile('forces.dat', 'r')
oc = openIOFile('forces.cmp', 'w')
ot = openIOFile('forces.tot', 'w')
lines = f.readlines()
spr = ['(',')']
Fx = np.zeros(4,float)
for l in lines[1:]:
x = np.array(isolateValues(l,spr))
if( len(x) == 13 ):
x.tofile(oc,sep=" \t"); oc.write("\n")
Fx[0] = x[0]
for i in xrange(1,len(Fx)):
Fx[i]=factor*(x[i]+x[i+3]) # Pressure + Viscous
Fx.tofile(ot, sep=" \t"); ot.write("\n")
f.close(); oc.close(); ot.close() | 20.489796 | 55 | 0.531873 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 243 | 0.242032 |
b5708d1fb897807ff8443226c727f7b6941ec2ec | 580 | py | Python | fluentcheck/tests/tests_is/test_basic_checks_is.py | jstoebel/fluentcheck | 9258dab4f46776b5df50528f5028ce2d11a443c5 | [
"MIT"
]
| 83 | 2018-05-31T13:21:06.000Z | 2022-03-20T14:27:49.000Z | fluentcheck/tests/tests_is/test_basic_checks_is.py | jstoebel/fluentcheck | 9258dab4f46776b5df50528f5028ce2d11a443c5 | [
"MIT"
]
| 20 | 2019-02-10T15:07:44.000Z | 2021-04-02T13:18:52.000Z | fluentcheck/tests/tests_is/test_basic_checks_is.py | jstoebel/fluentcheck | 9258dab4f46776b5df50528f5028ce2d11a443c5 | [
"MIT"
]
| 11 | 2019-02-16T21:33:11.000Z | 2022-03-25T03:39:52.000Z | import unittest
from fluentcheck import Is
from fluentcheck.exceptions import CheckError
# noinspection PyStatementEffect
class TestIsBasicChecks(unittest.TestCase):
def test_is_none_pass(self):
self.assertIsInstance(Is(None).none, Is)
def test_is_none_fail(self):
with self.assertRaises(CheckError):
Is("I am not none").none
def test_is_not_none_pass(self):
self.assertIsInstance(Is("I am not none").not_none, Is)
def test_is_not_none_fail(self):
with self.assertRaises(CheckError):
Is(None).not_none
| 26.363636 | 63 | 0.712069 | 455 | 0.784483 | 0 | 0 | 0 | 0 | 0 | 0 | 62 | 0.106897 |
b57121e74eb14c59a54321fc4b77f18535e2b4a3 | 414 | py | Python | back_to_back/apps/calculator/migrations/0003_calculation_last_occurrence.py | mhotwagner/back-to-back | ce8e0a58406d4fd6c20ecc71b8d1c49e6851f752 | [
"MIT"
]
| null | null | null | back_to_back/apps/calculator/migrations/0003_calculation_last_occurrence.py | mhotwagner/back-to-back | ce8e0a58406d4fd6c20ecc71b8d1c49e6851f752 | [
"MIT"
]
| null | null | null | back_to_back/apps/calculator/migrations/0003_calculation_last_occurrence.py | mhotwagner/back-to-back | ce8e0a58406d4fd6c20ecc71b8d1c49e6851f752 | [
"MIT"
]
| null | null | null | # Generated by Django 2.0.2 on 2018-02-18 17:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('calculator', '0002_calculation_add_occurrences'),
]
operations = [
migrations.AddField(
model_name='calculation',
name='last_occurrence',
field=models.DateTimeField(auto_now=True),
),
]
| 21.789474 | 59 | 0.625604 | 321 | 0.775362 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0.297101 |
b57208d1dd6013bfe4af88d2691bee115d3cc089 | 627 | py | Python | api/interests/models.py | sorinburghiu2323/Supervisio | df0b682d031914904547efafb3ec4d060bf96526 | [
"MIT"
]
| null | null | null | api/interests/models.py | sorinburghiu2323/Supervisio | df0b682d031914904547efafb3ec4d060bf96526 | [
"MIT"
]
| null | null | null | api/interests/models.py | sorinburghiu2323/Supervisio | df0b682d031914904547efafb3ec4d060bf96526 | [
"MIT"
]
| null | null | null | from django.db import models
from django.core.exceptions import ValidationError
from rest_framework.exceptions import ValidationError as DRFValidationError
from api.models import TimestampedModel
class Interest(TimestampedModel):
name = models.CharField(max_length=255, unique=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.name = self.name.lower()
try:
self.validate_unique()
except ValidationError:
raise DRFValidationError({"detail": "Name must be unique."})
return super(Interest, self).save(*args, **kwargs)
| 29.857143 | 75 | 0.700159 | 427 | 0.681021 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.047847 |
b572d60eef0c25ee409f0d4d3f9822ab96421622 | 3,652 | py | Python | src/eddington_gui/boxes/initial_guess_box.py | tsarpaul/eddington-gui | 6022c28fde22a97d97e0bcbe357a45d168bdf251 | [
"Apache-2.0"
]
| null | null | null | src/eddington_gui/boxes/initial_guess_box.py | tsarpaul/eddington-gui | 6022c28fde22a97d97e0bcbe357a45d168bdf251 | [
"Apache-2.0"
]
| null | null | null | src/eddington_gui/boxes/initial_guess_box.py | tsarpaul/eddington-gui | 6022c28fde22a97d97e0bcbe357a45d168bdf251 | [
"Apache-2.0"
]
| null | null | null | """Box for specifying initial guess for the fitting algorithm."""
from typing import Callable, List, Optional
import numpy as np
import toga
from eddington import EddingtonException
from toga.style import Pack
from eddington_gui.boxes.line_box import LineBox
from eddington_gui.consts import SMALL_INPUT_WIDTH
class InitialGuessBox(LineBox):
"""Visual box for specifying initial guess."""
main_label: toga.Label
initial_guess_labels: List[toga.Label] = []
initial_guess_inputs: List[toga.TextInput] = []
__n: int = 0
__a0: Optional[np.ndarray] = None
__on_initial_guess_change: Optional[Callable[[], None]] = None
def __init__(self, on_initial_guess_change):
"""Initial box."""
super().__init__()
self.on_initial_guess_change = on_initial_guess_change
self.main_label = toga.Label(text="Initial Guess:")
self.add(self.main_label)
@property
def n(self): # pylint: disable=invalid-name
"""Getter of the expected number of parameters."""
return self.__n
@n.setter
def n(self, n): # pylint: disable=invalid-name
"""Setter of the expected number of parameters."""
self.reset_initial_guess()
old_n = 0 if self.__n is None else self.__n
self.__n = n
if self.n > len(self.initial_guess_inputs):
for i in range(len(self.initial_guess_inputs), self.n):
self.initial_guess_labels.append(toga.Label(f"a[{i}]:"))
self.initial_guess_inputs.append(
toga.TextInput(
style=Pack(width=SMALL_INPUT_WIDTH),
on_change=lambda widget: self.reset_initial_guess(),
)
)
if old_n < self.n:
for i in range(old_n, self.n):
self.add(self.initial_guess_labels[i], self.initial_guess_inputs[i])
if self.n < old_n:
for i in range(self.n, old_n):
self.remove(self.initial_guess_labels[i], self.initial_guess_inputs[i])
@property
def a0(self): # pylint: disable=invalid-name
"""Getter of the initial guess."""
if self.__a0 is None:
self.__calculate_a0()
return self.__a0
@a0.setter
def a0(self, a0): # pylint: disable=invalid-name
"""
Setter of the initial guess.
Whenever a new initial guess is set, run handlers to update dependant
components.
"""
self.__a0 = a0
if self.on_initial_guess_change is not None:
self.on_initial_guess_change()
@property
def on_initial_guess_change(self):
"""on_initial_guess_change getter."""
return self.__on_initial_guess_change
@on_initial_guess_change.setter
def on_initial_guess_change(self, on_initial_guess_change):
"""on_initial_guess_change setter."""
self.__on_initial_guess_change = on_initial_guess_change
def reset_initial_guess(self):
"""Reset the initial guess."""
self.a0 = None # pylint: disable=invalid-name
def __calculate_a0(self):
if self.n is None:
return
try:
a0_values = [
self.initial_guess_inputs[i].value.strip() for i in range(self.n)
]
if all([value == "" for value in a0_values]):
return
self.a0 = np.array(list(map(float, a0_values)))
except ValueError as exc:
raise EddingtonException(
"Unable to parse initial guess. "
"Initial guess should be written as floats."
) from exc
| 34.780952 | 87 | 0.618839 | 3,337 | 0.913746 | 0 | 0 | 2,019 | 0.552848 | 0 | 0 | 773 | 0.211665 |
b5742eb898932211cf75e05e216d0c94c86949cb | 418 | py | Python | examples/select.py | GBS3/cues | 09bce776f9275b71a4028e5c59103e45d81ebed6 | [
"MIT"
]
| 1 | 2021-09-13T02:29:43.000Z | 2021-09-13T02:29:43.000Z | examples/select.py | giosali/cues | 09bce776f9275b71a4028e5c59103e45d81ebed6 | [
"MIT"
]
| null | null | null | examples/select.py | giosali/cues | 09bce776f9275b71a4028e5c59103e45d81ebed6 | [
"MIT"
]
| 1 | 2021-05-26T04:35:47.000Z | 2021-05-26T04:35:47.000Z | """
examples.select
===============
An example that demonstrates the Select child class.
"""
from cues.cues import Select
def main():
name = 'programming_language'
message = 'Which of these is your favorite programming language?'
options = ['Python', 'JavaScript', 'C++', 'C#']
cue = Select(name, message, options)
answer = cue.send()
print(answer)
if __name__ == '__main__':
main()
| 18.173913 | 69 | 0.629187 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 209 | 0.5 |
b576d5aece893c6e7258835bd199ea37f39fabca | 6,685 | py | Python | ckanext/satreasury/tests/test_plugin.py | OpenUpSA/ckanext-satreasury | b6e71009ee5fdfe8cc477304b10536b5af614de6 | [
"MIT"
]
| 1 | 2019-09-08T08:56:54.000Z | 2019-09-08T08:56:54.000Z | ckanext/satreasury/tests/test_plugin.py | vulekamali/ckanext-satreasury | b6e71009ee5fdfe8cc477304b10536b5af614de6 | [
"MIT"
]
| 3 | 2020-03-24T17:05:46.000Z | 2021-02-02T22:01:44.000Z | ckanext/satreasury/tests/test_plugin.py | vulekamali/ckanext-satreasury | b6e71009ee5fdfe8cc477304b10536b5af614de6 | [
"MIT"
]
| 1 | 2019-05-30T19:30:22.000Z | 2019-05-30T19:30:22.000Z | import json
import unittest
from functools import partial
import ckan.model as model
import responses
from ckanext.satreasury.plugin import SATreasuryDatasetPlugin
from mock import MagicMock, Mock, PropertyMock, patch
TRAVIS_ENDPOINT = "https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal"
TRAVIS_COMMIT_MESSAGE = 'Rebuild with new/modified dataset'
TRAVIS_WEB_URL = "https://travis-ci.org/vulekamali/static-budget-portal/builds/"
class TestNotifyMethod(unittest.TestCase):
@responses.activate
def setUp(self):
self.entity = Mock(spec=model.Package)
self.entity.owner_org = PropertyMock(return_value=True)
self.plugin = SATreasuryDatasetPlugin()
flash_success_patch = patch(
'ckanext.satreasury.plugin.ckan_helpers.flash_success')
self.flash_success_mock = flash_success_patch.start()
flash_error_patch = patch(
'ckanext.satreasury.plugin.ckan_helpers.flash_error')
self.flash_error_mock = flash_error_patch.start()
self.addCleanup(flash_success_patch.stop)
@patch(
'ckanext.satreasury.plugin.travis.build_trigger_enabled',
return_value=True)
def test_notify_already_building(self, build_trigger_enabled_mock):
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds",
json={
'builds': [
{
'id': 535878234,
'commit': {
'message': TRAVIS_COMMIT_MESSAGE
},
}]},
status=200,
content_type='application/json')
self.plugin.notify(self.entity, None)
message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/535878234' >Check progress of the update process.</a>"
self.flash_success_mock.assert_called_with(
message, allow_html=True)
@patch(
'ckanext.satreasury.plugin.travis.build_trigger_enabled',
return_value=True)
def test_notify_build_triggered(self, build_trigger_enabled_mock):
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds",
json={
'builds': []},
status=200,
content_type='application/json')
rsps.add(
responses.POST,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests",
json={
'request': {
'id': 12345}},
status=200,
content_type='application/json')
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/request/12345",
json={
'builds': [
{
'commit': {
'message': TRAVIS_COMMIT_MESSAGE},
'id': 535878234,
}]},
status=200,
content_type='application/json')
self.plugin.notify(self.entity, None)
message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/535878234' >Check progress of the update process.</a>"
self.flash_success_mock.assert_called_with(
message, allow_html=True)
@patch(
'ckanext.satreasury.plugin.travis.build_trigger_enabled',
return_value=True)
def test_notify_build_request_but_no_build(self, build_trigger_enabled_mock):
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds",
json={
'builds': []},
status=200,
content_type='application/json')
rsps.add(
responses.POST,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests",
json={
'request': {
'id': 12345}},
status=200,
content_type='application/json')
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/request/12345",
json={
'builds': []},
status=200,
content_type='application/json')
self.plugin.notify(self.entity, None)
message = "vulekamali will be updated in less than an hour. <a href='https://travis-ci.org/vulekamali/static-budget-portal/builds/' >Check progress of the update process.</a>"
self.flash_success_mock.assert_called_with(
message, allow_html=True)
@patch(
'ckanext.satreasury.plugin.travis.build_trigger_enabled',
return_value=True)
def test_notify_build_trigger_errored(self, build_trigger_enabled_mock):
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/builds",
json={
'builds': []},
status=200,
content_type='application/json')
rsps.add(
responses.POST,
"https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests",
json={
'request': {
'id': 12345}},
status=500,
content_type='application/json')
self.plugin.notify(self.entity, None)
message = 'An error occurred when updating the static site data. Technical details: 500 Server Error: Internal Server Error for url: https://api.travis-ci.org/repo/vulekamali%2Fstatic-budget-portal/requests'
self.flash_error_mock.assert_called_with(message)
@patch(
'ckanext.satreasury.plugin.travis.build_trigger_enabled',
return_value=False)
def test_notify_build_not_enabled(self, build_trigger_enabled_mock):
self.plugin.notify(self.entity, None)
self.assertTrue(True)
| 43.129032 | 219 | 0.57457 | 6,236 | 0.932835 | 0 | 0 | 6,159 | 0.921316 | 0 | 0 | 2,228 | 0.333283 |
b57c5c0aef7e6b76df1071bc270b08f415d805ee | 2,011 | py | Python | src/kbpo/db_evaluation.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
]
| 4 | 2017-08-09T14:05:48.000Z | 2018-12-25T01:34:23.000Z | src/kbpo/db_evaluation.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
]
| 12 | 2017-01-19T23:18:18.000Z | 2018-12-23T18:57:54.000Z | src/kbpo/db_evaluation.py | arunchaganty/kbp-online | 9f8763d8f4bfb1fb8a01f1f4f506f56625dd38d8 | [
"MIT"
]
| 2 | 2017-08-08T09:48:20.000Z | 2018-07-09T09:12:43.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Routines to evaluate the system.
"""
import logging
from collections import Counter
from . import db
logger = logging.getLogger(__name__)
def get_exhaustive_samples(corpus_tag):
"""
Use the document_sample table to get which documents have been exhaustively sampled.
"""
rows = db.select("""
SELECT e.doc_id, e.subject_id, e.object_id, e.relation, e.weight
FROM evaluation_relation e,
JOIN document_sample s ON (e.doc_id = s.doc_id)
JOIN document_tag t ON (e.doc_id = t.doc_id AND t.tag = %(corpus_tag)s)
WHERE e.weight > 0.5 AND e.relation <> 'no_relation'
""", tag=corpus_tag)
return [((row.subject_id, row.relation, row.object_id), 1.0) for row in rows]
def get_submission_samples(corpus_tag, scheme, submission_id):
rows = db.select("""
SELECT r.doc_id, r.subject_id, r.object_id, r.relation AS predicted_relation, e.relation AS gold_relation, b.params
FROM submission_relation r,
submission s,
evaluation_relation e,
evaluation_batch b
WHERE e.question_batch_id = b.id
AND r.doc_id = e.doc_id AND r.subject_id = e.subject_id AND r.object_id = e.object_id
AND r.submission_id = s.id
AND b.corpus_tag = %(tag)s
AND b.batch_type = 'selective_relations'
AND b.params ~ %(scheme)s
AND b.params ~ %(submission_f)s
AND r.submission_id = %(submission_id)s
""", tag=corpus_tag,
scheme='"method":"{}"'.format(scheme),
submission_id=submission_id,
submission_f='"submission_id":{}'.format(submission_id)
)
# TODO: ^^ is a hack to get the right rows from the database. we
# should probably do differently.
return [((row.subject_id, row.predicted_relation, row.object_id), 1.0 if row.predicted_relation == row.gold_relation else 0.0) for row in rows]
| 40.22 | 147 | 0.635007 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,302 | 0.647439 |
b57f76841f0c85c583ef9797290a21bbf823a12e | 2,212 | py | Python | model_metadata/utils.py | csdms/model_metadata | 62acab7ae2a152bec64bc1f52751f7a8aa1d4184 | [
"MIT"
]
| 1 | 2021-05-25T14:38:10.000Z | 2021-05-25T14:38:10.000Z | model_metadata/utils.py | csdms/model_metadata | 62acab7ae2a152bec64bc1f52751f7a8aa1d4184 | [
"MIT"
]
| 3 | 2018-04-05T21:50:24.000Z | 2021-04-02T03:54:04.000Z | model_metadata/utils.py | csdms/model_metadata | 62acab7ae2a152bec64bc1f52751f7a8aa1d4184 | [
"MIT"
]
| null | null | null | #! /usr/bin/env python
import os
import sys
from .api import install as install_mmd
def model_data_dir(name, datarootdir=None):
"""Get a model's data dir.
Parameters
----------
name : str
The name of the model.
Returns
-------
str
The absolute path to the data directory for the model.
"""
datarootdir = datarootdir or os.path.join(sys.prefix, "share")
return os.path.join(datarootdir, "csdms", name)
def get_cmdclass(paths, cmdclass=None):
cmdclass = {} if cmdclass is None else cmdclass.copy()
if "setuptools" in sys.modules:
from setuptools.command.develop import develop as _develop
from setuptools.command.install import install as _install
else:
from distutils.command.develop import develop as _develop
from distutils.command.install import install as _install
sharedir = os.path.join(sys.prefix, "share")
class install(_install):
def run(self):
_install.run(self)
for name, path in paths:
name = name.split(":")[-1]
install_mmd(
os.path.abspath(path),
os.path.join(sharedir, "csdms", name),
silent=False,
clobber=True,
develop=False,
)
class develop(_develop):
def run(self):
_develop.run(self)
for name, path in paths:
name = name.split(":")[-1]
install_mmd(
os.path.abspath(path),
os.path.join(sharedir, "csdms", name),
silent=False,
clobber=True,
develop=True,
)
cmdclass["install"] = install
cmdclass["develop"] = develop
return cmdclass
def get_entry_points(components, entry_points=None):
entry_points = {} if entry_points is None else entry_points
pymt_plugins = entry_points.get("pymt.plugins", [])
for entry_point, _ in components:
pymt_plugins.append(entry_point)
if len(pymt_plugins) > 0:
entry_points["pymt.plugins"] = pymt_plugins
return entry_points
| 26.97561 | 66 | 0.574593 | 819 | 0.370253 | 0 | 0 | 0 | 0 | 0 | 0 | 328 | 0.148282 |
b582e5842d21e445f1825c2debc8042c425aedda | 8,060 | py | Python | solution/serverlist.py | ksh0165/lhms | 8848a74ac5c0f309e3ab28583af4bd574575ab8a | [
"Apache-2.0"
]
| null | null | null | solution/serverlist.py | ksh0165/lhms | 8848a74ac5c0f309e3ab28583af4bd574575ab8a | [
"Apache-2.0"
]
| null | null | null | solution/serverlist.py | ksh0165/lhms | 8848a74ac5c0f309e3ab28583af4bd574575ab8a | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/python3
import os
import subprocess
import re
import pymysql
from datetime import datetime
strPath = r"/etc/webmin/servers";# file dir
files = os.listdir(strPath)
lists = [];# file lists
host = [];
user = [];
pwd = [];
val = 0;# extractServer use
test = "";# grep host
test1 = "";# grep user
test2 = "";# grep pass
test3 = "";# Text = remove
test5 = "";# Text /n remove
test7 = "";# Text1 ' remove
test9 = "";# Text1 /n remove
#retry = "";# fail use filename show : no use
cnt1 = 0;# array file wc total count
filelenlist = [];# files wc total list
filelentotallist = ""; #files wc total list make word and reset
finallist = []; # after less 11 rows romeve then finally list
lenlist = [];
fcnt = [];# length 11 less count list
frows = 0;# length 11 less count
hs = "";# host
us = "";# user
ps = "";# pass
rows = 0;# file wc -l
row = 0;# file wc -l
count = 0;# total file count for 11 less count
servers = "";
#total = [];# value total : no use
##########################################################################################
# FUNCTION
##########################################################################################
def extractServer(server):
val = server.index('.')
result = server[:val]
return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'')
def extractText1(text1):
#result = re.findall(r'^=[0-9]+(?:\.[0-9]+)', text)
result = re.findall(r'\d+',str(text1))
return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'')
#def extractFile(file):
# result = re.search(r'.*[.].*$', file)
# return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'')
def extractIp(ip):
result = re.findall(r'[0-9]+(?:\.[0-9]+){3}', ip)
return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'')
#regex1 = re.compile(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$')
def extractText(text):
#result = re.findall(r'^=[0-9]+(?:\.[0-9]+)', text)
test3 = text.index('=')
test5 = text.index('\n')
result = text[test3+1:test5]
return str(result).replace('[]','').replace('[', '').replace(']', '').replace("'",'')
print("files = %s" % files)
servs = [file for file in files if file.endswith(".serv")]
cnt = 0;
now1 = datetime.now()
now = now1.strftime("%Y")+now1.strftime("%m")+now1.strftime("%d")
print("now = %s" %now);
print("servs = %s" % servs);
print("servs len = %s" % len(servs));
db = pymysql.connect(host='172.20.0.3', port=3306, user='root', passwd='', db='hms',charset='utf8',autocommit=True)
cursor = db.cursor()
##########################################################################################
# SERVER LIST PASING & MARIADB INSERT
##########################################################################################
for serve in servs:
print("===================================================");
print("start row 11 less count check servs = %s : " % servs);
print("start row 11 less count check serve = %s : " % serve);
print("===================================================");
print("now count = %s :" % count);
lenlist.append(serve)
print("all lenlist count = %s :" % lenlist);
cnt2 = subprocess.check_output('cat /etc/webmin/servers/%s | wc -l' % lenlist[count],shell=True)
cnt1 = extractText1(cnt2)
filelenlist.append(cnt1)
print("now filelenlist = %s :" % filelenlist[count]);
#print("filelenlist.split() = %s : " % " ".join(filelenlist[count]));
#for y in range(filelenlist):
##filelenlist[count]
##for fll in filelenlist:
print("filelenlist[%d] = %s :" % (count, filelenlist));
## print("len(filelenlist) = %s :" % len(filelenlist));
#print("now fll = %s :" % fll);
#fl = fll.split(",")
filelentotallist = filelenlist[count]
print("now filelentotallist = %s :" % filelentotallist);
if filelentotallist == '11':
if count < len(servs):
#count = count + 1;
print("11 length ! pass ~~");
else:
fcnt.append(serve)
print(" no 11 length find ~~~ add value in fcnt + 1 = %s :" % count);
if count < len(servs):
#count = count + 1;
filelentotallist = "";
count = count + 1;
print("===================================================");
print("end row count = %s :" % count);
print("fcnt = %s :" % fcnt);
print("===================================================");
frows = len(fcnt)
print("frows = %s:" % frows);
##########################################################################################
# frows : less 11 rows -> craete new array and input filename and remove it
##########################################################################################
for removes in fcnt:
servs.remove(removes)
print(" alter remove less 11 rows servs = %s :" % servs);
try:
with cursor:
sql_d = "DELETE FROM tests"
cursor.execute(sql_d)
db.commit()
for serv in servs:
lists.append(serv)
print("-----------------------------------------------------");
print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt));
rows = subprocess.check_output('cat /etc/webmin/servers/%s | wc -l' % lists[cnt],shell=True)
row = extractText1(rows)
print("-----------------------------------------------------");
print("row = %s cnt = %d : " % (row, cnt));
print("-----------------------------------------------------");
servers = extractServer(serv)
#total.append(servers)
print("fname = %s" % servers);
if row == "11":
test = subprocess.check_output('cat /etc/webmin/servers/%s | grep host' % lists[cnt],shell=True)
test1 = subprocess.check_output('cat /etc/webmin/servers/%s | grep user' % lists[cnt],shell=True)
test2 = subprocess.check_output('cat /etc/webmin/servers/%s | grep pass' % lists[cnt],shell=True)
hs = extractIp(test.decode('utf-8'))
host.append(hs)
print("host =%s" % host[cnt]);
print("host[%d] =%s" % (cnt,host[cnt]));
#total.append(hs)
us = extractText(test1.decode('utf-8'))
user.append(us)
print("user =%s" % user[cnt]);
print("user[%d] =%s" % (cnt,user[cnt]));
#total.append(us)
ps = extractText(test2.decode('utf-8'))
pwd.append(ps)
print("pwd =%s" %pwd[cnt]);
print("pwd[%d] =%s" % (cnt,pwd[cnt]));
#total.append(ps)
#cursor.execute("INSERT INTO tests(fname,host,user,pwd,inputdt) VALUES (%s,%s,%s,%s,%s)" % (servers,host[cnt],user[cnt],pwd[cnt],now))
sql = "INSERT INTO `tests` (`fname`,`host`,`user`,`pwd`,`inputdt`) VALUES (%s,%s,%s,%s,%s)"
#for i in servs:
cursor.execute(sql, (servers,host[cnt],user[cnt],pwd[cnt],now))
data = cursor.fetchall()
db.commit()
if cnt < len(servs):
cnt = cnt+1;
else:
#print("cnt = %d:" % cnt);
#retry = servs[cnt]
#print("retry = %s : " % retry);
#if cnt < len(servs)-1:
# cnt = cnt;
# print("cnt = %d , cnt < len(servs):" % cnt);
# print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt));
# continue
pass
#else:
# cnt = cnt;
# print("cnt = %d , cnt = len(servs): " % cnt);
# print("lists[cnt] = %s cnt = %d : " % (lists[cnt], cnt));
# continue
# pass
finally:
db.close()
print("servs = %s" % servs)
print("The currnt directory is: %s" % strPath)
| 42.198953 | 150 | 0.458561 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,909 | 0.484988 |
b58403121af69cb7645522d11585b8ed10c27038 | 579 | py | Python | algorithms/tree_level_width.py | danielhgasparin/algorithms-python | 4b27c3cddd22762599fe55d3b760f388733c4fa7 | [
"MIT"
]
| null | null | null | algorithms/tree_level_width.py | danielhgasparin/algorithms-python | 4b27c3cddd22762599fe55d3b760f388733c4fa7 | [
"MIT"
]
| null | null | null | algorithms/tree_level_width.py | danielhgasparin/algorithms-python | 4b27c3cddd22762599fe55d3b760f388733c4fa7 | [
"MIT"
]
| null | null | null | """Tree level width module."""
from collections import deque
def tree_level_width(tree):
"""Return a list containing the width of each level of the specified tree."""
result = []
count = 0
queue = deque([tree.root, "s"])
while len(queue) > 0:
node = queue.popleft()
if node == "s":
if(count == 0):
break
else:
result.append(count)
count = 0
queue.append("s")
else:
count += 1
queue.extend(node.children)
return result | 26.318182 | 81 | 0.504318 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 116 | 0.200345 |
b5843a61298c91586c4a21bf7567496274b5cfe9 | 592 | py | Python | tests/utils/test_bbox.py | Swall0w/clib | 46f659783d5a0a6ec5994c3c707c1cc8a7934385 | [
"MIT"
]
| 1 | 2017-08-27T00:01:27.000Z | 2017-08-27T00:01:27.000Z | tests/utils/test_bbox.py | Swall0w/clib | 46f659783d5a0a6ec5994c3c707c1cc8a7934385 | [
"MIT"
]
| 49 | 2017-08-20T02:09:26.000Z | 2017-12-31T11:58:27.000Z | tests/utils/test_bbox.py | Swall0w/clib | 46f659783d5a0a6ec5994c3c707c1cc8a7934385 | [
"MIT"
]
| 1 | 2017-12-08T08:31:38.000Z | 2017-12-08T08:31:38.000Z | import os
import unittest
import clib
from clib.utils import Box
class BboxTest(unittest.TestCase):
def setUp(self):
self.bbox = Box(50, 50, 40, 60)
def test_vi_bbox(self):
self.assertEqual(self.bbox.int_left_top(), (30, 20))
self.assertEqual(self.bbox.int_right_bottom(), (70, 80))
self.assertEqual(self.bbox.left_top(), [30.0, 20.0])
self.assertEqual(self.bbox.right_bottom(), [70.0, 80.0])
self.bbox.crop_region(5, 5)
self.assertEqual(self.bbox.right_bottom(), [5.0, 5.0])
if __name__ == '__main__':
unittest.main()
| 26.909091 | 64 | 0.646959 | 475 | 0.802365 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.016892 |
b586010d4e3392e2a65d444fb3bb7f33c20e15b9 | 67,167 | py | Python | iriusrisk-python-client-lib/iriusrisk_python_client_lib/api/users_api.py | iriusrisk/iriusrisk-python-client-lib | 4912706cd1e5c0bc555dbc7da02fb64cbeab3b18 | [
"Apache-2.0"
]
| null | null | null | iriusrisk-python-client-lib/iriusrisk_python_client_lib/api/users_api.py | iriusrisk/iriusrisk-python-client-lib | 4912706cd1e5c0bc555dbc7da02fb64cbeab3b18 | [
"Apache-2.0"
]
| null | null | null | iriusrisk-python-client-lib/iriusrisk_python_client_lib/api/users_api.py | iriusrisk/iriusrisk-python-client-lib | 4912706cd1e5c0bc555dbc7da02fb64cbeab3b18 | [
"Apache-2.0"
]
| null | null | null | # coding: utf-8
"""
IriusRisk API
Products API # noqa: E501
OpenAPI spec version: 1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from iriusrisk_python_client_lib.api_client import ApiClient
class UsersApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def groups_group_users_delete(self, api_token, group, unassing_users_group_request_body, **kwargs): # noqa: E501
"""Unassign a list of users from a group # noqa: E501
Unassign a list of users from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign users from a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_delete(api_token, group, unassing_users_group_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param UnassingUsersGroupRequestBody unassing_users_group_request_body: JSON object that contains information to unassign users from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, **kwargs) # noqa: E501
else:
(data) = self.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, **kwargs) # noqa: E501
return data
def groups_group_users_delete_with_http_info(self, api_token, group, unassing_users_group_request_body, **kwargs): # noqa: E501
"""Unassign a list of users from a group # noqa: E501
Unassign a list of users from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign users from a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_delete_with_http_info(api_token, group, unassing_users_group_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param UnassingUsersGroupRequestBody unassing_users_group_request_body: JSON object that contains information to unassign users from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'group', 'unassing_users_group_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method groups_group_users_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_delete`") # noqa: E501
# verify the required parameter 'group' is set
if ('group' not in params or
params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `groups_group_users_delete`") # noqa: E501
# verify the required parameter 'unassing_users_group_request_body' is set
if ('unassing_users_group_request_body' not in params or
params['unassing_users_group_request_body'] is None):
raise ValueError("Missing the required parameter `unassing_users_group_request_body` when calling `groups_group_users_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'unassing_users_group_request_body' in params:
body_params = params['unassing_users_group_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/groups/{group}/users', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def groups_group_users_get(self, api_token, group, **kwargs): # noqa: E501
"""List users from a group # noqa: E501
List users who belongs to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to list users of a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_get(api_token, group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.groups_group_users_get_with_http_info(api_token, group, **kwargs) # noqa: E501
else:
(data) = self.groups_group_users_get_with_http_info(api_token, group, **kwargs) # noqa: E501
return data
def groups_group_users_get_with_http_info(self, api_token, group, **kwargs): # noqa: E501
"""List users from a group # noqa: E501
List users who belongs to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to list users of a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_get_with_http_info(api_token, group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method groups_group_users_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_get`") # noqa: E501
# verify the required parameter 'group' is set
if ('group' not in params or
params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `groups_group_users_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/groups/{group}/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def groups_group_users_put(self, api_token, group, assign_user_group_request_body, **kwargs): # noqa: E501
"""Assigns users to a group # noqa: E501
Assigns users to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to assign users to a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_put(api_token, group, assign_user_group_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param AssignUserGroupRequestBody assign_user_group_request_body: JSON object that contains information to assign users to group (required)
:return: InlineResponse201
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, **kwargs) # noqa: E501
else:
(data) = self.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, **kwargs) # noqa: E501
return data
def groups_group_users_put_with_http_info(self, api_token, group, assign_user_group_request_body, **kwargs): # noqa: E501
"""Assigns users to a group # noqa: E501
Assigns users to a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to assign users to a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_put_with_http_info(api_token, group, assign_user_group_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param AssignUserGroupRequestBody assign_user_group_request_body: JSON object that contains information to assign users to group (required)
:return: InlineResponse201
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'group', 'assign_user_group_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method groups_group_users_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_put`") # noqa: E501
# verify the required parameter 'group' is set
if ('group' not in params or
params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `groups_group_users_put`") # noqa: E501
# verify the required parameter 'assign_user_group_request_body' is set
if ('assign_user_group_request_body' not in params or
params['assign_user_group_request_body'] is None):
raise ValueError("Missing the required parameter `assign_user_group_request_body` when calling `groups_group_users_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'assign_user_group_request_body' in params:
body_params = params['assign_user_group_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/groups/{group}/users', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse201', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def groups_group_users_user_delete(self, api_token, group, user, **kwargs): # noqa: E501
"""Removes a user from a group # noqa: E501
Unassign a user from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign user from a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_user_delete(api_token, group, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param str user: user to be removed from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.groups_group_users_user_delete_with_http_info(api_token, group, user, **kwargs) # noqa: E501
else:
(data) = self.groups_group_users_user_delete_with_http_info(api_token, group, user, **kwargs) # noqa: E501
return data
def groups_group_users_user_delete_with_http_info(self, api_token, group, user, **kwargs): # noqa: E501
"""Removes a user from a group # noqa: E501
Unassign a user from a group. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. With this permission you will be able to unassign user from a group, **if you belong to this group**. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.groups_group_users_user_delete_with_http_info(api_token, group, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str group: name of the group (required)
:param str user: user to be removed from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'group', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method groups_group_users_user_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `groups_group_users_user_delete`") # noqa: E501
# verify the required parameter 'group' is set
if ('group' not in params or
params['group'] is None):
raise ValueError("Missing the required parameter `group` when calling `groups_group_users_user_delete`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `groups_group_users_user_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group' in params:
path_params['group'] = params['group'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/groups/{group}/users/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def products_ref_users_delete(self, api_token, ref, unassign_users_product_request_body, **kwargs): # noqa: E501
"""Unassigns a list of users from a product. # noqa: E501
Unassign a list of users from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_delete(api_token, ref, unassign_users_product_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param UnassignUsersProductRequestBody unassign_users_product_request_body: JSON object that contains information to unassign users from product (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, **kwargs) # noqa: E501
else:
(data) = self.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, **kwargs) # noqa: E501
return data
def products_ref_users_delete_with_http_info(self, api_token, ref, unassign_users_product_request_body, **kwargs): # noqa: E501
"""Unassigns a list of users from a product. # noqa: E501
Unassign a list of users from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_delete_with_http_info(api_token, ref, unassign_users_product_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param UnassignUsersProductRequestBody unassign_users_product_request_body: JSON object that contains information to unassign users from product (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'ref', 'unassign_users_product_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method products_ref_users_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_delete`") # noqa: E501
# verify the required parameter 'ref' is set
if ('ref' not in params or
params['ref'] is None):
raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_delete`") # noqa: E501
# verify the required parameter 'unassign_users_product_request_body' is set
if ('unassign_users_product_request_body' not in params or
params['unassign_users_product_request_body'] is None):
raise ValueError("Missing the required parameter `unassign_users_product_request_body` when calling `products_ref_users_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ref' in params:
path_params['ref'] = params['ref'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'unassign_users_product_request_body' in params:
body_params = params['unassign_users_product_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/products/{ref}/users', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def products_ref_users_get(self, api_token, ref, **kwargs): # noqa: E501
"""List all users assigned to a product # noqa: E501
List all users assigned to a product. Conditions to be able to perform the action: - No permissions are required to perform this action. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_get(api_token, ref, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference to product (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.products_ref_users_get_with_http_info(api_token, ref, **kwargs) # noqa: E501
else:
(data) = self.products_ref_users_get_with_http_info(api_token, ref, **kwargs) # noqa: E501
return data
def products_ref_users_get_with_http_info(self, api_token, ref, **kwargs): # noqa: E501
"""List all users assigned to a product # noqa: E501
List all users assigned to a product. Conditions to be able to perform the action: - No permissions are required to perform this action. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_get_with_http_info(api_token, ref, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference to product (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'ref'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method products_ref_users_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_get`") # noqa: E501
# verify the required parameter 'ref' is set
if ('ref' not in params or
params['ref'] is None):
raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ref' in params:
path_params['ref'] = params['ref'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/products/{ref}/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def products_ref_users_put(self, api_token, ref, assign_users_product_request_body, **kwargs): # noqa: E501
"""Assigns users to a product. # noqa: E501
Assigns users to a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_put(api_token, ref, assign_users_product_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param AssignUsersProductRequestBody assign_users_product_request_body: JSON data that contains the information to assign users to product (required)
:return: ProductShortUsers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, **kwargs) # noqa: E501
else:
(data) = self.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, **kwargs) # noqa: E501
return data
def products_ref_users_put_with_http_info(self, api_token, ref, assign_users_product_request_body, **kwargs): # noqa: E501
"""Assigns users to a product. # noqa: E501
Assigns users to a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_put_with_http_info(api_token, ref, assign_users_product_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param AssignUsersProductRequestBody assign_users_product_request_body: JSON data that contains the information to assign users to product (required)
:return: ProductShortUsers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'ref', 'assign_users_product_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method products_ref_users_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_put`") # noqa: E501
# verify the required parameter 'ref' is set
if ('ref' not in params or
params['ref'] is None):
raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_put`") # noqa: E501
# verify the required parameter 'assign_users_product_request_body' is set
if ('assign_users_product_request_body' not in params or
params['assign_users_product_request_body'] is None):
raise ValueError("Missing the required parameter `assign_users_product_request_body` when calling `products_ref_users_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ref' in params:
path_params['ref'] = params['ref'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'assign_users_product_request_body' in params:
body_params = params['assign_users_product_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/products/{ref}/users', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductShortUsers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def products_ref_users_user_delete(self, api_token, ref, user, **kwargs): # noqa: E501
"""Unassigns a user from a product # noqa: E501
Unassigns a user from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_user_delete(api_token, ref, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param str user: Username of the user who will be unassigned from the product (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.products_ref_users_user_delete_with_http_info(api_token, ref, user, **kwargs) # noqa: E501
else:
(data) = self.products_ref_users_user_delete_with_http_info(api_token, ref, user, **kwargs) # noqa: E501
return data
def products_ref_users_user_delete_with_http_info(self, api_token, ref, user, **kwargs): # noqa: E501
"""Unassigns a user from a product # noqa: E501
Unassigns a user from a product. Conditions to be able to perform the action: - To have the permission **PRODUCT_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.products_ref_users_user_delete_with_http_info(api_token, ref, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str ref: Reference for product (required)
:param str user: Username of the user who will be unassigned from the product (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'ref', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method products_ref_users_user_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `products_ref_users_user_delete`") # noqa: E501
# verify the required parameter 'ref' is set
if ('ref' not in params or
params['ref'] is None):
raise ValueError("Missing the required parameter `ref` when calling `products_ref_users_user_delete`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `products_ref_users_user_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ref' in params:
path_params['ref'] = params['ref'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/products/{ref}/users/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_get(self, api_token, **kwargs): # noqa: E501
"""List of all Users. # noqa: E501
Returns a list of all the users of the system. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get(api_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.users_get_with_http_info(api_token, **kwargs) # noqa: E501
else:
(data) = self.users_get_with_http_info(api_token, **kwargs) # noqa: E501
return data
def users_get_with_http_info(self, api_token, **kwargs): # noqa: E501
"""List of all Users. # noqa: E501
Returns a list of all the users of the system. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_with_http_info(api_token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `users_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_post(self, api_token, create_user_request_body, **kwargs): # noqa: E501
"""Creates a new user # noqa: E501
Creates a new user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_post(api_token, create_user_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param CreateUserRequestBody create_user_request_body: JSON data that contains information to creates new user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.users_post_with_http_info(api_token, create_user_request_body, **kwargs) # noqa: E501
else:
(data) = self.users_post_with_http_info(api_token, create_user_request_body, **kwargs) # noqa: E501
return data
def users_post_with_http_info(self, api_token, create_user_request_body, **kwargs): # noqa: E501
"""Creates a new user # noqa: E501
Creates a new user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_post_with_http_info(api_token, create_user_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param CreateUserRequestBody create_user_request_body: JSON data that contains information to creates new user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'create_user_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `users_post`") # noqa: E501
# verify the required parameter 'create_user_request_body' is set
if ('create_user_request_body' not in params or
params['create_user_request_body'] is None):
raise ValueError("Missing the required parameter `create_user_request_body` when calling `users_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'create_user_request_body' in params:
body_params = params['create_user_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_username_delete(self, api_token, username, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Deletes a user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can delete users who belongs to some of your user groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_delete(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.users_username_delete_with_http_info(api_token, username, **kwargs) # noqa: E501
else:
(data) = self.users_username_delete_with_http_info(api_token, username, **kwargs) # noqa: E501
return data
def users_username_delete_with_http_info(self, api_token, username, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Deletes a user. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can delete users who belongs to some of your user groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_delete_with_http_info(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_username_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `users_username_delete`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `users_username_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_username_get(self, api_token, username, **kwargs): # noqa: E501
"""Get all the information of a user # noqa: E501
Get all the relevant information of a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_get(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: UserDetailed
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.users_username_get_with_http_info(api_token, username, **kwargs) # noqa: E501
else:
(data) = self.users_username_get_with_http_info(api_token, username, **kwargs) # noqa: E501
return data
def users_username_get_with_http_info(self, api_token, username, **kwargs): # noqa: E501
"""Get all the information of a user # noqa: E501
Get all the relevant information of a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_get_with_http_info(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: UserDetailed
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_username_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `users_username_get`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `users_username_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDetailed', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_username_token_post(self, api_token, username, **kwargs): # noqa: E501
"""Generates a user API token # noqa: E501
Generates a new user API token. If the user already has a generated API token, generates a new one. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can generate a user API token to users who belongs to some of your user groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_token_post(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.users_username_token_post_with_http_info(api_token, username, **kwargs) # noqa: E501
else:
(data) = self.users_username_token_post_with_http_info(api_token, username, **kwargs) # noqa: E501
return data
def users_username_token_post_with_http_info(self, api_token, username, **kwargs): # noqa: E501
"""Generates a user API token # noqa: E501
Generates a new user API token. If the user already has a generated API token, generates a new one. Conditions to be able to perform the action: - To have the permission **ALL_USERS_UPDATE** granted, or - To have the permission **MANAGE_USERS_BU** granted. Having this permission you can generate a user API token to users who belongs to some of your user groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_username_token_post_with_http_info(api_token, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_token: Authentication token (required)
:param str username: User's username (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_token', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_username_token_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_token' is set
if ('api_token' not in params or
params['api_token'] is None):
raise ValueError("Missing the required parameter `api_token` when calling `users_username_token_post`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `users_username_token_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
if 'api_token' in params:
header_params['api-token'] = params['api_token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/users/{username}/token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.322069 | 390 | 0.634448 | 66,800 | 0.994536 | 0 | 0 | 0 | 0 | 0 | 0 | 37,773 | 0.562374 |
b5888f3a509cecfa1613e21a4254b8b55ec9d01b | 1,246 | py | Python | gcd/lop.py | bthate/genocide | 8de7a2cccee7315ae6cf5661738ba1335e30a5ba | [
"DOC"
]
| null | null | null | gcd/lop.py | bthate/genocide | 8de7a2cccee7315ae6cf5661738ba1335e30a5ba | [
"DOC"
]
| null | null | null | gcd/lop.py | bthate/genocide | 8de7a2cccee7315ae6cf5661738ba1335e30a5ba | [
"DOC"
]
| null | null | null | # This file is placed in the Public Domain.
import queue
import threading
from .dpt import Dispatcher
from .obj import Object
from .thr import launch
from .utl import get_exception
class Restart(Exception):
pass
class Stop(Exception):
pass
class Loop(Object):
def __init__(self):
super().__init__()
self.queue = queue.Queue()
self.speed = "normal"
self.stopped = threading.Event()
def do(self, e):
Dispatcher.dispatch(self, e)
def error(self, txt):
pass
def loop(self):
dorestart = False
self.stopped.clear()
while not self.stopped.isSet():
e = self.queue.get()
try:
self.do(e)
except Restart:
dorestart = True
break
except Stop:
break
except Exception:
self.error(get_exception())
if dorestart:
self.restart()
def restart(self):
self.stop()
self.start()
def put(self, e):
self.queue.put_nowait(e)
def start(self):
launch(self.loop)
return self
def stop(self):
self.stopped.set()
self.queue.put(None)
| 18.878788 | 43 | 0.544944 | 1,054 | 0.845907 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 0.040931 |
b58c5490649547fd191436f9730cc2a2c51f3b00 | 3,619 | py | Python | src/utils.py | Flantropy/TelegramChatAnalyzer | 88e879fa771361d47292721ff8adfd82a74e9b93 | [
"MIT"
]
| null | null | null | src/utils.py | Flantropy/TelegramChatAnalyzer | 88e879fa771361d47292721ff8adfd82a74e9b93 | [
"MIT"
]
| null | null | null | src/utils.py | Flantropy/TelegramChatAnalyzer | 88e879fa771361d47292721ff8adfd82a74e9b93 | [
"MIT"
]
| null | null | null | import json
import logging
from io import BytesIO
from typing import List
from typing import Optional
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from telegram import InputMediaPhoto
def __convert_plot_to_telegram_photo(plot) -> InputMediaPhoto:
with BytesIO() as buffer:
plot.figure.savefig(buffer)
plot.clear()
photo = InputMediaPhoto(buffer.getvalue())
return photo
def _unpack_telegram_document(update) -> dict:
"""
This function retrieves JSON representation of a chat history
from given telegram.Update
"""
document = update.message.document.get_file()
chat_file = BytesIO(document.download_as_bytearray())
chat_json = json.load(chat_file)
return chat_json
def _form_data_frame_from_json(chat_json) -> Optional[pd.DataFrame]:
try:
messages_df = pd.DataFrame(
chat_json['messages'],
columns=['id', 'type', 'date', 'from', 'text', 'media_type'])
except KeyError as e:
logging.getLogger().error(
msg=f'Unable to form DataFrame from json. '
f'Key "messages" not found. {e}'
)
return
else:
messages_df.set_index('id', inplace=True)
messages_df['date'] = pd.to_datetime(messages_df['date'])
return messages_df
def _make_barplot(messages_df: pd.DataFrame) -> InputMediaPhoto:
"""
:param messages_df: DataFrame with user messaging history
:return: telegram.InputMediaPhoto
"""
messages_per_month = messages_df['date'] \
.groupby(messages_df['date'].dt.to_period('M')) \
.agg('count')
plot = sns.barplot(
x=messages_per_month.index,
y=messages_per_month.values,
color=(0.44, 0.35, 0.95)
)
plt.xticks(rotation=45)
plt.title('All time history')
return __convert_plot_to_telegram_photo(plot)
def _make_kde_plot(messages_df: pd.DataFrame) -> InputMediaPhoto:
plot = sns.kdeplot(
x=messages_df['date'],
hue=messages_df['from'],
shade=True
)
plt.title('Activity by user')
plt.xticks(rotation=45)
plt.xlabel('')
return __convert_plot_to_telegram_photo(plot)
def _make_media_distribution_bar_plot(messages_df: pd.DataFrame) -> Optional[InputMediaPhoto]:
logging.getLogger().info('Enter media dist function')
media_dist_df = messages_df[['from', 'media_type']].value_counts()
if media_dist_df.empty:
return
media_dist_plot = media_dist_df.unstack().plot(
kind='bar',
stacked=True,
ylabel='Media messages',
xlabel='User'
)
plt.xticks(rotation=0)
plt.title('Distribution of media messages')
return __convert_plot_to_telegram_photo(media_dist_plot)
def _make_weekday_distribution_bar_plot(messages_df: pd.DataFrame) -> InputMediaPhoto:
dist_by_day_of_week = messages_df['from']\
.groupby(messages_df['date'].dt.weekday)\
.agg('value_counts')
plot = dist_by_day_of_week.unstack().plot(kind='bar')
plt.xlabel('')
plt.ylabel('Messages')
plt.xticks(
list(range(7)),
['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'],
rotation=0
)
return __convert_plot_to_telegram_photo(plot)
def make_plots(messages_df: pd.DataFrame) -> List[InputMediaPhoto]:
sns.set_theme(context='paper')
photo_list = [
_make_barplot(messages_df),
_make_media_distribution_bar_plot(messages_df),
_make_kde_plot(messages_df),
_make_weekday_distribution_bar_plot(messages_df),
]
return [p for p in photo_list if p is not None]
| 30.411765 | 94 | 0.671733 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 619 | 0.170947 |
b58c5890c2ea7e046b469064a62ceb8bea1ea212 | 2,215 | py | Python | pyxrd/calculations/improve.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
]
| 27 | 2018-06-15T15:28:18.000Z | 2022-03-10T12:23:50.000Z | pyxrd/calculations/improve.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
]
| 22 | 2018-06-14T08:29:16.000Z | 2021-07-05T13:33:44.000Z | pyxrd/calculations/improve.py | PyXRD/pyxrd | 26bacdf64f3153fa74b8caa62e219b76d91a55c1 | [
"BSD-2-Clause"
]
| 8 | 2019-04-13T13:03:51.000Z | 2021-06-19T09:29:11.000Z | # coding=UTF-8
# ex:ts=4:sw=4:et=on
# Copyright (c) 2013, Mathijs Dumon
# All rights reserved.
# Complete license can be found in the LICENSE file.
from io import StringIO
from scipy.optimize import fmin_l_bfgs_b
from .exceptions import wrap_exceptions
def setup_project(projectf):
from pyxrd.file_parsers.json_parser import JSONParser
from pyxrd.project.models import Project
type(Project).object_pool.clear()
f = StringIO(projectf)
project = JSONParser.parse(f)
f.close()
return project
@wrap_exceptions
def run_refinement(projectf, mixture_index):
"""
Runs a refinement setup for
- projectf: project data
- mixture_index: what mixture in the project to use
"""
if projectf is not None:
from pyxrd.data import settings
settings.initialize()
# Retrieve project and mixture:
project = setup_project(projectf)
del projectf
import gc
gc.collect()
mixture = project.mixtures[mixture_index]
mixture.refinement.update_refinement_treestore()
refiner = mixture.refinement.get_refiner()
refiner.refine()
return list(refiner.history.best_solution), refiner.history.best_residual
@wrap_exceptions
def improve_solution(projectf, mixture_index, solution, residual, l_bfgs_b_kwargs={}):
if projectf is not None:
from pyxrd.data import settings
settings.initialize()
# Retrieve project and mixture:
project = setup_project(projectf)
del projectf
mixture = project.mixtures[mixture_index]
with mixture.data_changed.ignore():
# Setup context again:
mixture.update_refinement_treestore()
refiner = mixture.refinement.get_refiner()
# Refine solution
vals = fmin_l_bfgs_b(
refiner.get_residual,
solution,
approx_grad=True,
bounds=refiner.ranges,
**l_bfgs_b_kwargs
)
new_solution, new_residual = tuple(vals[0:2])
# Return result
return new_solution, new_residual
else:
return solution, residual
| 27.012195 | 86 | 0.648307 | 0 | 0 | 0 | 0 | 1,687 | 0.761625 | 0 | 0 | 408 | 0.184199 |
b58ccbfff32cc054d600f5f7877ef4514f099933 | 931 | py | Python | enforceTH.py | Multivalence/enforceTypeHint | fb87fd48baa525044516ddbdf2160128e03fb7b7 | [
"MIT"
]
| null | null | null | enforceTH.py | Multivalence/enforceTypeHint | fb87fd48baa525044516ddbdf2160128e03fb7b7 | [
"MIT"
]
| null | null | null | enforceTH.py | Multivalence/enforceTypeHint | fb87fd48baa525044516ddbdf2160128e03fb7b7 | [
"MIT"
]
| 1 | 2020-12-16T18:34:19.000Z | 2020-12-16T18:34:19.000Z | import functools
def enforceType(func):
@functools.wraps(func)
def wrapper(*args):
wrapper.has_been_called = True
x = func.__annotations__
t = [x[i] for i in x if i != 'return']
if len(args) != len(t):
raise TypeError("Missing required positional arguments and/or annotations.")
for i in range(len(t)):
if not isinstance(args[i],t[i]):
raise ValueError(f"Invalid literal for {t[i]}: {args[i]}")
try:
ReturnValue = x['return']
except KeyError:
raise TypeError("Missing required return value annotation.")
try:
RV = func(*args)
except Exception as e:
raise Exception(e)
ReturnValue = type(ReturnValue) if ReturnValue == None else ReturnValue
if not isinstance(RV, ReturnValue):
raise SyntaxWarning(f"Expected function to return {ReturnValue}. Got {type(RV)} instead.")
return RV
wrapper.has_been_called = False
return wrapper
| 23.275 | 94 | 0.664876 | 0 | 0 | 0 | 0 | 837 | 0.899033 | 0 | 0 | 227 | 0.243824 |
b591052db3d50aa3c4ca4b5f6cbba2c5ca1708a6 | 3,239 | py | Python | examples/DataRecording/runDataRecording.py | mumuwoyou/pytrader | 6b94e0c8ecbc3ef238cf31715acf8474b9d26b4a | [
"MIT"
]
| 4 | 2019-03-14T05:30:59.000Z | 2021-11-21T20:05:22.000Z | examples/DataRecording/runDataRecording.py | mumuwoyou/pytrader | 6b94e0c8ecbc3ef238cf31715acf8474b9d26b4a | [
"MIT"
]
| null | null | null | examples/DataRecording/runDataRecording.py | mumuwoyou/pytrader | 6b94e0c8ecbc3ef238cf31715acf8474b9d26b4a | [
"MIT"
]
| 4 | 2019-02-14T14:30:46.000Z | 2021-01-05T09:46:19.000Z | # encoding: UTF-8
from __future__ import print_function
import sys
try:
reload(sys) # Python 2
sys.setdefaultencoding('utf8')
except NameError:
pass # Python 3
import multiprocessing
from time import sleep
from datetime import datetime, time
from cyvn.trader.vtEvent import EVENT_LOG, EVENT_RECORDER_DAY,EVENT_ERROR
from cyvn.trader.eventEngine import EventEngine2, Event
from cyvn.trader.vtEngine import MainEngine, LogEngine
from cyvn.trader.gateway.CtpGateway import ctpGateway
from cyvn.trader.app import dataRecorder
#----------------------------------------------------------------------
def processErrorEvent(event):
"""
处理错误事件
错误信息在每次登陆后,会将当日所有已产生的均推送一遍,所以不适合写入日志
"""
error = event.dict_['data']
print(u'错误代码:%s,错误信息:%s' %(error.errorID, error.errorMsg))
#----------------------------------------------------------------------
def runChildProcess():
"""子进程运行函数"""
print('-'*20)
# 创建日志引擎
le = LogEngine()
le.setLogLevel(le.LEVEL_INFO)
le.addConsoleHandler()
le.info(u'启动行情记录运行子进程')
ee = EventEngine2()
le.info(u'事件引擎创建成功')
me = MainEngine(ee)
me.addGateway('CTP')
me.addApp(dataRecorder)
le.info(u'主引擎创建成功')
ee.register(EVENT_LOG, le.processLogEvent)
ee.register(EVENT_ERROR, processErrorEvent)
le.info(u'注册日志事件监听')
me.connect('CTP')
le.info(u'连接CTP接口')
has_recorder_day = False
while True:
sleep(1)
if has_recorder_day == False:
time_now = datetime.now()
if time_now.time().hour ==15 and time_now.time().minute > 5:
event1 = Event(type_=EVENT_RECORDER_DAY)
ee.put(event1)
has_recorder_day = True
#----------------------------------------------------------------------
def runParentProcess():
"""父进程运行函数"""
# 创建日志引擎
le = LogEngine()
le.setLogLevel(le.LEVEL_INFO)
le.addConsoleHandler()
le.info(u'启动行情记录守护父进程')
DAY_START = time(8, 57) # 日盘启动和停止时间
DAY_END = time(15, 18)
NIGHT_START = time(20, 57) # 夜盘启动和停止时间
NIGHT_END = time(2, 33)
p = None # 子进程句柄
while True:
currentTime = datetime.now().time()
recording = False
# 判断当前处于的时间段
if ((currentTime >= DAY_START and currentTime <= DAY_END) or
(currentTime >= NIGHT_START) or
(currentTime <= NIGHT_END)):
recording = True
# 过滤周末时间段:周六全天,周五夜盘,周日日盘
if ((datetime.today().weekday() == 6) or
(datetime.today().weekday() == 5 and currentTime > NIGHT_END) or
(datetime.today().weekday() == 0 and currentTime < DAY_START)):
recording = False
# 记录时间则需要启动子进程
if recording and p is None:
le.info(u'启动子进程')
p = multiprocessing.Process(target=runChildProcess)
p.start()
le.info(u'子进程启动成功')
# 非记录时间则退出子进程
if not recording and p is not None:
le.info(u'关闭子进程')
p.terminate()
p.join()
p = None
le.info(u'子进程关闭成功')
sleep(5)
if __name__ == '__main__':
#runChildProcess()
runParentProcess()
| 26.120968 | 77 | 0.562211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,084 | 0.293052 |
b5913a9cb235f8d0707df68f9171ff05c9b0c7aa | 232 | py | Python | 2.4 lcm.py | rahul263-stack/PROJECT-Dump | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
]
| 1 | 2020-04-06T04:41:56.000Z | 2020-04-06T04:41:56.000Z | 2.4 lcm.py | rahul263-stack/quarantine | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
]
| null | null | null | 2.4 lcm.py | rahul263-stack/quarantine | d8b1cfe0da8cad9fe2f3bbd427334b979c7d2c09 | [
"MIT"
]
| null | null | null | # Python3
a, b = [int(i) for i in input().split()]
def euclid_gcd(a, b):
if b == 0:
return a
c = a%b
return euclid_gcd(b, c)
if a>b:
gcd = euclid_gcd(a, b)
else:
gcd = euclid_gcd(b, a)
print(a*b//gcd) | 14.5 | 40 | 0.534483 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.038793 |
b59341fea7f0dca1f065a80700c550e7ea708375 | 711 | py | Python | environments/__init__.py | electronicarts/character-motion-vaes | d8ff62f3a9f7fd330ebd797df3e7f8f7ec7c3816 | [
"BSD-3-Clause"
]
| 132 | 2021-03-12T18:45:19.000Z | 2022-03-28T16:54:33.000Z | environments/__init__.py | electronicarts/character-motion-vaes | d8ff62f3a9f7fd330ebd797df3e7f8f7ec7c3816 | [
"BSD-3-Clause"
]
| 5 | 2021-03-14T08:41:32.000Z | 2022-01-29T21:58:22.000Z | environments/__init__.py | electronicarts/character-motion-vaes | d8ff62f3a9f7fd330ebd797df3e7f8f7ec7c3816 | [
"BSD-3-Clause"
]
| 13 | 2021-03-16T13:06:20.000Z | 2021-12-29T11:57:38.000Z | import os
current_dir = os.path.dirname(os.path.realpath(__file__))
import gym
from gym.envs.registration import registry, make, spec
def register(id, *args, **kvargs):
if id in registry.env_specs:
return
else:
return gym.envs.registration.register(id, *args, **kvargs)
register(id="RandomWalkEnv-v0", entry_point="environments.mocap_envs:RandomWalkEnv")
register(id="TargetEnv-v0", entry_point="environments.mocap_envs:TargetEnv")
register(id="JoystickEnv-v0", entry_point="environments.mocap_envs:JoystickEnv")
register(id="PathFollowEnv-v0", entry_point="environments.mocap_envs:PathFollowEnv")
register(id="HumanMazeEnv-v0", entry_point="environments.mocap_envs:HumanMazeEnv")
| 33.857143 | 84 | 0.773558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 271 | 0.381153 |
b59346ae49540ddec35f1665aa5dcc8cc4da2da0 | 471 | py | Python | libya_elections/tests/test_schedule.py | SmartElect/SmartElect | d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8 | [
"Apache-2.0"
]
| 23 | 2015-10-28T14:08:23.000Z | 2021-09-11T21:38:41.000Z | libya_elections/tests/test_schedule.py | SmartElect/SmartElect | d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8 | [
"Apache-2.0"
]
| 4 | 2019-12-05T20:36:10.000Z | 2020-06-05T18:41:54.000Z | libya_elections/tests/test_schedule.py | SmartElect/SmartElect | d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8 | [
"Apache-2.0"
]
| 11 | 2015-10-28T15:49:56.000Z | 2021-09-14T14:18:36.000Z | import datetime
from django.test import TestCase
from libya_elections.utils import at_noon
class ScheduleTest(TestCase):
def test_at_noon(self):
# at_noon returns a datetime with the right values
dt = datetime.datetime(1970, 2, 3, 4, 5, 6, 7)
result = at_noon(dt)
self.assertEqual(12, result.hour)
self.assertEqual(0, result.minute)
self.assertEqual(0, result.second)
self.assertEqual(0, result.microsecond)
| 29.4375 | 58 | 0.685775 | 376 | 0.798301 | 0 | 0 | 0 | 0 | 0 | 0 | 50 | 0.106157 |
b5934a3e23d4a7debafa86c699f518b3070881a9 | 153 | py | Python | website/website/apps/statistics/urls.py | SimonGreenhill/Language5 | c59f502dda7be27fc338f0338cc3b03e63bad9c8 | [
"MIT"
]
| 1 | 2020-08-17T05:56:16.000Z | 2020-08-17T05:56:16.000Z | website/website/apps/statistics/urls.py | SimonGreenhill/Language5 | c59f502dda7be27fc338f0338cc3b03e63bad9c8 | [
"MIT"
]
| 5 | 2020-06-05T17:51:56.000Z | 2022-01-13T00:42:51.000Z | website/website/apps/statistics/urls.py | SimonGreenhill/Language5 | c59f502dda7be27fc338f0338cc3b03e63bad9c8 | [
"MIT"
]
| 1 | 2015-02-23T22:54:00.000Z | 2015-02-23T22:54:00.000Z | from django.conf.urls import url
from website.apps.statistics.views import statistics
urlpatterns = [
url(r'^$', statistics, name="statistics"),
]
| 19.125 | 52 | 0.732026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 17 | 0.111111 |
b593abbfc1101fb51b4b3e49fd3161d9712060e7 | 12,779 | py | Python | sitk_rtss_io.py | Auto-segmentation-in-Radiation-Oncology/Chapter-3 | 307330c848c7ddb650353484e18fa9bc7903f737 | [
"BSD-3-Clause"
]
| 1 | 2020-06-28T01:57:46.000Z | 2020-06-28T01:57:46.000Z | sitk_rtss_io.py | Auto-segmentation-in-Radiation-Oncology/Chapter-12 | 307330c848c7ddb650353484e18fa9bc7903f737 | [
"BSD-3-Clause"
]
| null | null | null | sitk_rtss_io.py | Auto-segmentation-in-Radiation-Oncology/Chapter-12 | 307330c848c7ddb650353484e18fa9bc7903f737 | [
"BSD-3-Clause"
]
| 1 | 2021-11-15T21:47:17.000Z | 2021-11-15T21:47:17.000Z | from skimage import measure
import pydicom
from pydicom.dataset import Dataset, FileDataset
from pydicom.sequence import Sequence
import os
import numpy as np
import SimpleITK as sITK
import time
import glob
import sitk_ct_io as imio
from skimage.draw import polygon
# for debugging
# import matplotlib.pyplot as plt
# import matplotlib.image as mpimg
def read_rtss_to_sitk(rtss_file, image_dir, return_names=True, return_image=False):
# modified code from xuefeng
# http://aapmchallenges.cloudapp.net/forums/3/2/
#
# The image directory is required to set the spacing on the label map
# read the rtss
contours, label_names = read_contours(pydicom.read_file(rtss_file))
# read the ct
dcms = []
for subdir, dirs, files in os.walk(image_dir):
dcms = glob.glob(os.path.join(subdir, "*.dcm"))
slices = [pydicom.read_file(dcm) for dcm in dcms]
slices.sort(key=lambda x: float(x.ImagePositionPatient[2]))
image = np.stack([s.pixel_array for s in slices], axis=-1)
# convert to mask
atlas_labels = get_mask(contours, slices, image)
atlas_image = imio.read_sitk_image_from_dicom(image_dir)
atlas_labels.SetOrigin(atlas_image.GetOrigin())
atlas_labels.SetSpacing(atlas_image.GetSpacing())
if not return_names:
return atlas_labels
elif not return_image:
return atlas_labels, label_names
else:
return atlas_labels, label_names, atlas_image
def write_rtss_from_sitk(labels, label_names, ct_directory, output_filename):
# labels is a sITK image volume with integer labels for the objects
# assumes 0 for background and consequtive label numbers starting from 1
# corresponding to the label_names
# the ct_directory is required to correctly link the UIDs
# load ct to get slice UIDs, z-slices and anything else we might need
slice_info = {}
series_info = {}
z_values = []
first_slice = True
spacing = [0, 0]
origin = [0, 0]
with os.scandir(ct_directory) as it:
for entry in it:
if not entry.name.startswith('.') and entry.is_file():
slice_file = ct_directory + entry.name
dicom_info = pydicom.read_file(slice_file)
slice_info[str(float(dicom_info.SliceLocation))] = dicom_info.SOPInstanceUID
z_values.append(float(dicom_info.SliceLocation))
if first_slice:
# get generic information
series_info['SOPClassUID'] = dicom_info.SOPClassUID
series_info['FrameOfReferenceUID'] = dicom_info.FrameOfReferenceUID
series_info['StudyInstanceUID'] = dicom_info.StudyInstanceUID
series_info['SeriesInstanceUID'] = dicom_info.SeriesInstanceUID
series_info['PatientName'] = dicom_info.PatientName
series_info['PatientID'] = dicom_info.PatientID
series_info['PatientBirthDate'] = dicom_info.PatientBirthDate
series_info['PatientSex'] = dicom_info.PatientSex
spacing[0] = float(dicom_info.PixelSpacing[0])
spacing[1] = float(dicom_info.PixelSpacing[1])
origin[0] = float(dicom_info.ImagePositionPatient[0])
origin[1] = float(dicom_info.ImagePositionPatient[1])
# Assuming axial for now
first_slice = False
z_values = np.sort(z_values)
current_time = time.localtime()
modification_time = time.strftime("%H%M%S", current_time)
modification_time_long = modification_time + '.123456' # madeup
modification_date = time.strftime("%Y%m%d", current_time)
file_meta = Dataset()
file_meta.FileMetaInformationGroupLength = 192
file_meta.MediaStorageSOPClassUID = '1.2.840.10008.5.1.4.1.1.481.3'
file_meta.MediaStorageSOPInstanceUID = "1.2.826.0.1.3680043.2.1125." + modification_time + ".3" + modification_date
file_meta.ImplementationClassUID = "1.2.3.771212.061203.1"
file_meta.TransferSyntaxUID = '1.2.840.10008.1.2'
pydicom.dataset.validate_file_meta(file_meta, True)
ds = FileDataset(output_filename, {},
file_meta=file_meta, preamble=b"\0" * 128)
# Add the data elements
ds.PatientName = series_info['PatientName']
ds.PatientID = series_info['PatientID']
ds.PatientBirthDate = series_info['PatientBirthDate']
ds.PatientSex = series_info['PatientSex']
# Set the transfer syntax
ds.is_little_endian = True
ds.is_implicit_VR = True
# Set lots of tags
ds.ContentDate = modification_date
ds.SpecificCharacterSet = 'ISO_IR 100' # probably not true TODO Check
ds.InstanceCreationDate = modification_date
ds.InstanceCreationTime = modification_time_long
ds.StudyDate = modification_date
ds.SeriesDate = modification_date
ds.ContentTime = modification_time
ds.StudyTime = modification_time_long
ds.SeriesTime = modification_time_long
ds.AccessionNumber = ''
ds.SOPClassUID = '1.2.840.10008.5.1.4.1.1.481.3' # RT Structure Set Stroage
ds.SOPInstanceUID = "1.2.826.0.1.3680043.2.1125." + modification_time + ".3" + modification_date
ds.Modality = "RTSTRUCT"
ds.Manufacturer = "Python software"
ds.ManufacturersModelName = 'sitk_rtss_io.py'
ds.ReferringPhysiciansName = ''
ds.StudyDescription = ""
ds.SeriesDescription = "RTSS from SimpleITK data"
ds.StudyInstanceUID = series_info['StudyInstanceUID']
ds.SeriesInstanceUID = "1.2.826.0.1.3680043.2.1471." + modification_time + ".4" + modification_date
ds.StructureSetLabel = "RTSTRUCT"
ds.StructureSetName = ''
ds.StructureSetDate = modification_time
ds.StructureSetTime = modification_time
contour_sequence = Sequence()
for slice_z in z_values:
contour_data = Dataset()
contour_data.ReferencedSOPClassUID = series_info['SOPClassUID']
contour_data.ReferencedSOPInstanceUID = slice_info[str(slice_z)]
contour_sequence.append(contour_data)
referenced_series = Dataset()
referenced_series.SeriesInstanceUID = series_info['SeriesInstanceUID']
referenced_series.ContourImageSequence = contour_sequence
referenced_study = Dataset()
referenced_study.ReferencedSOPClassUID = '1.2.840.10008.3.1.2.3.2'
referenced_study.ReferencedSOPInstanceUID = series_info['StudyInstanceUID']
referenced_study.RTReferencedSeriesSequence = Sequence([referenced_series])
frame_of_ref_data = Dataset()
frame_of_ref_data.FrameOfReferenceUID = series_info['FrameOfReferenceUID']
frame_of_ref_data.RTReferencedStudySequence = Sequence([referenced_study])
ds.ReferencedFrameOfReferenceSequence = Sequence([frame_of_ref_data])
roi_sequence = Sequence()
roi_observations = Sequence()
for label_number in range(0, len(label_names)):
roi_data = Dataset()
roi_obs = Dataset()
roi_data.ROINumber = label_number + 1
roi_obs.ObservationNumber = label_number + 1
roi_obs.ReferencedROINumber = label_number + 1
roi_data.ReferencedFrameOfReferenceUID = series_info['FrameOfReferenceUID']
roi_data.ROIName = label_names[label_number]
roi_data.ROIObservationDescription = ''
roi_data.ROIGenerationAlgorithm = 'Atlas-based'
roi_data.ROIGenerationMethod = 'Python'
roi_obs.RTROIInterpretedType = ''
roi_obs.ROIInterpreter = ''
roi_sequence.append(roi_data)
roi_observations.append(roi_obs)
ds.StructureSetROISequence = roi_sequence
ds.RTROIObservationsSequence = roi_observations
# as if that wasn't bad enough, now we have to add the contours!
label_data = sITK.GetArrayFromImage(labels)
roi_contour_sequence = Sequence()
for label_number in range(0, len(label_names)):
roi_contour_data = Dataset()
roi_contour_data.ROIDisplayColor = '255\\0\\0'
roi_contour_data.ReferencedROINumber = label_number + 1
contour_sequence = Sequence()
# convert labels to polygons
contour_number = 0
for slice_number in range(0, labels.GetSize()[2] - 1):
slice_data = label_data[slice_number, :, :]
slice_for_label = np.where(slice_data != label_number + 1, 0, slice_data)
if np.any(np.isin(slice_for_label, label_number + 1)):
contours = measure.find_contours(slice_for_label, (float(label_number + 1) / 2.0))
for contour in contours:
# Convert to real world and add z_position
# plt.imshow(slice_data)
# plt.plot(contour[:, 1], contour[:, 0], color='#ff0000')
contour_as_string = ''
is_first_point = True
for point in contour[:-1]:
real_contour = [point[1] * spacing[0] + origin[0], point[0] * spacing[1] + origin[1],
z_values[slice_number]]
if not is_first_point:
contour_as_string = contour_as_string + '\\'
else:
is_first_point = False
contour_as_string = contour_as_string + str(real_contour[0]) + '\\'
contour_as_string = contour_as_string + str(real_contour[1]) + '\\'
contour_as_string = contour_as_string + str(real_contour[2])
contour_number = contour_number + 1
contour_data = Dataset()
contour_data.ContourGeometricType = 'CLOSED_PLANAR'
contour_data.NumberOfContourPoints = str(len(contour))
contour_data.ContourNumber = str(contour_number)
image_data = Dataset()
image_data.ReferencedSOPClassUID = series_info['SOPClassUID']
image_data.ReferencedSOPInstanceUID = slice_info[str(z_values[slice_number])]
contour_data.ContourImageSequence = Sequence([image_data])
contour_data.ContourData = contour_as_string
contour_sequence.append(contour_data)
roi_contour_data.ContourSequence = contour_sequence
roi_contour_sequence.append(roi_contour_data)
ds.ROIContourSequence = roi_contour_sequence
ds.ApprovalStatus = 'UNAPPROVED'
ds.save_as(output_filename)
return
def read_contours(structure_file):
# code from xuefeng
# http://aapmchallenges.cloudapp.net/forums/3/2/
contours = []
contour_names = []
for i in range(len(structure_file.ROIContourSequence)):
contour = {'color': structure_file.ROIContourSequence[i].ROIDisplayColor,
'number': structure_file.ROIContourSequence[i].ReferencedROINumber,
'name': structure_file.StructureSetROISequence[i].ROIName}
assert contour['number'] == structure_file.StructureSetROISequence[i].ROINumber
contour['contours'] = [s.ContourData for s in structure_file.ROIContourSequence[i].ContourSequence]
contours.append(contour)
contour_names.append(contour['name'])
return contours, contour_names
def get_mask(contours, slices, image):
# code from xuefeng
# http://aapmchallenges.cloudapp.net/forums/3/2/
z = [s.ImagePositionPatient[2] for s in slices]
pos_r = slices[0].ImagePositionPatient[1]
spacing_r = slices[0].PixelSpacing[1]
pos_c = slices[0].ImagePositionPatient[0]
spacing_c = slices[0].PixelSpacing[0]
im_dims = image.shape
label = np.zeros([im_dims[2], im_dims[1], im_dims[0]], dtype=np.uint8)
z_index = 0
for con in contours:
num = int(con['number'])
for c in con['contours']:
nodes = np.array(c).reshape((-1, 3))
assert np.amax(np.abs(np.diff(nodes[:, 2]))) == 0
zNew = [round(elem, 1) for elem in z]
try:
z_index = z.index(nodes[0, 2])
except ValueError:
try:
z_index = zNew.index(round(nodes[0, 2], 1))
except ValueError:
print('Slice not found for ' + con['name'] + ' at z = ' + str(nodes[0, 2]))
r = (nodes[:, 1] - pos_r) / spacing_r
c = (nodes[:, 0] - pos_c) / spacing_c
rr, cc = polygon(r, c)
label[z_index, rr, cc] = num
return sITK.GetImageFromArray(label)
| 44.526132 | 120 | 0.642069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,982 | 0.155098 |
b59742af888cb2d88c4cbf6cba219ceb64599613 | 2,364 | py | Python | code/opt_algo/downhillsimplex_untested.py | nicolai-schwartze/Masterthesis | 7857af20c6b233901ab3cedc325bd64704111e16 | [
"MIT"
]
| 1 | 2020-06-13T10:02:02.000Z | 2020-06-13T10:02:02.000Z | code/opt_algo/downhillsimplex_untested.py | nicolai-schwartze/Masterthesis | 7857af20c6b233901ab3cedc325bd64704111e16 | [
"MIT"
]
| null | null | null | code/opt_algo/downhillsimplex_untested.py | nicolai-schwartze/Masterthesis | 7857af20c6b233901ab3cedc325bd64704111e16 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 20 14:03:18 2020
@author: Nicolai
"""
import sys
sys.path.append("../differential_evolution")
from JADE import JADE
import numpy as np
import scipy as sc
import testFunctions as tf
def downhillsimplex(population, function, minError, maxFeval):
'''
implementation of a memetic JADE: \n
maxFeval-2*dim of the function evaluation are spend on JADE
2*dim of the function evaluation is used to perform a downhill simplex
internal parameters of JADE are set to p=0.3 and c=0.5
Parameters
----------
population: numpy array
2D numpy array where lines are candidates and colums is the dimension
function: function
fitness function that is optimised
minError: float
stopping condition on function value
maxFeval: int
stopping condition on max number of function evaluation
Returns
-------
history: tuple
tupel[0] - popDynamic
tupel[1] - FEDynamic
tupel[2] - FDynamic
tupel[3] - CRDynamic
Examples
--------
>>> import numpy as np
>>> def sphere(x):
return np.dot(x,x)
>>> minError = -1*np.inf
>>> maxGen = 10**3
>>> population = 100*np.random.rand(50,2)
>>> (popDynamic, FEDynamic, FDynamic, CRDynamic) =
JADE(population, sphere, minError, maxGen)
'''
psize, dim = population.shape
startSolution = population[np.random.randint(0, high=psize)]
_, _, _, _, _, allvecs = sc.optimize.fmin(function, startSolution, ftol=minError, \
maxfun=maxFeval, \
full_output = True, retall = True)
FDynamic = []
CRDynamic = []
popDynamic = []
FEDynamic = []
for x in allvecs:
popDynamic.append(np.array([x]))
FEDynamic.append(function(allvecs[-1]))
return (popDynamic, FEDynamic, FDynamic, CRDynamic)
if __name__ == "__main__":
import matplotlib.pyplot as plt
population = 100*np.random.rand(4,2)
minError = 10**-200
maxFeval = 10**3
H = 100
p = 0.3
c = 0.5
(popDynamic, FEDynamic, FDynamic, CRDynamic) = downhillsimplex(population, \
tf.sphere, minError, maxFeval)
plt.semilogy(FEDynamic)
| 26.863636 | 87 | 0.593909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,243 | 0.525804 |
b5977a635ec36409ea956827d6f1116885b0d8e3 | 7,030 | py | Python | bot_instagram.py | Lucas-Okamura/Instagram-Bot | e5e515f55a351c20add2389723dbbef452dc371b | [
"MIT"
]
| 1 | 2020-08-24T20:05:13.000Z | 2020-08-24T20:05:13.000Z | bot_instagram.py | Lucas-Okamura/Instagram-Bot | e5e515f55a351c20add2389723dbbef452dc371b | [
"MIT"
]
| null | null | null | bot_instagram.py | Lucas-Okamura/Instagram-Bot | e5e515f55a351c20add2389723dbbef452dc371b | [
"MIT"
]
| null | null | null | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import random
import pandas as pd
import os
class InstagramBot:
def __init__(self, username, password, function, url, num_people=1):
'''
Bot that comment on photos on Instagram
Args:
username:string: username to an Instagram account
password:string: password to an Instagram account
function:string: 'comment' if only comment or 'get_comments' to get comments (scrapper)
url:string/list: unique url if 'comment', list of url if 'get_comments'
num_people(optional):int: number of people to tag, valid only if 'comment'
Attributes:
username:string: username given
password:string: password given
base_url:string: instagram website (https://www.instagram.com)
driver:selenium.webdriver.Chrome: driver that performs actions in the browser
'''
self.username = username
self.password = password
self.function = function
self.url = url
self.num_people = num_people
self.driver = webdriver.Chrome(executable_path = "INPUT CHROME DRIVER PATH HERE")
def login(self):
'''
Logs into the Instagram account with the given username and password
Args:
None
'''
driver = self.driver
driver.get("https://www.instagram.com")
time.sleep(3)
user_box = driver.find_element_by_xpath("//input[@name = 'username']")
user_box.click()
user_box.clear()
user_box.send_keys(self.username)
password_box = driver.find_element_by_xpath("//input[@name = 'password']")
password_box.click()
password_box.clear()
password_box.send_keys(self.password)
password_box.send_keys(Keys.RETURN)
time.sleep(3)
not_now_login = driver.find_element_by_xpath("//button[contains(text(), 'Agora não')]") # If your language is not portuguese, change the "Agora não" to "Not now" or similar
not_now_login.click()
time.sleep(1)
self.run_bot()
@staticmethod
def human_type(phrase, input_comment):
'''
Type letter by letter, with random intervals of time in between
Args:
phrase:string: text that will be written by the function
input_comment:selenium.webdriver.Chrome.find_element_by_xpath: path to the comment box
'''
for letter in phrase:
input_comment.send_keys(letter)
time.sleep(random.randint(1,5)/30)
input_comment.send_keys(" ")
def comment_on_post(self, num_people):
'''
Comment on the choosen URL post, choosing random strings on the people list, the number of times that were specified
on "num_people"
Args:
num_people(optional):int: number of people to pick from the people list
'''
i = 0 # Counter
driver = self.driver
driver.get(self.url)
time.sleep(3)
people = [
"@person1",
"@person2",
"@person3"
]
while (1):
try:
driver.find_element_by_class_name("Ypffh").click()
commentary_box = driver.find_element_by_class_name("Ypffh")
time.sleep(random.randint(1, 10)/40)
cache = []
for num in range(num_people):
person = random.choice(people)
if person not in cache:
cache.append(person)
elif person in cache:
check = True
while check:
person = random.choice(people)
if person not in cache:
check = False
cache.append(person)
self.human_type(person, commentary_box)
time.sleep(random.randint(1,4)/4)
time.sleep(random.randint(1, 4))
publish = driver.find_element_by_xpath("//button[contains(text(), 'Publicar')]") # If your language is not portuguese, change the "Publicar" to "Publish" or similar
publish.click()
i += 1
print("You published ", i, " commentaries")
time.sleep(random.randint(45, 90))
if i % 100 == 0:
time.sleep(60*5)
except Exception as e:
print(e)
time.sleep(5)
def scroll(self):
"""
Scroll screen to show all comments
Args:
None
"""
# Get scroll height
try:
driver = self.driver
while True:
# Click on "plus" sign
print("Carregando mais comentários...")
driver.find_element_by_xpath("//button[contains(@class, 'dCJp8')]").click()
# Wait to load page
time.sleep(1)
except:
pass
def get_comments(self):
"""
Get all the comments from Instagram URLs
Args:
None
"""
try:
# Getting all the comments from the post
all_comments = []
for url in self.url:
driver = self.driver
driver.get(url)
time.sleep(3)
# Scroll to load all the comments
self.scroll()
comment = driver.find_elements_by_class_name('gElp9 ')
for c in comment:
container = c.find_element_by_class_name('C4VMK')
content = container.find_elements_by_xpath('(.//span)')[1].text
content = content.replace('\n', ' ').strip().rstrip()
print(content)
all_comments.append(content)
time.sleep(3)
# Exporting comments to csv
df = pd.DataFrame({"comments" : all_comments})
# Check if file already exists and export to csv
i = 0
exists = True
while exists:
filename = f'comments{i}.csv'
filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename)
if os.path.isfile(filepath):
i += 1
else:
df.to_csv(filename, sep=';', index=False)
exists = False
except Exception as e:
print(e)
time.sleep(5)
def run_bot(self):
if self.function == 'comment':
self.comment_on_post(self.num_people)
print('chegou')
elif self.function == 'get_comments':
self.get_comments()
| 32.546296 | 187 | 0.52845 | 6,895 | 0.980378 | 0 | 0 | 502 | 0.071378 | 0 | 0 | 2,451 | 0.3485 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.