code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
#!/usr/bin/python3
from __future__ import division
import random
import numpy as np
import argparse
from models.log_reg import LogRegModel
from models.nn_layer import NN_Model
from pdb import set_trace
def get_accuracy(X_test, Y_test, predict_func):
"""
Returns classification accuracy
"""
Y_predictions = predict_func(X_test)
correct = np.sum(Y_predictions == Y_test)
return (correct / Y_test.shape[1]) * 100
def split_data(X,Y):
"""
returns X_train,Y_train, X_test, Y_test
Eventually we'll also creave a cv to tune hyperparams
"""
test_percentage = 0.2
train_percentage = 1 - test_percentage
m = X.shape[1]
test_indices = random.sample(range(m), int(0.2 * m))
train_indices = [x for x in list(range(m)) if x not in test_indices]
X_train = X[:, train_indices]
Y_train = Y[:, train_indices]
X_test = X[:, test_indices]
Y_test = Y[:, test_indices]
return X_train, Y_train, X_test, Y_test
def model_logreg(X_train, Y_train, X_test, Y_test):
"""
Returns:
- cost_history_train
- accuracy on the test set
"""
print("Modeling Logistic Regression")
num_features = X_train.shape[0]
learning_rate = 0.05
num_iterations = 1500
regularization_constant = 0
model = LogRegModel(num_features, learning_rate, num_iterations, regularization_constant)
cost_hist_train = model.train(X_train,Y_train)
accuracy_trainset = get_accuracy(X_train, Y_train, model.predict)
accuracy_testset = get_accuracy(X_test, Y_test, model.predict)
return accuracy_trainset, accuracy_testset
def model_nn(X_train, Y_train, X_test, Y_test, layer_dims):
print("Modeling Neural Network w/ layers: {}".format(layer_dims))
alpha = 0.05
num_iters = 1500
model = NN_Model(layer_dims, alpha, num_iters)
costs = model.train(X_train, Y_train)
acc_test = get_accuracy(X_test, Y_test, model.predict)
acc_train = get_accuracy(X_train, Y_train, model.predict)
return acc_train, acc_test
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Control which models to train on the spam data set")
parser.add_argument('model', help="Train a logistic regression model")
args = parser.parse_args()
model = args.model
# call either, runLogReg, runSmallNN, runMediumNN, runLargeNN
x_file = open('data/practice/X_data.csv', "r")
y_file = open('data/practice/Y_data.csv', "r")
x_data = np.genfromtxt(x_file, delimiter=',')
y_data = np.genfromtxt(y_file, delimiter=',')
# Format data
X = x_data.T
Y = y_data.reshape((1,y_data.shape[0]))
X_train, Y_train, X_test, Y_test = split_data(X,Y)
if model == "l":
acc_train_log, acc_test_log = model_logreg(X_train, Y_train, X_test, Y_test)
print("Logistic Regression Accuracy on Train Set: {}".format(acc_train_log))
print("Logistic Regression Accuracy on Test Set: {}".format(acc_test_log))
elif model == "nn":
layer_dims = [X_train.shape[0],20,5,1]
acc_train_nn, acc_test_nn = model_nn(X_train, Y_train, X_test, Y_test, layer_dims)
print("NN Accuracy on Train Set: {}".format(acc_train_nn))
print("NN Accuracy on Test Set: {}".format(acc_test_nn))
else:
print("Unrecognized cmd argument, pass in either 'nn' or 'l'")
| [
"argparse.ArgumentParser",
"models.nn_layer.NN_Model",
"models.log_reg.LogRegModel",
"numpy.sum",
"numpy.genfromtxt"
] | [((359, 390), 'numpy.sum', 'np.sum', (['(Y_predictions == Y_test)'], {}), '(Y_predictions == Y_test)\n', (365, 390), True, 'import numpy as np\n'), ((1294, 1379), 'models.log_reg.LogRegModel', 'LogRegModel', (['num_features', 'learning_rate', 'num_iterations', 'regularization_constant'], {}), '(num_features, learning_rate, num_iterations,\n regularization_constant)\n', (1305, 1379), False, 'from models.log_reg import LogRegModel\n'), ((1799, 1837), 'models.nn_layer.NN_Model', 'NN_Model', (['layer_dims', 'alpha', 'num_iters'], {}), '(layer_dims, alpha, num_iters)\n', (1807, 1837), False, 'from models.nn_layer import NN_Model\n'), ((2082, 2176), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Control which models to train on the spam data set"""'}), "(description=\n 'Control which models to train on the spam data set')\n", (2105, 2176), False, 'import argparse\n'), ((2492, 2528), 'numpy.genfromtxt', 'np.genfromtxt', (['x_file'], {'delimiter': '""","""'}), "(x_file, delimiter=',')\n", (2505, 2528), True, 'import numpy as np\n'), ((2542, 2578), 'numpy.genfromtxt', 'np.genfromtxt', (['y_file'], {'delimiter': '""","""'}), "(y_file, delimiter=',')\n", (2555, 2578), True, 'import numpy as np\n')] |
import psycopg2
import psycopg2.extras
from settings.const import *
from hashlib import md5
import logging
logger = logging.getLogger("importer")
def dictequal(dictnew, dictold, ignore_keys=[]):
for k in dictnew:
if k in ignore_keys:
continue
if k not in dictold:
print(k + str(dictold) + str(dictnew))
return False
elif str(dictold[k]) != str(dictnew[k]):
if k in ['latitude', 'longitude', 'rd_x', 'rd_y'] or (
k not in ['stoparearef'] and len(k) > 3 and k[-3:] == 'ref'):
try:
if dictold[k] is None or dictnew[k] is None or float(dictold[k]) != float(dictnew[k]):
return False
except:
logger.error('%s: %s==%s' % (k, dictold[k], dictnew[k]), exc_info=True)
raise
else:
print(k + str(dictold) + str(dictnew))
return False
return True
def simple_dict_insert(conn, table, dict_item, check_existing=True, return_id=True):
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
columns = dict_item.keys()
if table == 'STOPAREA':
ignore_keys = ['latitude', 'longitude']
else:
ignore_keys = []
if check_existing:
query = "SELECT * from %s WHERE operator_id = %%s" % table
cur.execute(query, [str(dict_item['operator_id'])])
for dictold in cur.fetchall():
if dictequal(dict_item, dictold, ignore_keys=ignore_keys):
dict_item['id'] = dictold['id']
return True, dictold['id']
if not return_id:
query = "INSERT INTO %s (%s) VALUES (%s)" % (
table, ','.join(columns), ','.join(['%s' for i in range(len(columns))]))
cur.execute(query, [dict_item[key] for key in columns])
cur.close()
return False
else:
query = "INSERT INTO %s (%s) VALUES (%s) returning id" % (
table, ','.join(columns), ','.join(['%s' for i in range(len(columns))]))
cur.execute(query, [dict_item[key] for key in columns])
id = cur.fetchone()['id']
dict_item['id'] = id
cur.close()
return (False, id)
def simple_dictdict_insert(conn, table, dictionary, return_id=True):
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
if table == 'STOPAREA':
ignore_keys = ['latitude', 'longitude']
else:
ignore_keys = []
for dict_key, item in dictionary.items():
columns = item.keys()
query = "SELECT * from %s WHERE operator_id = %%s" % table
cur.execute(query, [str(item['operator_id'])])
record_exists = False
for dictold in cur.fetchall():
if dictequal(item, dictold, ignore_keys=ignore_keys):
record_exists = True
dictionary[dict_key] = dictold['id']
if record_exists:
continue
if return_id:
query = "INSERT INTO %s (%s) VALUES (%s) returning id" % (
table, ','.join(columns), ','.join(['%s' for i in range(len(columns))]))
cur.execute(query, [item[key] for key in columns])
id = cur.fetchone()['id']
item['id'] = id
dictionary[dict_key] = id
else:
query = "INSERT INTO %s (%s) VALUES (%s)" % (
table, ','.join(columns), ','.join(['%s' for i in range(len(columns))]))
cur.execute(query, [item[key] for key in columns])
cur.close()
def checkIfExistingVersion(conn, dictionary):
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
for key, item in dictionary.items():
columns = item.keys()
query = "SELECT * from version WHERE operator_id = %s"
cur.execute(query, [item['operator_id']])
if len(cur.fetchall()) > 0:
raise Exception('Version already imported')
def import_availabilityconditions(conn, data):
for key, item in data['AVAILABILITYCONDITION'].items():
validdays = None
if 'DAYS' in item:
validdays = item['DAYS']
del (item['DAYS'])
item['versionref'] = data['VERSION'][item['versionref']]
exists, id = simple_dict_insert(conn, 'AVAILABILITYCONDITION', item, check_existing=False)
data['AVAILABILITYCONDITION'][key] = id
if exists or validdays is None:
continue
availabilityday = {'availabilityconditionRef': id, 'isavailable': True}
for day in validdays['validdates']:
availabilityday['validdate'] = day
simple_dict_insert(conn, 'AVAILABILITYCONDITIONDAY', availabilityday, check_existing=False)
def import_routes(conn, routes):
for key, item in routes.items():
points = item['POINTS']
del (item['POINTS'])
exists, id = simple_dict_insert(conn, 'ROUTE', item)
routes[key] = id
if exists:
continue
for point in points:
point['routeref'] = id
exists = simple_dict_insert(conn, 'POINTINROUTE', point, check_existing=False, return_id=False)
def import_timedemandgroups(conn, timedemandgroups):
for key, item in timedemandgroups.items():
points = item['POINTS']
del (item['POINTS'])
exists, id = simple_dict_insert(conn, 'TIMEDEMANDGROUP', item)
timedemandgroups[key] = id
if exists:
continue
for point in points:
point['timedemandgroupref'] = id
exists = simple_dict_insert(conn, 'POINTINTIMEDEMANDGROUP', point, check_existing=False, return_id=False)
def setRefsDict(item, reftable, columnname, ignore_null=False):
if columnname in item:
if ignore_null and item[columnname] is None:
return
item[columnname] = reftable[item[columnname]]
def setRefs(table, reftable, columnname, ignore_null=False):
for key, item in table.items():
setRefsDict(item, reftable, columnname, ignore_null=ignore_null)
def import_journeypatterns(conn, data):
setRefs(data['JOURNEYPATTERN'], data['ROUTE'], 'routeref')
setRefs(data['JOURNEYPATTERN'], data['DESTINATIONDISPLAY'], 'destinationdisplayref')
for code, pattern in data['JOURNEYPATTERN'].items():
points = pattern['POINTS']
for point in pattern['POINTS']:
setRefsDict(point, data['DESTINATIONDISPLAY'], 'destinationdisplayref', ignore_null=True)
setRefsDict(point, data['STOPPOINT'], 'pointref')
setRefsDict(point, data['STOPPOINT'], 'onwardpointref', ignore_null=True)
if 'ADMINISTRATIVEZONE' in data:
setRefsDict(point, data['ADMINISTRATIVEZONE'], 'administrativezoneref', ignore_null=True)
setRefsDict(point, data['NOTICEASSIGNMENT'], 'noticeassignmentref', ignore_null=True)
m = md5()
m.update(str(pattern).encode('utf-8'))
pattern['privatecode'] = m.hexdigest()
del (pattern['POINTS'])
exists, id = simple_dict_insert(conn, 'JOURNEYPATTERN', pattern)
data['JOURNEYPATTERN'][code] = id
if exists:
continue
for point in points:
point['journeypatternref'] = id
exists = simple_dict_insert(conn, 'POINTINJOURNEYPATTERN', point, check_existing=False, return_id=False)
"""
{'haswifi': None, 'departuretime': None, 'operator_id': '5:EBS:w103: 3906:Sunday:22103:7001', 'operatorref': 'EBS', 'name': 7001, 'ondemand':
None, 'lowfloor': True, 'blockref': None, 'departuretime': 32640, 'journeypatternref': 'EBS:22103: 58', 'noticeassignmentref': None,
'productcategoryref': None, 'biycleallowed': None, 'hasliftorramp': True, 'availabilityconditionref': 'EBS:w103: 3906:Sunday',
'timedemandgroupref': 'aad863b6220e60df872628cab1916c07', 'privatecode': 'EBS:22103:7001'}
"""
def deleteJourney(conn, id):
cur = conn.cursor()
cur.execute("DELETE FROM journeytransfers WHERE journeyref = %s", [id])
cur.execute("DELETE FROM journeytransfers WHERE onwardjourneyref = %s", [id])
cur.execute("DELETE FROM journey WHERE id = %s", [id])
cur.close()
def getConnection():
return psycopg2.connect(database_connect)
def import_journeys(conn, data, recycle_journeyids=None):
if recycle_journeyids is not None:
recycle_journeyids(conn, data)
for key, journey in data['JOURNEY'].items():
try:
setRefsDict(journey, data['AVAILABILITYCONDITION'], 'availabilityconditionref')
setRefsDict(journey, data['JOURNEYPATTERN'], 'journeypatternref')
setRefsDict(journey, data['TIMEDEMANDGROUP'], 'timedemandgroupref')
setRefsDict(journey, data['NOTICEASSIGNMENT'], 'noticeassignmentref', ignore_null=True)
setRefsDict(journey, data['PRODUCTCATEGORY'], 'productcategoryref')
if 'id' in journey:
deleteJourney(conn, journey['id'])
exists, id = simple_dict_insert(conn, 'JOURNEY', journey, check_existing=False, return_id=True)
data['JOURNEY'][key] = id
if exists:
raise Exception('duplicate journey')
except KeyError:
continue
"""
Merge strategies:
{'type' : 'DATASOURCE', datasourceref : '1'}
Datasource, replace entire datasource
{'type' : 'DATASOURCE', datasourceref : '1', fromdate : '2013-01-01'}
Datasource on x, replace entire datasoure starting on data x
{'type' : 'DATASOURCE', datasourceref : '1', fromdate : '2013-01-01', todate : '2014-01-12'}
Datasource between x and y, replace entire datasource between x and y
{'type' : 'UNITCODE', unitcode : 'ARR:LS', fromdate : '2013-01-01'}
Unitcode after x, replace unitcode on days >= x
{'type' : 'UNITCODE', unitcode : 'ARR:LS', fromdate : '2013-01-01', todate : '2014-01-14'}
Unitcode between x and y
"""
def merge(conn, data, mergestrategies):
cur = conn.cursor()
for item in mergestrategies:
if item['type'] == 'DATASOURCE':
datasource = data['DATASOURCE'][item['datasourceref']]
print(datasource)
if 'fromdate' in item and 'todate' in item:
print('remove datesource ' + str(datasource) + ' between ' + str(item['fromdate']) + ' and ' + str(
item['todate']))
cur.execute("""
update availabilityconditionday set isavailable = false
WHERE availabilityconditionref in (select ac.id from availabilitycondition as ac LEFT JOIN version as v ON (v.id = ac.versionref) WHERE datasourceref = %s)
AND validdate between %s and %s;
""", [datasource, item['fromdate'], item['todate']])
elif 'fromdate' in item:
print('remove datesource ' + str(datasource) + ' after and on ' + str(item['fromdate']))
cur.execute("""
update availabilityconditionday set isavailable = false
WHERE availabilityconditionref in (select ac.id from availabilitycondition as ac JOIN version as v ON (v.id = ac.versionref) WHERE datasourceref = %s)
AND validdate >= %s;
""", [datasource, item['fromdate']])
else:
print('remove datesource ' + str(datasource))
cur.execute("""
update availabilityconditionday set isavailable = false
WHERE availabilityconditionref in (select ac.id from availabilitycondition as ac LEFT JOIN version as v ON (v.id = ac.versionref) WHERE datasourceref = %s)
""", [datasource])
elif item['type'] == 'UNITCODE':
unitcode = item['unitcode']
if 'fromdate' in item and 'todate' in item:
print('remove unitcode ' + str(unitcode) + ' between ' + str(item['fromdate']) + ' and ' + str(
item['todate']))
cur.execute("""
update availabilityconditionday set isavailable = false where availabilityconditionref in (select id from availabilitycondition where unitcode = %s) and validdate between %s and %s;
""", [unitcode, item['fromdate'], item['todate']])
elif 'fromdate' in item:
print('remove unitcode ' + str(unitcode) + ' after and on ' + str(item['fromdate']))
cur.execute("""
update availabilityconditionday set isavailable = false where availabilityconditionref in (select id from availabilitycondition where unitcode = %s) and validdate >= %s;
""", [unitcode, item['fromdate']])
else:
print('remove unitcode')
cur.execute("""
update availabilityconditionday set isavailable = false where availabilityconditionref in (select id from availabilitycondition where unitcode = %s)
""", [unitcode])
cur.close()
def versions_imported(datasourceid):
conn = psycopg2.connect(database_connect)
cur = conn.cursor()
cur.execute("""
SELECT version.operator_id FROM version LEFT JOIN datasource ON (datasourceref = datasource.id)
WHERE datasource.operator_id = %s
UNION
SELECT rejectedversion.operator_id FROM rejectedversion LEFT JOIN datasource ON (datasourceref = datasource.id)
WHERE datasource.operator_id = %s
""", [datasourceid] * 2)
versions = set([])
for row in cur.fetchall():
versions.add(row[0])
cur.close()
return versions
def version_imported(operator_id):
conn = psycopg2.connect(database_connect)
cur = conn.cursor()
cur.execute("""
SELECT true FROM version WHERE operator_id = %s
UNION
SELECT true FROM rejectedversion WHERE operator_id = %s
""", [operator_id] * 2)
result = cur.fetchall()
cur.close()
conn.close()
return len(result) > 0
def import_noticegroups(conn, data):
cur = conn.cursor()
for operator_id, group in data['NOTICEGROUP'].items():
noticerefs = []
for notice in group['noticerefs']:
noticerefs.append(data['NOTICE'][notice])
cur.execute("""
SELECT g.id,array_agg(n.id ORDER BY n.id)
FROM noticegroup as g JOIN notice AS n ON (n.id = g.noticeref)
WHERE g.operator_id = %s
GROUP BY g.id;""", [operator_id])
id = -1
for match in cur.fetchall():
if set(match[1]) == set(noticerefs):
id = match[0]
break
if id == -1:
cur.execute("SELECT nextval('noticegroup_id_seq')")
id = cur.fetchone()[0]
for noticeref in noticerefs:
cur.execute("INSERT INTO noticegroup (id,operator_id,noticeref) VALUES (%s,%s,%s)",
[id, operator_id, noticeref])
data['NOTICEGROUP'][operator_id] = id
cur.close()
def reject(data):
conn = psycopg2.connect(database_connect)
try:
simple_dictdict_insert(conn, 'DATASOURCE', data['DATASOURCE'])
setRefs(data['VERSION'], data['DATASOURCE'], 'datasourceref')
simple_dictdict_insert(conn, 'REJECTEDVERSION', data['VERSION'])
except:
conn.rollback()
conn.close()
raise
conn.commit()
conn.close()
def insert(data, recycle_journeyids=None):
conn = psycopg2.connect(database_connect, host="localhost", user='asfandyar')
try:
# checkIfExistingVersion(conn,data['VERSION'])
simple_dictdict_insert(conn, 'DATASOURCE', data['DATASOURCE'])
merge(conn, data, data['MERGESTRATEGY'])
simple_dictdict_insert(conn, 'OPERATOR', data['OPERATOR'])
setRefs(data['VERSION'], data['DATASOURCE'], 'datasourceref')
simple_dictdict_insert(conn, 'VERSION', data['VERSION'])
simple_dictdict_insert(conn, 'DESTINATIONDISPLAY', data['DESTINATIONDISPLAY'])
simple_dictdict_insert(conn, 'PRODUCTCATEGORY', data['PRODUCTCATEGORY'])
if 'NOTICE' in data:
simple_dictdict_insert(conn, 'NOTICE', data['NOTICE'])
import_noticegroups(conn, data)
setRefs(data['NOTICEASSIGNMENT'], data['NOTICEGROUP'], 'noticegroupref')
simple_dictdict_insert(conn, 'NOTICEASSIGNMENT', data['NOTICEASSIGNMENT'])
if 'ADMINISTRATIVEZONE' in data:
simple_dictdict_insert(conn, 'ADMINISTRATIVEZONE', data['ADMINISTRATIVEZONE'])
setRefs(data['LINE'], data['OPERATOR'], 'operatorref')
simple_dictdict_insert(conn, 'LINE', data['LINE'])
setRefs(data['ROUTE'], data['LINE'], 'lineref')
import_routes(conn, data['ROUTE'])
import_timedemandgroups(conn, data['TIMEDEMANDGROUP'])
simple_dictdict_insert(conn, 'STOPAREA', data['STOPAREA'])
setRefs(data['STOPPOINT'], data['STOPAREA'], 'stoparearef', ignore_null=True)
simple_dictdict_insert(conn, 'STOPPOINT', data['STOPPOINT'])
import_availabilityconditions(conn, data)
import_journeypatterns(conn, data)
import_journeys(conn, data, recycle_journeyids=recycle_journeyids)
if 'JOURNEYTRANSFERS' in data:
setRefs(data['JOURNEYTRANSFERS'], data['JOURNEY'], 'journeyref')
setRefs(data['JOURNEYTRANSFERS'], data['STOPPOINT'], 'pointref')
setRefs(data['JOURNEYTRANSFERS'], data['JOURNEY'], 'onwardjourneyref')
setRefs(data['JOURNEYTRANSFERS'], data['STOPPOINT'], 'onwardpointref')
fromTo = set([])
for id, transfer in data['JOURNEYTRANSFERS'].items():
key = ':'.join(str(x) for x in
[transfer['journeyref'], transfer['onwardjourneyref'], transfer['pointref'],
transfer['onwardpointref']])
if key in fromTo:
del (data['JOURNEYTRANSFERS'][id])
fromTo.add(key)
simple_dictdict_insert(conn, 'JOURNEYTRANSFERS', data['JOURNEYTRANSFERS'], return_id=False)
conn.commit()
except:
conn.rollback()
conn.close()
raise
conn.close()
| [
"logging.getLogger",
"hashlib.md5",
"psycopg2.connect"
] | [((118, 147), 'logging.getLogger', 'logging.getLogger', (['"""importer"""'], {}), "('importer')\n", (135, 147), False, 'import logging\n'), ((8225, 8259), 'psycopg2.connect', 'psycopg2.connect', (['database_connect'], {}), '(database_connect)\n', (8241, 8259), False, 'import psycopg2\n'), ((12789, 12823), 'psycopg2.connect', 'psycopg2.connect', (['database_connect'], {}), '(database_connect)\n', (12805, 12823), False, 'import psycopg2\n'), ((13343, 13377), 'psycopg2.connect', 'psycopg2.connect', (['database_connect'], {}), '(database_connect)\n', (13359, 13377), False, 'import psycopg2\n'), ((14643, 14677), 'psycopg2.connect', 'psycopg2.connect', (['database_connect'], {}), '(database_connect)\n', (14659, 14677), False, 'import psycopg2\n'), ((15063, 15133), 'psycopg2.connect', 'psycopg2.connect', (['database_connect'], {'host': '"""localhost"""', 'user': '"""asfandyar"""'}), "(database_connect, host='localhost', user='asfandyar')\n", (15079, 15133), False, 'import psycopg2\n'), ((6903, 6908), 'hashlib.md5', 'md5', ([], {}), '()\n', (6906, 6908), False, 'from hashlib import md5\n')] |
from django.urls import path
from . import views
#(just in case in the future I have named urls that are equal.
#app_name = 'articles'
urlpatterns = [
path('', views.article_list, name="article_list"),
path('create/', views.article_create, name="article_create"),
path('<slug:slug>/', views.article_detail, name="article_detail"),
]
| [
"django.urls.path"
] | [((158, 207), 'django.urls.path', 'path', (['""""""', 'views.article_list'], {'name': '"""article_list"""'}), "('', views.article_list, name='article_list')\n", (162, 207), False, 'from django.urls import path\n'), ((213, 273), 'django.urls.path', 'path', (['"""create/"""', 'views.article_create'], {'name': '"""article_create"""'}), "('create/', views.article_create, name='article_create')\n", (217, 273), False, 'from django.urls import path\n'), ((279, 344), 'django.urls.path', 'path', (['"""<slug:slug>/"""', 'views.article_detail'], {'name': '"""article_detail"""'}), "('<slug:slug>/', views.article_detail, name='article_detail')\n", (283, 344), False, 'from django.urls import path\n')] |
# Generated by Django 3.2.9 on 2021-12-09 21:47
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('nickname', models.CharField(max_length=64)),
('avatar', models.ImageField(default='iamge/default.png', upload_to='image/avatar')),
('email', models.EmailField(max_length=254)),
('password', models.TextField()),
('isqualified', models.BooleanField(default=False)),
('selfintroduce', models.TextField(default='一名低调的用户')),
('identity', models.IntegerField(default=0)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='emailVerify',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=64)),
('randomCode', models.TextField()),
],
),
migrations.CreateModel(
name='HotKey',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('visit', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('neckname', models.CharField(max_length=64)),
('dataTime', models.DateTimeField()),
('DOI', models.CharField(max_length=64)),
],
),
migrations.CreateModel(
name='Administrators',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='pattern.user')),
('realName', models.CharField(max_length=64)),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
bases=('pattern.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Mechanism',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='pattern.user')),
('mecName', models.CharField(max_length=128)),
('briefIntro', models.TextField(null=True)),
('ranking', models.IntegerField(null=True)),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
bases=('pattern.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('userName', models.CharField(max_length=64)),
('commentTime', models.DateTimeField(default=django.utils.timezone.now)),
('nextComment', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pattern.comment')),
],
),
migrations.CreateModel(
name='Attestation',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mecName', models.CharField(max_length=128)),
('operatorID', models.CharField(max_length=32)),
('certificateFile', models.FileField(upload_to='')),
('applicant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Expert',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, to='pattern.user')),
('EID', models.CharField(max_length=32, primary_key=True, serialize=False)),
('domain', models.CharField(max_length=64)),
('realName', models.CharField(max_length=128)),
('pubNum', models.IntegerField(null=True)),
('isCertified', models.BooleanField(default=False)),
('mec', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pattern.mechanism')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
bases=('pattern.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| [
"django.db.models.EmailField",
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.FileField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.BigAutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((526, 622), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (545, 622), False, 'from django.db import migrations, models\n'), ((652, 722), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""last login"""'}), "(blank=True, null=True, verbose_name='last login')\n", (672, 722), False, 'from django.db import migrations, models\n'), ((758, 929), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Designates that this user has all permissions without explicitly assigning them."""', 'verbose_name': '"""superuser status"""'}), "(default=False, help_text=\n 'Designates that this user has all permissions without explicitly assigning them.'\n , verbose_name='superuser status')\n", (777, 929), False, 'from django.db import migrations, models\n'), ((1283, 1354), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(150)', 'verbose_name': '"""first name"""'}), "(blank=True, max_length=150, verbose_name='first name')\n", (1299, 1354), False, 'from django.db import migrations, models\n'), ((1387, 1457), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(150)', 'verbose_name': '"""last name"""'}), "(blank=True, max_length=150, verbose_name='last name')\n", (1403, 1457), False, 'from django.db import migrations, models\n'), ((1489, 1632), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Designates whether the user can log into this admin site."""', 'verbose_name': '"""staff status"""'}), "(default=False, help_text=\n 'Designates whether the user can log into this admin site.',\n verbose_name='staff status')\n", (1508, 1632), False, 'from django.db import migrations, models\n'), ((1656, 1837), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Designates whether this user should be treated as active. Unselect this instead of deleting accounts."""', 'verbose_name': '"""active"""'}), "(default=True, help_text=\n 'Designates whether this user should be treated as active. Unselect this instead of deleting accounts.'\n , verbose_name='active')\n", (1675, 1837), False, 'from django.db import migrations, models\n'), ((1862, 1950), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""date joined"""'}), "(default=django.utils.timezone.now, verbose_name=\n 'date joined')\n", (1882, 1950), False, 'from django.db import migrations, models\n'), ((1977, 2008), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (1993, 2008), False, 'from django.db import migrations, models\n'), ((2038, 2110), 'django.db.models.ImageField', 'models.ImageField', ([], {'default': '"""iamge/default.png"""', 'upload_to': '"""image/avatar"""'}), "(default='iamge/default.png', upload_to='image/avatar')\n", (2055, 2110), False, 'from django.db import migrations, models\n'), ((2139, 2172), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (2156, 2172), False, 'from django.db import migrations, models\n'), ((2204, 2222), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2220, 2222), False, 'from django.db import migrations, models\n'), ((2257, 2291), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2276, 2291), False, 'from django.db import migrations, models\n'), ((2328, 2363), 'django.db.models.TextField', 'models.TextField', ([], {'default': '"""一名低调的用户"""'}), "(default='一名低调的用户')\n", (2344, 2363), False, 'from django.db import migrations, models\n'), ((2395, 2425), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2414, 2425), False, 'from django.db import migrations, models\n'), ((2455, 2706), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'help_text': '"""The groups this user belongs to. A user will get all permissions granted to each of their groups."""', 'related_name': '"""user_set"""', 'related_query_name': '"""user"""', 'to': '"""auth.Group"""', 'verbose_name': '"""groups"""'}), "(blank=True, help_text=\n 'The groups this user belongs to. A user will get all permissions granted to each of their groups.'\n , related_name='user_set', related_query_name='user', to='auth.Group',\n verbose_name='groups')\n", (2477, 2706), False, 'from django.db import migrations, models\n'), ((2732, 2936), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'help_text': '"""Specific permissions for this user."""', 'related_name': '"""user_set"""', 'related_query_name': '"""user"""', 'to': '"""auth.Permission"""', 'verbose_name': '"""user permissions"""'}), "(blank=True, help_text=\n 'Specific permissions for this user.', related_name='user_set',\n related_query_name='user', to='auth.Permission', verbose_name=\n 'user permissions')\n", (2754, 2936), False, 'from django.db import migrations, models\n'), ((3328, 3424), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3347, 3424), False, 'from django.db import migrations, models\n'), ((3449, 3481), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (3466, 3481), False, 'from django.db import migrations, models\n'), ((3515, 3533), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (3531, 3533), False, 'from django.db import migrations, models\n'), ((3665, 3761), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3684, 3761), False, 'from django.db import migrations, models\n'), ((3788, 3806), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (3804, 3806), False, 'from django.db import migrations, models\n'), ((3835, 3865), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (3854, 3865), False, 'from django.db import migrations, models\n'), ((3997, 4093), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (4016, 4093), False, 'from django.db import migrations, models\n'), ((4121, 4152), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (4137, 4152), False, 'from django.db import migrations, models\n'), ((4184, 4206), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (4204, 4206), False, 'from django.db import migrations, models\n'), ((4233, 4264), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (4249, 4264), False, 'from django.db import migrations, models\n'), ((4410, 4576), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""pattern.user"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'pattern.user')\n", (4430, 4576), False, 'from django.db import migrations, models\n'), ((4598, 4629), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (4614, 4629), False, 'from django.db import migrations, models\n'), ((5076, 5242), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""pattern.user"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'pattern.user')\n", (5096, 5242), False, 'from django.db import migrations, models\n'), ((5263, 5295), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (5279, 5295), False, 'from django.db import migrations, models\n'), ((5329, 5356), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)'}), '(null=True)\n', (5345, 5356), False, 'from django.db import migrations, models\n'), ((5387, 5417), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (5406, 5417), False, 'from django.db import migrations, models\n'), ((5856, 5952), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (5875, 5952), False, 'from django.db import migrations, models\n'), ((5980, 6011), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (5996, 6011), False, 'from django.db import migrations, models\n'), ((6046, 6101), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now'}), '(default=django.utils.timezone.now)\n', (6066, 6101), False, 'from django.db import migrations, models\n'), ((6136, 6235), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""pattern.comment"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='pattern.comment')\n", (6153, 6235), False, 'from django.db import migrations, models\n'), ((6368, 6464), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (6387, 6464), False, 'from django.db import migrations, models\n'), ((6491, 6523), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (6507, 6523), False, 'from django.db import migrations, models\n'), ((6557, 6588), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)'}), '(max_length=32)\n', (6573, 6588), False, 'from django.db import migrations, models\n'), ((6627, 6657), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '""""""'}), "(upload_to='')\n", (6643, 6657), False, 'from django.db import migrations, models\n'), ((6690, 6786), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (6707, 6786), False, 'from django.db import migrations, models\n'), ((6919, 7045), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'to': '"""pattern.user"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, to='pattern.user')\n", (6939, 7045), False, 'from django.db import migrations, models\n'), ((7067, 7133), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'primary_key': '(True)', 'serialize': '(False)'}), '(max_length=32, primary_key=True, serialize=False)\n', (7083, 7133), False, 'from django.db import migrations, models\n'), ((7163, 7194), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (7179, 7194), False, 'from django.db import migrations, models\n'), ((7226, 7258), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (7242, 7258), False, 'from django.db import migrations, models\n'), ((7288, 7318), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (7307, 7318), False, 'from django.db import migrations, models\n'), ((7353, 7387), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (7372, 7387), False, 'from django.db import migrations, models\n'), ((7414, 7515), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""pattern.mechanism"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='pattern.mechanism')\n", (7431, 7515), False, 'from django.db import migrations, models\n')] |
import copy
from collections import namedtuple
from uuid import uuid1
from crum import get_current_user
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.db.models.signals import m2m_changed
from py2neo import Node, Relationship, Graph
from py2neo.packages.httpstream import SocketError
def proxy(name, d):
return namedtuple(name, d.keys())(**d)
class AbstractCachedModel(models.Model):
cached_vars = ['status']
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(AbstractCachedModel, self).__init__(*args, **kwargs)
self.var_cache = {}
for var in self.cached_vars:
try:
pass
if type(var) is str:
value = getattr(self, var)
else:
var, use_id = var
value = getattr(self, var).id
self.var_cache[var] = copy.copy(value)
except:
self.var_cache[var] = None
def get_current_domain_id(user=None):
domain_id = None
try:
# use in shell
domain_id = settings.CURRENT_DOMAIN_ID
return domain_id
except AttributeError:
pass
if not user:
user = get_current_user()
if not domain_id and user and user.id and user.domain:
domain_id = user.domain.id
#elif request:
# try:
# domain_id = Domain.objects.get(code=request.get_host()).id
# except Domain.DoesNotExist:
# pass
return domain_id
class DomainManager(models.Manager):
def get_queryset(self):
queryset = super(DomainManager, self).get_queryset()
#return queryset
if self.model is Domain:
return queryset
domain_id = get_current_domain_id()
if not domain_id:
return queryset
if self.model.is_multi_domain:
queryset = queryset.filter(Q(domains__id=domain_id) | Q(id=0))
else:
queryset = queryset.filter(Q(domain__id=domain_id) | Q(id=0))
return queryset
class CommonTrashManager(DomainManager):
def filter_without_trash(self, *args, **kwargs):
if not kwargs.get('is_deleted'):
return super(CommonTrashManager, self).filter(*args, **kwargs).exclude(is_deleted=True)
else:
return super(CommonTrashManager, self).filter(*args, **kwargs)
def exclude(self, *args, **kwargs):
if not kwargs.get('is_deleted'):
return super(CommonTrashManager, self).exclude(*args, **kwargs).exclude(is_deleted=True)
def filter(self, *args, **kwargs):
return self.filter_without_trash(*args, **kwargs)
def all(self, *args, **kwargs):
return self.filter(*args, **kwargs)
def get_without_trash(self, *args, **kwargs):
if not kwargs.get('is_deleted'):
kwargs['is_deleted'] = False
return super(CommonTrashManager, self).get(*args, **kwargs)
def get(self, *args, **kwargs):
return self.get_without_trash(*args, **kwargs)
def annotate(self, *args, **kwargs):
return super(CommonTrashManager, self).exclude(is_deleted=True).annotate(*args, **kwargs)
def count(self, *args, **kwargs):
return self.filter(*args, **kwargs).count()
def latest(self, *args, **kwargs):
return super(CommonTrashManager, self).exclude(is_deleted=True).latest(*args, **kwargs)
class DomainMixin(models.Model):
domain = models.ForeignKey('common.Domain', related_name='domain_%(class)s', verbose_name='Current domain')
objects = DomainManager()
default_manager = models.Manager()
is_multi_domain = False
graph_node = False
graph_fields = []
graph_relations = []
class Meta:
abstract = True
def graph_execute(self, command):
graph = Graph(settings.NEO4J_CONNECTION_URL)
try:
results = graph.run(command)
except SocketError:
raise Exception('Please, start neo4j server')
return [proxy('Record', record) for record in results]
def get_graph_name(self, field_name=None):
if field_name:
return '%s_%s' % (type(self).__name__, field_name)
else:
return type(self).__name__
def get_or_create_graph_node(self):
graph = Graph(settings.NEO4J_CONNECTION_URL)
extras = {'name': '%s' % self}
for field in self.graph_fields:
field_split = field.split('.')
field_name = field_split[0]
value = self
for key in field_split:
try:
value = getattr(value, key)
except AttributeError:
break
extras[field_name] = value
node = Node(self.get_graph_name(), id=self.id, name='%s' % self, domain_id=self.domain_id)
graph.merge(node)
for key, value in extras.iteritems():
node[key] = value
node.push()
return node
def update_graph_node(self):
if not self.graph_node:
return
node = self.get_or_create_graph_node()
if node:
node.properties['name'] = '%s' % self
node.push()
def update_graph_relations(self, field_names=None, add_instance_list=[]):
if not self.graph_relations:
return
node = self.get_or_create_graph_node()
relationships = []
field_names = field_names or self.graph_relations
for field_name in field_names:
related_insts = getattr(self, field_name)
if add_instance_list:
related_insts = add_instance_list
elif type(self._meta.get_field(field_name)) is models.ManyToManyField:
related_insts = related_insts.all()
else:
related_insts = related_insts and [related_insts]
if not add_instance_list:
self.graph_execute(
"MATCH (n:%s{id:%d, domain_id:%s})-[r:%s]->() DETACH DELETE r" % (
self.get_graph_name(),
self.id,
self.domain_id,
self.get_graph_name(field_name)
)
)
if not related_insts:
continue
for related_inst in related_insts:
related_node = related_inst.get_or_create_graph_node()
relationship = Relationship(node, self.get_graph_name(field_name), related_node)
relationships.append(relationship)
graph = Graph(settings.NEO4J_CONNECTION_URL)
tx = graph.begin()
for relationship in relationships:
tx.merge(relationship)
tx.commit()
def save(self, *args, **kwargs):
if not self.id and not self.domain_id:
self.domain_id = get_current_domain_id()
self.full_clean()
if hasattr(self, 'created_by') and self.created_by and self.created_by.is_anonymous:
raise Exception('Not allow created by anonymous')
super(DomainMixin, self).save(*args, **kwargs)
self.update_graph_node()
self.update_graph_relations()
def delete(self, using=None):
self.graph_execute(
"MATCH (n:%s{id:%d, domain_id:%s}) DETACH DELETE n" % (
self.get_graph_name(),
self.id,
self.domain_id
)
)
super(DomainMixin, self).delete(using=using)
def graph_m2m_changed(sender, **kwargs):
action = kwargs['action']
instance = kwargs['instance']
if action not in ['post_clear', 'post_add', 'post_remove']:
return
if not (hasattr(instance, 'graph_node') and instance.graph_node and hasattr(instance, 'graph_relations') and instance.graph_relations):
return
field_name = str(sender._meta.model._meta).replace('%s_' % str(instance._meta), '')
if field_name in instance.graph_relations:
if action == 'post_add':
add_instance_list = getattr(instance, field_name).model.objects.filter(id__in=kwargs.get('pk_set', []))
instance.update_graph_relations(field_names=[field_name], add_instance_list=add_instance_list)
else:
instance.update_graph_relations(field_names=[field_name])
m2m_changed.connect(graph_m2m_changed)
class MultiDomainMixin(DomainMixin):
domains = models.ManyToManyField('common.Domain', related_name='multi_domain_%(class)s', verbose_name='Allowed in domains')
is_multi_domain = True
class Meta:
abstract = True
def save(self, *args, **kwargs):
super(MultiDomainMixin, self).save(*args, **kwargs)
if self.domain and self.domains.filter(id=self.domain.id).count() == 0:
self.domains.add(self.domain)
class AbstractCommonTrashModel(models.Model):
is_deleted = models.BooleanField(default=False)
objects = CommonTrashManager()
class Meta:
abstract = True
def save(self, commit=True, force_insert=False, force_update=False, *args, **kwargs):
super(AbstractCommonTrashModel, self).save(*args, **kwargs)
def trash(self, *args, **kwargs):
self.is_deleted = True
self.save()
return self
def delete(self, *args, **kwargs):
return self.trash(self, *args, **kwargs)
def remove(self, *args, **kwargs):
return super(AbstractCommonTrashModel, self).delete(*args, **kwargs)
class Domain(AbstractCommonTrashModel):
name = models.CharField(max_length=512)
code = models.CharField(max_length=255, unique=True)
description = models.TextField(null=True, blank=True)
default_language = models.CharField(max_length=20, default=settings.LANGUAGE_CODE)
timezone = models.FloatField(default=7.0)
is_active = models.BooleanField(default=False)
# logo ?
def __unicode__(self):
return '%s(%s)' % (self.name, self.code)
| [
"django.db.models.Manager",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.signals.m2m_changed.connect",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"copy.copy",
"crum.get_current_user",
"django.db.models.Q",
"py2neo.Graph",
"django.db.models.CharField"
] | [((8427, 8465), 'django.db.models.signals.m2m_changed.connect', 'm2m_changed.connect', (['graph_m2m_changed'], {}), '(graph_m2m_changed)\n', (8446, 8465), False, 'from django.db.models.signals import m2m_changed\n'), ((3529, 3631), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""common.Domain"""'], {'related_name': '"""domain_%(class)s"""', 'verbose_name': '"""Current domain"""'}), "('common.Domain', related_name='domain_%(class)s',\n verbose_name='Current domain')\n", (3546, 3631), False, 'from django.db import models\n'), ((3681, 3697), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (3695, 3697), False, 'from django.db import models\n'), ((8520, 8638), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""common.Domain"""'], {'related_name': '"""multi_domain_%(class)s"""', 'verbose_name': '"""Allowed in domains"""'}), "('common.Domain', related_name=\n 'multi_domain_%(class)s', verbose_name='Allowed in domains')\n", (8542, 8638), False, 'from django.db import models\n'), ((8990, 9024), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (9009, 9024), False, 'from django.db import models\n'), ((9630, 9662), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(512)'}), '(max_length=512)\n', (9646, 9662), False, 'from django.db import models\n'), ((9674, 9719), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (9690, 9719), False, 'from django.db import models\n'), ((9738, 9777), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (9754, 9777), False, 'from django.db import models\n'), ((9802, 9865), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'default': 'settings.LANGUAGE_CODE'}), '(max_length=20, default=settings.LANGUAGE_CODE)\n', (9818, 9865), False, 'from django.db import models\n'), ((9881, 9911), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(7.0)'}), '(default=7.0)\n', (9898, 9911), False, 'from django.db import models\n'), ((9929, 9963), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (9948, 9963), False, 'from django.db import models\n'), ((1290, 1308), 'crum.get_current_user', 'get_current_user', ([], {}), '()\n', (1306, 1308), False, 'from crum import get_current_user\n'), ((3894, 3930), 'py2neo.Graph', 'Graph', (['settings.NEO4J_CONNECTION_URL'], {}), '(settings.NEO4J_CONNECTION_URL)\n', (3899, 3930), False, 'from py2neo import Node, Relationship, Graph\n'), ((4380, 4416), 'py2neo.Graph', 'Graph', (['settings.NEO4J_CONNECTION_URL'], {}), '(settings.NEO4J_CONNECTION_URL)\n', (4385, 4416), False, 'from py2neo import Node, Relationship, Graph\n'), ((6685, 6721), 'py2neo.Graph', 'Graph', (['settings.NEO4J_CONNECTION_URL'], {}), '(settings.NEO4J_CONNECTION_URL)\n', (6690, 6721), False, 'from py2neo import Node, Relationship, Graph\n'), ((967, 983), 'copy.copy', 'copy.copy', (['value'], {}), '(value)\n', (976, 983), False, 'import copy\n'), ((1983, 2007), 'django.db.models.Q', 'Q', ([], {'domains__id': 'domain_id'}), '(domains__id=domain_id)\n', (1984, 2007), False, 'from django.db.models import Q\n'), ((2010, 2017), 'django.db.models.Q', 'Q', ([], {'id': '(0)'}), '(id=0)\n', (2011, 2017), False, 'from django.db.models import Q\n'), ((2072, 2095), 'django.db.models.Q', 'Q', ([], {'domain__id': 'domain_id'}), '(domain__id=domain_id)\n', (2073, 2095), False, 'from django.db.models import Q\n'), ((2098, 2105), 'django.db.models.Q', 'Q', ([], {'id': '(0)'}), '(id=0)\n', (2099, 2105), False, 'from django.db.models import Q\n')] |
#!/usr/bin/env python3
"""
Find the nearest clone
:author: <NAME>
:hackerrank: https://hackerrank.com/delaanthonio
:problem: https://www.hackerrank.com/challenges/find-the-nearest-clone
"""
from collections import defaultdict, deque
from typing import Iterable, Optional, Set
class Graph(object):
def __init__(self, edges, colors):
self.nodes = defaultdict(list)
self.colors = {i: color for i, color in enumerate(colors, start=1)}
for src, dest in edges:
self.add_edge(src, dest)
def add_edge(self, src: int, dest: int):
self.nodes[src].append(dest)
self.nodes[dest].append(src)
def _shortest_path(self, color: int) -> Iterable[int]:
seen = set()
for n, c in self.colors.items():
if color == c:
yield self.color_bfs(n, color, seen)
def shortest_path(self, color: int) -> int:
distances = [x for x in self._shortest_path(color) if x]
return min(distances) if distances else -1
def color_bfs(self, node: int, color: int,
seen: Set[int]) -> Optional[int]:
nodes = deque(self.nodes[node])
seen.add(node)
distances = {n: 1 for n in nodes}
while nodes:
n = nodes.popleft()
if n in seen:
continue
seen.add(n)
if self.colors[n] == color:
return distances[n]
nodes.extend(self.nodes[n])
distances.update((adj, distances[n] + 1) for adj in self.nodes[n])
return None
def main():
n, e = [int(x) for x in input().split()]
edges = []
for _ in range(e):
edge = [int(x) for x in input().split()]
edges.append(edge)
colors = [int(x) for x in input().split()]
graph = Graph(edges, colors)
target_color = int(input())
distance = graph.shortest_path(target_color)
print(distance)
if __name__ == '__main__':
main()
| [
"collections.deque",
"collections.defaultdict"
] | [((361, 378), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (372, 378), False, 'from collections import defaultdict, deque\n'), ((1127, 1150), 'collections.deque', 'deque', (['self.nodes[node]'], {}), '(self.nodes[node])\n', (1132, 1150), False, 'from collections import defaultdict, deque\n')] |
import threading, wget
class ParallelDownloader:
def __init__(self):
pass
def dUrl(self,url,i):
try:
wget.download(url)
except:
print("Error with : "+url)
def logic1(self):
urls = ["https://images.s3.amazonaws.com/PdfLabelFiles/flipkartShippingLabel_OD107312205540085000-1731220554008500.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559338486.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559338426.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559338357.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559338279.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/404-9012833-0137142_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/171-5056321-1155509_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/403-4455185-5905913_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559295457.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559295397.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559148777.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559148776.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559148775.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559148770.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19805014659-SLP1140406657.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19803429605-SLP1140286741.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/171-7456146-3809129_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559131926.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559131850.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538921681-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538853123-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/171-9284133-0781116_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19801906394-SLP1140178106.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/171-5670213-6464363_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559087648.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/171-0998013-5440314_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/402-3428884-0889148_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/403-3179019-2162765_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/402-2892189-3625157_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559045947.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559045879.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559045815.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/flipkartShippingLabel_OD107310867834425001-1731086783442500.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/402-9459255-6661948_shippinglabel.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538638382-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538630871-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538512662-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538508341-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/flipkartShippingLabel_OD107310694756347000-1731069475634700.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19799680099-SLP1140008175.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19799407603-SLP1139999699.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19798917481-SLP1139967832.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19798845649-SLP1139957984.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559010233.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559010142.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559010038.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/paytm_packing_slip_order_559007311.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19799239237-SLP1139987041.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19798716880-SLP1139950403.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19787010456-SLP1139961489.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19797915979-SLP1139887878.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538385725-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538361501-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538330738-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/ebayShippinglabel_2538321921-15242.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/SnapDealLabel_19798049434-SLP1139897601.pdf",
"https://images.s3.amazonaws.com/PdfLabelFiles/jabong_161010359170961_ship_label_path.pdf"]
count = 0
threadLists = []
for i,url in enumerate(urls):
thread = threading.Thread(target=self.dUrl,args=(url,i))
count = count+1
thread.name = "T%d" % count
threadLists.append(thread)
for it in threadLists:
it.start()
for it in threadLists:
it.join()
obj = ParallelDownloader()
obj.logic1() | [
"wget.download",
"threading.Thread"
] | [((113, 131), 'wget.download', 'wget.download', (['url'], {}), '(url)\n', (126, 131), False, 'import threading, wget\n'), ((5646, 5695), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.dUrl', 'args': '(url, i)'}), '(target=self.dUrl, args=(url, i))\n', (5662, 5695), False, 'import threading, wget\n')] |
# Copyright 2017 TsumiNa. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# -*- coding: utf-8 -*-
import sys
import os
cvm_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, cvm_path)
import cvm | [
"os.path.dirname",
"sys.path.insert"
] | [((271, 299), 'sys.path.insert', 'sys.path.insert', (['(0)', 'cvm_path'], {}), '(0, cvm_path)\n', (286, 299), False, 'import sys\n'), ((237, 262), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (252, 262), False, 'import os\n')] |
# import the necessary packages
from threading import Thread
import cv2
import time
class FileVideoStream:
def __init__(self, path, transform=None, queue_size=16, num_queues=1, queue_type="Q"):
self.stream = cv2.VideoCapture(path)
self.stopped = False
self.transform = transform
self.num_queues = num_queues
self.queue_type = queue_type
self.qlist = []
if self.queue_type == "mQ":
from multiprocessing import Queue
else:
from queue import Queue
for _ in range(self.num_queues):
q = Queue(maxsize=queue_size)
self.qlist.append(q)
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
def start(self):
self.thread.start()
return self
def update(self):
while True:
if self.stopped:
break
if not self.qlist[0].full():
(grabbed, frame) = self.stream.read()
if not grabbed:
self.stopped = True
if self.transform:
frame = self.transform(frame)
for i in range(self.num_queues):
self.qlist[i].put(frame)
else:
time.sleep(0.1)
self.stream.release()
def read(self):
return self.qlist[0].get()
def running(self):
return self.more() or not self.stopped
def more(self):
tries = 0
while self.qlist[0].qsize() == 0 and not self.stopped and tries < 5:
time.sleep(0.1)
tries += 1
return self.qlist[0].qsize() > 0
def stop(self):
self.stopped = True
self.thread.join()
| [
"threading.Thread",
"queue.Queue",
"cv2.VideoCapture",
"time.sleep"
] | [((222, 244), 'cv2.VideoCapture', 'cv2.VideoCapture', (['path'], {}), '(path)\n', (238, 244), False, 'import cv2\n'), ((677, 712), 'threading.Thread', 'Thread', ([], {'target': 'self.update', 'args': '()'}), '(target=self.update, args=())\n', (683, 712), False, 'from threading import Thread\n'), ((596, 621), 'queue.Queue', 'Queue', ([], {'maxsize': 'queue_size'}), '(maxsize=queue_size)\n', (601, 621), False, 'from queue import Queue\n'), ((1597, 1612), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1607, 1612), False, 'import time\n'), ((1295, 1310), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1305, 1310), False, 'import time\n')] |
# ./sedater/test/test_rawconverter.py
# Author: <NAME> <<EMAIL>>
# Created: Wed, 28.10.2015 - 19:34:06
# Modified: Sun, 08.11.2015 - 01:12:59
import unittest
import os
from testfixtures import TempDirectory
from sedater.rawvalidation import Sensor, RawConverter
from sedater.lib import shared
class TestSensor(unittest.TestCase):
def setUp(self):
self.tmp = TempDirectory()
self.calibration = self.tmp.write('calibration.csv',
b"1.1,2,3\n4,5.5,6\n7,8,9.9")
def tearDown(self):
TempDirectory.cleanup_all()
def test_missing_sensor_calibration_values(self):
calibration = self.tmp.write('calibration.csv',
b"1.1,9")
self.assertRaises(TypeError, Sensor,
shared.Orientation.left, calibration)
def test_invalid_sensor_calibration_values(self):
calibration = self.tmp.write('calibration.csv',
b"1.1,2,3\n4,foobar,6\n7,8,9.9")
self.assertRaises(ValueError, Sensor,
shared.Orientation.left, calibration)
def test_sensor_calibration_file(self):
calibration = self.tmp.write('calibration.csv',
b"1.1,2,3\n4,5.5,6\n7,8,9.9")
sens = Sensor(shared.Orientation.left, calibration)
self.assertEquals(sens.calibration[1], 2)
self.assertEquals(sens.calibration[4], 5.5)
self.assertEquals(sens.calibration[8], 9.9)
def test_normalizing_return(self):
sens = Sensor(shared.Orientation(1).name, self.calibration)
foo = shared.Sensorsegment._make([1,2,3,4,5,6])
bar = sens._normalizeRawSegment(foo)
self.assertIsInstance(foo, type(foo))
def test_normalizing(self):
# eGaiT_database/P100_E4_left.dat
# A917.csv
calibrationLeft = self.tmp.write('left.csv',
b"2367.1,2274.9,2271.2\n1871.8,1795.5,1753.1\n1785.8,1684.4,1855.4")
# A6DF.csv
calibrationRight = self.tmp.write('right.csv',
b"2403.5,2254.8,2266.1\n1899.7,1769.3,1773.2\n1835.2, 1709.6,1860.5")
# first 12 bytes of P100_E4_left.dat
rawRight = shared.Sensorsegment._make([1762, 2155, 2024, 1849, 1713, 1864])
# first 12 bytes of P100_E4_right.dat
rawLeft = shared.Sensorsegment._make([1797, 2109, 2013, 1777, 1688, 1850])
# expected left end results
resL = shared.Sensorsegment._make([-1.3020391681808998,0.30788485607008736,0.003281219841729923,-3.2222629073599247,1.318198462101761,-1.9772976931527246])
# expected right end results
resR = shared.Sensorsegment._make([-1.5466454942437473,0.5888774459320278,0.01765063907486269,5.053094104723528,1.2449652142072833,1.2815818381545223])
sensL = Sensor(shared.Orientation.left, calibrationLeft)
sensR = Sensor(shared.Orientation.right, calibrationRight)
bar = sensL._normalizeRawSegment(rawLeft)
foo = sensR._normalizeRawSegment(rawRight)
self.assertEqual(resL, bar)
self.assertEqual(resR, foo)
class TestRawConverter(unittest.TestCase):
def setUp(self):
self.tmp = TempDirectory()
foo = self.tmp.write('foo.csv', b"1,2,3\n4,5,6\n7,8,9")
bar = self.tmp.write('bar.csv', b"9,8,7\n6,5,4\n3,2,1")
self.rawleft = self.tmp.write('P100_E4_left.dat',
b'\x05\x07\x3d\x08\xdd\x07\xf1\x06\x98\x06\x3a\x07')
self.rawright = self.tmp.write('P100_E4_right.dat',
b'\xe2\x06\x6b\x08\xe8\x07\x39\x07\xb1\x06\x48\x07')
self.foo = shared.UninitializedSensor._make([shared.Orientation(1), foo])
self.bar = shared.UninitializedSensor._make([shared.Orientation(2), bar])
def tearDown(self):
TempDirectory.cleanup_all()
def test_sensor_setup(self):
conv = RawConverter([], [self.foo, self.bar])
conv.processDatFiles()
self.assertIsInstance(conv.initSensors[
shared.Orientation(1).name], Sensor)
self.assertIsInstance(conv.initSensors[
shared.Orientation(2).name], Sensor)
self.assertEquals(conv.initSensors[
shared.Orientation(1).name].orientation, shared.Orientation(1))
self.assertEquals(conv.initSensors[
shared.Orientation(2).name].orientation, shared.Orientation(2))
def test_invalid_filesToConvert_parameter(self):
conv = RawConverter(
[self.rawleft, self.rawright], [self.foo, self.bar])
self.assertRaises(TypeError, conv.processDatFiles)
def test_dat_file_processing(self):
first = shared.Sourcefile._make(
[os.path.dirname(self.rawleft), 'P100_E4_left.dat', '01', '01', shared.Orientation(1)])
second = shared.Sourcefile._make(
[os.path.dirname(self.rawright), 'P100_E4_right.dat', '01', '01', shared.Orientation(2)])
conv = RawConverter(
[(first, second)], [self.foo, self.bar])
conv.processDatFiles()
#TODO: continue
| [
"sedater.lib.shared.Sensorsegment._make",
"testfixtures.TempDirectory",
"sedater.lib.shared.Orientation",
"os.path.dirname",
"sedater.rawvalidation.RawConverter",
"sedater.rawvalidation.Sensor",
"testfixtures.TempDirectory.cleanup_all"
] | [((377, 392), 'testfixtures.TempDirectory', 'TempDirectory', ([], {}), '()\n', (390, 392), False, 'from testfixtures import TempDirectory\n'), ((533, 560), 'testfixtures.TempDirectory.cleanup_all', 'TempDirectory.cleanup_all', ([], {}), '()\n', (558, 560), False, 'from testfixtures import TempDirectory\n'), ((1221, 1265), 'sedater.rawvalidation.Sensor', 'Sensor', (['shared.Orientation.left', 'calibration'], {}), '(shared.Orientation.left, calibration)\n', (1227, 1265), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((1541, 1587), 'sedater.lib.shared.Sensorsegment._make', 'shared.Sensorsegment._make', (['[1, 2, 3, 4, 5, 6]'], {}), '([1, 2, 3, 4, 5, 6])\n', (1567, 1587), False, 'from sedater.lib import shared\n'), ((2131, 2195), 'sedater.lib.shared.Sensorsegment._make', 'shared.Sensorsegment._make', (['[1762, 2155, 2024, 1849, 1713, 1864]'], {}), '([1762, 2155, 2024, 1849, 1713, 1864])\n', (2157, 2195), False, 'from sedater.lib import shared\n'), ((2260, 2324), 'sedater.lib.shared.Sensorsegment._make', 'shared.Sensorsegment._make', (['[1797, 2109, 2013, 1777, 1688, 1850]'], {}), '([1797, 2109, 2013, 1777, 1688, 1850])\n', (2286, 2324), False, 'from sedater.lib import shared\n'), ((2376, 2539), 'sedater.lib.shared.Sensorsegment._make', 'shared.Sensorsegment._make', (['[-1.3020391681808998, 0.30788485607008736, 0.003281219841729923, -\n 3.2222629073599247, 1.318198462101761, -1.9772976931527246]'], {}), '([-1.3020391681808998, 0.30788485607008736, \n 0.003281219841729923, -3.2222629073599247, 1.318198462101761, -\n 1.9772976931527246])\n', (2402, 2539), False, 'from sedater.lib import shared\n'), ((2577, 2736), 'sedater.lib.shared.Sensorsegment._make', 'shared.Sensorsegment._make', (['[-1.5466454942437473, 0.5888774459320278, 0.01765063907486269, \n 5.053094104723528, 1.2449652142072833, 1.2815818381545223]'], {}), '([-1.5466454942437473, 0.5888774459320278, \n 0.01765063907486269, 5.053094104723528, 1.2449652142072833, \n 1.2815818381545223])\n', (2603, 2736), False, 'from sedater.lib import shared\n'), ((2738, 2786), 'sedater.rawvalidation.Sensor', 'Sensor', (['shared.Orientation.left', 'calibrationLeft'], {}), '(shared.Orientation.left, calibrationLeft)\n', (2744, 2786), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((2803, 2853), 'sedater.rawvalidation.Sensor', 'Sensor', (['shared.Orientation.right', 'calibrationRight'], {}), '(shared.Orientation.right, calibrationRight)\n', (2809, 2853), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((3111, 3126), 'testfixtures.TempDirectory', 'TempDirectory', ([], {}), '()\n', (3124, 3126), False, 'from testfixtures import TempDirectory\n'), ((3709, 3736), 'testfixtures.TempDirectory.cleanup_all', 'TempDirectory.cleanup_all', ([], {}), '()\n', (3734, 3736), False, 'from testfixtures import TempDirectory\n'), ((3786, 3824), 'sedater.rawvalidation.RawConverter', 'RawConverter', (['[]', '[self.foo, self.bar]'], {}), '([], [self.foo, self.bar])\n', (3798, 3824), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((4358, 4423), 'sedater.rawvalidation.RawConverter', 'RawConverter', (['[self.rawleft, self.rawright]', '[self.foo, self.bar]'], {}), '([self.rawleft, self.rawright], [self.foo, self.bar])\n', (4370, 4423), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((4848, 4901), 'sedater.rawvalidation.RawConverter', 'RawConverter', (['[(first, second)]', '[self.foo, self.bar]'], {}), '([(first, second)], [self.foo, self.bar])\n', (4860, 4901), False, 'from sedater.rawvalidation import Sensor, RawConverter\n'), ((4147, 4168), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (4165, 4168), False, 'from sedater.lib import shared\n'), ((4267, 4288), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(2)'], {}), '(2)\n', (4285, 4288), False, 'from sedater.lib import shared\n'), ((1481, 1502), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (1499, 1502), False, 'from sedater.lib import shared\n'), ((3566, 3587), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (3584, 3587), False, 'from sedater.lib import shared\n'), ((3648, 3669), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(2)'], {}), '(2)\n', (3666, 3669), False, 'from sedater.lib import shared\n'), ((4598, 4627), 'os.path.dirname', 'os.path.dirname', (['self.rawleft'], {}), '(self.rawleft)\n', (4613, 4627), False, 'import os\n'), ((4661, 4682), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (4679, 4682), False, 'from sedater.lib import shared\n'), ((4744, 4774), 'os.path.dirname', 'os.path.dirname', (['self.rawright'], {}), '(self.rawright)\n', (4759, 4774), False, 'import os\n'), ((4809, 4830), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(2)'], {}), '(2)\n', (4827, 4830), False, 'from sedater.lib import shared\n'), ((3916, 3937), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (3934, 3937), False, 'from sedater.lib import shared\n'), ((4013, 4034), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(2)'], {}), '(2)\n', (4031, 4034), False, 'from sedater.lib import shared\n'), ((4106, 4127), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(1)'], {}), '(1)\n', (4124, 4127), False, 'from sedater.lib import shared\n'), ((4226, 4247), 'sedater.lib.shared.Orientation', 'shared.Orientation', (['(2)'], {}), '(2)\n', (4244, 4247), False, 'from sedater.lib import shared\n')] |
from server.instance import server
import sys, os
# Need to import all resources
# so that they register with the server
#from resources.routes import *
from resources.update import *
from resources.delete import *
from resources.create import *
from resources.status import *
from resources.health import *
from resources.configof import *
from resources.list import *
if __name__ == '__main__':
server.run() | [
"server.instance.server.run"
] | [((404, 416), 'server.instance.server.run', 'server.run', ([], {}), '()\n', (414, 416), False, 'from server.instance import server\n')] |
from __future__ import annotations
import dataclasses
import enum
import logging
import re
from typing import Dict, Iterator, List, Optional, Tuple, Union
from urllib.parse import urljoin, urlparse
from uuid import uuid4
import bs4
import pydantic
from . import SpecFetcher, config, models, utils
from .exceptions import CameraLensDatabaseException, ParseError
_logger = logging.getLogger(__name__)
@enum.unique
class EquipmentType(int, enum.Enum):
NEW_CAMERA = enum.auto()
OLD_CAMERA = enum.auto()
class Mount(str, enum.Enum):
A = "Sony A"
E = "Sony E"
@staticmethod
def parse(s: str) -> Mount:
if "Eマウント" in s or "Eマウント" in s:
return Mount.E
elif "Aマウント" in s or "ソニー製αレンズ" in s:
return Mount.A
else:
msg = f"unrecognizable mount description: {s}"
raise ParseError(msg)
@dataclasses.dataclass
class SpecParseParams(object):
subpath: Optional[str]
table_selector: str
key_cell_selector: str
value_cell_selector: str
_models_to_ignore: List[str] = []
_known_lens_specs: Dict[str, Dict[str, Union[float, str]]] = {}
_known_camera_specs: Dict[str, Dict[str, Union[float, str]]] = {
"DSLR-A900": {models.KEY_CAMERA_MOUNT: Mount.A},
}
def enum_cameras(target: EquipmentType) -> Iterator[Tuple[str, str, SpecFetcher]]:
card: bs4.ResultSet
if target == EquipmentType.NEW_CAMERA:
base_uri = "https://www.sony.jp/ichigan/lineup/"
item_selector = "div[data-s5lineup-pid]"
name_selector = ".s5-listItem4__modelName"
anchor_selector = ".s5-listItem4__mainLink"
elif target == EquipmentType.OLD_CAMERA:
# TODO: Generalize the pattern using JSONP with HTML scraping
import json
base_uri = "https://www.sony.jp/ichigan/lineup/past.html"
jsonp_uri = "https://www.sony.jp/webapi/past_product/previous_product.php?callback=PreviousProduct&categoryId=2508,3729,4588&startDate=20010101&flag=3&sort=2" # noqa: E501
html_text = utils.fetch(jsonp_uri)
obj = json.loads(re.findall(r"PreviousProduct\((.*)\)", html_text)[0])
eval_name = lambda obj: [x["modelName"] for x in obj["product"]]
eval_href = lambda obj: [x["productLink"] for x in obj["product"]]
for name, href in zip(eval_name(obj), eval_href(obj)):
yield name, urljoin(base_uri, href), fetch_camera
return
else:
msg = f"unsupported type to enumerate: {target}"
raise ValueError(msg)
html_text = utils.fetch(base_uri)
soup = bs4.BeautifulSoup(html_text, features=config["bs_features"])
for card in soup.select(item_selector):
name = card.select(name_selector)[0].text.strip()
anchor = card.select(anchor_selector)[0]
# Get raw value of href attribute
raw_dest = anchor["href"]
if raw_dest.startswith("javascript:"):
continue
# Check the destination looks fine
pr = urlparse(raw_dest)
if pr.hostname and pr.hostname != base_uri:
msg = "skipped an item because it's not on the same server"
msg += f": {anchor['href']!r} <> {base_uri!r}"
_logger.warning(msg)
continue
# Construct an absolute URI
rel_dest = pr.path
abs_dest = urljoin(base_uri, rel_dest)
yield name, abs_dest, fetch_camera
def fetch_camera(name: str, uri: str) -> models.Camera:
parse_params: List[SpecParseParams] = [
SpecParseParams("spec.html", ".s5-specTable > table", "th", "td"),
SpecParseParams("spec.html", ".mod-specTable", "table th", "table th ~ td"),
]
errors = []
for idx, params in enumerate(parse_params):
try:
return _fetch_camera(name, uri, params)
except ParseError as ex:
errors.append((idx, ex))
msglines = [f'cannot read spec of "{name}" from "{uri}"']
for idx, e in errors:
msglines.append(f" mode {idx}: {str(e)}")
raise CameraLensDatabaseException("\n".join(msglines))
def _fetch_camera(name: str, uri: str, pp: SpecParseParams) -> models.Camera:
if pp.subpath is not None:
uri = urljoin(uri, pp.subpath)
html_text = utils.fetch(uri)
soup = bs4.BeautifulSoup(html_text, config["bs_features"])
selection = soup.select(pp.table_selector)
if len(selection) <= 0:
msg = f"spec table not found: {uri}"
raise ParseError(msg)
# Set initial values
pairs: Dict[str, Union[float, str]] = {
models.KEY_CAMERA_ID: str(uuid4()),
models.KEY_CAMERA_NAME: name,
models.KEY_CAMERA_BRAND: "Sony",
models.KEY_CAMERA_KEYWORDS: "",
}
# Collect and parse interested th-td pairs from the spec table
spec_table: bs4.Tag = selection[0]
for row in spec_table.select("tr"):
key_cells: bs4.ResultSet = row.select(pp.key_cell_selector)
value_cells: bs4.ResultSet = row.select(pp.value_cell_selector)
if len(key_cells) != 1 or len(value_cells) != 1:
continue
key_cell_text = key_cells[0].text.strip()
value_cell_text = value_cells[0].text.strip()
for k, v in _recognize_camera_property(key_cell_text, value_cell_text).items():
pairs[k] = v
# Force using some spec data which is not available or hard to recognize
for k, v in _known_camera_specs.get(name, {}).items():
pairs[k] = v
# Infer media size name if not set
if models.KEY_CAMERA_SIZE_NAME not in pairs:
w = pairs.get(models.KEY_CAMERA_MEDIA_WIDTH)
h = pairs.get(models.KEY_CAMERA_MEDIA_HEIGHT)
if w and h:
assert isinstance(w, float) and isinstance(h, float)
size_name = models.infer_media_size_name(w, h, for_nikon=False)
if size_name:
pairs[models.KEY_CAMERA_SIZE_NAME] = size_name
# Compose a spec object from the table content
try:
return models.Camera(**pairs)
except pydantic.ValidationError as ex:
msg = f"unexpected spec: {pairs}, {uri}"
raise CameraLensDatabaseException(msg) from ex
def _recognize_camera_property(key: str, value: str) -> Dict[str, Union[float, str]]:
# TODO: This can be merged with nikon._recognize_camera_prop... as a static dict?
if key in ("レンズマウント", "使用レンズ"):
mount = Mount.parse(value)
if mount is not None:
return {models.KEY_CAMERA_MOUNT: mount}
elif key == "撮像素子":
props: Dict[str, Union[float, str]] = {}
areas = list(utils.enum_square_millimeters(value))
if len(areas) == 1:
w, h = areas[0]
props[models.KEY_CAMERA_MEDIA_WIDTH] = w
props[models.KEY_CAMERA_MEDIA_HEIGHT] = h
if props:
return props
return {}
| [
"logging.getLogger",
"enum.auto",
"urllib.parse.urlparse",
"uuid.uuid4",
"bs4.BeautifulSoup",
"urllib.parse.urljoin",
"re.findall"
] | [((375, 402), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (392, 402), False, 'import logging\n'), ((472, 483), 'enum.auto', 'enum.auto', ([], {}), '()\n', (481, 483), False, 'import enum\n'), ((501, 512), 'enum.auto', 'enum.auto', ([], {}), '()\n', (510, 512), False, 'import enum\n'), ((2562, 2622), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['html_text'], {'features': "config['bs_features']"}), "(html_text, features=config['bs_features'])\n", (2579, 2622), False, 'import bs4\n'), ((4249, 4300), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['html_text', "config['bs_features']"], {}), "(html_text, config['bs_features'])\n", (4266, 4300), False, 'import bs4\n'), ((2976, 2994), 'urllib.parse.urlparse', 'urlparse', (['raw_dest'], {}), '(raw_dest)\n', (2984, 2994), False, 'from urllib.parse import urljoin, urlparse\n'), ((3315, 3342), 'urllib.parse.urljoin', 'urljoin', (['base_uri', 'rel_dest'], {}), '(base_uri, rel_dest)\n', (3322, 3342), False, 'from urllib.parse import urljoin, urlparse\n'), ((4179, 4203), 'urllib.parse.urljoin', 'urljoin', (['uri', 'pp.subpath'], {}), '(uri, pp.subpath)\n', (4186, 4203), False, 'from urllib.parse import urljoin, urlparse\n'), ((4555, 4562), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (4560, 4562), False, 'from uuid import uuid4\n'), ((2073, 2123), 're.findall', 're.findall', (['"""PreviousProduct\\\\((.*)\\\\)"""', 'html_text'], {}), "('PreviousProduct\\\\((.*)\\\\)', html_text)\n", (2083, 2123), False, 'import re\n'), ((2362, 2385), 'urllib.parse.urljoin', 'urljoin', (['base_uri', 'href'], {}), '(base_uri, href)\n', (2369, 2385), False, 'from urllib.parse import urljoin, urlparse\n')] |
import os
import sys
import json
import logging
from simple_mask_ui import Ui_SimpleMask as Ui
from simple_mask_kernel import SimpleMask
from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow
import numpy as np
import pyqtgraph as pg
home_dir = os.path.join(os.path.expanduser('~'), '.simple-mask')
if not os.path.isdir(home_dir):
os.mkdir(home_dir)
log_filename = os.path.join(home_dir, 'simple-mask.log')
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-24s: %(message)s',
handlers=[
logging.FileHandler(log_filename, mode='a'),
logging.StreamHandler()
])
logger = logging.getLogger(__name__)
def exception_hook(exc_type, exc_value, exc_traceback):
logger.error("Uncaught exception",
exc_info=(exc_type, exc_value, exc_traceback))
sys.excepthook = exception_hook
def text_to_array(pts):
for symbol in '[](),':
pts = pts.replace(symbol, ' ')
pts = pts.split(' ')
pts = [int(x) for x in pts if x != '']
pts = np.array(pts).astype(np.int64)
return pts
class SimpleMaskGUI(QMainWindow, Ui):
def __init__(self, path=None):
super(SimpleMaskGUI, self).__init__()
self.setupUi(self)
self.btn_load.clicked.connect(self.load)
self.btn_plot.clicked.connect(self.plot)
self.btn_compute_qpartition.clicked.connect(self.compute_partition)
self.btn_select_raw.clicked.connect(self.select_raw)
# self.btn_select_txt.clicked.connect(self.select_txt)
self.btn_update_parameters.clicked.connect(self.update_parameters)
self.btn_swapxy.clicked.connect(
lambda: self.update_parameters(swapxy=True))
# need a function for save button -- simple_mask_ui
self.pushButton.clicked.connect(self.save_mask)
self.plot_index.currentIndexChanged.connect(self.mp1.setCurrentIndex)
# simple mask kernep
self.sm = SimpleMask(self.mp1, self.infobar)
self.mp1.sigTimeChanged.connect(self.update_index)
self.state = 'lock'
# mask_list
self.btn_mask_list_load.clicked.connect(self.mask_list_load)
self.btn_mask_list_clear.clicked.connect(self.mask_list_clear)
self.btn_mask_list_add.clicked.connect(self.mask_list_add)
self.btn_mask_list_evaluate.clicked.connect(
lambda: self.mask_evaluate('mask_list'))
self.btn_mask_list_apply.clicked.connect(
lambda: self.mask_apply('mask_list'))
# blemish
self.btn_select_blemish.clicked.connect(self.select_blemish)
self.btn_apply_blemish.clicked.connect(
lambda: self.mask_evaluate('mask_blemish'))
self.btn_mask_blemish_apply.clicked.connect(
lambda: self.mask_apply('mask_blemish'))
# mask_file
self.btn_select_maskfile.clicked.connect(self.select_maskfile)
self.btn_apply_maskfile.clicked.connect(
lambda: self.mask_evaluate('mask_file'))
self.btn_mask_file_apply.clicked.connect(
lambda: self.mask_apply('mask_file'))
# draw method / array
self.btn_mask_draw_add.clicked.connect(self.add_drawing)
self.btn_mask_draw_evaluate.clicked.connect(
lambda: self.mask_evaluate('mask_draw'))
self.btn_mask_draw_apply.clicked.connect(
lambda: self.mask_apply('mask_draw'))
# binary threshold
self.btn_mask_threshold_evaluate.clicked.connect(
lambda: self.mask_evaluate('mask_threshold'))
self.btn_mask_threshold_apply.clicked.connect(
lambda: self.mask_apply('mask_threshold'))
# btn_mask_outlier_evaluate
self.btn_mask_outlier_evaluate.clicked.connect(
lambda: self.mask_evaluate('mask_outlier'))
self.btn_mask_outlier_apply.clicked.connect(
lambda: self.mask_apply('mask_outlier'))
self.mask_outlier_hdl.setBackground((255, 255, 255))
self.mp1.scene.sigMouseClicked.connect(self.mouse_clicked)
self.work_dir = None
if path is not None:
path = os.path.abspath(path)
if os.path.isfile(path):
self.fname.setText(str(path))
self.work_dir = os.path.dirname(path)
elif os.path.isdir(path):
self.work_dir = path
self.MaskWidget.setCurrentIndex(0)
self.setting_fname = os.path.join(home_dir, 'default_setting.json')
self.lastconfig_fname = os.path.join(home_dir, 'last_config.json')
self.load_default_settings()
self.load_last_config()
self.show()
def load_default_settings(self):
# copy the default values
if not os.path.isfile(self.setting_fname):
config = {
"window_size_w": 1400,
"window_size_h": 740
}
with open(self.setting_fname, 'w') as f:
json.dump(config, f, indent=4)
# the display size might too big for some laptops
with open(self.setting_fname, 'r') as f:
config = json.load(f)
if "window_size_h" in config:
new_size = (config["window_size_w"], config["window_size_h"])
logger.info('set mainwindow to size %s', new_size)
self.resize(*new_size)
return
def mouse_clicked(self, event):
if not event.double():
return
# make sure the maskwidget is at manual mode;
if self.MaskWidget.currentIndex() != 3:
return
if not self.mp1.scene.itemsBoundingRect().contains(event.pos()):
return
mouse_point = self.mp1.getView().mapSceneToView(event.pos())
col = int(mouse_point.x())
row = int(mouse_point.y())
if not self.mask_list_include.isChecked():
self.mask_list_add_pts([np.array([col, row])])
else:
kwargs = {
'radius': self.mask_list_radius.value(),
'variation': self.mask_list_variation.value(),
'cen': (row, col)
}
pos = self.sm.get_pts_with_similar_intensity(**kwargs)
self.mask_list_add_pts(pos)
def mask_evaluate(self, target=None):
if target is None or not self.is_ready():
return
if target == 'mask_blemish':
kwargs = {
'fname': self.blemish_fname.text(),
'key': self.blemish_path.text()
}
elif target == 'mask_file':
kwargs = {
'fname': self.maskfile_fname.text(),
'key': self.maskfile_path.text()
}
elif target == 'mask_list':
num_row = self.mask_list_xylist.count()
val = [str(self.mask_list_xylist.item(i).text())
for i in range(num_row)]
val = ' '.join(val)
xy = text_to_array(val)
xy = xy[0: xy.size // 2 * 2].reshape(-1, 2).T
xy = np.roll(xy, shift=1, axis=0)
kwargs = {
'zero_loc': xy
}
elif target == 'mask_draw':
kwargs = {
'arr': np.logical_not(self.sm.apply_drawing())
}
elif target == 'mask_threshold':
kwargs = {
'low': self.binary_threshold_low.value(),
'high': self.binary_threshold_high.value(),
'scale': ['linear', 'log'][self.binary_scale.currentIndex()]
}
elif target == 'mask_outlier':
num = self.outlier_num_roi.value()
cutoff = self.outlier_cutoff.value()
# iterations = self.outlier_iterations.value()
saxs1d, zero_loc = self.sm.compute_saxs1d(num=num, cutoff=cutoff)
self.mask_outlier_hdl.clear()
p = self.mask_outlier_hdl
p.addLegend()
p.plot(saxs1d[0], saxs1d[1], name='average_ref',
pen=pg.mkPen(color='g', width=2))
p.plot(saxs1d[0], saxs1d[4], name='average_raw',
pen=pg.mkPen(color='m', width=2))
p.plot(saxs1d[0], saxs1d[2], name='cutoff',
pen=pg.mkPen(color='b', width=2))
p.plot(saxs1d[0], saxs1d[3], name='maximum value',
pen=pg.mkPen(color='r', width=2))
p.setLabel('bottom', 'q (Å⁻¹)')
p.setLabel('left', 'Intensity (a.u.)')
p.setLogMode(y=True)
kwargs = {'zero_loc': zero_loc}
msg = self.sm.mask_evaluate(target, **kwargs)
self.statusbar.showMessage(msg, 10000)
self.plot_index.setCurrentIndex(0)
self.plot_index.setCurrentIndex(5)
return
def mask_apply(self, target):
if not self.is_ready():
return
self.sm.mask_apply(target)
# perform evaluate again so the saxs1d shows the new results;
if target == 'mask_outlier':
self.mask_evaluate(target=target)
elif target == 'mask_list':
self.mask_list_clear()
self.plot_index.setCurrentIndex(0)
self.plot_index.setCurrentIndex(1)
def update_index(self):
idx = self.mp1.currentIndex
self.plot_index.setCurrentIndex(idx)
# make the mask and preview binary
if idx in [2, 5]:
self.mp1.setLevels(0, 1)
def is_ready(self):
if not self.sm.is_ready():
self.statusbar.showMessage('No scattering image is loaded.', 500)
return False
return True
def update_parameters(self, swapxy=False):
if not self.is_ready():
return
pvs = (self.db_cenx, self.db_ceny, self.db_energy, self.db_pix_dim,
self.db_det_dist)
values = []
for pv in pvs:
values.append(pv.value())
if swapxy:
y, x = values[0], values[1]
values[0], values[1] = x, y
self.db_cenx.setValue(x)
self.db_ceny.setValue(y)
self.sm.update_parameters(values)
self.groupBox.repaint()
self.plot()
def select_raw(self):
fname = QFileDialog.getOpenFileName(self,
caption='Select raw file hdf',
directory=self.work_dir
)[0]
# fname = """
# /Users/mqichu/Documents/local_dev/pysimplemask/tests/data/
# E0135_La0p65_L2_013C_att04_Rq0_00001/E0135_La0p65_L2_013C_
# att04_Rq0_00001_0001-100000.hdf"
if fname not in [None, '']:
self.fname.setText(fname)
return
def select_blemish(self):
fname = QFileDialog.getOpenFileName(self, 'Select blemish file')[0]
if fname not in [None, '']:
self.blemish_fname.setText(fname)
if fname.endswith('.tif') or fname.endswith('.tiff'):
self.blemish_path.setDisabled(True)
else:
self.blemish_path.setEnabled(True)
return
def select_maskfile(self):
fname = QFileDialog.getOpenFileName(self, 'Select mask file')[0]
# fname = "../tests/data/triangle_mask/mask_lambda_test.h5"
if fname not in [None, '']:
self.maskfile_fname.setText(fname)
if fname.endswith('.tif') or fname.endswith('.tiff'):
self.maskfile_path.setDisabled(True)
else:
self.maskfile_path.setEnabled(True)
return
def load(self):
if not os.path.isfile(self.fname.text()):
self.select_raw()
if not os.path.isfile(self.fname.text()):
self.statusbar.showMessage('select a valid file')
return
fname = self.fname.text()
self.sm.read_data(fname)
self.db_cenx.setValue(self.sm.meta['bcx'])
self.db_ceny.setValue(self.sm.meta['bcy'])
self.db_energy.setValue(self.sm.meta['energy'])
self.db_pix_dim.setValue(self.sm.meta['pix_dim'])
self.db_det_dist.setValue(self.sm.meta['det_dist'])
self.le_shape.setText(str(self.sm.shape))
self.groupBox.repaint()
self.plot()
def plot(self):
kwargs = {
'cmap': self.plot_cmap.currentText(),
'log': self.plot_log.isChecked(),
'invert': self.plot_invert.isChecked(),
# 'rotate': self.plot_rotate.isChecked(),
'plot_center': self.plot_center.isChecked(),
}
self.sm.show_saxs(**kwargs)
self.plot_index.setCurrentIndex(0)
def add_drawing(self):
if not self.is_ready():
return
color = ('g', 'y', 'b', 'r', 'c', 'm', 'k', 'w')[
self.cb_selector_color.currentIndex()]
kwargs = {
'color': color,
'sl_type': self.cb_selector_type.currentText(),
'sl_mode': self.cb_selector_mode.currentText(),
'width': self.plot_width.value()
}
self.sm.add_drawing(**kwargs)
return
def compute_partition(self):
if not self.is_ready():
return
kwargs = {
'sq_num': self.sb_sqnum.value(),
'dq_num': self.sb_dqnum.value(),
'sp_num': self.sb_spnum.value(),
'dp_num': self.sb_dpnum.value(),
'style': self.partition_style.currentText(),
}
self.sm.compute_partition(**kwargs)
self.plot_index.setCurrentIndex(3)
def save_mask(self):
if not self.is_ready():
return
if self.sm.new_partition is None:
self.compute_partition()
save_fname = QFileDialog.getSaveFileName(
self, caption='Save mask/qmap as')[0]
self.sm.save_partition(save_fname)
def mask_list_load(self):
if not self.is_ready():
return
fname = QFileDialog.getOpenFileName(self, 'Select mask file')[0]
if fname in ['', None]:
return
try:
xy = np.loadtxt(fname, delimiter=',')
except ValueError:
xy = np.loadtxt(fname)
except Exception:
self.statusbar.showMessage(
'only support csv and space separated file', 500)
return
if self.mask_list_rowcol.isChecked():
xy = np.roll(xy, shift=1, axis=1)
if self.mask_list_1based.isChecked():
xy = xy - 1
xy = xy.astype(np.int64)
self.mask_list_add_pts(xy)
def mask_list_add(self):
pts = self.mask_list_input.text()
self.mask_list_input.clear()
if len(pts) < 1:
self.statusbar.showMessage('Input list is almost empty.', 500)
return
xy = text_to_array(pts)
xy = xy[0: xy.size // 2 * 2].reshape(-1, 2)
self.mask_list_add_pts(xy)
def mask_list_add_pts(self, pts):
for xy in pts:
xy_str = str(xy)
if xy_str not in self.sm.bad_pixel_set:
self.mask_list_xylist.addItem(xy_str)
self.sm.bad_pixel_set.add(xy_str)
self.groupBox_11.setTitle('xy list: %d' % len(self.sm.bad_pixel_set))
def mask_list_clear(self):
self.sm.bad_pixel_set.clear()
self.mask_list_xylist.clear()
self.groupBox_11.setTitle('xy list')
def load_last_config(self, ):
if not os.path.isfile(self.lastconfig_fname):
logger.info('no configure file found. skip')
return
try:
with open(self.lastconfig_fname, 'r') as fhdl:
logger.info('load the last configure.')
config = json.load(fhdl)
for key, val in config.items():
self.__dict__[key].setText(val)
except Exception:
os.remove(self.lastconfig_fname)
logger.info('configuration file damaged. delete it now')
return
def closeEvent(self, e) -> None:
keys = ['blemish_fname', 'blemish_path', 'maskfile_fname',
'maskfile_path']
config = {}
for key in keys:
config[key] = self.__dict__[key].text()
with open(self.lastconfig_fname, 'w') as fhdl:
json.dump(config, fhdl)
def run():
# if os.name == 'nt':
# setup_windows_icon()
# QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling, True)
app = QApplication(sys.argv)
if len(sys.argv) > 1:
window = SimpleMaskGUI(sys.argv[1])
else:
window = SimpleMaskGUI()
app.exec_()
if __name__ == '__main__':
run()
| [
"logging.getLogger",
"logging.StreamHandler",
"simple_mask_kernel.SimpleMask",
"numpy.array",
"PyQt5.QtWidgets.QApplication",
"pyqtgraph.mkPen",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"os.remove",
"os.path.isdir",
"logging.FileHandler",
"os.mkdir",
"os.path.expanduser",
"os.path.isfile",
"os.path.dirname",
"os.path.abspath",
"numpy.roll",
"os.path.join",
"json.load",
"numpy.loadtxt",
"json.dump",
"PyQt5.QtWidgets.QFileDialog.getSaveFileName"
] | [((384, 425), 'os.path.join', 'os.path.join', (['home_dir', '"""simple-mask.log"""'], {}), "(home_dir, 'simple-mask.log')\n", (396, 425), False, 'import os\n'), ((714, 741), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (731, 741), False, 'import logging\n'), ((273, 296), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (291, 296), False, 'import os\n'), ((321, 344), 'os.path.isdir', 'os.path.isdir', (['home_dir'], {}), '(home_dir)\n', (334, 344), False, 'import os\n'), ((350, 368), 'os.mkdir', 'os.mkdir', (['home_dir'], {}), '(home_dir)\n', (358, 368), False, 'import os\n'), ((16458, 16480), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (16470, 16480), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((2019, 2053), 'simple_mask_kernel.SimpleMask', 'SimpleMask', (['self.mp1', 'self.infobar'], {}), '(self.mp1, self.infobar)\n', (2029, 2053), False, 'from simple_mask_kernel import SimpleMask\n'), ((4493, 4539), 'os.path.join', 'os.path.join', (['home_dir', '"""default_setting.json"""'], {}), "(home_dir, 'default_setting.json')\n", (4505, 4539), False, 'import os\n'), ((4572, 4614), 'os.path.join', 'os.path.join', (['home_dir', '"""last_config.json"""'], {}), "(home_dir, 'last_config.json')\n", (4584, 4614), False, 'import os\n'), ((588, 631), 'logging.FileHandler', 'logging.FileHandler', (['log_filename'], {'mode': '"""a"""'}), "(log_filename, mode='a')\n", (607, 631), False, 'import logging\n'), ((657, 680), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (678, 680), False, 'import logging\n'), ((1107, 1120), 'numpy.array', 'np.array', (['pts'], {}), '(pts)\n', (1115, 1120), True, 'import numpy as np\n'), ((4186, 4207), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (4201, 4207), False, 'import os\n'), ((4223, 4243), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (4237, 4243), False, 'import os\n'), ((4791, 4825), 'os.path.isfile', 'os.path.isfile', (['self.setting_fname'], {}), '(self.setting_fname)\n', (4805, 4825), False, 'import os\n'), ((5169, 5181), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5178, 5181), False, 'import json\n'), ((10237, 10331), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self'], {'caption': '"""Select raw file hdf"""', 'directory': 'self.work_dir'}), "(self, caption='Select raw file hdf', directory=\n self.work_dir)\n", (10264, 10331), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((10803, 10859), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""Select blemish file"""'], {}), "(self, 'Select blemish file')\n", (10830, 10859), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((11181, 11234), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""Select mask file"""'], {}), "(self, 'Select mask file')\n", (11208, 11234), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((13724, 13786), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self'], {'caption': '"""Save mask/qmap as"""'}), "(self, caption='Save mask/qmap as')\n", (13751, 13786), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((13945, 13998), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""Select mask file"""'], {}), "(self, 'Select mask file')\n", (13972, 13998), False, 'from PyQt5.QtWidgets import QFileDialog, QApplication, QMainWindow\n'), ((14084, 14116), 'numpy.loadtxt', 'np.loadtxt', (['fname'], {'delimiter': '""","""'}), "(fname, delimiter=',')\n", (14094, 14116), True, 'import numpy as np\n'), ((14394, 14422), 'numpy.roll', 'np.roll', (['xy'], {'shift': '(1)', 'axis': '(1)'}), '(xy, shift=1, axis=1)\n', (14401, 14422), True, 'import numpy as np\n'), ((15438, 15475), 'os.path.isfile', 'os.path.isfile', (['self.lastconfig_fname'], {}), '(self.lastconfig_fname)\n', (15452, 15475), False, 'import os\n'), ((16281, 16304), 'json.dump', 'json.dump', (['config', 'fhdl'], {}), '(config, fhdl)\n', (16290, 16304), False, 'import json\n'), ((4323, 4344), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (4338, 4344), False, 'import os\n'), ((4362, 4381), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4375, 4381), False, 'import os\n'), ((5009, 5039), 'json.dump', 'json.dump', (['config', 'f'], {'indent': '(4)'}), '(config, f, indent=4)\n', (5018, 5039), False, 'import json\n'), ((14161, 14178), 'numpy.loadtxt', 'np.loadtxt', (['fname'], {}), '(fname)\n', (14171, 14178), True, 'import numpy as np\n'), ((15707, 15722), 'json.load', 'json.load', (['fhdl'], {}), '(fhdl)\n', (15716, 15722), False, 'import json\n'), ((15861, 15893), 'os.remove', 'os.remove', (['self.lastconfig_fname'], {}), '(self.lastconfig_fname)\n', (15870, 15893), False, 'import os\n'), ((5953, 5973), 'numpy.array', 'np.array', (['[col, row]'], {}), '([col, row])\n', (5961, 5973), True, 'import numpy as np\n'), ((7086, 7114), 'numpy.roll', 'np.roll', (['xy'], {'shift': '(1)', 'axis': '(0)'}), '(xy, shift=1, axis=0)\n', (7093, 7114), True, 'import numpy as np\n'), ((8055, 8083), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '"""g"""', 'width': '(2)'}), "(color='g', width=2)\n", (8063, 8083), True, 'import pyqtgraph as pg\n'), ((8169, 8197), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '"""m"""', 'width': '(2)'}), "(color='m', width=2)\n", (8177, 8197), True, 'import pyqtgraph as pg\n'), ((8278, 8306), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '"""b"""', 'width': '(2)'}), "(color='b', width=2)\n", (8286, 8306), True, 'import pyqtgraph as pg\n'), ((8394, 8422), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '"""r"""', 'width': '(2)'}), "(color='r', width=2)\n", (8402, 8422), True, 'import pyqtgraph as pg\n')] |
from flask import Flask
app = Flask('ai')
app.jinja_env.filters['zip'] = zip
import view
app.route('/')(view.base)
app.route('/ant', methods=['GET', 'POST'])(view.ant)
app.route('/genetic', methods=['GET', 'POST'])(view.genetic)
app.route('/qos')(view.qos)
app.route('/bigram')(view.bigram)
app.route('/art1', methods=['GET', 'POST'])(view.art1) | [
"flask.Flask"
] | [((32, 43), 'flask.Flask', 'Flask', (['"""ai"""'], {}), "('ai')\n", (37, 43), False, 'from flask import Flask\n')] |
import tkinter as tk
import tkinter.messagebox as messagebox
from tkinter.filedialog import askopenfile
"""
Copyright 2020 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class App:
"""
App the class containing the GUI and controls' implementation
"""
window = tk.Tk()
file = None
signatureFile = None
labels = {
"csvPath": tk.Label(text="Choose CSV File", font=("Segoe UI", 18), pady=10),
"schoolName": tk.Label(
text="Enter name of your school", font=("Segoe UI", 18), pady=10
),
"examName": tk.Label(
text="Enter name of the exam", font=("Segoe UI", 18), pady=10
),
"signatureFile": tk.Label(
text="Choose signature image (Optional)", font=("Segoe UI", 18), pady=10
),
"authorityName": tk.Label(
text="Enter the name of the signature holder (Optional; if signature uplaoded)",
font=("Segoe UI", 18),
pady=10,
),
}
inputs = {
"csvPath": tk.Button(text="Insert CSV File", font=("Segoe UI", 16)),
"signatureFile": tk.Button(text="Insert Signature File", font=("Segoe UI", 16)),
"schoolName": tk.Entry(font=("Segoe UI", 18), width=50),
"examName": tk.Entry(font=("Segoe UI", 18), width=50),
"authorityName": tk.Entry(font=("Segoe UI", 18), width=50),
}
waitLabel = tk.Label(text="")
csvFileName = tk.Label(text="")
signatureFileName = tk.Label(text="")
buttonFrame = tk.Frame(highlightbackground="black", highlightthickness=1, pady=10)
generateButton = tk.Button(
master=buttonFrame, text="Generate", font=("Segoe UI", 16)
)
closeButton = tk.Button(master=buttonFrame, text="Close", font=("Segoe UI", 16))
def __init__(self):
super().__init__()
self.window.title("Automated Marksheet Generator")
self.window.geometry("900x700")
heading = tk.Label(
text="Automated Marksheet Generator", font=("Segoe UI Bold", 24), pady=5
)
heading.pack()
for key in self.labels.keys():
self.labels[key].pack()
if key in self.inputs.keys():
self.inputs[key].pack()
if key == "csvPath":
self.csvFileName.pack()
elif key == "signatureFile":
self.signatureFileName.pack()
self.waitLabel.pack()
self.inputs["csvPath"].bind("<ButtonRelease-1>", self.askForCSVFile)
self.inputs["signatureFile"].bind("<ButtonRelease-1>", self.askForSignatureFile)
self.closeButton.bind("<ButtonRelease-1>", self.kill)
self.generateButton.bind("<ButtonRelease-1>", self.generate)
self.generateButton.pack(side=tk.LEFT, padx=10)
self.closeButton.pack(side=tk.LEFT, padx=10)
self.buttonFrame.pack(side=tk.BOTTOM, fill=tk.X)
def run(self):
self.window.mainloop()
def askForCSVFile(self, event):
file = askopenfile(
mode="r", filetypes=[("CSV Files (Comma delimited)", "*.csv")]
)
if file is not None:
self.file = file
self.csvFileName["text"] = file.name
else:
messagebox.showerror("Error", "Please choose another file!!")
def askForSignatureFile(self, event):
file = askopenfile(
mode="r", filetypes=[("PNG Images", "*.png"), ("JPG Images", "*.jpg")]
)
self.signatureFileName["text"] = file.name
self.signatureFile = file
def kill(self, event):
self.window.destroy()
def generate(self, event):
from src.generator import Generator
import os
schoolName = self.inputs["schoolName"].get()
examName = self.inputs["examName"].get()
authorityName = self.inputs["authorityName"].get()
if (not schoolName) and (not examName):
messagebox.showwarning(
"Required", "School Name and Exam Name are required!!"
)
return
elif not schoolName:
messagebox.showwarning("Required", "School Name is required!!")
return
elif not examName:
messagebox.showwarning("Required", "Exam Name is required!!")
return
try:
self.waitLabel["text"] = "Wait..."
factory = Generator(
schoolName,
examName,
self.file,
self.signatureFile.name if self.signatureFile else None,
authorityName,
)
if not os.path.isdir("pdfs"):
os.mkdir("pdfs", 0o666)
print("Starting to generate PDFs")
for student in factory.getCSV():
factory.generatePDF(student["Roll Number"], log=True)
self.waitLabel["text"] = ""
messagebox.showinfo("Success!!", "Generated PDFs!! Check the 'pdfs' folder")
except Exception as e:
print(e.with_traceback())
self.waitLabel["text"] = ""
messagebox.showerror("Error!!", "Something went wrong!! Please try again!!")
if __name__ == "__main__":
try:
App().run()
except Exception as e:
print(e.with_traceback())
messagebox.showerror("Error!!", "Something went wrong. Please try again!!")
# Metadata
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, <NAME>"
__credits__ = ["reportlab documentation"]
__license__ = "Apache 2.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>" | [
"tkinter.messagebox.showwarning",
"tkinter.messagebox.showerror",
"src.generator.Generator",
"tkinter.Entry",
"tkinter.Button",
"tkinter.Tk",
"os.path.isdir",
"os.mkdir",
"tkinter.Label",
"tkinter.filedialog.askopenfile",
"tkinter.messagebox.showinfo",
"tkinter.Frame"
] | [((799, 806), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (804, 806), True, 'import tkinter as tk\n'), ((1915, 1932), 'tkinter.Label', 'tk.Label', ([], {'text': '""""""'}), "(text='')\n", (1923, 1932), True, 'import tkinter as tk\n'), ((1951, 1968), 'tkinter.Label', 'tk.Label', ([], {'text': '""""""'}), "(text='')\n", (1959, 1968), True, 'import tkinter as tk\n'), ((1993, 2010), 'tkinter.Label', 'tk.Label', ([], {'text': '""""""'}), "(text='')\n", (2001, 2010), True, 'import tkinter as tk\n'), ((2030, 2098), 'tkinter.Frame', 'tk.Frame', ([], {'highlightbackground': '"""black"""', 'highlightthickness': '(1)', 'pady': '(10)'}), "(highlightbackground='black', highlightthickness=1, pady=10)\n", (2038, 2098), True, 'import tkinter as tk\n'), ((2121, 2190), 'tkinter.Button', 'tk.Button', ([], {'master': 'buttonFrame', 'text': '"""Generate"""', 'font': "('Segoe UI', 16)"}), "(master=buttonFrame, text='Generate', font=('Segoe UI', 16))\n", (2130, 2190), True, 'import tkinter as tk\n'), ((2223, 2289), 'tkinter.Button', 'tk.Button', ([], {'master': 'buttonFrame', 'text': '"""Close"""', 'font': "('Segoe UI', 16)"}), "(master=buttonFrame, text='Close', font=('Segoe UI', 16))\n", (2232, 2289), True, 'import tkinter as tk\n'), ((882, 946), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Choose CSV File"""', 'font': "('Segoe UI', 18)", 'pady': '(10)'}), "(text='Choose CSV File', font=('Segoe UI', 18), pady=10)\n", (890, 946), True, 'import tkinter as tk\n'), ((970, 1044), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Enter name of your school"""', 'font': "('Segoe UI', 18)", 'pady': '(10)'}), "(text='Enter name of your school', font=('Segoe UI', 18), pady=10)\n", (978, 1044), True, 'import tkinter as tk\n'), ((1088, 1159), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Enter name of the exam"""', 'font': "('Segoe UI', 18)", 'pady': '(10)'}), "(text='Enter name of the exam', font=('Segoe UI', 18), pady=10)\n", (1096, 1159), True, 'import tkinter as tk\n'), ((1208, 1294), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Choose signature image (Optional)"""', 'font': "('Segoe UI', 18)", 'pady': '(10)'}), "(text='Choose signature image (Optional)', font=('Segoe UI', 18),\n pady=10)\n", (1216, 1294), True, 'import tkinter as tk\n'), ((1339, 1469), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Enter the name of the signature holder (Optional; if signature uplaoded)"""', 'font': "('Segoe UI', 18)", 'pady': '(10)'}), "(text=\n 'Enter the name of the signature holder (Optional; if signature uplaoded)',\n font=('Segoe UI', 18), pady=10)\n", (1347, 1469), True, 'import tkinter as tk\n'), ((1549, 1605), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Insert CSV File"""', 'font': "('Segoe UI', 16)"}), "(text='Insert CSV File', font=('Segoe UI', 16))\n", (1558, 1605), True, 'import tkinter as tk\n'), ((1632, 1694), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Insert Signature File"""', 'font': "('Segoe UI', 16)"}), "(text='Insert Signature File', font=('Segoe UI', 16))\n", (1641, 1694), True, 'import tkinter as tk\n'), ((1718, 1759), 'tkinter.Entry', 'tk.Entry', ([], {'font': "('Segoe UI', 18)", 'width': '(50)'}), "(font=('Segoe UI', 18), width=50)\n", (1726, 1759), True, 'import tkinter as tk\n'), ((1781, 1822), 'tkinter.Entry', 'tk.Entry', ([], {'font': "('Segoe UI', 18)", 'width': '(50)'}), "(font=('Segoe UI', 18), width=50)\n", (1789, 1822), True, 'import tkinter as tk\n'), ((1849, 1890), 'tkinter.Entry', 'tk.Entry', ([], {'font': "('Segoe UI', 18)", 'width': '(50)'}), "(font=('Segoe UI', 18), width=50)\n", (1857, 1890), True, 'import tkinter as tk\n'), ((2459, 2545), 'tkinter.Label', 'tk.Label', ([], {'text': '"""Automated Marksheet Generator"""', 'font': "('Segoe UI Bold', 24)", 'pady': '(5)'}), "(text='Automated Marksheet Generator', font=('Segoe UI Bold', 24),\n pady=5)\n", (2467, 2545), True, 'import tkinter as tk\n'), ((3520, 3595), 'tkinter.filedialog.askopenfile', 'askopenfile', ([], {'mode': '"""r"""', 'filetypes': "[('CSV Files (Comma delimited)', '*.csv')]"}), "(mode='r', filetypes=[('CSV Files (Comma delimited)', '*.csv')])\n", (3531, 3595), False, 'from tkinter.filedialog import askopenfile\n'), ((3871, 3958), 'tkinter.filedialog.askopenfile', 'askopenfile', ([], {'mode': '"""r"""', 'filetypes': "[('PNG Images', '*.png'), ('JPG Images', '*.jpg')]"}), "(mode='r', filetypes=[('PNG Images', '*.png'), ('JPG Images',\n '*.jpg')])\n", (3882, 3958), False, 'from tkinter.filedialog import askopenfile\n'), ((3751, 3812), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error"""', '"""Please choose another file!!"""'], {}), "('Error', 'Please choose another file!!')\n", (3771, 3812), True, 'import tkinter.messagebox as messagebox\n'), ((4437, 4515), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""Required"""', '"""School Name and Exam Name are required!!"""'], {}), "('Required', 'School Name and Exam Name are required!!')\n", (4459, 4515), True, 'import tkinter.messagebox as messagebox\n'), ((4892, 5011), 'src.generator.Generator', 'Generator', (['schoolName', 'examName', 'self.file', '(self.signatureFile.name if self.signatureFile else None)', 'authorityName'], {}), '(schoolName, examName, self.file, self.signatureFile.name if self.\n signatureFile else None, authorityName)\n', (4901, 5011), False, 'from src.generator import Generator\n'), ((5401, 5477), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""Success!!"""', '"""Generated PDFs!! Check the \'pdfs\' folder"""'], {}), '(\'Success!!\', "Generated PDFs!! Check the \'pdfs\' folder")\n', (5420, 5477), True, 'import tkinter.messagebox as messagebox\n'), ((5803, 5878), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error!!"""', '"""Something went wrong. Please try again!!"""'], {}), "('Error!!', 'Something went wrong. Please try again!!')\n", (5823, 5878), True, 'import tkinter.messagebox as messagebox\n'), ((4606, 4669), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""Required"""', '"""School Name is required!!"""'], {}), "('Required', 'School Name is required!!')\n", (4628, 4669), True, 'import tkinter.messagebox as messagebox\n'), ((5122, 5143), 'os.path.isdir', 'os.path.isdir', (['"""pdfs"""'], {}), "('pdfs')\n", (5135, 5143), False, 'import os\n'), ((5161, 5182), 'os.mkdir', 'os.mkdir', (['"""pdfs"""', '(438)'], {}), "('pdfs', 438)\n", (5169, 5182), False, 'import os\n'), ((5599, 5675), 'tkinter.messagebox.showerror', 'messagebox.showerror', (['"""Error!!"""', '"""Something went wrong!! Please try again!!"""'], {}), "('Error!!', 'Something went wrong!! Please try again!!')\n", (5619, 5675), True, 'import tkinter.messagebox as messagebox\n'), ((4728, 4789), 'tkinter.messagebox.showwarning', 'messagebox.showwarning', (['"""Required"""', '"""Exam Name is required!!"""'], {}), "('Required', 'Exam Name is required!!')\n", (4750, 4789), True, 'import tkinter.messagebox as messagebox\n')] |
import inspect
import sys
from typing import Any, Dict, List, Optional, Protocol, TextIO, Type
import rich
from rich.text import Text
from ..errors import DiagnosticError
try:
import build # noqa
import click
import sphinx_autobuild # type: ignore # noqa
except ImportError as import_error:
rich.print(
DiagnosticError(
reference="missing-command-line-dependencies",
message=(
"Could not import a package that is required for the `stb` command line."
),
context=str(import_error),
note_stmt=(
"The optional [blue]cli[/] dependencies of this package are missing."
),
hint_stmt=(
"During installation, make sure to include the `\\[cli]`. For example:\n"
'pip install "sphinx-theme-builder\\[cli]"'
),
),
file=sys.stderr,
)
sys.exit(1)
class Command(Protocol):
context_settings: Dict[str, Any]
interface: List[click.Parameter]
def run(self, **kwargs: Dict[str, Any]) -> int:
...
def create_click_command(cls: Type[Command]) -> click.Command:
# Use the class docstring as the help string
help_string = inspect.cleandoc(cls.__doc__)
# Infer the name, from the known context.
name = cls.__name__[: -len("Command")].lower()
# Double check that things are named correctly.
assert name.capitalize() + "Command" == cls.__name__
assert name == cls.__module__.split(".")[-1]
context_settings: Optional[Dict[str, Any]] = None
if hasattr(cls, "context_settings"):
context_settings = cls.context_settings
command = click.Command(
name=name,
context_settings=context_settings,
help=help_string,
params=cls.interface,
callback=lambda **kwargs: cls().run(**kwargs),
)
return command
def compose_command_line() -> click.Group:
from .compile import CompileCommand
from .new import NewCommand
from .npm import NpmCommand
from .package import PackageCommand
from .serve import ServeCommand
command_classes: List[Type[Command]] = [
CompileCommand, # type: ignore
NewCommand, # type: ignore
PackageCommand, # type: ignore
ServeCommand, # type: ignore
NpmCommand, # type: ignore
]
# Convert our commands into click objects.
click_commands = [create_click_command(cls) for cls in command_classes]
# Create the main click interface.
cli = click.Group(
name="stb",
help="sphinx-theme-builder helps make it easier to write sphinx themes.",
commands={command.name: command for command in click_commands}, # type: ignore
)
return cli
def present_click_usage_error(error: click.UsageError, *, stream: TextIO) -> None:
assert error.ctx
rich.print(
Text.from_markup("[red]error:[/] ") + Text(error.format_message()),
file=stream,
)
# Usage
usage_parts = error.ctx.command.collect_usage_pieces(error.ctx)
usage = " ".join([error.ctx.command_path] + usage_parts)
print(file=stream)
print("Usage:", file=stream)
print(" ", usage, file=stream)
# --help
option = "--help"
command = error.ctx.command_path
print(file=stream)
rich.print(
f"Try [green]{command} {option}[/] for more information.",
file=stream,
)
def main(args: Optional[List[str]] = None) -> None:
"""The entrypoint for command line stuff."""
cli = compose_command_line()
try:
cli(args=args, standalone_mode=False)
except click.Abort:
rich.print(r"[cyan]\[stb][/] [red]Aborting![/]", file=sys.stderr)
sys.exit(1)
except click.UsageError as error:
present_click_usage_error(error, stream=sys.stderr)
sys.exit(error.exit_code) # uses exit codes 1 and 2
except click.ClickException as error:
error.show(sys.stderr)
sys.exit(error.exit_code) # uses exit codes 1 and 2
except DiagnosticError as error:
rich.print(error, file=sys.stderr)
sys.exit(3)
except Exception:
console = rich.console.Console(stderr=True)
console.print_exception(
width=console.width, show_locals=True, word_wrap=True, suppress=[click]
)
console.print(
DiagnosticError(
reference="crash",
message="A fatal error occurred.",
context="See above for a detailed Python traceback.",
note_stmt=(
"If you file an issue, please include the full traceback above."
),
hint_stmt=(
"This might be due to an issue in sphinx-theme-builder, one of the "
"tools it uses internally, or your code."
),
),
)
sys.exit(4)
| [
"sys.exit",
"inspect.cleandoc",
"rich.console.Console",
"rich.print",
"rich.text.Text.from_markup",
"click.Group"
] | [((1245, 1274), 'inspect.cleandoc', 'inspect.cleandoc', (['cls.__doc__'], {}), '(cls.__doc__)\n', (1261, 1274), False, 'import inspect\n'), ((2544, 2714), 'click.Group', 'click.Group', ([], {'name': '"""stb"""', 'help': '"""sphinx-theme-builder helps make it easier to write sphinx themes."""', 'commands': '{command.name: command for command in click_commands}'}), "(name='stb', help=\n 'sphinx-theme-builder helps make it easier to write sphinx themes.',\n commands={command.name: command for command in click_commands})\n", (2555, 2714), False, 'import click\n'), ((3327, 3414), 'rich.print', 'rich.print', (['f"""Try [green]{command} {option}[/] for more information."""'], {'file': 'stream'}), "(f'Try [green]{command} {option}[/] for more information.', file=\n stream)\n", (3337, 3414), False, 'import rich\n'), ((935, 946), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (943, 946), False, 'import sys\n'), ((2899, 2934), 'rich.text.Text.from_markup', 'Text.from_markup', (['"""[red]error:[/] """'], {}), "('[red]error:[/] ')\n", (2915, 2934), False, 'from rich.text import Text\n'), ((3656, 3721), 'rich.print', 'rich.print', (['"""[cyan]\\\\[stb][/] [red]Aborting![/]"""'], {'file': 'sys.stderr'}), "('[cyan]\\\\[stb][/] [red]Aborting![/]', file=sys.stderr)\n", (3666, 3721), False, 'import rich\n'), ((3730, 3741), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3738, 3741), False, 'import sys\n'), ((3848, 3873), 'sys.exit', 'sys.exit', (['error.exit_code'], {}), '(error.exit_code)\n', (3856, 3873), False, 'import sys\n'), ((3982, 4007), 'sys.exit', 'sys.exit', (['error.exit_code'], {}), '(error.exit_code)\n', (3990, 4007), False, 'import sys\n'), ((4080, 4114), 'rich.print', 'rich.print', (['error'], {'file': 'sys.stderr'}), '(error, file=sys.stderr)\n', (4090, 4114), False, 'import rich\n'), ((4123, 4134), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (4131, 4134), False, 'import sys\n'), ((4175, 4208), 'rich.console.Console', 'rich.console.Console', ([], {'stderr': '(True)'}), '(stderr=True)\n', (4195, 4208), False, 'import rich\n'), ((4907, 4918), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (4915, 4918), False, 'import sys\n')] |
import os
# Create your views here.
from serverside.views import ListNodeView, DetailNodeView
from ssr_test.models import Player
FILE_DIRNAME = os.path.dirname(__file__)
class IndexView(ListNodeView):
model = Player
queryset = Player.objects.order_by("-created_at").all()
filename = os.path.join(FILE_DIRNAME, "node/index.js")
class PlayerView(DetailNodeView):
model = Player
queryset = Player.objects.all()
filename = os.path.join(FILE_DIRNAME, "node/player.js")
| [
"ssr_test.models.Player.objects.order_by",
"os.path.dirname",
"os.path.join",
"ssr_test.models.Player.objects.all"
] | [((146, 171), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (161, 171), False, 'import os\n'), ((299, 342), 'os.path.join', 'os.path.join', (['FILE_DIRNAME', '"""node/index.js"""'], {}), "(FILE_DIRNAME, 'node/index.js')\n", (311, 342), False, 'import os\n'), ((413, 433), 'ssr_test.models.Player.objects.all', 'Player.objects.all', ([], {}), '()\n', (431, 433), False, 'from ssr_test.models import Player\n'), ((449, 493), 'os.path.join', 'os.path.join', (['FILE_DIRNAME', '"""node/player.js"""'], {}), "(FILE_DIRNAME, 'node/player.js')\n", (461, 493), False, 'import os\n'), ((239, 277), 'ssr_test.models.Player.objects.order_by', 'Player.objects.order_by', (['"""-created_at"""'], {}), "('-created_at')\n", (262, 277), False, 'from ssr_test.models import Player\n')] |
# Generated by Django 2.1 on 2019-06-05 22:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0023_auto_20190605_1455'),
]
operations = [
migrations.RemoveField(
model_name='devicedetail',
name='device_model',
),
migrations.RemoveField(
model_name='devicedetail',
name='device_sn',
),
migrations.AddField(
model_name='devices',
name='device_model',
field=models.CharField(max_length=100, null=True),
),
migrations.AddField(
model_name='devices',
name='device_sn',
field=models.CharField(max_length=250, null=True),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] | [((234, 304), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devicedetail"""', 'name': '"""device_model"""'}), "(model_name='devicedetail', name='device_model')\n", (256, 304), False, 'from django.db import migrations, models\n'), ((349, 416), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""devicedetail"""', 'name': '"""device_sn"""'}), "(model_name='devicedetail', name='device_sn')\n", (371, 416), False, 'from django.db import migrations, models\n'), ((567, 610), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)'}), '(max_length=100, null=True)\n', (583, 610), False, 'from django.db import migrations, models\n'), ((734, 777), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'null': '(True)'}), '(max_length=250, null=True)\n', (750, 777), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 15 15:57:46 2018
@author:
<NAME>
MIT Kavli Institute for Astrophysics and Space Research,
Massachusetts Institute of Technology,
77 Massachusetts Avenue,
Cambridge, MA 02109,
USA
Email: <EMAIL>
Web: www.mnguenther.com
"""
#::: plotting settings
import seaborn as sns
sns.set(context='paper', style='ticks', palette='deep', font='sans-serif', font_scale=1.5, color_codes=True)
sns.set_style({"xtick.direction": "in","ytick.direction": "in"})
sns.set_context(rc={'lines.markeredgewidth': 1})
#::: modules
import numpy as np
import matplotlib.pyplot as plt
import os
import ellc
from pprint import pprint
np.random.seed(42)
###############################################################################
#::: params
###############################################################################
workdir = 'allesfit'
params = {
'b_radius_1':0.1,
'b_radius_2':0.01,
'b_sbratio':0.,
'b_incl':89.,
'b_epoch':1.1,
'b_period':3.4,
'b_K':0.1,
'b_q':1,
'ld_1_Leonardo':'quad',
'ldc_1_Leonardo':[0.3,0.1],
'ld_1_Michelangelo':'quad',
'ldc_1_Michelangelo':[0.5,0.4]
}
a_1 = 0.019771142 * params['b_K'] * params['b_period']
params['b_a'] = (1.+1./params['b_q'])*a_1
###############################################################################
#::: special features
###############################################################################
stellar_rotation_amp = 0.1
stellar_rotation_period = 30.
def get_stellar_var(time):
red_noise = 2e-1*( 2e-4*(time-60)**2 - 2e-4*(time) + 1 )
return stellar_rotation_amp * np.sin(2.*np.pi*time/stellar_rotation_period) + red_noise
def get_rv(time):
return ellc.rv(
t_obs = time,
a = params[planet+'_a'],
incl = params[planet+'_incl'],
t_zero = params[planet+'_epoch'],
period = params[planet+'_period'],
q = params[planet+'_q'],
flux_weighted = False,
)[0]
###############################################################################
#::: "truth" signals
###############################################################################
#==============================================================================
#::: Leonardo
#==============================================================================
planet = 'b'
inst = 'Leonardo'
time_Leonardo = np.arange(0,16,5./60./24.)[::5]
time_Leonardo = time_Leonardo[ (time_Leonardo<2) | (time_Leonardo>4) ]
flux_Leonardo = ellc.lc(
t_obs = time_Leonardo,
radius_1 = params[planet+'_radius_1'],
radius_2 = params[planet+'_radius_2'],
sbratio = params[planet+'_sbratio'],
incl = params[planet+'_incl'],
t_zero = params[planet+'_epoch'],
period = params[planet+'_period'],
ld_1 = params['ld_1_'+inst],
ldc_1 = params['ldc_1_'+inst]
)
flux_Leonardo += np.random.normal(0,2e-3,size=len(flux_Leonardo))
flux_Leonardo += 1e-3*np.exp(time_Leonardo/4.7)*np.sin(time_Leonardo/2.7)
flux_err_Leonardo = 2e-3*np.ones_like(flux_Leonardo)
header = 'time,flux,flux_err'
X = np.column_stack(( time_Leonardo, flux_Leonardo, flux_err_Leonardo ))
np.savetxt(os.path.join(workdir,'Leonardo.csv'), X, delimiter=',', header=header)
#==============================================================================
#::: Michelangelo
#==============================================================================
planet = 'b'
inst = 'Michelangelo'
time_Michelangelo = np.arange(52,52.25,2./60./24.)[::2]
flux_Michelangelo = ellc.lc(
t_obs = time_Michelangelo,
radius_1 = params[planet+'_radius_1'],
radius_2 = params[planet+'_radius_2'],
sbratio = params[planet+'_sbratio'],
incl = params[planet+'_incl'],
t_zero = params[planet+'_epoch'],
period = params[planet+'_period'],
ld_1 = params['ld_1_'+inst],
ldc_1 = params['ldc_1_'+inst]
)
flux_Michelangelo += np.random.normal(0,5e-4,size=len(flux_Michelangelo))
flux_Michelangelo += 2e-3*np.sin(time_Michelangelo*8)
flux_err_Michelangelo = 5e-4*np.ones_like(flux_Michelangelo)
header = 'time,flux,flux_err'
X = np.column_stack(( time_Michelangelo, flux_Michelangelo, flux_err_Michelangelo ))
np.savetxt(os.path.join(workdir,'Michelangelo.csv'), X, delimiter=',', header=header)
#==============================================================================
#::: Donatello
#==============================================================================
planet = 'b'
inst = 'Donatello'
time_Donatello = np.sort(17. + np.random.rand(40)*70.)
rv_Donatello = ellc.rv(
t_obs = time_Donatello,
a = params[planet+'_a'],
incl = params[planet+'_incl'],
t_zero = params[planet+'_epoch'],
period = params[planet+'_period'],
q = params[planet+'_q'],
flux_weighted = False,
)[0]
rv_Donatello += get_stellar_var(time_Donatello)
rv_Donatello += np.random.normal(0,1e-2,size=len(rv_Donatello))
rv_err_Donatello = 6e-3*np.ones_like(rv_Donatello)
header = 'time,flux,flux_err'
X = np.column_stack(( time_Donatello, rv_Donatello, rv_err_Donatello ))
np.savetxt(os.path.join(workdir,'Donatello.csv'), X, delimiter=',', header=header)
#==============================================================================
#::: Raphael
#==============================================================================
planet = 'b'
inst = 'Raphael'
time_Raphael = np.sort(63. + np.random.rand(20)*30.)
rv_Raphael = ellc.rv(
t_obs = time_Raphael,
a = params[planet+'_a'],
incl = params[planet+'_incl'],
t_zero = params[planet+'_epoch'],
period = params[planet+'_period'],
q = params[planet+'_q'],
flux_weighted = False,
)[0]
rv_Raphael += get_stellar_var(time_Raphael)
rv_Raphael += np.random.normal(0,3e-3,size=len(rv_Raphael))
rv_Raphael += 10.7
rv_err_Raphael = 1e-3*np.ones_like(rv_Raphael)
header = 'time,flux,flux_err'
X = np.column_stack(( time_Raphael, rv_Raphael, rv_err_Raphael ))
np.savetxt(os.path.join(workdir,'Raphael.csv'), X, delimiter=',', header=header)
###############################################################################
#::: plot
###############################################################################
t = np.linspace(10,100,1000)
fig, axes = plt.subplots(2,2,figsize=(10,10))
axes[0,0].plot(time_Leonardo, flux_Leonardo, 'b.', label='Leonardo')
axes[0,0].legend()
axes[0,0].set(xlabel='BJD', ylabel='Flux')
axes[0,1].errorbar(time_Michelangelo, flux_Michelangelo, yerr=flux_err_Michelangelo, fmt='b.', label='Michelangelo')
axes[0,1].legend()
axes[0,1].set(xlabel='BJD', ylabel='Flux')
axes[1,0].errorbar(time_Donatello, rv_Donatello, yerr=rv_err_Donatello, fmt='bo', label='Donatello')
axes[1,0].plot(t, get_stellar_var(t), 'g-', label='Stellar var.')
#axes[1,0].plot(t, get_rv(t), color='orange', label='RV')
axes[1,0].plot(t, get_stellar_var(t)+get_rv(t), 'r-', label='SV + planet',lw=0.5)
axes[1,0].legend()
axes[1,0].set(xlabel='BJD', ylabel='RV (km/s)')
axes[1,1].errorbar(time_Raphael, rv_Raphael, yerr=rv_err_Raphael, fmt='bo', label='Raphael')
axes[1,1].plot(t, get_stellar_var(t)+10.7, 'g-', label='Stellar var.')
axes[1,1].plot(t, get_stellar_var(t)+get_rv(t)+10.7, 'r-', label='SV + planet',lw=0.5)
axes[1,1].legend()
axes[1,1].set(xlabel='BJD', ylabel='RV (km/s)')
plt.tight_layout()
fig.savefig(os.path.join(workdir,'data.pdf'), bbox_inches='tight') | [
"numpy.ones_like",
"seaborn.set",
"numpy.random.rand",
"seaborn.set_context",
"os.path.join",
"numpy.column_stack",
"seaborn.set_style",
"numpy.exp",
"numpy.linspace",
"ellc.lc",
"numpy.random.seed",
"matplotlib.pyplot.tight_layout",
"numpy.sin",
"ellc.rv",
"matplotlib.pyplot.subplots",
"numpy.arange"
] | [((341, 453), 'seaborn.set', 'sns.set', ([], {'context': '"""paper"""', 'style': '"""ticks"""', 'palette': '"""deep"""', 'font': '"""sans-serif"""', 'font_scale': '(1.5)', 'color_codes': '(True)'}), "(context='paper', style='ticks', palette='deep', font='sans-serif',\n font_scale=1.5, color_codes=True)\n", (348, 453), True, 'import seaborn as sns\n'), ((450, 515), 'seaborn.set_style', 'sns.set_style', (["{'xtick.direction': 'in', 'ytick.direction': 'in'}"], {}), "({'xtick.direction': 'in', 'ytick.direction': 'in'})\n", (463, 515), True, 'import seaborn as sns\n'), ((515, 563), 'seaborn.set_context', 'sns.set_context', ([], {'rc': "{'lines.markeredgewidth': 1}"}), "(rc={'lines.markeredgewidth': 1})\n", (530, 563), True, 'import seaborn as sns\n'), ((679, 697), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (693, 697), True, 'import numpy as np\n'), ((2742, 3061), 'ellc.lc', 'ellc.lc', ([], {'t_obs': 'time_Leonardo', 'radius_1': "params[planet + '_radius_1']", 'radius_2': "params[planet + '_radius_2']", 'sbratio': "params[planet + '_sbratio']", 'incl': "params[planet + '_incl']", 't_zero': "params[planet + '_epoch']", 'period': "params[planet + '_period']", 'ld_1': "params['ld_1_' + inst]", 'ldc_1': "params['ldc_1_' + inst]"}), "(t_obs=time_Leonardo, radius_1=params[planet + '_radius_1'],\n radius_2=params[planet + '_radius_2'], sbratio=params[planet +\n '_sbratio'], incl=params[planet + '_incl'], t_zero=params[planet +\n '_epoch'], period=params[planet + '_period'], ld_1=params['ld_1_' +\n inst], ldc_1=params['ldc_1_' + inst])\n", (2749, 3061), False, 'import ellc\n'), ((3547, 3613), 'numpy.column_stack', 'np.column_stack', (['(time_Leonardo, flux_Leonardo, flux_err_Leonardo)'], {}), '((time_Leonardo, flux_Leonardo, flux_err_Leonardo))\n', (3562, 3613), True, 'import numpy as np\n'), ((3990, 4313), 'ellc.lc', 'ellc.lc', ([], {'t_obs': 'time_Michelangelo', 'radius_1': "params[planet + '_radius_1']", 'radius_2': "params[planet + '_radius_2']", 'sbratio': "params[planet + '_sbratio']", 'incl': "params[planet + '_incl']", 't_zero': "params[planet + '_epoch']", 'period': "params[planet + '_period']", 'ld_1': "params['ld_1_' + inst]", 'ldc_1': "params['ldc_1_' + inst]"}), "(t_obs=time_Michelangelo, radius_1=params[planet + '_radius_1'],\n radius_2=params[planet + '_radius_2'], sbratio=params[planet +\n '_sbratio'], incl=params[planet + '_incl'], t_zero=params[planet +\n '_epoch'], period=params[planet + '_period'], ld_1=params['ld_1_' +\n inst], ldc_1=params['ldc_1_' + inst])\n", (3997, 4313), False, 'import ellc\n'), ((4795, 4873), 'numpy.column_stack', 'np.column_stack', (['(time_Michelangelo, flux_Michelangelo, flux_err_Michelangelo)'], {}), '((time_Michelangelo, flux_Michelangelo, flux_err_Michelangelo))\n', (4810, 4873), True, 'import numpy as np\n'), ((5871, 5936), 'numpy.column_stack', 'np.column_stack', (['(time_Donatello, rv_Donatello, rv_err_Donatello)'], {}), '((time_Donatello, rv_Donatello, rv_err_Donatello))\n', (5886, 5936), True, 'import numpy as np\n'), ((6928, 6987), 'numpy.column_stack', 'np.column_stack', (['(time_Raphael, rv_Raphael, rv_err_Raphael)'], {}), '((time_Raphael, rv_Raphael, rv_err_Raphael))\n', (6943, 6987), True, 'import numpy as np\n'), ((7249, 7275), 'numpy.linspace', 'np.linspace', (['(10)', '(100)', '(1000)'], {}), '(10, 100, 1000)\n', (7260, 7275), True, 'import numpy as np\n'), ((7287, 7323), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {'figsize': '(10, 10)'}), '(2, 2, figsize=(10, 10))\n', (7299, 7323), True, 'import matplotlib.pyplot as plt\n'), ((8323, 8341), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8339, 8341), True, 'import matplotlib.pyplot as plt\n'), ((2623, 2658), 'numpy.arange', 'np.arange', (['(0)', '(16)', '(5.0 / 60.0 / 24.0)'], {}), '(0, 16, 5.0 / 60.0 / 24.0)\n', (2632, 2658), True, 'import numpy as np\n'), ((3433, 3460), 'numpy.sin', 'np.sin', (['(time_Leonardo / 2.7)'], {}), '(time_Leonardo / 2.7)\n', (3439, 3460), True, 'import numpy as np\n'), ((3485, 3512), 'numpy.ones_like', 'np.ones_like', (['flux_Leonardo'], {}), '(flux_Leonardo)\n', (3497, 3512), True, 'import numpy as np\n'), ((3627, 3664), 'os.path.join', 'os.path.join', (['workdir', '"""Leonardo.csv"""'], {}), "(workdir, 'Leonardo.csv')\n", (3639, 3664), False, 'import os\n'), ((3934, 3973), 'numpy.arange', 'np.arange', (['(52)', '(52.25)', '(2.0 / 60.0 / 24.0)'], {}), '(52, 52.25, 2.0 / 60.0 / 24.0)\n', (3943, 3973), True, 'import numpy as np\n'), ((4671, 4700), 'numpy.sin', 'np.sin', (['(time_Michelangelo * 8)'], {}), '(time_Michelangelo * 8)\n', (4677, 4700), True, 'import numpy as np\n'), ((4729, 4760), 'numpy.ones_like', 'np.ones_like', (['flux_Michelangelo'], {}), '(flux_Michelangelo)\n', (4741, 4760), True, 'import numpy as np\n'), ((4887, 4928), 'os.path.join', 'os.path.join', (['workdir', '"""Michelangelo.csv"""'], {}), "(workdir, 'Michelangelo.csv')\n", (4899, 4928), False, 'import os\n'), ((5242, 5450), 'ellc.rv', 'ellc.rv', ([], {'t_obs': 'time_Donatello', 'a': "params[planet + '_a']", 'incl': "params[planet + '_incl']", 't_zero': "params[planet + '_epoch']", 'period': "params[planet + '_period']", 'q': "params[planet + '_q']", 'flux_weighted': '(False)'}), "(t_obs=time_Donatello, a=params[planet + '_a'], incl=params[planet +\n '_incl'], t_zero=params[planet + '_epoch'], period=params[planet +\n '_period'], q=params[planet + '_q'], flux_weighted=False)\n", (5249, 5450), False, 'import ellc\n'), ((5810, 5836), 'numpy.ones_like', 'np.ones_like', (['rv_Donatello'], {}), '(rv_Donatello)\n', (5822, 5836), True, 'import numpy as np\n'), ((5950, 5988), 'os.path.join', 'os.path.join', (['workdir', '"""Donatello.csv"""'], {}), "(workdir, 'Donatello.csv')\n", (5962, 5988), False, 'import os\n'), ((6294, 6500), 'ellc.rv', 'ellc.rv', ([], {'t_obs': 'time_Raphael', 'a': "params[planet + '_a']", 'incl': "params[planet + '_incl']", 't_zero': "params[planet + '_epoch']", 'period': "params[planet + '_period']", 'q': "params[planet + '_q']", 'flux_weighted': '(False)'}), "(t_obs=time_Raphael, a=params[planet + '_a'], incl=params[planet +\n '_incl'], t_zero=params[planet + '_epoch'], period=params[planet +\n '_period'], q=params[planet + '_q'], flux_weighted=False)\n", (6301, 6500), False, 'import ellc\n'), ((6869, 6893), 'numpy.ones_like', 'np.ones_like', (['rv_Raphael'], {}), '(rv_Raphael)\n', (6881, 6893), True, 'import numpy as np\n'), ((7001, 7037), 'os.path.join', 'os.path.join', (['workdir', '"""Raphael.csv"""'], {}), "(workdir, 'Raphael.csv')\n", (7013, 7037), False, 'import os\n'), ((8355, 8388), 'os.path.join', 'os.path.join', (['workdir', '"""data.pdf"""'], {}), "(workdir, 'data.pdf')\n", (8367, 8388), False, 'import os\n'), ((1824, 2023), 'ellc.rv', 'ellc.rv', ([], {'t_obs': 'time', 'a': "params[planet + '_a']", 'incl': "params[planet + '_incl']", 't_zero': "params[planet + '_epoch']", 'period': "params[planet + '_period']", 'q': "params[planet + '_q']", 'flux_weighted': '(False)'}), "(t_obs=time, a=params[planet + '_a'], incl=params[planet + '_incl'],\n t_zero=params[planet + '_epoch'], period=params[planet + '_period'], q=\n params[planet + '_q'], flux_weighted=False)\n", (1831, 2023), False, 'import ellc\n'), ((3407, 3434), 'numpy.exp', 'np.exp', (['(time_Leonardo / 4.7)'], {}), '(time_Leonardo / 4.7)\n', (3413, 3434), True, 'import numpy as np\n'), ((1732, 1784), 'numpy.sin', 'np.sin', (['(2.0 * np.pi * time / stellar_rotation_period)'], {}), '(2.0 * np.pi * time / stellar_rotation_period)\n', (1738, 1784), True, 'import numpy as np\n'), ((5203, 5221), 'numpy.random.rand', 'np.random.rand', (['(40)'], {}), '(40)\n', (5217, 5221), True, 'import numpy as np\n'), ((6257, 6275), 'numpy.random.rand', 'np.random.rand', (['(20)'], {}), '(20)\n', (6271, 6275), True, 'import numpy as np\n')] |
import mechanize
url = "http://192.168.48.1:81"
browser = mechanize.Browser()
browser.set_handle_robots(False)
cookies = mechanize.CookieJar()
browser.set_cookiejar(cookies)
browser.addheaders = [
(
"user-agent",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.3) Gecko/20100423",
)
]
try:
browser.open(url)
browser.select_form(nr=0)
# SIAPE Number
browser.form["UserUsername"] = "........"
# Password
browser.form["UserPassword"] = "........"
browser.find_control("termo").items[0].selected = True
browser.submit()
except Exception as ex:
print(f"Error: {ex}")
| [
"mechanize.CookieJar",
"mechanize.Browser"
] | [((60, 79), 'mechanize.Browser', 'mechanize.Browser', ([], {}), '()\n', (77, 79), False, 'import mechanize\n'), ((123, 144), 'mechanize.CookieJar', 'mechanize.CookieJar', ([], {}), '()\n', (142, 144), False, 'import mechanize\n')] |
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
from kafka import KafkaProducer, KafkaClient
import json
properties = open('properties.txt', 'r')
lines = properties.read().split('\n')
access_token = lines[0].split('=')[1].replace('"', '').strip()
access_token_secret = lines[1].split('=')[1].replace('"', '').strip()
consumer_key = lines[2].split('=')[1].replace('"', '').strip()
consumer_secret = lines[3].split('=')[1].replace('"', '').strip()
class StdOutListener(StreamListener):
def on_data(self, data):
producer.send("trump", data.encode('utf-8'))
return True
def on_error(self, status):
print (status)
producer = KafkaProducer()
l = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, l)
stream.filter(track="donald trump")
| [
"kafka.KafkaProducer",
"tweepy.Stream",
"tweepy.OAuthHandler"
] | [((711, 726), 'kafka.KafkaProducer', 'KafkaProducer', ([], {}), '()\n', (724, 726), False, 'from kafka import KafkaProducer, KafkaClient\n'), ((755, 798), 'tweepy.OAuthHandler', 'OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (767, 798), False, 'from tweepy import OAuthHandler\n'), ((865, 880), 'tweepy.Stream', 'Stream', (['auth', 'l'], {}), '(auth, l)\n', (871, 880), False, 'from tweepy import Stream\n')] |
import os
import discord
from server import keep_running
intents = discord.Intents.all()
client = discord.Client(intents=intents)
token = os.environ['token']
master_id_string = os.environ["master_id"]
master_id_int = int(master_id_string)
@client.event
async def on_ready():
print("Log: Logged in as {0.user}"
.format(client))
await client.change_presence(activity=discord.Activity(type=discord.ActivityType.watching, name="Pornhub"))
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.author.id == master_id_int:
if not message.content.startswith(">>"):
return
elif message.content == ">> pin":
await pin_last(message)
elif ">> delete msg" in message.content:
await delete(message)
elif ">> write" in message.content:
await write(message)
elif message.content == ">> info":
await info(message)
elif ">> say" in message.content:
await say(message)
elif ">> create txt" in message.content:
await create_text_ch(message)
elif ">> create voice" in message.content:
await create_voice_ch(message)
elif message.content == ">> list members":
await list_members(message)
elif ">> create category" in message.content:
await create_category(message)
elif ">> kick named" in message.content:
await kick_named(message)
elif ">> kick id" in message.content:
await kick_id(message)
elif ">> ban named" in message.content:
await ban_named(message)
elif ">> ban id" in message.content:
await ban_id(message)
elif ">> purge" in message.content:
await purge(message)
elif ">> delete txt" in message.content:
await delete_txt(message)
elif ">> delete voice" in message.content:
await delete_voice(message)
elif ">> delete category" in message.content:
await delete_category(message)
elif ">> wipe voice" in message.content:
await wipe_voice(message)
elif ">> wipe all voice" in message.content:
await wipe_all_voice(message)
else:
return
async def pin_last(message):
counter = 0
async for m in message.channel.history(limit=2):
if counter == 1:
await m.pin()
counter += 1
async def delete(message):
number_string = message.content[message.content.rfind(" "):]
number = int(number_string)
async for m in message.channel.history(limit=number):
await m.delete()
async def write(message):
number_string = message.content[message.content.rfind(" "):]
number = int(number_string)
content_string = message.content[message.content.find("\"") + 1: message.content.rfind("\"")]
for i in range(number):
await message.channel.send(content_string)
async def info(message):
info = """hello @everyone, i am an extremely overpowered bot only at Gökalp Ateş's disposal. I only listen to my master's commands, my master honors me by making me stronger in his spare time."""
await message.channel.send(info)
async def say(message):
await message.channel.last_message.delete()
content_string = message.content[message.content.find("\"")+1: message.content.rfind("\"")]
await message.channel.send(content_string)
async def create_text_ch(message):
text_ch_name = message.content[message.content.find("\"") + 1: message.content.rfind("\"")]
await message.guild.create_text_channel(text_ch_name)
async def create_voice_ch(message):
voice_ch_name = message.content[message.content.find("\"") + 1: message.content.rfind("\"")]
await message.guild.create_voice_channel(voice_ch_name)
async def list_members(message):
member_list = message.guild.members
member_message = ""
for member in member_list:
member_message += member.name + ":\t" + str(member.id) + "\n"
await message.channel.send(member_message)
async def create_category(message):
category_name = message.content[message.content.find("\"") + 1: message.content.rfind("\"")]
await message.guild.create_category(category_name)
async def kick_named(message):
target_name = message.content[message.content.find("\"")+1:message.content.rfind("\"")]
target = message.guild.get_member_named(target_name)
await target.kick()
async def kick_id(message):
target_id_string = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
target_id_int = int(target_id_string)
target = message.guild.get_member(target_id_int)
await target.kick()
async def ban_named(message):
target_id_string = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
target = message.guild.get_member_named(target_id_string)
await target.ban()
async def ban_id(message):
target_id_string =message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
target_id_int = int(target_id_string)
target = message.guild.get_member(target_id_int)
await target.ban()
async def purge(message):
await message.channel.purge()
async def delete_txt(message):
txt_ch_name = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
for txt_ch in message.guild.text_channels:
if txt_ch.name == txt_ch_name:
await txt_ch.delete()
return
async def delete_voice(message):
voice_ch_name = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
for voice_ch in message.guild.voice_channels:
if voice_ch.name == voice_ch_name:
await voice_ch.delete()
return
async def delete_category(message):
category_name = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
for cat_item in message.guild.categories:
if cat_item.name == category_name:
await cat_item.delete()
return
async def wipe_voice(message):
voice_ch_name = message.content[message.content.find("\"") + 1:message.content.rfind("\"")]
for voice_ch in message.guild.voice_channels:
if voice_ch.name == voice_ch_name:
for member in voice_ch.members:
await member.move_to(None)
async def wipe_all_voice(message):
for voice_ch in message.guild.voice_channels:
for member in voice_ch.members:
await member.move_to(None)
keep_running()
client.run(token)
| [
"discord.Client",
"server.keep_running",
"discord.Intents.all",
"discord.Activity"
] | [((68, 89), 'discord.Intents.all', 'discord.Intents.all', ([], {}), '()\n', (87, 89), False, 'import discord\n'), ((100, 131), 'discord.Client', 'discord.Client', ([], {'intents': 'intents'}), '(intents=intents)\n', (114, 131), False, 'import discord\n'), ((6156, 6170), 'server.keep_running', 'keep_running', ([], {}), '()\n', (6168, 6170), False, 'from server import keep_running\n'), ((375, 443), 'discord.Activity', 'discord.Activity', ([], {'type': 'discord.ActivityType.watching', 'name': '"""Pornhub"""'}), "(type=discord.ActivityType.watching, name='Pornhub')\n", (391, 443), False, 'import discord\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 17 17:32:40 2021
@author: tekin.evrim.ozmermer
"""
from .MLP_ExactSolution import Model as mlpes
from .KNN import Model as knn
from sys import exit as EXIT
def load(cfg, backbone, dl_coll):
if cfg.classifier == "MLP_ExactSolution":
model_cls = mlpes(cfg)
model_cls.create_collection(backbone, dl_coll = dl_coll, input_batch = None)
model_cls.solve_exact()
elif cfg.classifier == "KNN":
if cfg.neighbors:
pass
else:
cfg.neighbor = 8
model_cls = knn(cfg)
model_cls.create_collection(backbone, dl_coll)
else:
print("Classifier parameter given in the config.json is wrong.\n Choose one of KNN,MLP_ExactSolution")
EXIT(0)
return model_cls
| [
"sys.exit"
] | [((766, 773), 'sys.exit', 'EXIT', (['(0)'], {}), '(0)\n', (770, 773), True, 'from sys import exit as EXIT\n')] |
# coding=utf-8
# Copyright 2017 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import json
import logging
import os
import random
import string
import time
import pytz
import requests
from swiftclient import client
from swiftclient.exceptions import ClientException
from flask import abort, Flask, request
app = Flask(__name__)
app.config.from_envvar('GIT_LFS_SWIFT_SETTINGS_FILE', silent=True)
if 'GIT_LFS_SWIFT_AUTH_URL' in os.environ:
app.config['AUTH_URL'] = os.environ['GIT_LFS_SWIFT_AUTH_URL']
if 'GIT_LFS_SWIFT_BASE_URL' in os.environ:
app.config['BASE_URL'] = os.environ['GIT_LFS_SWIFT_BASE_URL']
if 'GIT_LFS_SWIFT_TOKEN_EXPIRY' in os.environ:
app.config['TOKEN_EXPIRY'] = os.environ['GIT_LFS_SWIFT_TOKEN_EXPIRY']
if 'GIT_LFS_SWIFT_LOGFILE' in os.environ:
app.config['LOGFILE'] = os.environ['GIT_LFS_SWIFT_LOGFILE']
if 'GIT_LFS_SWIFT_LOGLEVEL' in os.environ:
app.config['LOGLEVEL'] = os.environ['GIT_LFS_SWIFT_LOGLEVEL']
if app.config.get('LOGFILE') or app.config.get('LOGLEVEL'):
loglevel = app.config.get('LOGLEVEL', 'WARNING')
logging.basicConfig(
level=loglevel, filename=app.config.get('LOGFILE'),
format='%(asctime)s [%(name)s] %(levelname)s %(message)s')
logger = logging.getLogger(__name__)
def handle_dl(c_url, oid, query, headers, o_size, o_data):
"""Handle download of object by manipulating o_data dict."""
url = c_url + '/' + oid + query
success = False
r = requests.head(url, headers=headers)
try:
r.raise_for_status()
except requests.RequestException as e:
if r.status_code == 404:
o_data['error'] = dict(code=404, message='Not found.')
elif r.status_code in (401, 403):
abort(r.status_code)
else:
logger.exception(
'Failure while heading object with url %s. %s',
url, str(e))
abort(500)
else:
if int(r.headers['content-length']) != o_size:
o_data['error'] = dict(
code=422, message='Size does not match.')
else:
success = True
return success
def handle_ul(
c_url, oid, query, headers, o_size, o_data):
"""Handle upload of object by manipulating o_data dict."""
url = c_url + '/' + oid + query
r = requests.head(url, headers=headers)
try:
r.raise_for_status()
except requests.RequestException as e:
if r.status_code == 404:
pass
elif r.status_code == 401:
abort(401)
elif r.status_code == 403:
# It's possible that a write ACL exist. Test it with a
# post to a random object, which should not exist.
chars = string.ascii_lowercase + string.digits
obj = '_'.join(random.choice(chars) for x in range(32))
url = c_url + '/' + obj
r = requests.post(url, headers=headers)
try:
r.raise_for_status()
# Landing here should be unlikely, but still
# this would mean that a write is possible.
pass
except requests.RequestException as e:
if r.status_code == 404:
# Post is possible, so user has access rights.
pass
elif r.status_code == 403:
abort(403)
else:
logger.exception(
'Failure while posting dummy object with url '
'%s. %s',
url, str(e))
abort(500)
else:
logger.exception(
'Failure while heading object with url %s. %s',
url, str(e))
abort(500)
return True
@app.route(
'/<account>/<container>/read_<readsig>/write_<writesig>/<expires_at>/'
'objects/batch', methods=['POST'])
@app.route('/<account>/<container>/objects/batch', methods=['POST'])
@app.route('/<container>/objects/batch', methods=['POST'])
def batch_api(
account=None, container=None, readsig=None, writesig=None,
expires_at=None):
"""
Implementation of
https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md.
"""
auth = request.authorization
if auth:
# With this option it should be possible
# to use keystone auth, too.
kwargs = app.config.get('AUTH_KWARGS', {})
try:
storage_url, token = client.get_auth(
app.config['AUTH_URL'],
auth.username.replace(';', ':'),
auth.password,
**kwargs)
except ClientException as e:
if e.http_status == 401:
abort(401)
else:
abort(500)
else:
query = ''
if account:
# Replace default storage-account.
storage_url = '/'.join(
storage_url.rstrip('/').split('/')[0:-1] + [account])
else:
token = None
if 'BASE_URL' not in app.config or account is None:
abort(401)
storage_url = app.config['BASE_URL'].rstrip('/') + '/v1/' + account
if not expires_at:
expires_at_iso = datetime.fromtimestamp(
int(time.time()) + app.config.get('TOKEN_EXPIRY', 3600),
pytz.utc).isoformat()
else:
expires_at_iso = datetime.fromtimestamp(
int(expires_at), pytz.utc).isoformat()
data = request.get_json()
logger.debug('Received Data: %s', data)
operation = data.get('operation', None)
if operation not in ('download', 'upload') or 'objects' not in data:
abort(400)
# We currently support basic and swift transfer.
# With swift transfer, the client does also consider LO's.
# swift transfer currently only supports token auth.
if 'swift' in data.get('transfers', []) and token:
transfer = 'swift'
else:
transfer = 'basic'
c_url = storage_url.rstrip('/') + '/' + container
objs = []
if operation == 'download':
handle = handle_dl
if not auth:
query = '?temp_url_prefix=&temp_url_sig={}&temp_url_expires={}'.\
format(readsig, expires_at)
else:
handle = handle_ul
if not auth:
query = '?temp_url_prefix=&temp_url_sig={}&temp_url_expires={}'.\
format(writesig, expires_at)
for o in data['objects']:
try:
oid = o['oid']
o_size = o['size']
except KeyError:
abort(400)
o_data = {'oid': oid}
href = c_url if transfer == 'swift' else c_url + '/' + oid + query
headers = {'x-auth-token': token} if token else {}
if handle(c_url, oid, query, headers, o_size, o_data):
action = dict(
href=href, header=headers, expires_at=expires_at_iso)
o_data['actions'] = {operation: action}
o_data['size'] = o_size
o_data['authenticated'] = True
objs.append(o_data)
result = {'objects': objs, 'transfer': transfer}
logger.debug('Response %s', result)
return json.dumps(result), 200, {'Content-Type': 'application/json'}
if __name__ == "__main__":
if 'AUTH_URL' not in app.config and 'BASE_URL' not in app.config:
raise Exception('AUTH_URL or BASE_URL must be specified.')
app.run()
| [
"logging.getLogger",
"requests.post",
"random.choice",
"flask.Flask",
"json.dumps",
"requests.head",
"flask.request.get_json",
"flask.abort",
"time.time"
] | [((862, 877), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (867, 877), False, 'from flask import abort, Flask, request\n'), ((1775, 1802), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1792, 1802), False, 'import logging\n'), ((1993, 2028), 'requests.head', 'requests.head', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (2006, 2028), False, 'import requests\n'), ((2842, 2877), 'requests.head', 'requests.head', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (2855, 2877), False, 'import requests\n'), ((6041, 6059), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (6057, 6059), False, 'from flask import abort, Flask, request\n'), ((6230, 6240), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (6235, 6240), False, 'from flask import abort, Flask, request\n'), ((7721, 7739), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (7731, 7739), False, 'import json\n'), ((5655, 5665), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (5660, 5665), False, 'from flask import abort, Flask, request\n'), ((7126, 7136), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (7131, 7136), False, 'from flask import abort, Flask, request\n'), ((2264, 2284), 'flask.abort', 'abort', (['r.status_code'], {}), '(r.status_code)\n', (2269, 2284), False, 'from flask import abort, Flask, request\n'), ((2434, 2444), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (2439, 2444), False, 'from flask import abort, Flask, request\n'), ((3056, 3066), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (3061, 3066), False, 'from flask import abort, Flask, request\n'), ((5270, 5280), 'flask.abort', 'abort', (['(401)'], {}), '(401)\n', (5275, 5280), False, 'from flask import abort, Flask, request\n'), ((5315, 5325), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (5320, 5325), False, 'from flask import abort, Flask, request\n'), ((3411, 3446), 'requests.post', 'requests.post', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (3424, 3446), False, 'import requests\n'), ((4283, 4293), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (4288, 4293), False, 'from flask import abort, Flask, request\n'), ((5832, 5843), 'time.time', 'time.time', ([], {}), '()\n', (5841, 5843), False, 'import time\n'), ((3318, 3338), 'random.choice', 'random.choice', (['chars'], {}), '(chars)\n', (3331, 3338), False, 'import random\n'), ((3890, 3900), 'flask.abort', 'abort', (['(403)'], {}), '(403)\n', (3895, 3900), False, 'from flask import abort, Flask, request\n'), ((4123, 4133), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (4128, 4133), False, 'from flask import abort, Flask, request\n')] |
import click
import subprocess
"""
API for interacting with the system shell
"""
def sh(args, **kwargs):
"""
runs the given cmd as shell command
"""
if isinstance(args, str):
args = args.split()
if not args:
return
click.echo('$ {0}'.format(' '.join(args)))
try:
return subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError as exc:
click.secho('run error {}'.format(exc))
except OSError as exc:
click.secho('not found error {}'.format(exc))
| [
"subprocess.check_call"
] | [((325, 362), 'subprocess.check_call', 'subprocess.check_call', (['args'], {}), '(args, **kwargs)\n', (346, 362), False, 'import subprocess\n')] |
from subprocess import call
def truthy(val):
if isinstance(val, basestring):
if val.lower() == 'true':
return True
return False
return val
class BaseDriver(object):
def __init__(self, config):
self.config = config
def use_sudo(self, cmd):
use_sudo = truthy(self.config.get('use_sudo'))
if use_sudo:
return ['sudo'] + cmd
return cmd
def working_dir(self, cmd):
working_dir = self.config.get('directory')
if working_dir:
cmd = ['cd', working_dir, '&&'] + cmd
return cmd
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
| [
"subprocess.call"
] | [((667, 676), 'subprocess.call', 'call', (['cmd'], {}), '(cmd)\n', (671, 676), False, 'from subprocess import call\n')] |
#!/usr/bin/env python3
import sys
import psycopg2
try:
csv_path = sys.argv[1]
except IndexError:
print('ERROR: Taxonomy CSV file path required.')
sys.exit(1)
conn = psycopg2.connect("host=localhost dbname=bih user=postgres")
cur = conn.cursor()
with open(csv_path, 'r') as fp:
next(fp) # Skip the header row.
cur.copy_from(fp, 'taxonomy', sep=',')
conn.commit()
| [
"psycopg2.connect",
"sys.exit"
] | [((179, 238), 'psycopg2.connect', 'psycopg2.connect', (['"""host=localhost dbname=bih user=postgres"""'], {}), "('host=localhost dbname=bih user=postgres')\n", (195, 238), False, 'import psycopg2\n'), ((159, 170), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (167, 170), False, 'import sys\n')] |
from losowe_liczby import losuj
tab = losuj(20)
print(tab)
for i in range(1, len(tab)):
tymczasowe_miejsce = i
for x in range(i, -1, -1): # szukamy miejsca do którego pasuje nowa zmienna
if tab[i] < tab[x]:
tymczasowe_miejsce = x
tmp_var = tab[i]
for l in range(tymczasowe_miejsce, i+1): # wrzucamy zmienną do posortowanego zbioru
tmp_var2 = tab[l]
tab[l] = tmp_var
tmp_var = tmp_var2
print(tab) | [
"losowe_liczby.losuj"
] | [((39, 48), 'losowe_liczby.losuj', 'losuj', (['(20)'], {}), '(20)\n', (44, 48), False, 'from losowe_liczby import losuj\n')] |
#!/router/bin/python
__copyright__ = "Copyright 2015"
"""
Name:
control_plane_general_test.py
Description:
This script creates the functionality to test the performance of the TRex traffic generator control plane.
The scenarios assumes a WORKING server is listening and processing the requests.
::
Topology:
-------- --------
| | | |
| Client | <-----JSON-RPC------> | Server |
| | | |
-------- --------
"""
from nose.plugins import Plugin
# import misc_methods
import sys
import os
# from CPlatformUnderTest import *
# from CPlatform import *
import termstyle
import threading
from common.trex_exceptions import *
from Client.trex_client import CTRexClient
# import Client.outer_packages
# import Client.trex_client
TREX_SERVER = None
class CTRexCP():
trex_server = None
def setUpModule(module):
pass
def tearDownModule(module):
pass
class CControlPlaneGeneral_Test(object):#(unittest.TestCase):
"""This class defines the general testcase of the control plane service"""
def __init__ (self):
self.trex_server_name = 'csi-kiwi-02'
self.trex = CTRexClient(self.trex_server_name)
pass
def setUp(self):
# initialize server connection for single client
# self.server = CTRexClient(self.trex_server)
pass
########################################################################
#### DO NOT ADD TESTS TO THIS FILE ####
#### Added tests here will held once for EVERY test sub-class ####
########################################################################
def tearDown(self):
pass
def check_for_trex_crash(self):
pass
| [
"Client.trex_client.CTRexClient"
] | [((1275, 1309), 'Client.trex_client.CTRexClient', 'CTRexClient', (['self.trex_server_name'], {}), '(self.trex_server_name)\n', (1286, 1309), False, 'from Client.trex_client import CTRexClient\n')] |
import math
import random
import sys
from fractions import Fraction
from functools import lru_cache
from PIL import Image, ImageDraw, ImageFont
from . import conf
__all__ = ["sizes", "source_set", "placeholder"]
def _grid(*, _columns=12, **breakpoint_sizes):
settings = conf.get_settings()
for key in breakpoint_sizes.keys() - settings.BREAKPOINTS.keys():
raise KeyError(
f"Invalid breakpoint: {key}. Choices are: {', '.join(settings.BREAKPOINTS.keys())}"
)
prev_size = _columns
for key, value in settings.BREAKPOINTS.items():
prev_size = breakpoint_sizes.get(key, prev_size)
yield key, prev_size / _columns
def _media_query(*, container_width: int = None, **breakpoints: {str: int}):
settings = conf.get_settings()
prev_ratio = None
prev_width = 0
for key, ratio in breakpoints.items():
width = settings.BREAKPOINTS[key]
if container_width and width >= container_width:
yield f"(min-width: {prev_width}px) and (max-width: {container_width - 1}px) {math.floor(ratio * 100)}vw"
break
if prev_ratio and prev_ratio != ratio:
yield f"(min-width: {prev_width}px) and (max-width: {width - 1}px) {math.floor(prev_ratio * 100)}vw"
prev_width = width
prev_ratio = ratio
yield f"{math.floor(prev_ratio * container_width)}px" if container_width else f"{math.floor(prev_ratio * 100)}vw"
def sizes(*, cols=12, container_width: int = None, **breakpoints: {str: int}) -> str:
breakpoints = dict(_grid(_columns=cols, **breakpoints))
return ", ".join(_media_query(container_width=container_width, **breakpoints))
def source_set(
size: (int, int), *, ratio: str | Fraction | None, max_width: int, cols: int
) -> set:
ratio = Fraction(ratio) if ratio else None
img_width, img_height = size
ratio = ratio or (img_width / img_height)
settings = conf.get_settings()
# calc all widths at 1X resolution
widths = (max_width * (w + 1) / cols for w in range(cols))
# exclude widths above the max width
widths = (w for w in widths if w <= max_width)
# sizes for all screen resolutions
widths = (w * res for w in widths for res in settings.PIXEL_DENSITIES)
# exclude sizes above the original image width or height
return {math.floor(w) for w in widths if w <= img_width and w / ratio <= img_height}
@lru_cache
def placeholder(width: int, height: int, alt):
hue = random.randint(0, 360) # nosec
img = Image.new("RGB", (width, height), color=f"hsl({hue}, 40%, 80%)")
draw = ImageDraw.Draw(img)
draw.line(((0, 0, width, height)), width=3, fill=f"hsl({hue}, 60%, 20%)")
draw.line(((0, height, width, 0)), width=3, fill=f"hsl({hue}, 60%, 20%)")
draw.rectangle(
(width / 4, height / 4, width * 3 / 4, height * 3 / 4),
fill=f"hsl({hue}, 40%, 80%)",
)
fontsize = 32
if sys.platform == "win32":
font_name = r"C:\WINDOWS\Fonts\CALIBRI.TTF"
elif sys.platform in ["linux", "linux2"]:
font_name = "DejaVuSans-Bold"
elif sys.platform == "darwin":
font_name = "Helvetica"
font = ImageFont.truetype(font_name, fontsize)
text = f"{alt}\n<{width}x{height}>"
while font.getsize(text)[0] < width / 2:
# iterate until the text size is just larger than the criteria
fontsize += 1
font = ImageFont.truetype(font_name, fontsize)
draw.text(
(width / 2, height / 2),
text,
font=font,
fill=f"hsl({hue}, 60%, 20%)",
align="center",
anchor="mm",
)
return img
| [
"math.floor",
"PIL.Image.new",
"fractions.Fraction",
"PIL.ImageFont.truetype",
"PIL.ImageDraw.Draw",
"random.randint"
] | [((2471, 2493), 'random.randint', 'random.randint', (['(0)', '(360)'], {}), '(0, 360)\n', (2485, 2493), False, 'import random\n'), ((2513, 2577), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(width, height)'], {'color': 'f"""hsl({hue}, 40%, 80%)"""'}), "('RGB', (width, height), color=f'hsl({hue}, 40%, 80%)')\n", (2522, 2577), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((2589, 2608), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['img'], {}), '(img)\n', (2603, 2608), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((3158, 3197), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['font_name', 'fontsize'], {}), '(font_name, fontsize)\n', (3176, 3197), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1794, 1809), 'fractions.Fraction', 'Fraction', (['ratio'], {}), '(ratio)\n', (1802, 1809), False, 'from fractions import Fraction\n'), ((2324, 2337), 'math.floor', 'math.floor', (['w'], {}), '(w)\n', (2334, 2337), False, 'import math\n'), ((3391, 3430), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['font_name', 'fontsize'], {}), '(font_name, fontsize)\n', (3409, 3430), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((1337, 1377), 'math.floor', 'math.floor', (['(prev_ratio * container_width)'], {}), '(prev_ratio * container_width)\n', (1347, 1377), False, 'import math\n'), ((1409, 1437), 'math.floor', 'math.floor', (['(prev_ratio * 100)'], {}), '(prev_ratio * 100)\n', (1419, 1437), False, 'import math\n'), ((1060, 1083), 'math.floor', 'math.floor', (['(ratio * 100)'], {}), '(ratio * 100)\n', (1070, 1083), False, 'import math\n'), ((1233, 1261), 'math.floor', 'math.floor', (['(prev_ratio * 100)'], {}), '(prev_ratio * 100)\n', (1243, 1261), False, 'import math\n')] |
import argparse
from pathlib import Path
import math
import colorsys
from typing import Union
from PIL import Image, ImageColor
import numpy as np
class HueCycler:
"""Class that processes the command-line arguments and renders the GIF.
"""
def __init__(self):
self.parser = argparse.ArgumentParser(
description="Turn a .PNG into a .GIF that cycles around the hue spectrum."
)
self.parser.add_argument(
"input",
metavar="IN",
type=Path,
help="Path to the input PNG."
)
self.parser.add_argument(
"output",
metavar="OUT",
type=Path,
help="Path to the output GIF."
)
self.parser.add_argument(
"-p", "--period",
metavar="N",
type=int,
default=10,
help="The number of seconds for the image to make a 360 degree hue cycle. Default 10."
)
self.parser.add_argument(
"-s", "--step",
metavar="N",
type=int,
default=5,
help="The number of degrees to increment the picture between GIF frames. Default 5."
)
self._parse_args()
def _parse_args(self):
"""Parse the command-line arguments.
"""
self.args = self.parser.parse_args()
self.input_file = Path(self.args.input)
self.output_file = Path(self.args.output)
self.period = self.args.period
self.step = self.args.step
self.ms = math.ceil(1000 * self.period * self.step / 360)
self.num_frames = math.ceil(360 / self.step)
self.input_image = Image.open(self.input_file).convert("RGBA")
def process(self):
"""Render the GIF!
"""
rgb_to_hsv = np.vectorize(colorsys.rgb_to_hsv)
hsv_to_rgb = np.vectorize(colorsys.hsv_to_rgb)
arr = np.array(np.asarray(self.input_image).astype("float"))
r, g, b, a = np.rollaxis(arr, axis=-1)
h, s, v = rgb_to_hsv(r, g, b)
frames = []
for i in range(1, self.num_frames):
shift = (i * self.step) / 360
h_ = (h + shift) % 1.0
r_, g_, b_ = hsv_to_rgb(h_, s, v)
arr_ = np.dstack((r_, g_, b_, a))
frame = Image.fromarray(arr_.astype("uint8"), "RGBA")
frames.append(frame)
self.input_image.save(
self.output_file,
**self.input_image.info,
save_all=True,
append_images=frames,
optimize=True,
duration=self.ms,
loop=0
)
if __name__ == "__main__":
hc = HueCycler()
hc.process()
| [
"numpy.dstack",
"PIL.Image.open",
"math.ceil",
"argparse.ArgumentParser",
"pathlib.Path",
"numpy.rollaxis",
"numpy.asarray",
"numpy.vectorize"
] | [((292, 396), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Turn a .PNG into a .GIF that cycles around the hue spectrum."""'}), "(description=\n 'Turn a .PNG into a .GIF that cycles around the hue spectrum.')\n", (315, 396), False, 'import argparse\n'), ((1318, 1339), 'pathlib.Path', 'Path', (['self.args.input'], {}), '(self.args.input)\n', (1322, 1339), False, 'from pathlib import Path\n'), ((1365, 1387), 'pathlib.Path', 'Path', (['self.args.output'], {}), '(self.args.output)\n', (1369, 1387), False, 'from pathlib import Path\n'), ((1474, 1521), 'math.ceil', 'math.ceil', (['(1000 * self.period * self.step / 360)'], {}), '(1000 * self.period * self.step / 360)\n', (1483, 1521), False, 'import math\n'), ((1546, 1572), 'math.ceil', 'math.ceil', (['(360 / self.step)'], {}), '(360 / self.step)\n', (1555, 1572), False, 'import math\n'), ((1720, 1753), 'numpy.vectorize', 'np.vectorize', (['colorsys.rgb_to_hsv'], {}), '(colorsys.rgb_to_hsv)\n', (1732, 1753), True, 'import numpy as np\n'), ((1773, 1806), 'numpy.vectorize', 'np.vectorize', (['colorsys.hsv_to_rgb'], {}), '(colorsys.hsv_to_rgb)\n', (1785, 1806), True, 'import numpy as np\n'), ((1894, 1919), 'numpy.rollaxis', 'np.rollaxis', (['arr'], {'axis': '(-1)'}), '(arr, axis=-1)\n', (1905, 1919), True, 'import numpy as np\n'), ((2147, 2173), 'numpy.dstack', 'np.dstack', (['(r_, g_, b_, a)'], {}), '((r_, g_, b_, a))\n', (2156, 2173), True, 'import numpy as np\n'), ((1599, 1626), 'PIL.Image.open', 'Image.open', (['self.input_file'], {}), '(self.input_file)\n', (1609, 1626), False, 'from PIL import Image, ImageColor\n'), ((1829, 1857), 'numpy.asarray', 'np.asarray', (['self.input_image'], {}), '(self.input_image)\n', (1839, 1857), True, 'import numpy as np\n')] |
import json
import threading
from os import path
import config
if not config.NO_PI:
import RPi.GPIO as GPIO
import atexit
from subprocess import call
else:
from playsound import playsound
class Soundboard:
def __init__(self, sounds_config=config.SOUNDS_CONFIG):
self.sounds_config = sounds_config
self.sounds = []
self.run_gpio_thread = False
self.load_from_config(sounds_config)
if not config.NO_PI:
self.setup_gpio()
atexit.register(self.cleanup)
def reload_config(self):
print("Reloading config ('{0}')".format(self.sounds_config))
self.load_from_config(self.sounds_config)
if not config.NO_PI:
self.setup_gpio()
@staticmethod
def cleanup():
if config.VERBOSE_LOGGING:
print("Cleaning up the GPIO")
GPIO.cleanup()
def load_from_config(self, sounds_config):
if config.VERBOSE_LOGGING:
print("Loading sound config '{0}'".format(sounds_config))
with open(sounds_config) as f:
self.sounds = json.load(f)
def setup_gpio(self):
if config.NO_PI:
return
if config.VERBOSE_LOGGING:
print("Setting up the GPIO")
GPIO.setmode(GPIO.BOARD)
if not config.VERBOSE_LOGGING:
GPIO.setwarnings(False)
for item in self.sounds:
if "GPIO_pin" not in item:
continue
pin = item["GPIO_pin"]
GPIO.setup(pin, GPIO.IN)
GPIO.add_event_detect(pin, GPIO.RISING, callback=lambda x: self.on_button_pressed(pin), bouncetime=200)
def on_button_pressed(self, pin):
if config.VERBOSE_LOGGING:
print("Button on pin {0} was pressed".format(pin))
self.play_sound_from_pin(pin)
pass
@staticmethod
def play_sound_file(file_path):
full_path = path.join(config.SOUNDS_DIR, file_path)
if path.isfile(full_path):
print("Playing sound {0}".format(full_path))
if config.NO_PI:
try:
playsound(full_path, False)
except NotImplementedError:
if config.VERBOSE_LOGGING:
print("Could not use the 'non blocking mode' from playsound, running it in a different thread")
threading.Thread(target=playsound, args=(full_path,)).start()
else:
threading.Thread(target=lambda: call(["aplay", "-q", full_path])).start()
return True
else:
print("Could not find sound at '{0}'".format(full_path))
return False
def play_sound_by_name(self, name):
if config.VERBOSE_LOGGING:
print("Attempting to play sound by name ('{0}')".format(name))
sound = self.get_sound_by_name(name)
if sound is not False:
return self.play_sound_file(sound["file"])
else:
print("Could not find sound '{0}'".format(name))
return False
def play_sound_from_pin(self, pin):
if config.VERBOSE_LOGGING:
print("Playing sound from pin ({0})".format(pin))
sound = self.get_sound_by_pin(pin)
if sound is not False:
self.play_sound_file(sound["file"])
else:
print("There is no sound bound to GPIO pin {0}".format(pin))
def get_sound_by_name(self, name):
for item in self.sounds:
if item["file"] == name:
return item
if path.splitext(item["file"])[0] == name:
return item
if "aliases" not in item:
continue
for alias in item["aliases"]:
if alias == name:
return item
return False
def get_sound_by_pin(self, pin):
for item in self.sounds:
if "GPIO_pin" not in item:
print("no attr")
continue
if item["GPIO_pin"] == pin:
return item
return False
def add_sound(self, file, aliases, gpio_pin=None):
if self.contains_sound_file(file):
print("Sound file '{0}' already registered".format(file))
return
s = {"file": file, "aliases": aliases}
if gpio_pin is not None:
s["gpio_pin"] = gpio_pin
self.sounds.append(s)
self.write_to_config()
def write_to_config(self):
print("Updating sound config file '{0}'".format(config.SOUNDS_CONFIG))
serialized = json.dumps(self.sounds, indent=4)
f = open(config.SOUNDS_CONFIG, "w")
f.write(serialized)
f.close()
def contains_sound_file(self, file):
for item in self.sounds:
if item["file"] == file:
return True
return False
def contains_sound(self, name):
return self.get_sound_by_name(name) is not False
from time import sleep
if __name__ == '__main__':
soundboard = Soundboard()
soundboard.play_sound_from_pin(10)
sleep(3)
soundboard.play_sound_by_name("mySound")
sleep(3)
soundboard.play_sound_by_name("beep")
| [
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"json.dumps",
"os.path.join",
"RPi.GPIO.setwarnings",
"time.sleep",
"playsound.playsound",
"os.path.isfile",
"os.path.splitext",
"subprocess.call",
"json.load",
"threading.Thread",
"atexit.register",
"RPi.GPIO.setmode"
] | [((5097, 5105), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (5102, 5105), False, 'from time import sleep\n'), ((5155, 5163), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (5160, 5163), False, 'from time import sleep\n'), ((869, 883), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (881, 883), True, 'import RPi.GPIO as GPIO\n'), ((1273, 1297), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BOARD'], {}), '(GPIO.BOARD)\n', (1285, 1297), True, 'import RPi.GPIO as GPIO\n'), ((1924, 1963), 'os.path.join', 'path.join', (['config.SOUNDS_DIR', 'file_path'], {}), '(config.SOUNDS_DIR, file_path)\n', (1933, 1963), False, 'from os import path\n'), ((1976, 1998), 'os.path.isfile', 'path.isfile', (['full_path'], {}), '(full_path)\n', (1987, 1998), False, 'from os import path\n'), ((4592, 4625), 'json.dumps', 'json.dumps', (['self.sounds'], {'indent': '(4)'}), '(self.sounds, indent=4)\n', (4602, 4625), False, 'import json\n'), ((507, 536), 'atexit.register', 'atexit.register', (['self.cleanup'], {}), '(self.cleanup)\n', (522, 536), False, 'import atexit\n'), ((1103, 1115), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1112, 1115), False, 'import json\n'), ((1349, 1372), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (1365, 1372), True, 'import RPi.GPIO as GPIO\n'), ((1519, 1543), 'RPi.GPIO.setup', 'GPIO.setup', (['pin', 'GPIO.IN'], {}), '(pin, GPIO.IN)\n', (1529, 1543), True, 'import RPi.GPIO as GPIO\n'), ((2127, 2154), 'playsound.playsound', 'playsound', (['full_path', '(False)'], {}), '(full_path, False)\n', (2136, 2154), False, 'from playsound import playsound\n'), ((3573, 3600), 'os.path.splitext', 'path.splitext', (["item['file']"], {}), "(item['file'])\n", (3586, 3600), False, 'from os import path\n'), ((2386, 2439), 'threading.Thread', 'threading.Thread', ([], {'target': 'playsound', 'args': '(full_path,)'}), '(target=playsound, args=(full_path,))\n', (2402, 2439), False, 'import threading\n'), ((2514, 2546), 'subprocess.call', 'call', (["['aplay', '-q', full_path]"], {}), "(['aplay', '-q', full_path])\n", (2518, 2546), False, 'from subprocess import call\n')] |
from typing import Union
import discord
from discord.ext import commands
from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, \
has_permissions, bot_has_permissions
from poppi import Poppi
from poppi_helpers import PoppiError, success_embed, FetchedUser
class Moderation(commands.Cog, name="Moderation"):
def __init__(self, bot: Poppi):
self.bot = bot
# Moderation commands
# Errors are handled by the error handler in /src/cogs/events.py
@command(help="Ban a user", usage="[user] [string|None]")
@guild_only()
@has_guild_permissions(ban_members=True)
@bot_has_guild_permissions(ban_members=True)
async def ban(self, ctx: commands.Context, user: Union[discord.User, FetchedUser], reason="None"):
await ctx.guild.ban(user, reason=reason)
await ctx.send(embed=success_embed(f"Successfully banned {user.display_name}#{user.discriminator}!"))
@command(help="Unban a user", usage="[user] [string|None]")
@guild_only()
@has_guild_permissions(ban_members=True)
@bot_has_guild_permissions(ban_members=True)
async def unban(self, ctx: commands.Context, user: Union[discord.User, FetchedUser], reason="None"):
await ctx.guild.unban(user, reason=reason)
await ctx.send(embed=success_embed(f"Successfully unbanned {user.display_name}#{user.discriminator}!"))
@command(help="Softban a user", usage="[member] [reason|None]")
@guild_only()
@has_guild_permissions(ban_members=True)
@bot_has_guild_permissions(ban_members=True)
async def softban(self, ctx: commands.Context, user: discord.Member, reason="None"):
await ctx.guild.ban(user, reason=reason)
await ctx.guild.unban(user, reason=reason)
await ctx.send(embed=success_embed(f"Successfully softbanned {user.display_name}#{user.discriminator}!"))
@command(help="Kick a user", usage="[member] [string|None]")
@guild_only()
@has_guild_permissions(kick_members=True)
@bot_has_guild_permissions(kick_members=True)
async def kick(self, ctx: commands.Context, user: discord.Member, reason="None"):
await ctx.guild.kick(user, reason=reason)
await ctx.send(embed=success_embed(f"Successfully kicked {user.display_name}#{user.discriminator}!"))
@command(help="Clear up to 100 messages", usage="[amount]")
@guild_only()
@has_permissions(manage_messages=True)
@bot_has_permissions(manage_messages=True)
async def clear(self, ctx: commands.Context, amount: int):
# Check for boundaries (no more than 100 msgs deleted at once)
if amount > 100:
raise PoppiError("Please specify an amount <= 100!")
deleted = await ctx.channel.purge(limit=amount)
await ctx.send(embed=success_embed(f"Deleted {len(deleted)} messages!"), delete_after=5)
| [
"discord.ext.commands.has_permissions",
"discord.ext.commands.bot_has_guild_permissions",
"poppi_helpers.PoppiError",
"discord.ext.commands.guild_only",
"discord.ext.commands.bot_has_permissions",
"poppi_helpers.success_embed",
"discord.ext.commands.has_guild_permissions",
"discord.ext.commands.command"
] | [((524, 580), 'discord.ext.commands.command', 'command', ([], {'help': '"""Ban a user"""', 'usage': '"""[user] [string|None]"""'}), "(help='Ban a user', usage='[user] [string|None]')\n", (531, 580), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((586, 598), 'discord.ext.commands.guild_only', 'guild_only', ([], {}), '()\n', (596, 598), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((604, 643), 'discord.ext.commands.has_guild_permissions', 'has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (625, 643), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((649, 692), 'discord.ext.commands.bot_has_guild_permissions', 'bot_has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (674, 692), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((962, 1020), 'discord.ext.commands.command', 'command', ([], {'help': '"""Unban a user"""', 'usage': '"""[user] [string|None]"""'}), "(help='Unban a user', usage='[user] [string|None]')\n", (969, 1020), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1026, 1038), 'discord.ext.commands.guild_only', 'guild_only', ([], {}), '()\n', (1036, 1038), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1044, 1083), 'discord.ext.commands.has_guild_permissions', 'has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (1065, 1083), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1089, 1132), 'discord.ext.commands.bot_has_guild_permissions', 'bot_has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (1114, 1132), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1408, 1470), 'discord.ext.commands.command', 'command', ([], {'help': '"""Softban a user"""', 'usage': '"""[member] [reason|None]"""'}), "(help='Softban a user', usage='[member] [reason|None]')\n", (1415, 1470), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1476, 1488), 'discord.ext.commands.guild_only', 'guild_only', ([], {}), '()\n', (1486, 1488), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1494, 1533), 'discord.ext.commands.has_guild_permissions', 'has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (1515, 1533), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1539, 1582), 'discord.ext.commands.bot_has_guild_permissions', 'bot_has_guild_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (1564, 1582), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1893, 1952), 'discord.ext.commands.command', 'command', ([], {'help': '"""Kick a user"""', 'usage': '"""[member] [string|None]"""'}), "(help='Kick a user', usage='[member] [string|None]')\n", (1900, 1952), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1958, 1970), 'discord.ext.commands.guild_only', 'guild_only', ([], {}), '()\n', (1968, 1970), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((1976, 2016), 'discord.ext.commands.has_guild_permissions', 'has_guild_permissions', ([], {'kick_members': '(True)'}), '(kick_members=True)\n', (1997, 2016), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2022, 2066), 'discord.ext.commands.bot_has_guild_permissions', 'bot_has_guild_permissions', ([], {'kick_members': '(True)'}), '(kick_members=True)\n', (2047, 2066), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2320, 2378), 'discord.ext.commands.command', 'command', ([], {'help': '"""Clear up to 100 messages"""', 'usage': '"""[amount]"""'}), "(help='Clear up to 100 messages', usage='[amount]')\n", (2327, 2378), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2384, 2396), 'discord.ext.commands.guild_only', 'guild_only', ([], {}), '()\n', (2394, 2396), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2402, 2439), 'discord.ext.commands.has_permissions', 'has_permissions', ([], {'manage_messages': '(True)'}), '(manage_messages=True)\n', (2417, 2439), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2445, 2486), 'discord.ext.commands.bot_has_permissions', 'bot_has_permissions', ([], {'manage_messages': '(True)'}), '(manage_messages=True)\n', (2464, 2486), False, 'from discord.ext.commands import command, guild_only, has_guild_permissions, bot_has_guild_permissions, has_permissions, bot_has_permissions\n'), ((2664, 2710), 'poppi_helpers.PoppiError', 'PoppiError', (['"""Please specify an amount <= 100!"""'], {}), "('Please specify an amount <= 100!')\n", (2674, 2710), False, 'from poppi_helpers import PoppiError, success_embed, FetchedUser\n'), ((875, 954), 'poppi_helpers.success_embed', 'success_embed', (['f"""Successfully banned {user.display_name}#{user.discriminator}!"""'], {}), "(f'Successfully banned {user.display_name}#{user.discriminator}!')\n", (888, 954), False, 'from poppi_helpers import PoppiError, success_embed, FetchedUser\n'), ((1319, 1405), 'poppi_helpers.success_embed', 'success_embed', (['f"""Successfully unbanned {user.display_name}#{user.discriminator}!"""'], {}), "(\n f'Successfully unbanned {user.display_name}#{user.discriminator}!')\n", (1332, 1405), False, 'from poppi_helpers import PoppiError, success_embed, FetchedUser\n'), ((1802, 1890), 'poppi_helpers.success_embed', 'success_embed', (['f"""Successfully softbanned {user.display_name}#{user.discriminator}!"""'], {}), "(\n f'Successfully softbanned {user.display_name}#{user.discriminator}!')\n", (1815, 1890), False, 'from poppi_helpers import PoppiError, success_embed, FetchedUser\n'), ((2233, 2312), 'poppi_helpers.success_embed', 'success_embed', (['f"""Successfully kicked {user.display_name}#{user.discriminator}!"""'], {}), "(f'Successfully kicked {user.display_name}#{user.discriminator}!')\n", (2246, 2312), False, 'from poppi_helpers import PoppiError, success_embed, FetchedUser\n')] |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
import pytest
from ..bounding_box import BoundingBox
from ..mask import RegionMask
from ..pixcoord import PixCoord
from ...shapes import CirclePixelRegion
try:
import matplotlib # noqa
HAS_MATPLOTLIB = True
except ImportError:
HAS_MATPLOTLIB = False
POSITIONS = [(-20, -20), (-20, 20), (20, -20), (60, 60)]
def test_mask_input_shapes():
with pytest.raises(ValueError):
mask_data = np.ones((10, 10))
bbox = BoundingBox(5, 10, 5, 10)
RegionMask(mask_data, bbox)
def test_mask_array():
mask_data = np.ones((10, 10))
bbox = BoundingBox(5, 15, 5, 15)
mask = RegionMask(mask_data, bbox)
data = np.array(mask)
assert_allclose(data, mask.data)
def test_mask_cutout_shape():
mask_data = np.ones((10, 10))
bbox = BoundingBox(5, 15, 5, 15)
mask = RegionMask(mask_data, bbox)
with pytest.raises(ValueError):
mask.cutout(np.arange(10))
with pytest.raises(ValueError):
mask._overlap_slices((10,))
with pytest.raises(ValueError):
mask.to_image((10,))
def test_mask_cutout_copy():
data = np.ones((50, 50))
aper = CirclePixelRegion(PixCoord(25, 25), radius=10.)
mask = aper.to_mask()
cutout = mask.cutout(data, copy=True)
data[25, 25] = 100.
assert cutout[10, 10] == 1.
@pytest.mark.parametrize('position', POSITIONS)
def test_mask_cutout_no_overlap(position):
data = np.ones((50, 50))
aper = CirclePixelRegion(PixCoord(position[0], position[1]), radius=10.)
mask = aper.to_mask()
cutout = mask.cutout(data)
assert cutout is None
weighted_data = mask.multiply(data)
assert weighted_data is None
image = mask.to_image(data.shape)
assert image is None
@pytest.mark.parametrize('position', POSITIONS)
def test_mask_cutout_partial_overlap(position):
data = np.ones((50, 50))
aper = CirclePixelRegion(PixCoord(position[0], position[1]), radius=30.)
mask = aper.to_mask()
cutout = mask.cutout(data)
assert cutout.shape == mask.shape
weighted_data = mask.multiply(data)
assert weighted_data.shape == mask.shape
image = mask.to_image(data.shape)
assert image.shape == data.shape
def test_mask_nan_in_bbox():
"""
Regression test that non-finite data values outside of the mask but
within the bounding box are set to zero.
"""
data = np.ones((101, 101))
data[33, 33] = np.nan
data[67, 67] = np.inf
data[33, 67] = -np.inf
data[22, 22] = np.nan
data[22, 23] = np.inf
radius = 20.
reg1 = CirclePixelRegion(PixCoord(50, 50), radius)
reg2 = CirclePixelRegion(PixCoord(5, 5), radius)
wdata1 = reg1.to_mask(mode='exact').multiply(data)
assert_allclose(np.sum(wdata1), np.pi * radius**2)
wdata2 = reg2.to_mask(mode='exact').multiply(data)
assert_allclose(np.sum(wdata2), 561.6040111923013)
| [
"numpy.ones",
"numpy.testing.assert_allclose",
"pytest.mark.parametrize",
"numpy.array",
"numpy.sum",
"pytest.raises",
"numpy.arange"
] | [((1431, 1477), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""position"""', 'POSITIONS'], {}), "('position', POSITIONS)\n", (1454, 1477), False, 'import pytest\n'), ((1852, 1898), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""position"""', 'POSITIONS'], {}), "('position', POSITIONS)\n", (1875, 1898), False, 'import pytest\n'), ((675, 692), 'numpy.ones', 'np.ones', (['(10, 10)'], {}), '((10, 10))\n', (682, 692), True, 'import numpy as np\n'), ((780, 794), 'numpy.array', 'np.array', (['mask'], {}), '(mask)\n', (788, 794), True, 'import numpy as np\n'), ((799, 831), 'numpy.testing.assert_allclose', 'assert_allclose', (['data', 'mask.data'], {}), '(data, mask.data)\n', (814, 831), False, 'from numpy.testing import assert_allclose\n'), ((880, 897), 'numpy.ones', 'np.ones', (['(10, 10)'], {}), '((10, 10))\n', (887, 897), True, 'import numpy as np\n'), ((1227, 1244), 'numpy.ones', 'np.ones', (['(50, 50)'], {}), '((50, 50))\n', (1234, 1244), True, 'import numpy as np\n'), ((1532, 1549), 'numpy.ones', 'np.ones', (['(50, 50)'], {}), '((50, 50))\n', (1539, 1549), True, 'import numpy as np\n'), ((1958, 1975), 'numpy.ones', 'np.ones', (['(50, 50)'], {}), '((50, 50))\n', (1965, 1975), True, 'import numpy as np\n'), ((2487, 2506), 'numpy.ones', 'np.ones', (['(101, 101)'], {}), '((101, 101))\n', (2494, 2506), True, 'import numpy as np\n'), ((492, 517), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (505, 517), False, 'import pytest\n'), ((539, 556), 'numpy.ones', 'np.ones', (['(10, 10)'], {}), '((10, 10))\n', (546, 556), True, 'import numpy as np\n'), ((984, 1009), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (997, 1009), False, 'import pytest\n'), ((1056, 1081), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1069, 1081), False, 'import pytest\n'), ((1129, 1154), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1142, 1154), False, 'import pytest\n'), ((2840, 2854), 'numpy.sum', 'np.sum', (['wdata1'], {}), '(wdata1)\n', (2846, 2854), True, 'import numpy as np\n'), ((2951, 2965), 'numpy.sum', 'np.sum', (['wdata2'], {}), '(wdata2)\n', (2957, 2965), True, 'import numpy as np\n'), ((1031, 1044), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (1040, 1044), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# coding: utf-8
import sys
import os
import shutil
import struct
"""
此工具用来自动生成公共的mesh 提供给c++使用
目前导出的mesh:
1. quad
2. cube
3. plane
"""
target = "../resources/engine/common/"
def writef(f,v):
p=struct.pack("f",v)
f.write(p)
def writei(f,v):
p=struct.pack("I",v)
f.write(p)
def writeh(f, indices, verts, atype):
f.write(struct.pack('h', 1))
writei(f, verts)
writei(f, atype)
writei(f, indices)
def readh(f):
v=f.read(2)
return struct.unpack("h",v)[0]
def readi(f):
v=f.read(4)
return struct.unpack("I",v)[0]
def readf(f):
v=f.read(4)
return struct.unpack("f",v)[0]
quad = [ \
# positions # texCoords \
0.6, -0.6, 0.0, 1.0, \
0.6, -1.0, 0.0, 0.0, \
1.0, -1.0, 1.0, 0.0, \
0.6, -0.6, 0.0, 1.0, \
1.0, -1.0, 1.0, 0.0, \
1.0, -0.6, 1.0, 1.0, \
]
quad2 = [ \
# positions # texCoords \
-1.0, 1.0, 0.0, 1.0, \
-1.0,-1.0, 0.0, 0.0, \
1.0, -1.0, 1.0, 0.0, \
-1.0, 1.0, 0.0, 1.0, \
1.0, -1.0, 1.0, 0.0, \
1.0, 1.0, 1.0, 1.0, \
]
cube = [ \
# //position, texcoord, normal \
# back face \
-1.0, -1.0, -1.0, 0.0, 0.0, 0.0, 0.0, -1.0, # bottom-left \
1.0, 1.0, -1.0, 1.0, 1.0, 0.0, 0.0, -1.0, # top-right \
1.0, -1.0, -1.0, 1.0, 0.0, 0.0, 0.0, -1.0, # bottom-right \
1.0, 1.0, -1.0, 1.0, 1.0, 0.0, 0.0, -1.0, # top-right \
-1.0, -1.0, -1.0, 0.0, 0.0, 0.0, 0.0, -1.0, # bottom-left \
-1.0, 1.0, -1.0, 0.0, 1.0, 0.0, 0.0, -1.0, # top-left \
# front face \
-1.0, -1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, # bottom-left \
1.0, -1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, # bottom-right \
1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, # top-right \
1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, # top-right \
-1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, # top-left \
-1.0, -1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, # bottom-left \
# left face \
-1.0, 1.0, 1.0, 1.0, 0.0, -1.0, 0.0, 0.0, # top-right \
-1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 0.0, 0.0, # top-left \
-1.0, -1.0, -1.0, 0.0, 1.0, -1.0, 0.0, 0.0, # bottom-left \
-1.0, -1.0, -1.0, 0.0, 1.0, -1.0, 0.0, 0.0, # bottom-left \
-1.0, -1.0, 1.0, 0.0, 0.0, -1.0, 0.0, 0.0, # bottom-right \
-1.0, 1.0, 1.0, 1.0, 0.0, -1.0, 0.0, 0.0, # top-right \
# right face \
1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, # top-left \
1.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, 0.0, # bottom-right \
1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 0.0, 0.0, # top-right \
1.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, 0.0, # bottom-right \
1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, # top-left \
1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, # bottom-left \
# bottom face \
-1.0, -1.0, -1.0, 0.0, 1.0, 0.0, -1.0, 0.0, # top-right \
1.0, -1.0, -1.0, 1.0, 1.0, 0.0, -1.0, 0.0, # top-left \
1.0, -1.0, 1.0, 1.0, 0.0, 0.0, -1.0, 0.0, # bottom-left \
1.0, -1.0, 1.0, 1.0, 0.0, 0.0, -1.0, 0.0, # bottom-left \
-1.0, -1.0, 1.0, 0.0, 0.0, 0.0, -1.0, 0.0, # bottom-right \
-1.0, -1.0, -1.0, 0.0, 1.0, 0.0, -1.0, 0.0, # top-right \
# top face \
-1.0, 1.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.0, # top-left \
1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, # bottom-right \
1.0, 1.0, -1.0, 1.0, 1.0, 0.0, 1.0, 0.0, # top-right \
1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, # bottom-right \
-1.0, 1.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.0, # top-left \
-1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, # bottom-left \
]
plane = [ \
# // positions // texCoords // normals \
25.0, -0.5, 25.0, 25.0, 0.0, 0.0, 1.0, 0.0, \
-25.0, -0.5, 25.0, 0.0, 0.0, 0.0, 1.0, 0.0, \
-25.0, -0.5, -25.0, 0.0, 25.0, 0.0, 1.0, 0.0, \
\
25.0, -0.5, 25.0, 25.0, 0.0, 0.0, 1.0, 0.0, \
-25.0, -0.5, -25.0, 0.0, 25.0, 0.0, 1.0, 0.0, \
25.0, -0.5, -25.0, 25.0, 25.0, 0.0, 1.0, 0.0, \
]
grass = [ \
0.0, 0.5, 0.0, 0.0, 0.0, \
0.0, -0.5, 0.0, 0.0, 1.0, \
1.0, -0.5, 0.0, 1.0, 1.0, \
0.0, 0.5, 0.0, 0.0, 0.0, \
1.0, -0.5, 0.0, 1.0, 1.0, \
1.0, 0.5, 0.0, 1.0, 0.0, \
]
if os.path.exists(target):
shutil.rmtree(target,True)
os.makedirs(target)
def writequad(name, quad):
f = open(target+name,"wb")
writeh(f, 0, 6, 0x0012)
idx = 0
for x in range(0, 6):
writef(f,quad[idx])
idx=idx+1
writef(f,quad[idx])
idx=idx+1
writef(f,quad[idx])
idx=idx+1
writef(f,quad[idx])
idx=idx+1
f.close()
writequad("quad.mesh", quad)
writequad("quad2.mesh", quad2)
f = open(target+"cube.mesh","wb")
writeh(f, 0, 36, 0x0111)
idx = 0
for x in range(0,36):
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
writef(f,cube[idx])
idx=idx+1
f.close()
f = open(target+"plane.mesh","wb")
writeh(f, 0, 6, 0x0111)
idx = 0
for x in range(0,6):
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
writef(f,plane[idx])
idx=idx+1
f.close()
f = open(target+"grass.mesh","wb")
writeh(f, 0, 6, 0x0011)
idx = 0
for x in range(0,6):
writef(f,grass[idx])
idx=idx+1
writef(f,grass[idx])
idx=idx+1
writef(f,grass[idx])
idx=idx+1
writef(f,grass[idx])
idx=idx+1
writef(f,grass[idx])
idx=idx+1
f.close()
# for test
f = open(target+"plane.mesh", 'rb')
lod = readh(f)
num=readi(f)
ty = readi(f)
inds=readi(f)
print("lod",lod,"inds:",inds,"num:",num,"type:",ty)
for x in range(0,num):
a1 = readf(f)
a2 = readf(f)
a3 = readf(f)
a4 = readf(f)
a5 = readf(f)
a6 = readf(f)
a7 = readf(f)
a8 = readf(f)
print(a1,a2,a3,a4,a5,a6,a7,a8)
f.close() | [
"os.path.exists",
"os.makedirs",
"struct.pack",
"struct.unpack",
"shutil.rmtree"
] | [((4662, 4684), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (4676, 4684), False, 'import os\n'), ((4722, 4741), 'os.makedirs', 'os.makedirs', (['target'], {}), '(target)\n', (4733, 4741), False, 'import os\n'), ((224, 243), 'struct.pack', 'struct.pack', (['"""f"""', 'v'], {}), "('f', v)\n", (235, 243), False, 'import struct\n'), ((276, 295), 'struct.pack', 'struct.pack', (['"""I"""', 'v'], {}), "('I', v)\n", (287, 295), False, 'import struct\n'), ((4690, 4717), 'shutil.rmtree', 'shutil.rmtree', (['target', '(True)'], {}), '(target, True)\n', (4703, 4717), False, 'import shutil\n'), ((360, 379), 'struct.pack', 'struct.pack', (['"""h"""', '(1)'], {}), "('h', 1)\n", (371, 379), False, 'import struct\n'), ((489, 510), 'struct.unpack', 'struct.unpack', (['"""h"""', 'v'], {}), "('h', v)\n", (502, 510), False, 'import struct\n'), ((549, 570), 'struct.unpack', 'struct.unpack', (['"""I"""', 'v'], {}), "('I', v)\n", (562, 570), False, 'import struct\n'), ((609, 630), 'struct.unpack', 'struct.unpack', (['"""f"""', 'v'], {}), "('f', v)\n", (622, 630), False, 'import struct\n')] |
from model.contact import Contact
import random
import pytest
def test_change_contact(app, db, check_ui):
if app.contacts.count() == 0:
with pytest.allure.step("Create new contact if contact list is empty"):
app.contacts.add_new_contact(Contact(firstname="Firstname", middlename="Middlename", lastname="Lastname",
nickname="Nickname", title="Mr", company="Company", address="Address line, 10317, Walterst.5",
phone_home="+74951234567", phone_mobile="+79031234567",
phone_work="+74961234567",
fax_number="+74961234560", email="<EMAIL>", email2="<EMAIL>", email3="<EMAIL>",
homepage="http://example.com", birthday_day="21", birthday_month="12",
birthday_year="1990", anniversary_day="18", anniversary_month="10",
anniversary_year="2010", secondary_address="1234567",
secondary_phone="123-123456", notes="notes"))
with pytest.allure.step("Given a contact list"):
old_contacts = db.get_contact_list()
with pytest.allure.step("choose random contact"):
contact = random.choice(old_contacts)
with pytest.allure.step("modify this random contact"):
contact.firstname = "Updated"
contact.lastname = "Updated"
app.contacts.change_contact_by_id(contact.id, contact)
with pytest.allure.step("Get contact list again"):
new_contacts = db.get_contact_list()
with pytest.allure.step("Compare length of old list with length of new list "):
assert len(old_contacts) == len(new_contacts)
if check_ui:
app_contacts = app.contacts.get_contacts_list()
for new_contact in new_contacts:
for app_contact in app_contacts:
if new_contact.id == app_contact.id:
if new_contact.id == contact.id:
assert new_contact.firstname == contact.firstname
else:
assert new_contact.firstname == app_contact.firstname
break
#old_contacts[index] = contact
#assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
| [
"model.contact.Contact",
"random.choice",
"pytest.allure.step"
] | [((1206, 1248), 'pytest.allure.step', 'pytest.allure.step', (['"""Given a contact list"""'], {}), "('Given a contact list')\n", (1224, 1248), False, 'import pytest\n'), ((1304, 1347), 'pytest.allure.step', 'pytest.allure.step', (['"""choose random contact"""'], {}), "('choose random contact')\n", (1322, 1347), False, 'import pytest\n'), ((1367, 1394), 'random.choice', 'random.choice', (['old_contacts'], {}), '(old_contacts)\n', (1380, 1394), False, 'import random\n'), ((1404, 1452), 'pytest.allure.step', 'pytest.allure.step', (['"""modify this random contact"""'], {}), "('modify this random contact')\n", (1422, 1452), False, 'import pytest\n'), ((1601, 1645), 'pytest.allure.step', 'pytest.allure.step', (['"""Get contact list again"""'], {}), "('Get contact list again')\n", (1619, 1645), False, 'import pytest\n'), ((1701, 1774), 'pytest.allure.step', 'pytest.allure.step', (['"""Compare length of old list with length of new list """'], {}), "('Compare length of old list with length of new list ')\n", (1719, 1774), False, 'import pytest\n'), ((155, 220), 'pytest.allure.step', 'pytest.allure.step', (['"""Create new contact if contact list is empty"""'], {}), "('Create new contact if contact list is empty')\n", (173, 220), False, 'import pytest\n'), ((263, 870), 'model.contact.Contact', 'Contact', ([], {'firstname': '"""Firstname"""', 'middlename': '"""Middlename"""', 'lastname': '"""Lastname"""', 'nickname': '"""Nickname"""', 'title': '"""Mr"""', 'company': '"""Company"""', 'address': '"""Address line, 10317, Walterst.5"""', 'phone_home': '"""+74951234567"""', 'phone_mobile': '"""+79031234567"""', 'phone_work': '"""+74961234567"""', 'fax_number': '"""+74961234560"""', 'email': '"""<EMAIL>"""', 'email2': '"""<EMAIL>"""', 'email3': '"""<EMAIL>"""', 'homepage': '"""http://example.com"""', 'birthday_day': '"""21"""', 'birthday_month': '"""12"""', 'birthday_year': '"""1990"""', 'anniversary_day': '"""18"""', 'anniversary_month': '"""10"""', 'anniversary_year': '"""2010"""', 'secondary_address': '"""1234567"""', 'secondary_phone': '"""123-123456"""', 'notes': '"""notes"""'}), "(firstname='Firstname', middlename='Middlename', lastname='Lastname',\n nickname='Nickname', title='Mr', company='Company', address=\n 'Address line, 10317, Walterst.5', phone_home='+74951234567',\n phone_mobile='+79031234567', phone_work='+74961234567', fax_number=\n '+74961234560', email='<EMAIL>', email2='<EMAIL>', email3='<EMAIL>',\n homepage='http://example.com', birthday_day='21', birthday_month='12',\n birthday_year='1990', anniversary_day='18', anniversary_month='10',\n anniversary_year='2010', secondary_address='1234567', secondary_phone=\n '123-123456', notes='notes')\n", (270, 870), False, 'from model.contact import Contact\n')] |
#!/usr/bin/env python3
from math import pi, cos, sin
from drone_api import Drone_api
speed = 2 # m/s
angles = [i/180*pi for i in range(0, 360, 5)] * 2
vel_x = [cos(i)*speed for i in angles]
vel_y = [sin(i)*speed for i in angles]
vel_z_up = 2
vel_z_down = -1
drone = Drone_api()
drone.start()
drone.sleep(3)
drone.set_velocity(0, 0, speed, 0)
drone.sleep(5)
drone.set_velocity(0, 0, 0, 0)
drone.sleep(10)
for i in range(len(vel_x)):
if drone.is_shutdown():
break
drone.set_velocity(vel_x[i], vel_y[i], vel_z_up)
drone.sleep(0.1)
drone.set_velocity(0, 0, 0, 0)
drone.sleep(10)
for i in range(len(vel_x)):
if drone.is_shutdown():
break
drone.set_velocity(vel_x[i], vel_y[i], vel_z_down)
drone.sleep(0.1)
| [
"math.cos",
"drone_api.Drone_api",
"math.sin"
] | [((270, 281), 'drone_api.Drone_api', 'Drone_api', ([], {}), '()\n', (279, 281), False, 'from drone_api import Drone_api\n'), ((163, 169), 'math.cos', 'cos', (['i'], {}), '(i)\n', (166, 169), False, 'from math import pi, cos, sin\n'), ((202, 208), 'math.sin', 'sin', (['i'], {}), '(i)\n', (205, 208), False, 'from math import pi, cos, sin\n')] |
from sdl2._sdl2 import ffi, lib
class SDLError(Exception):
pass
def check_int_err(int_return_value):
if int_return_value >= 0:
return int_return_value
else:
error_message = ffi.string(lib.SDL_GetError())
lib.SDL_ClearError()
raise SDLError(error_message)
def check_ptr_err(ptr_return_value):
if ptr_return_value != ffi.NULL:
return ptr_return_value
else:
error_message = ffi.string(lib.SDL_GetError())
lib.SDL_ClearError()
raise SDLError(error_message)
| [
"sdl2._sdl2.lib.SDL_GetError",
"sdl2._sdl2.lib.SDL_ClearError"
] | [((244, 264), 'sdl2._sdl2.lib.SDL_ClearError', 'lib.SDL_ClearError', ([], {}), '()\n', (262, 264), False, 'from sdl2._sdl2 import ffi, lib\n'), ((483, 503), 'sdl2._sdl2.lib.SDL_ClearError', 'lib.SDL_ClearError', ([], {}), '()\n', (501, 503), False, 'from sdl2._sdl2 import ffi, lib\n'), ((216, 234), 'sdl2._sdl2.lib.SDL_GetError', 'lib.SDL_GetError', ([], {}), '()\n', (232, 234), False, 'from sdl2._sdl2 import ffi, lib\n'), ((455, 473), 'sdl2._sdl2.lib.SDL_GetError', 'lib.SDL_GetError', ([], {}), '()\n', (471, 473), False, 'from sdl2._sdl2 import ffi, lib\n')] |
import logging
from astropy.table import Table
import sys
import os
sys.path.append(os.path.realpath(os.path.dirname(os.path.abspath(__file__)) + "/../../.."))
from scripts.create import insert_many_rows
from taipan.core import polar2cart
def execute(cursor, science_file=None):
logging.info("Loading Science")
if not science_file:
logging.info("No file passed - aborting loading science")
return
# Get science
science_table = Table.read(science_file)
# Do some stuff to convert science_table into values_table
# (This is dependent on the structure of science_file)
values_table1 = [[row['uniqid'], row['ra'], row['dec'],
True, False, False]
+ list(polar2cart((row['ra'], row['dec'])))
for row in science_table]
columns1 = ["TARGET_ID", "RA", "DEC", "IS_SCIENCE", "IS_STANDARD",
"IS_GUIDE", "UX", "UY", "UZ"]
values_table2 = [[row['uniqid'], row['priority'],
row['is_H0'], row['is_vpec'], row['is_lowz']]
for row in science_table]
columns2 = ["TARGET_ID", "PRIORITY", "IS_H0", "IS_VPEC", "IS_LOWZ"]
# Insert into database
if cursor is not None:
insert_many_rows(cursor, "target", values_table1, columns=columns1)
insert_many_rows(cursor, "science_target", values_table2, columns=columns2)
logging.info("Loaded Science")
else:
logging.info("No database - however, dry-run of loading successful")
| [
"taipan.core.polar2cart",
"scripts.create.insert_many_rows",
"os.path.abspath",
"logging.info",
"astropy.table.Table.read"
] | [((286, 317), 'logging.info', 'logging.info', (['"""Loading Science"""'], {}), "('Loading Science')\n", (298, 317), False, 'import logging\n'), ((464, 488), 'astropy.table.Table.read', 'Table.read', (['science_file'], {}), '(science_file)\n', (474, 488), False, 'from astropy.table import Table\n'), ((352, 409), 'logging.info', 'logging.info', (['"""No file passed - aborting loading science"""'], {}), "('No file passed - aborting loading science')\n", (364, 409), False, 'import logging\n'), ((1249, 1316), 'scripts.create.insert_many_rows', 'insert_many_rows', (['cursor', '"""target"""', 'values_table1'], {'columns': 'columns1'}), "(cursor, 'target', values_table1, columns=columns1)\n", (1265, 1316), False, 'from scripts.create import insert_many_rows\n'), ((1325, 1400), 'scripts.create.insert_many_rows', 'insert_many_rows', (['cursor', '"""science_target"""', 'values_table2'], {'columns': 'columns2'}), "(cursor, 'science_target', values_table2, columns=columns2)\n", (1341, 1400), False, 'from scripts.create import insert_many_rows\n'), ((1409, 1439), 'logging.info', 'logging.info', (['"""Loaded Science"""'], {}), "('Loaded Science')\n", (1421, 1439), False, 'import logging\n'), ((1458, 1526), 'logging.info', 'logging.info', (['"""No database - however, dry-run of loading successful"""'], {}), "('No database - however, dry-run of loading successful')\n", (1470, 1526), False, 'import logging\n'), ((117, 142), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (132, 142), False, 'import os\n'), ((744, 779), 'taipan.core.polar2cart', 'polar2cart', (["(row['ra'], row['dec'])"], {}), "((row['ra'], row['dec']))\n", (754, 779), False, 'from taipan.core import polar2cart\n')] |
import os
import re
import bcolz
import keras
import itertools
import numpy as np
import _pickle as pickle
from itertools import chain
from matplotlib import pyplot as plt
from keras.utils.data_utils import get_file
from numpy.random import normal
def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
(This function is copied from the scikit docs.)
"""
plt.figure()
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j], horizontalalignment="center", color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
def load_array(fname):
return bcolz.open(fname)[:]
def get_glove_dataset(dataset):
"""Download the requested glove dataset from files.fast.ai
and return a location that can be passed to load_vectors.
"""
md5sums = {'6B.50d': '8e1557d1228decbda7db6dfd81cd9909',
'6B.100d': 'c92dbbeacde2b0384a43014885a60b2c',
'6B.200d': 'af271b46c04b0b2e41a84d8cd806178d',
'6B.300d': '30290210376887dcc6d0a5a6374d8255'}
glove_path = os.path.abspath('data/glove/results')
return get_file(dataset,
'http://files.fast.ai/models/glove/' + dataset + '.tgz',
cache_subdir=glove_path,
md5_hash=md5sums.get(dataset, None),
untar=True)
def load_vectors(loc):
return (load_array(loc+'.dat'),
pickle.load(open(loc+'_words.pkl','rb'), encoding='latin1'),
pickle.load(open(loc+'_idx.pkl','rb'), encoding='latin1'))
def create_embeddings(max_features, vecs, wordidx, idx2word):
n_fact = vecs.shape[1]
emb = np.zeros((max_features, n_fact))
for i in range(1,len(emb)):
word = idx2word[i]
if word and re.match(r"^[a-zA-Z0-9\-]*$", word):
src_idx = wordidx[word]
emb[i] = vecs[src_idx]
else:
# If we can't find the word in glove, randomly initialize
emb[i] = normal(scale=0.6, size=(n_fact,))
# This is our "rare word" id - we want to randomly initialize
emb[-1] = normal(scale=0.6, size=(n_fact,))
emb/=3
return emb | [
"matplotlib.pyplot.imshow",
"numpy.random.normal",
"matplotlib.pyplot.text",
"bcolz.open",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.colorbar",
"re.match",
"matplotlib.pyplot.figure",
"numpy.zeros",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.tight_layout",
"os.path.abspath",
"matplotlib.pyplot.title"
] | [((543, 555), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (553, 555), True, 'from matplotlib import pyplot as plt\n'), ((560, 610), 'matplotlib.pyplot.imshow', 'plt.imshow', (['cm'], {'interpolation': '"""nearest"""', 'cmap': 'cmap'}), "(cm, interpolation='nearest', cmap=cmap)\n", (570, 610), True, 'from matplotlib import pyplot as plt\n'), ((615, 631), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (624, 631), True, 'from matplotlib import pyplot as plt\n'), ((636, 650), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (648, 650), True, 'from matplotlib import pyplot as plt\n'), ((696, 740), 'matplotlib.pyplot.xticks', 'plt.xticks', (['tick_marks', 'classes'], {'rotation': '(45)'}), '(tick_marks, classes, rotation=45)\n', (706, 740), True, 'from matplotlib import pyplot as plt\n'), ((745, 776), 'matplotlib.pyplot.yticks', 'plt.yticks', (['tick_marks', 'classes'], {}), '(tick_marks, classes)\n', (755, 776), True, 'from matplotlib import pyplot as plt\n'), ((1093, 1111), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1109, 1111), True, 'from matplotlib import pyplot as plt\n'), ((1116, 1140), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True label"""'], {}), "('True label')\n", (1126, 1140), True, 'from matplotlib import pyplot as plt\n'), ((1145, 1174), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Predicted label"""'], {}), "('Predicted label')\n", (1155, 1174), True, 'from matplotlib import pyplot as plt\n'), ((1665, 1702), 'os.path.abspath', 'os.path.abspath', (['"""data/glove/results"""'], {}), "('data/glove/results')\n", (1680, 1702), False, 'import os\n'), ((2239, 2271), 'numpy.zeros', 'np.zeros', (['(max_features, n_fact)'], {}), '((max_features, n_fact))\n', (2247, 2271), True, 'import numpy as np\n'), ((2680, 2713), 'numpy.random.normal', 'normal', ([], {'scale': '(0.6)', 'size': '(n_fact,)'}), '(scale=0.6, size=(n_fact,))\n', (2686, 2713), False, 'from numpy.random import normal\n'), ((984, 1092), 'matplotlib.pyplot.text', 'plt.text', (['j', 'i', 'cm[i, j]'], {'horizontalalignment': '"""center"""', 'color': "('white' if cm[i, j] > thresh else 'black')"}), "(j, i, cm[i, j], horizontalalignment='center', color='white' if cm[\n i, j] > thresh else 'black')\n", (992, 1092), True, 'from matplotlib import pyplot as plt\n'), ((1214, 1231), 'bcolz.open', 'bcolz.open', (['fname'], {}), '(fname)\n', (1224, 1231), False, 'import bcolz\n'), ((2352, 2387), 're.match', 're.match', (['"""^[a-zA-Z0-9\\\\-]*$"""', 'word'], {}), "('^[a-zA-Z0-9\\\\-]*$', word)\n", (2360, 2387), False, 'import re\n'), ((2565, 2598), 'numpy.random.normal', 'normal', ([], {'scale': '(0.6)', 'size': '(n_fact,)'}), '(scale=0.6, size=(n_fact,))\n', (2571, 2598), False, 'from numpy.random import normal\n')] |
from FoxySheep.generated.InputFormLexer import InputFormLexer
from antlr4 import InputStream, CommonTokenStream
from FoxySheep.generated.InputFormParser import InputFormParser
from FoxySheep.tree.pretty_printer import pretty_print_string
from FoxySheep.transform.if2py import input_form_to_python
import decimal
import math
import yaml
import os.path as osp
def get_srcdir():
filename = osp.normcase(osp.dirname(osp.abspath(__file__)))
return osp.realpath(filename)
srcdir = get_srcdir()
testdata_dir = osp.join(srcdir, "parse_expressions")
last_tree_str = ""
def parse_tree_fn(expr: str, show_tree_fn):
global last_tree_str
lexer = InputFormLexer(InputStream(expr))
parser = InputFormParser(CommonTokenStream(lexer))
tree = parser.prog()
last_tree_str = show_tree_fn(tree, parser.ruleNames)
return tree
pp_fn = lambda tree, rule_names: pretty_print_string(tree, rule_names, compact=True)
show_tests = True
out_results = [None]
try:
import numpy
except ImportError:
numpy = None
pass
eval_namespace = {
"out_results": out_results,
"missing_modules": [math, decimal, numpy],
}
def do_test(input_base: str):
testdata_path = osp.join(testdata_dir, input_base)
with open(testdata_path, "r") as yaml_file:
test_data = yaml.load(yaml_file, Loader=yaml.FullLoader)
# print(test_data)
# translation_fn = input_form_to_python
for expr in test_data:
numpy_str = input_form_to_python(
expr, parse_tree_fn=parse_tree_fn, show_tree_fn=pp_fn, mode="numpy"
)
print(numpy_str)
assert eval(numpy_str, None, eval_namespace)
if numpy:
def test_foxy_to_numpy():
do_test("numpy.yaml")
if __name__ == "__main__":
test_foxy_to_numpy()
| [
"os.path.join",
"FoxySheep.tree.pretty_printer.pretty_print_string",
"yaml.load",
"os.path.realpath",
"antlr4.InputStream",
"antlr4.CommonTokenStream",
"os.path.abspath",
"FoxySheep.transform.if2py.input_form_to_python"
] | [((516, 553), 'os.path.join', 'osp.join', (['srcdir', '"""parse_expressions"""'], {}), "(srcdir, 'parse_expressions')\n", (524, 553), True, 'import os.path as osp\n'), ((454, 476), 'os.path.realpath', 'osp.realpath', (['filename'], {}), '(filename)\n', (466, 476), True, 'import os.path as osp\n'), ((879, 930), 'FoxySheep.tree.pretty_printer.pretty_print_string', 'pretty_print_string', (['tree', 'rule_names'], {'compact': '(True)'}), '(tree, rule_names, compact=True)\n', (898, 930), False, 'from FoxySheep.tree.pretty_printer import pretty_print_string\n'), ((1194, 1228), 'os.path.join', 'osp.join', (['testdata_dir', 'input_base'], {}), '(testdata_dir, input_base)\n', (1202, 1228), True, 'import os.path as osp\n'), ((672, 689), 'antlr4.InputStream', 'InputStream', (['expr'], {}), '(expr)\n', (683, 689), False, 'from antlr4 import InputStream, CommonTokenStream\n'), ((720, 744), 'antlr4.CommonTokenStream', 'CommonTokenStream', (['lexer'], {}), '(lexer)\n', (737, 744), False, 'from antlr4 import InputStream, CommonTokenStream\n'), ((1297, 1341), 'yaml.load', 'yaml.load', (['yaml_file'], {'Loader': 'yaml.FullLoader'}), '(yaml_file, Loader=yaml.FullLoader)\n', (1306, 1341), False, 'import yaml\n'), ((1461, 1554), 'FoxySheep.transform.if2py.input_form_to_python', 'input_form_to_python', (['expr'], {'parse_tree_fn': 'parse_tree_fn', 'show_tree_fn': 'pp_fn', 'mode': '"""numpy"""'}), "(expr, parse_tree_fn=parse_tree_fn, show_tree_fn=pp_fn,\n mode='numpy')\n", (1481, 1554), False, 'from FoxySheep.transform.if2py import input_form_to_python\n'), ((419, 440), 'os.path.abspath', 'osp.abspath', (['__file__'], {}), '(__file__)\n', (430, 440), True, 'import os.path as osp\n')] |
from flask import Flask
def create_app(test_config=None):
app = Flask(__name__)
from .routes import hello_world_bp
app.register_blueprint(hello_world_bp)
from .routes import dog_bp
app.register_blueprint(dog_bp)
return app | [
"flask.Flask"
] | [((69, 84), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (74, 84), False, 'from flask import Flask\n')] |
# Generated by Django 3.2.1 on 2021-06-16 22:08
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Paper',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=256, unique=True)),
('author', models.CharField(max_length=256)),
('conference', models.CharField(max_length=128)),
('year', models.IntegerField()),
('download_link', models.URLField(max_length=256)),
('abstract', models.TextField()),
],
options={
'verbose_name': 'paperinfo',
'verbose_name_plural': 'paperinfo',
'ordering': ['year', 'title'],
},
),
]
| [
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.BigAutoField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((301, 397), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (320, 397), False, 'from django.db import migrations, models\n'), ((422, 467), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'unique': '(True)'}), '(max_length=256, unique=True)\n', (438, 467), False, 'from django.db import migrations, models\n'), ((497, 529), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (513, 529), False, 'from django.db import migrations, models\n'), ((563, 595), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (579, 595), False, 'from django.db import migrations, models\n'), ((623, 644), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (642, 644), False, 'from django.db import migrations, models\n'), ((681, 712), 'django.db.models.URLField', 'models.URLField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (696, 712), False, 'from django.db import migrations, models\n'), ((744, 762), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (760, 762), False, 'from django.db import migrations, models\n')] |
import media
import fresh_tomatoes
if __name__ == "__main__":
#Create a few movie objects
toy_story = media.Movie("To<NAME>",
"toy story",
"http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg",
"https://www.youtube.com/watch?v=vwyZH85NQC4")
hunger_games = media.Movie("hunger game",
"hunger_games",
"http://img1.wikia.nocookie.net/__cb20150109223445/thehungergames/images/0/09/Mockingjay_part_1_poster_2.jpg",
"https://youtu.be/ZL_td1j3BQs")
se7en = media.Movie("Se7en",
"Two detectives, a rookie and a veteran, hunt a serial killer who uses the seven deadly sins as his modus operandi.",
"http://ia.media-imdb.com/images/M/MV5BMTQwNTU3MTE4NF5BMl5BanBnXkFtZTcwOTgxNDM2Mg@@._V1_SX214_AL_.jpg",
"https://youtu.be/J4YV2_TcCoE")
godFather = media.Movie("The Godfather",
"The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.",
"http://ia.media-imdb.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBnXkFtZTcwMDA5Mzg3OA@@._V1_SX214_AL_.jpg",
"https://youtu.be/sY1S34973zA")
movies = [toy_story, hunger_games, se7en, godFather]
fresh_tomatoes.open_movies_page(movies)
| [
"fresh_tomatoes.open_movies_page",
"media.Movie"
] | [((107, 261), 'media.Movie', 'media.Movie', (['"""To<NAME>"""', '"""toy story"""', '"""http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg"""', '"""https://www.youtube.com/watch?v=vwyZH85NQC4"""'], {}), "('To<NAME>', 'toy story',\n 'http://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg',\n 'https://www.youtube.com/watch?v=vwyZH85NQC4')\n", (118, 261), False, 'import media\n'), ((287, 481), 'media.Movie', 'media.Movie', (['"""hunger game"""', '"""hunger_games"""', '"""http://img1.wikia.nocookie.net/__cb20150109223445/thehungergames/images/0/09/Mockingjay_part_1_poster_2.jpg"""', '"""https://youtu.be/ZL_td1j3BQs"""'], {}), "('hunger game', 'hunger_games',\n 'http://img1.wikia.nocookie.net/__cb20150109223445/thehungergames/images/0/09/Mockingjay_part_1_poster_2.jpg'\n , 'https://youtu.be/ZL_td1j3BQs')\n", (298, 481), False, 'import media\n'), ((573, 865), 'media.Movie', 'media.Movie', (['"""Se7en"""', '"""Two detectives, a rookie and a veteran, hunt a serial killer who uses the seven deadly sins as his modus operandi."""', '"""http://ia.media-imdb.com/images/M/MV5BMTQwNTU3MTE4NF5BMl5BanBnXkFtZTcwOTgxNDM2Mg@@._V1_SX214_AL_.jpg"""', '"""https://youtu.be/J4YV2_TcCoE"""'], {}), "('Se7en',\n 'Two detectives, a rookie and a veteran, hunt a serial killer who uses the seven deadly sins as his modus operandi.'\n ,\n 'http://ia.media-imdb.com/images/M/MV5BMTQwNTU3MTE4NF5BMl5BanBnXkFtZTcwOTgxNDM2Mg@@._V1_SX214_AL_.jpg'\n , 'https://youtu.be/J4YV2_TcCoE')\n", (584, 865), False, 'import media\n'), ((929, 1230), 'media.Movie', 'media.Movie', (['"""The Godfather"""', '"""The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son."""', '"""http://ia.media-imdb.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBnXkFtZTcwMDA5Mzg3OA@@._V1_SX214_AL_.jpg"""', '"""https://youtu.be/sY1S34973zA"""'], {}), "('The Godfather',\n 'The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.'\n ,\n 'http://ia.media-imdb.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBnXkFtZTcwMDA5Mzg3OA@@._V1_SX214_AL_.jpg'\n , 'https://youtu.be/sY1S34973zA')\n", (940, 1230), False, 'import media\n'), ((1349, 1388), 'fresh_tomatoes.open_movies_page', 'fresh_tomatoes.open_movies_page', (['movies'], {}), '(movies)\n', (1380, 1388), False, 'import fresh_tomatoes\n')] |
from math import sqrt
from random import choice
def knn(k, neighbors, point):
#distances
d = []
for n in neighbors:
soma = 0
for i in range(0,len(n[0])):
soma += (n[0][i]-point[0][i])**2
d.append([sqrt(soma), n[1]])
#sorting distances
flag=1
while(flag!=0):
flag=0
for i in range(0,len(d)-1):
if d[i][0]>d[i+1][0]:
aux = d[i]
d[i]=d[i+1]
d[i+1]=aux
flag += 1
#dictionary with k nearest neighbors
n_group = {}
for i in range(0,k):
if d[i][1] in n_group:
n_group[d[i][1]] += 1
else:
n_group[d[i][1]] = 1
bigger = max(n_group, key=n_group.get)
group_list = []
for c in n_group:
if n_group[c] == n_group[bigger]:
group_list.append(c)
if len(group_list) == 1:
return group_list[0]
else:
#sorting if there are two or more in the list
return choice(group_list)
'''
Example:
N = [[[0,1], 'a'], [[0,0], 'b'], [[1,1], 'c'], [[1,2], 'd'], [[2,2], 'e'], [[0,0], 'a'], [[0,0], 'b'], [[3,6], 'c'], [[9,5], 'd'], [[0,0.5], 'e']]
k = 3
x = [[0,0], ' ']
knn(k, N, x)
'''
| [
"random.choice",
"math.sqrt"
] | [((822, 840), 'random.choice', 'choice', (['group_list'], {}), '(group_list)\n', (828, 840), False, 'from random import choice\n'), ((213, 223), 'math.sqrt', 'sqrt', (['soma'], {}), '(soma)\n', (217, 223), False, 'from math import sqrt\n')] |
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from archiv.api_views import ArchResourceViewSet
from entities.api_views import *
from vocabs import api_views
router = routers.DefaultRouter()
router.register(r'geojson', GeoJsonViewSet, basename='places')
router.register(r'skoslabels', api_views.SkosLabelViewSet)
router.register(r'skosnamespaces', api_views.SkosNamespaceViewSet)
router.register(r'skosconceptschemes', api_views.SkosConceptSchemeViewSet)
router.register(r'skosconcepts', api_views.SkosConceptViewSet)
router.register(r'archresource', ArchResourceViewSet)
router.register(r'places', PlaceViewSet)
router.register(r'persons', PersonViewSet)
router.register(r'institutions', InstitutionViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^netviz/', include('netviz.urls', namespace="netviz")),
url(r'^wordcloud/', include('wordcloud.urls', namespace="wordcloud")),
url(r'^calheatmap/', include('calheatmap.urls', namespace="calheatmap")),
url(r'^admin/', admin.site.urls),
url(r'^archiv/', include('archiv.urls', namespace='archiv')),
url(r'^archiv-ac/', include('archiv.dal_urls', namespace='archiv-ac')),
url(r'^vocabs/', include('vocabs.urls', namespace='vocabs')),
url(r'^vocabs-ac/', include('vocabs.dal_urls', namespace='vocabs-ac')),
url(r'^entities/', include('entities.urls', namespace='entities')),
url(r'^entities-ac/', include('entities.dal_urls', namespace='entities-ac')),
url(r'^charts/', include('charts.urls', namespace='charts')),
url(r'^search/', include('haystack.urls')),
url(r'^transkribus/', include('transkribus.urls', namespace='transkribus')),
url(r'^', include('webpage.urls', namespace='webpage')),
]
| [
"django.conf.urls.include",
"django.conf.urls.url",
"rest_framework.routers.DefaultRouter"
] | [((231, 254), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (252, 254), False, 'from rest_framework import routers\n'), ((1139, 1170), 'django.conf.urls.url', 'url', (['"""^admin/"""', 'admin.site.urls'], {}), "('^admin/', admin.site.urls)\n", (1142, 1170), False, 'from django.conf.urls import url, include\n'), ((809, 829), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (816, 829), False, 'from django.conf.urls import url, include\n'), ((855, 913), 'django.conf.urls.include', 'include', (['"""rest_framework.urls"""'], {'namespace': '"""rest_framework"""'}), "('rest_framework.urls', namespace='rest_framework')\n", (862, 913), False, 'from django.conf.urls import url, include\n'), ((937, 979), 'django.conf.urls.include', 'include', (['"""netviz.urls"""'], {'namespace': '"""netviz"""'}), "('netviz.urls', namespace='netviz')\n", (944, 979), False, 'from django.conf.urls import url, include\n'), ((1006, 1054), 'django.conf.urls.include', 'include', (['"""wordcloud.urls"""'], {'namespace': '"""wordcloud"""'}), "('wordcloud.urls', namespace='wordcloud')\n", (1013, 1054), False, 'from django.conf.urls import url, include\n'), ((1082, 1132), 'django.conf.urls.include', 'include', (['"""calheatmap.urls"""'], {'namespace': '"""calheatmap"""'}), "('calheatmap.urls', namespace='calheatmap')\n", (1089, 1132), False, 'from django.conf.urls import url, include\n'), ((1194, 1236), 'django.conf.urls.include', 'include', (['"""archiv.urls"""'], {'namespace': '"""archiv"""'}), "('archiv.urls', namespace='archiv')\n", (1201, 1236), False, 'from django.conf.urls import url, include\n'), ((1263, 1312), 'django.conf.urls.include', 'include', (['"""archiv.dal_urls"""'], {'namespace': '"""archiv-ac"""'}), "('archiv.dal_urls', namespace='archiv-ac')\n", (1270, 1312), False, 'from django.conf.urls import url, include\n'), ((1336, 1378), 'django.conf.urls.include', 'include', (['"""vocabs.urls"""'], {'namespace': '"""vocabs"""'}), "('vocabs.urls', namespace='vocabs')\n", (1343, 1378), False, 'from django.conf.urls import url, include\n'), ((1405, 1454), 'django.conf.urls.include', 'include', (['"""vocabs.dal_urls"""'], {'namespace': '"""vocabs-ac"""'}), "('vocabs.dal_urls', namespace='vocabs-ac')\n", (1412, 1454), False, 'from django.conf.urls import url, include\n'), ((1480, 1526), 'django.conf.urls.include', 'include', (['"""entities.urls"""'], {'namespace': '"""entities"""'}), "('entities.urls', namespace='entities')\n", (1487, 1526), False, 'from django.conf.urls import url, include\n'), ((1555, 1608), 'django.conf.urls.include', 'include', (['"""entities.dal_urls"""'], {'namespace': '"""entities-ac"""'}), "('entities.dal_urls', namespace='entities-ac')\n", (1562, 1608), False, 'from django.conf.urls import url, include\n'), ((1632, 1674), 'django.conf.urls.include', 'include', (['"""charts.urls"""'], {'namespace': '"""charts"""'}), "('charts.urls', namespace='charts')\n", (1639, 1674), False, 'from django.conf.urls import url, include\n'), ((1698, 1722), 'django.conf.urls.include', 'include', (['"""haystack.urls"""'], {}), "('haystack.urls')\n", (1705, 1722), False, 'from django.conf.urls import url, include\n'), ((1751, 1803), 'django.conf.urls.include', 'include', (['"""transkribus.urls"""'], {'namespace': '"""transkribus"""'}), "('transkribus.urls', namespace='transkribus')\n", (1758, 1803), False, 'from django.conf.urls import url, include\n'), ((1820, 1864), 'django.conf.urls.include', 'include', (['"""webpage.urls"""'], {'namespace': '"""webpage"""'}), "('webpage.urls', namespace='webpage')\n", (1827, 1864), False, 'from django.conf.urls import url, include\n')] |
#! /usr/bin/env python
import rospy
from geometry_msgs.msg import Point, Quaternion, Pose
from traject_msgs.srv import PlanTolerancedTrajecory, PlanTolerancedTrajecoryRequest, PlanTolerancedTrajecoryResponse
import numpy as np
from traj_complete_ros.toppra_eef_vel_ct import retime, plot_plan
from moveit_commander import MoveGroupCommander, RobotTrajectory
if __name__ == "__main__":
rospy.init_node('send_path')
rospy.sleep(0.5)
get_traj_srv = rospy.ServiceProxy('/planTolerancedTrajecory', PlanTolerancedTrajecory)
rospy.sleep(1.0)
req = PlanTolerancedTrajecoryRequest()
req.header.frame_id = 'base_link'
rect_a = 0.2
rect_b = 0.2
res = 100
h_start = 0.3
h_end = 0.3
nr_points_a = int(rect_a * res)
nr_points_b = int(rect_b * res)
a = np.linspace(-0.5 * rect_a, 0.5 * rect_a, nr_points_a)
b = np.linspace(-0.5 * rect_b, 0.5 * rect_b, nr_points_b)
h = np.linspace(h_start, h_end, 2 * (nr_points_a + nr_points_b))
# rectangle starts in top left corner, center is in the middle
curve_points = np.zeros(shape=(2 * (nr_points_a + nr_points_b), 3))
curve_points[0:nr_points_a] = np.array(
[a, nr_points_a * [rect_b * 0.5], h[0:nr_points_a]]).transpose()
curve_points[nr_points_a:nr_points_a + nr_points_b] = np.array(
[nr_points_b * [rect_a * 0.5], -b, h[nr_points_a:nr_points_a + nr_points_b]]).transpose()
curve_points[nr_points_a + nr_points_b:2 * nr_points_a + nr_points_b] = np.array(
[-1.0 * a, nr_points_a * [rect_b * (-0.5)],
h[nr_points_a + nr_points_b:2 * nr_points_a + nr_points_b]]).transpose()
curve_points[2 * nr_points_a + nr_points_b:] = np.array(
[[-0.5 * rect_a] * nr_points_b, b, h[2 * nr_points_a + nr_points_b:]]).transpose()
# remove duplicate points from data
to_delete = np.where(np.linalg.norm(curve_points[1:] - curve_points[:-1], axis=1) <= 0.0001)
curve_points = np.delete(curve_points, to_delete, axis=0)
# curve_normals = np.delete(curve_normals, to_delete, axis=0)
print(curve_points)
req.poses = [Pose(position=Point(x,y,z), orientation=Quaternion(0,0,0,1)) for x,y,z in curve_points[:]]
req.base_to_path.translation.x = 0.4
req.base_to_path.translation.y = 0.0
req.base_to_path.translation.z = 0.3
req.base_to_path.rotation.w = 1.0
res = get_traj_srv.call(req)
mg = MoveGroupCommander('r1_arm')
plan = RobotTrajectory()
plan.joint_trajectory = res.traj
mg.go(plan.joint_trajectory.points[0].positions)
print(plan)
plan = retime(plan, cart_vel_limit=0.03)
mg.execute(plan)
rospy.sleep(2.0)
# send goal around a circle | [
"traj_complete_ros.toppra_eef_vel_ct.retime",
"moveit_commander.RobotTrajectory",
"rospy.init_node",
"numpy.delete",
"rospy.ServiceProxy",
"moveit_commander.MoveGroupCommander",
"numpy.linspace",
"numpy.zeros",
"numpy.array",
"geometry_msgs.msg.Point",
"geometry_msgs.msg.Quaternion",
"numpy.linalg.norm",
"traject_msgs.srv.PlanTolerancedTrajecoryRequest",
"rospy.sleep"
] | [((395, 423), 'rospy.init_node', 'rospy.init_node', (['"""send_path"""'], {}), "('send_path')\n", (410, 423), False, 'import rospy\n'), ((428, 444), 'rospy.sleep', 'rospy.sleep', (['(0.5)'], {}), '(0.5)\n', (439, 444), False, 'import rospy\n'), ((464, 535), 'rospy.ServiceProxy', 'rospy.ServiceProxy', (['"""/planTolerancedTrajecory"""', 'PlanTolerancedTrajecory'], {}), "('/planTolerancedTrajecory', PlanTolerancedTrajecory)\n", (482, 535), False, 'import rospy\n'), ((541, 557), 'rospy.sleep', 'rospy.sleep', (['(1.0)'], {}), '(1.0)\n', (552, 557), False, 'import rospy\n'), ((569, 601), 'traject_msgs.srv.PlanTolerancedTrajecoryRequest', 'PlanTolerancedTrajecoryRequest', ([], {}), '()\n', (599, 601), False, 'from traject_msgs.srv import PlanTolerancedTrajecory, PlanTolerancedTrajecoryRequest, PlanTolerancedTrajecoryResponse\n'), ((808, 861), 'numpy.linspace', 'np.linspace', (['(-0.5 * rect_a)', '(0.5 * rect_a)', 'nr_points_a'], {}), '(-0.5 * rect_a, 0.5 * rect_a, nr_points_a)\n', (819, 861), True, 'import numpy as np\n'), ((870, 923), 'numpy.linspace', 'np.linspace', (['(-0.5 * rect_b)', '(0.5 * rect_b)', 'nr_points_b'], {}), '(-0.5 * rect_b, 0.5 * rect_b, nr_points_b)\n', (881, 923), True, 'import numpy as np\n'), ((932, 992), 'numpy.linspace', 'np.linspace', (['h_start', 'h_end', '(2 * (nr_points_a + nr_points_b))'], {}), '(h_start, h_end, 2 * (nr_points_a + nr_points_b))\n', (943, 992), True, 'import numpy as np\n'), ((1080, 1132), 'numpy.zeros', 'np.zeros', ([], {'shape': '(2 * (nr_points_a + nr_points_b), 3)'}), '(shape=(2 * (nr_points_a + nr_points_b), 3))\n', (1088, 1132), True, 'import numpy as np\n'), ((1945, 1987), 'numpy.delete', 'np.delete', (['curve_points', 'to_delete'], {'axis': '(0)'}), '(curve_points, to_delete, axis=0)\n', (1954, 1987), True, 'import numpy as np\n'), ((2395, 2423), 'moveit_commander.MoveGroupCommander', 'MoveGroupCommander', (['"""r1_arm"""'], {}), "('r1_arm')\n", (2413, 2423), False, 'from moveit_commander import MoveGroupCommander, RobotTrajectory\n'), ((2436, 2453), 'moveit_commander.RobotTrajectory', 'RobotTrajectory', ([], {}), '()\n', (2451, 2453), False, 'from moveit_commander import MoveGroupCommander, RobotTrajectory\n'), ((2574, 2607), 'traj_complete_ros.toppra_eef_vel_ct.retime', 'retime', (['plan'], {'cart_vel_limit': '(0.03)'}), '(plan, cart_vel_limit=0.03)\n', (2580, 2607), False, 'from traj_complete_ros.toppra_eef_vel_ct import retime, plot_plan\n'), ((2633, 2649), 'rospy.sleep', 'rospy.sleep', (['(2.0)'], {}), '(2.0)\n', (2644, 2649), False, 'import rospy\n'), ((1167, 1228), 'numpy.array', 'np.array', (['[a, nr_points_a * [rect_b * 0.5], h[0:nr_points_a]]'], {}), '([a, nr_points_a * [rect_b * 0.5], h[0:nr_points_a]])\n', (1175, 1228), True, 'import numpy as np\n'), ((1308, 1398), 'numpy.array', 'np.array', (['[nr_points_b * [rect_a * 0.5], -b, h[nr_points_a:nr_points_a + nr_points_b]]'], {}), '([nr_points_b * [rect_a * 0.5], -b, h[nr_points_a:nr_points_a +\n nr_points_b]])\n', (1316, 1398), True, 'import numpy as np\n'), ((1492, 1607), 'numpy.array', 'np.array', (['[-1.0 * a, nr_points_a * [rect_b * -0.5], h[nr_points_a + nr_points_b:2 *\n nr_points_a + nr_points_b]]'], {}), '([-1.0 * a, nr_points_a * [rect_b * -0.5], h[nr_points_a +\n nr_points_b:2 * nr_points_a + nr_points_b]])\n', (1500, 1607), True, 'import numpy as np\n'), ((1687, 1766), 'numpy.array', 'np.array', (['[[-0.5 * rect_a] * nr_points_b, b, h[2 * nr_points_a + nr_points_b:]]'], {}), '([[-0.5 * rect_a] * nr_points_b, b, h[2 * nr_points_a + nr_points_b:]])\n', (1695, 1766), True, 'import numpy as np\n'), ((1854, 1914), 'numpy.linalg.norm', 'np.linalg.norm', (['(curve_points[1:] - curve_points[:-1])'], {'axis': '(1)'}), '(curve_points[1:] - curve_points[:-1], axis=1)\n', (1868, 1914), True, 'import numpy as np\n'), ((2113, 2127), 'geometry_msgs.msg.Point', 'Point', (['x', 'y', 'z'], {}), '(x, y, z)\n', (2118, 2127), False, 'from geometry_msgs.msg import Point, Quaternion, Pose\n'), ((2139, 2161), 'geometry_msgs.msg.Quaternion', 'Quaternion', (['(0)', '(0)', '(0)', '(1)'], {}), '(0, 0, 0, 1)\n', (2149, 2161), False, 'from geometry_msgs.msg import Point, Quaternion, Pose\n')] |
from data_storing.assets.common import Timespan
from utilities.common_methods import getDebugInfo
from utilities import log
def is_met(financial_statements, financial_key, percentage_improvement):
"""
function is_met
It scans through the financial statements passed as input to see if the financial key passed as input
increases by a percentage (passed as input) a year.
"""
try:
previous = 0
good_fundamentals_counter = 0
number_years = 0
complementary_percentage = 1 - percentage_improvement
if not financial_statements:
return False
# scanning through the financial statements.
for statement in financial_statements:
if statement.period_length == Timespan.annual:
number_years += 1 # keep tracks of how many financial statements I have.
globals_vars = {'statement': statement}
local_vars = {}
exec(f"value_financial_key = statement.{financial_key}", globals_vars, local_vars)
current = local_vars['value_financial_key']
if current: # if it is not None
current_minus_improvement = current * complementary_percentage
if previous < current_minus_improvement:
good_fundamentals_counter += 1
previous = current
if good_fundamentals_counter == number_years: # increased every year
return True
else:
return False
except Exception as e:
log.error(f"There is a problem in the code!: {e}\n{getDebugInfo()}")
| [
"utilities.common_methods.getDebugInfo"
] | [((1633, 1647), 'utilities.common_methods.getDebugInfo', 'getDebugInfo', ([], {}), '()\n', (1645, 1647), False, 'from utilities.common_methods import getDebugInfo\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 3 17:15:11 2020
@author: jisuk
"""
# %% immport some modules
from __future__ import absolute_import, division, print_function
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist
import tensorflow as tf
import numpy as np
# %% NMIST dataset parameters
num_classes = 10 # total classes (0~9) digits
num_features = 784 # data features(img shape = 28 * 28)
# training parameters
learning_rate = 0.001
training_steps = 3000
batch_size = 256
display_step = 100
# network parameters
n_hidden_1 = 128
n_hidden_2 = 256
# %% prepare MNIST data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# convert to float32
x_train, x_test = np.array(x_train, np.float32), np.array(x_test, np.float32)
# flatten images to 1_D vector of 784 features (28*28)
x_train, x_test = x_train.reshape(
[-1, num_features]), x_test.reshape([-1, num_features])
# normalize images value from [0, 255] to [0, 1]
x_train, x_test = x_train/255.0, x_test/255.0
# %% Use tf.data API to shuffle and batch data
train_data = tf.data.Dataset.from_tensor_slices((x_train, y_train))
train_data = train_data.repeat().shuffle(5000).batch(batch_size).prefetch(1)
# %% store layers weight & bias
# a random value generator to imitialize weights
random_normal = tf.initializers.RandomNormal()
weights = {
'h1': tf.Variable(random_normal([num_features, n_hidden_1])),
'h2': tf.Variable(random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(random_normal([n_hidden_2, num_classes]))
}
biases = {
'b1': tf.Variable(tf.zeros([n_hidden_1])),
'b2': tf.Variable(tf.zeros([n_hidden_2])),
'out': tf.Variable(tf.zeros([num_classes]))
}
# %% create model
def neural_net(x):
# Hidden fully connected layer with 128 neurons.
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
# Apply sigmoid to layer_1 output for non-linearity.
layer_1 = tf.nn.sigmoid(layer_1)
# Hidden fully connected layer with 256 neurons.
layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
# Apply sigmoid to layer_2 output for non-linearity.
layer_2 = tf.nn.sigmoid(layer_2)
# Output fully connected layer with a neuron for each class.
out_layer = tf.matmul(layer_2, weights['out']) + biases['out']
# Apply softmax to normalize the logits to a probability distribution.
return tf.nn.softmax(out_layer)
# stochastic gradient descent
optimizer = tf.optimizers.SGD(learning_rate)
# %% cross entropy loss function
def cross_entropy(y_pred, y_true):
# encode label to a one hot encoder
y_true = tf.one_hot(y_true, depth=num_classes)
# clip prediction values to avoid log(0) error
y_pred = tf.clip_by_value(y_pred, 1e-9, 1.)
# compute cross-entropy
return tf.reduce_mean(-tf.reduce_sum(y_true * tf.math.log(y_pred)))
# accuracy function
def accuracy(y_pred, y_true):
# predicted class is the index of highest score in prediction vector (i.e. argmax)
correct_prediction = tf.equal(
tf.argmax(y_pred, 1), tf.cast(y_true, tf.int64))
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32), axis=-1)
# stochastic gradient descent optimizer.
optimizer = tf.optimizers.SGD(learning_rate)
# %% optmization process
def run_optimization(x, y):
# Wrap computation inside a GradientTape for automatic differentiation.
with tf.GradientTape() as g:
pred = neural_net(x)
loss = cross_entropy(pred, y)
# Variables to update, i.e. trainable variables.
trainable_variables = list(weights.values()) + list(biases.values())
# Compute gradients.
gradients = g.gradient(loss, trainable_variables)
# Update W and b following gradients.
optimizer.apply_gradients(zip(gradients, trainable_variables))
# %% Run training for the given number of steps.
for step, (batch_x, batch_y) in enumerate(train_data.take(training_steps), 1):
# run the optimization to update W and b values.
run_optimization(batch_x, batch_y)
if step % display_step == 0:
pred = neural_net(batch_x)
loss = cross_entropy(pred, batch_y)
acc = accuracy(pred, batch_y)
print("step: %i, loss: %f, accuracy: %f" % (step, loss, acc))
# %% test model on validation set.
pred = neural_net(x_test)
print("Test Accuracy: %f" % accuracy(pred, y_test))
# visualize predictions.
import matplotlib.pyplot as plt
# predict 5 images from validation set.
n_images = 5
test_images = x_test[:n_images]
predictions = neural_net(test_images)
# display image and model predictions
for i in range(n_images):
plt.imshow(np.reshape(test_images[i], [28, 28]), cmap='gray')
plt.show()
print("Model prediction: %i" % np.argmax(predictions.numpy()[i]))
| [
"tensorflow.one_hot",
"numpy.reshape",
"tensorflow.data.Dataset.from_tensor_slices",
"tensorflow.keras.datasets.mnist.load_data",
"tensorflow.math.log",
"tensorflow.optimizers.SGD",
"tensorflow.GradientTape",
"numpy.array",
"tensorflow.initializers.RandomNormal",
"tensorflow.nn.sigmoid",
"tensorflow.argmax",
"tensorflow.clip_by_value",
"tensorflow.nn.softmax",
"tensorflow.matmul",
"tensorflow.cast",
"tensorflow.zeros",
"matplotlib.pyplot.show"
] | [((655, 672), 'tensorflow.keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (670, 672), False, 'from tensorflow.keras.datasets import mnist\n'), ((1078, 1132), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(x_train, y_train)'], {}), '((x_train, y_train))\n', (1112, 1132), True, 'import tensorflow as tf\n'), ((1309, 1339), 'tensorflow.initializers.RandomNormal', 'tf.initializers.RandomNormal', ([], {}), '()\n', (1337, 1339), True, 'import tensorflow as tf\n'), ((2463, 2495), 'tensorflow.optimizers.SGD', 'tf.optimizers.SGD', (['learning_rate'], {}), '(learning_rate)\n', (2480, 2495), True, 'import tensorflow as tf\n'), ((3219, 3251), 'tensorflow.optimizers.SGD', 'tf.optimizers.SGD', (['learning_rate'], {}), '(learning_rate)\n', (3236, 3251), True, 'import tensorflow as tf\n'), ((712, 741), 'numpy.array', 'np.array', (['x_train', 'np.float32'], {}), '(x_train, np.float32)\n', (720, 741), True, 'import numpy as np\n'), ((743, 771), 'numpy.array', 'np.array', (['x_test', 'np.float32'], {}), '(x_test, np.float32)\n', (751, 771), True, 'import numpy as np\n'), ((1934, 1956), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['layer_1'], {}), '(layer_1)\n', (1947, 1956), True, 'import tensorflow as tf\n'), ((2152, 2174), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['layer_2'], {}), '(layer_2)\n', (2165, 2174), True, 'import tensorflow as tf\n'), ((2394, 2418), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['out_layer'], {}), '(out_layer)\n', (2407, 2418), True, 'import tensorflow as tf\n'), ((2619, 2656), 'tensorflow.one_hot', 'tf.one_hot', (['y_true'], {'depth': 'num_classes'}), '(y_true, depth=num_classes)\n', (2629, 2656), True, 'import tensorflow as tf\n'), ((2721, 2757), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['y_pred', '(1e-09)', '(1.0)'], {}), '(y_pred, 1e-09, 1.0)\n', (2737, 2757), True, 'import tensorflow as tf\n'), ((4675, 4685), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4683, 4685), True, 'import matplotlib.pyplot as plt\n'), ((1584, 1606), 'tensorflow.zeros', 'tf.zeros', (['[n_hidden_1]'], {}), '([n_hidden_1])\n', (1592, 1606), True, 'import tensorflow as tf\n'), ((1631, 1653), 'tensorflow.zeros', 'tf.zeros', (['[n_hidden_2]'], {}), '([n_hidden_2])\n', (1639, 1653), True, 'import tensorflow as tf\n'), ((1679, 1702), 'tensorflow.zeros', 'tf.zeros', (['[num_classes]'], {}), '([num_classes])\n', (1687, 1702), True, 'import tensorflow as tf\n'), ((1820, 1847), 'tensorflow.matmul', 'tf.matmul', (['x', "weights['h1']"], {}), "(x, weights['h1'])\n", (1829, 1847), True, 'import tensorflow as tf\n'), ((2032, 2065), 'tensorflow.matmul', 'tf.matmul', (['layer_1', "weights['h2']"], {}), "(layer_1, weights['h2'])\n", (2041, 2065), True, 'import tensorflow as tf\n'), ((2257, 2291), 'tensorflow.matmul', 'tf.matmul', (['layer_2', "weights['out']"], {}), "(layer_2, weights['out'])\n", (2266, 2291), True, 'import tensorflow as tf\n'), ((3039, 3059), 'tensorflow.argmax', 'tf.argmax', (['y_pred', '(1)'], {}), '(y_pred, 1)\n', (3048, 3059), True, 'import tensorflow as tf\n'), ((3061, 3086), 'tensorflow.cast', 'tf.cast', (['y_true', 'tf.int64'], {}), '(y_true, tf.int64)\n', (3068, 3086), True, 'import tensorflow as tf\n'), ((3114, 3153), 'tensorflow.cast', 'tf.cast', (['correct_prediction', 'tf.float32'], {}), '(correct_prediction, tf.float32)\n', (3121, 3153), True, 'import tensorflow as tf\n'), ((3393, 3410), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (3408, 3410), True, 'import tensorflow as tf\n'), ((4620, 4656), 'numpy.reshape', 'np.reshape', (['test_images[i]', '[28, 28]'], {}), '(test_images[i], [28, 28])\n', (4630, 4656), True, 'import numpy as np\n'), ((2834, 2853), 'tensorflow.math.log', 'tf.math.log', (['y_pred'], {}), '(y_pred)\n', (2845, 2853), True, 'import tensorflow as tf\n')] |
# -*- coding: utf-8 -*-
"""
A useful tool to examine a series of raw2tif images for the FlamePyrometry code.
For more information on the FlamePyrometry code, please see [https://doi.org/10.1364/AO.58.002662].
The inverse Abel transformation is very sensitive to the symmetry of flame and the uniformity of stacked images.
Thus, this code was developed to examine a series of raw2tif images from BlackflyS camera.
Several flame images with similar flame shape can be selected for further processing in FlamePyrometry code.
Note that several lines of this code are copied and modified from the original FlamePyrometry code.
The raw images from BlackflyS camera can be converted to Tif format images using ImageJ.
The Tif images need to be renamed as "0, 1, 2, 3..." using some kind of renamed tool, which can be found in the Internet.
In addition, these Tif images must be placed in "//Photos//Examination//Input//" folder.
Then, variable "img_number" need to be changed by the number of these images.
Note that the positions of HAB0 and flame tip, and flame width must be given.
For each image, the standard deviation of the detected centre points (i.e., line),
and the position of flame tip can be saved in csv format in "//Photos//Examination//".
The demosaicing image of flame, and the R, G, and B channels of this image can also be saved.
The code can also save tecplor format files for all frames that stores the Red, Green and Blue channel of the flame image.
Note that each tecplot file (*.dat) may be very large.
If you don't want to save the tecplot file, just change the save_tecplot to 'False'.
Created on Sat Jun 26, 2021
@ zhangw106
@ <EMAIL>
"""
from skimage import io, exposure
import numpy as np
from os.path import abspath
from scipy import ndimage
import cv2
import matplotlib.pyplot as plt
import gc
# Number of images for examination
img_number = 3
# mm for each pixel of flame images
pixelmm = 1/38.5
# Save tecplot file. 'True' or 'False'
save_tecplot = True
# Parameters defining position of flame and cropped image size.
# Please use an uneven integer for flame_width.
HAB0 = 3634 # 1614 for '100H_BG7_exp1563'
HAB_tip = 1800 # 114 for '100H_BG7_exp1563'
flame_width = 501 # 401 for '100H_BG7_exp1563'
flame_height = HAB0 - HAB_tip
# It is possible to slightly rotate tilded flame images
degree = 0
# Threshold for the tip detection
thresh_grn = 2000
# Max. intensity count of the camera
scale = 65535
# Two arrows to save middle_std and tip for each frame
# Another arrow to save both the middle_std and tip
middle_std = np.zeros(img_number)
tip = np.zeros(img_number)
tif_exam = np.zeros((img_number, 3))
for k in range(0, img_number, 1):
filename = k
fname = abspath( '{0}{1}{2}'.format('Photos//Examination//Input//', filename, '.tif') )
ImDatBayer = io.imread(fname) # Row*Column = 2048*1536
ImDat = np.zeros(((len(ImDatBayer), len(ImDatBayer[0]), 3) ) ) # 2048*1536*3
# Demosaic the image. COLOR_BayerGB2RGB, COLOR_BayerGB2RGB_EA, COLOR_BayerGB2RGB_VNG.
ImDat = ndimage.rotate((cv2.cvtColor(ImDatBayer, cv2.COLOR_BayerGB2RGB_EA)), degree, reshape=False)
# It is possible to adjust the lightness of color image.
Img_color = exposure.adjust_gamma(ImDat, 1) # <1 means brighter, >1 means darker
del ImDatBayer
ImDatRed = ImDat[:,:,0] # 2048*1536
ImDatGrn = ImDat[:,:,1] # 2048*1536
ImDatBlu = ImDat[:,:,2] # 2048*1536
left = np.zeros([len(ImDatGrn)], dtype=int) # 2048
right = np.zeros([len(ImDatGrn)], dtype=int) # 2048
middle = np.zeros([len(ImDatGrn)]) # 2048
# Find left and right flame edge to calculate the flame centre at different pixel rows
gray = cv2.cvtColor(np.uint8(ImDat/scale*255), cv2.COLOR_RGB2GRAY) # 2048*1536
th_gray = cv2.adaptiveThreshold(gray,1,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,\
cv2.THRESH_BINARY,85,6) # 2048*1536
for i in range(0,len(ImDatGrn)):
left[i] = np.argmax(th_gray[i,:]<0.5)
right[i] = len(ImDatGrn[0]) - np.argmax(np.flip(th_gray[i,:],0)<0.5)- 1
# Remove the noisy top and bottom part of the flame
left[0:np.argmax(left[:]>0)+20] = 0
left[len(left) - np.argmax(np.flip(left[:])>0)-50:-1] = 0
middle = (left + right)/2 * (left!=0) * (right!=len(ImDatGrn[0])-1)
middle_std[k] = np.std(middle[middle!=0])
# Find the flame tip at flame centre
tip[k] = np.argmax(ImDatGrn[:,int(round(np.mean(middle[np.nonzero(middle)])))]>thresh_grn)
del ImDat, left, right, gray, th_gray,
# Save img_number, middle_std and tip in the exam_tif arrow
tif_exam[k,0] = k
tif_exam[k,1] = middle_std[k]
tif_exam[k,2] = tip[k]
#-----------------------------------------------------
# Crop the flame image to desired size
#-----------------------------------------------------
middle_ave = int(np.average(middle[middle!=0]))
Img_color_crop = Img_color[(HAB0 - flame_height) : HAB0, int(middle_ave - flame_width/2) : int(middle_ave + flame_width/2), :]
ImDatRed_crop = ImDatRed[(HAB0 - flame_height) : HAB0, int(middle_ave - flame_width/2) : int(middle_ave + flame_width/2)]
ImDatGrn_crop = ImDatGrn[(HAB0 - flame_height) : HAB0, int(middle_ave - flame_width/2) : int(middle_ave + flame_width/2)]
ImDatBlu_crop = ImDatBlu[(HAB0 - flame_height) : HAB0, int(middle_ave - flame_width/2) : int(middle_ave + flame_width/2)]
del middle, Img_color, ImDatRed, ImDatGrn, ImDatBlu
ImDatRG = ImDatRed_crop / ImDatGrn_crop
ImDatRB = ImDatRed_crop / ImDatBlu_crop
ImDatBG = ImDatBlu_crop / ImDatGrn_crop
# Save the debayer color image and the R G B images
plt.figure()
plt.title('Color image')
plt.subplot(221)
plt.axis('off') # Hide both the x and y axises
plt.imshow(Img_color_crop/scale, vmin=0, vmax=scale)
plt.title('Color')
plt.subplot(222)
plt.axis('off')
plt.imshow(ImDatRed_crop, vmin=0, vmax=scale)
plt.title('Red')
plt.subplot(223)
plt.axis('off')
plt.imshow(ImDatGrn_crop, vmin=0, vmax=scale)
plt.title('Green')
plt.subplot(224)
plt.axis('off')
plt.imshow(ImDatBlu_crop, vmin=0, vmax=scale)
plt.title('Blue')
plt.subplots_adjust(hspace=None, wspace=-0.7)
# Path to save the figure, and save it.
fsave_color = abspath( '{0}{1}{2}'.format('Photos//Examination//Color_image-', str(k), '.png') )
plt.savefig(fsave_color, bbox_inches='tight', dpi=500)
#plt.draw()
#plt.pause(3) # Figure will show for 5 seconds
#plt.cla() # Clear axis
#plt.clf() # clear figure
plt.close() # close figure
#-----------------------------------------------------
# Save Red, Green and Blue channels in tecplot format
#-----------------------------------------------------
if save_tecplot == True:
# Get the shape of ImDatRed_crop
m = len(ImDatRed_crop) # Row
n = len(ImDatRed_crop[0]) # Column
tecplot = np.zeros((m*n, 5)) # Creat a matrix
for i in range(0, m): # Searching each row
for j in range(0, n): # Searching each column
tecplot[i*n+j, 0] = (j-(n-1)/2)*pixelmm/10 # Set the middle pixel as r=0 cm
tecplot[i*n+j, 1] = (m-i)*pixelmm/10 # Flip the image, cm
tecplot[i*n+j, 2] = ImDatRed_crop[i,j]
tecplot[i*n+j, 3] = ImDatGrn_crop[i,j]
tecplot[i*n+j, 4] = ImDatBlu_crop[i,j]
header_str = ('{0}{1}{2}{3}{4}{5}{6}{7}{8}'.format(' TITLE = "Flame-', str(filename), '" \n VARIABLES = "r (cm)", "HAB (cm)", "Red", "Green", "Blue" \n ZONE T = "Flame-', str(filename), '", I = ', str(n),', J = ', str(m), ', F = POINT'))
ImPathtecplot = ('{0}{1}{2}{3}'.format('Photos//Examination//', 'Color_channel_', filename, '-tecplot.dat'))
np.savetxt(ImPathtecplot, tecplot, delimiter=' ', header= header_str, comments='')
del tecplot
print('{0}{1}{2}'.format('Frame ', str(k), ' have been examined.'))
del Img_color_crop, ImDatRed_crop, ImDatGrn_crop, ImDatBlu_crop
# Release Ram
gc.collect() # Release Ram
#-----------------------------------------------------
# Save data of flame centre and tip for all frames
#-----------------------------------------------------
header_str = ('NO.,STD,Tip')
path_exam = ('Photos/Examination/0_Flame_centre_and_tip.csv')
np.savetxt(path_exam, tif_exam, fmt='%3d,%1.3f,%4d', header= header_str, comments='')
print('Flame centre and tip detection finished.')
#-----------------------------------------------------
# Show figure of flame centre and tip, and save it
#-----------------------------------------------------
plt.figure()
plt.title('Centre standard deviation')
plt.xlabel("Image number")
plt.ylabel("Standard deviation")
plt.plot(middle_std, '*b')
plt.savefig('./Photos/Examination/1_Flame_centre.png', bbox_inches='tight', dpi=500)
if img_number != 1:
plt.draw()
plt.pause(3) # Figure will show for 3 seconds
plt.close() # Close figure and continue
print('Flame centre standard deviation saved.')
plt.figure()
plt.title('Flame tip')
plt.xlabel("Image number")
plt.ylabel("Flame tip")
plt.plot(tip, 'xr')
plt.savefig('./Photos/Examination/2_Flame_tip.png', bbox_inches='tight', dpi=500)
if img_number != 1:
plt.draw()
plt.pause(3) # Figure will show for 3 seconds
plt.close() # Close figure and continue
print('Flame tip saved.') | [
"numpy.uint8",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.imshow",
"numpy.flip",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.savefig",
"numpy.average",
"numpy.argmax",
"skimage.io.imread",
"numpy.savetxt",
"numpy.std",
"gc.collect",
"matplotlib.pyplot.title",
"matplotlib.pyplot.draw",
"matplotlib.pyplot.pause",
"cv2.cvtColor",
"numpy.nonzero",
"matplotlib.pyplot.subplots_adjust",
"skimage.exposure.adjust_gamma",
"numpy.zeros",
"matplotlib.pyplot.figure",
"cv2.adaptiveThreshold",
"matplotlib.pyplot.subplot"
] | [((2657, 2677), 'numpy.zeros', 'np.zeros', (['img_number'], {}), '(img_number)\n', (2665, 2677), True, 'import numpy as np\n'), ((2686, 2706), 'numpy.zeros', 'np.zeros', (['img_number'], {}), '(img_number)\n', (2694, 2706), True, 'import numpy as np\n'), ((2719, 2744), 'numpy.zeros', 'np.zeros', (['(img_number, 3)'], {}), '((img_number, 3))\n', (2727, 2744), True, 'import numpy as np\n'), ((8778, 8866), 'numpy.savetxt', 'np.savetxt', (['path_exam', 'tif_exam'], {'fmt': '"""%3d,%1.3f,%4d"""', 'header': 'header_str', 'comments': '""""""'}), "(path_exam, tif_exam, fmt='%3d,%1.3f,%4d', header=header_str,\n comments='')\n", (8788, 8866), True, 'import numpy as np\n'), ((9082, 9094), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9092, 9094), True, 'import matplotlib.pyplot as plt\n'), ((9096, 9134), 'matplotlib.pyplot.title', 'plt.title', (['"""Centre standard deviation"""'], {}), "('Centre standard deviation')\n", (9105, 9134), True, 'import matplotlib.pyplot as plt\n'), ((9136, 9162), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Image number"""'], {}), "('Image number')\n", (9146, 9162), True, 'import matplotlib.pyplot as plt\n'), ((9164, 9196), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Standard deviation"""'], {}), "('Standard deviation')\n", (9174, 9196), True, 'import matplotlib.pyplot as plt\n'), ((9198, 9224), 'matplotlib.pyplot.plot', 'plt.plot', (['middle_std', '"""*b"""'], {}), "(middle_std, '*b')\n", (9206, 9224), True, 'import matplotlib.pyplot as plt\n'), ((9226, 9314), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./Photos/Examination/1_Flame_centre.png"""'], {'bbox_inches': '"""tight"""', 'dpi': '(500)'}), "('./Photos/Examination/1_Flame_centre.png', bbox_inches='tight',\n dpi=500)\n", (9237, 9314), True, 'import matplotlib.pyplot as plt\n'), ((9401, 9412), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9410, 9412), True, 'import matplotlib.pyplot as plt\n'), ((9499, 9511), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9509, 9511), True, 'import matplotlib.pyplot as plt\n'), ((9513, 9535), 'matplotlib.pyplot.title', 'plt.title', (['"""Flame tip"""'], {}), "('Flame tip')\n", (9522, 9535), True, 'import matplotlib.pyplot as plt\n'), ((9537, 9563), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Image number"""'], {}), "('Image number')\n", (9547, 9563), True, 'import matplotlib.pyplot as plt\n'), ((9565, 9588), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Flame tip"""'], {}), "('Flame tip')\n", (9575, 9588), True, 'import matplotlib.pyplot as plt\n'), ((9590, 9609), 'matplotlib.pyplot.plot', 'plt.plot', (['tip', '"""xr"""'], {}), "(tip, 'xr')\n", (9598, 9609), True, 'import matplotlib.pyplot as plt\n'), ((9611, 9696), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""./Photos/Examination/2_Flame_tip.png"""'], {'bbox_inches': '"""tight"""', 'dpi': '(500)'}), "('./Photos/Examination/2_Flame_tip.png', bbox_inches='tight',\n dpi=500)\n", (9622, 9696), True, 'import matplotlib.pyplot as plt\n'), ((9783, 9794), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9792, 9794), True, 'import matplotlib.pyplot as plt\n'), ((2913, 2929), 'skimage.io.imread', 'io.imread', (['fname'], {}), '(fname)\n', (2922, 2929), False, 'from skimage import io, exposure\n'), ((3328, 3359), 'skimage.exposure.adjust_gamma', 'exposure.adjust_gamma', (['ImDat', '(1)'], {}), '(ImDat, 1)\n', (3349, 3359), False, 'from skimage import io, exposure\n'), ((3934, 4027), 'cv2.adaptiveThreshold', 'cv2.adaptiveThreshold', (['gray', '(1)', 'cv2.ADAPTIVE_THRESH_GAUSSIAN_C', 'cv2.THRESH_BINARY', '(85)', '(6)'], {}), '(gray, 1, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.\n THRESH_BINARY, 85, 6)\n', (3955, 4027), False, 'import cv2\n'), ((4520, 4547), 'numpy.std', 'np.std', (['middle[middle != 0]'], {}), '(middle[middle != 0])\n', (4526, 4547), True, 'import numpy as np\n'), ((5890, 5902), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5900, 5902), True, 'import matplotlib.pyplot as plt\n'), ((5908, 5932), 'matplotlib.pyplot.title', 'plt.title', (['"""Color image"""'], {}), "('Color image')\n", (5917, 5932), True, 'import matplotlib.pyplot as plt\n'), ((5947, 5963), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(221)'], {}), '(221)\n', (5958, 5963), True, 'import matplotlib.pyplot as plt\n'), ((5969, 5984), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (5977, 5984), True, 'import matplotlib.pyplot as plt\n'), ((6022, 6076), 'matplotlib.pyplot.imshow', 'plt.imshow', (['(Img_color_crop / scale)'], {'vmin': '(0)', 'vmax': 'scale'}), '(Img_color_crop / scale, vmin=0, vmax=scale)\n', (6032, 6076), True, 'import matplotlib.pyplot as plt\n'), ((6080, 6098), 'matplotlib.pyplot.title', 'plt.title', (['"""Color"""'], {}), "('Color')\n", (6089, 6098), True, 'import matplotlib.pyplot as plt\n'), ((6110, 6126), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(222)'], {}), '(222)\n', (6121, 6126), True, 'import matplotlib.pyplot as plt\n'), ((6132, 6147), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6140, 6147), True, 'import matplotlib.pyplot as plt\n'), ((6155, 6200), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ImDatRed_crop'], {'vmin': '(0)', 'vmax': 'scale'}), '(ImDatRed_crop, vmin=0, vmax=scale)\n', (6165, 6200), True, 'import matplotlib.pyplot as plt\n'), ((6206, 6222), 'matplotlib.pyplot.title', 'plt.title', (['"""Red"""'], {}), "('Red')\n", (6215, 6222), True, 'import matplotlib.pyplot as plt\n'), ((6234, 6250), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(223)'], {}), '(223)\n', (6245, 6250), True, 'import matplotlib.pyplot as plt\n'), ((6256, 6271), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6264, 6271), True, 'import matplotlib.pyplot as plt\n'), ((6279, 6324), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ImDatGrn_crop'], {'vmin': '(0)', 'vmax': 'scale'}), '(ImDatGrn_crop, vmin=0, vmax=scale)\n', (6289, 6324), True, 'import matplotlib.pyplot as plt\n'), ((6330, 6348), 'matplotlib.pyplot.title', 'plt.title', (['"""Green"""'], {}), "('Green')\n", (6339, 6348), True, 'import matplotlib.pyplot as plt\n'), ((6360, 6376), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(224)'], {}), '(224)\n', (6371, 6376), True, 'import matplotlib.pyplot as plt\n'), ((6382, 6397), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6390, 6397), True, 'import matplotlib.pyplot as plt\n'), ((6405, 6450), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ImDatBlu_crop'], {'vmin': '(0)', 'vmax': 'scale'}), '(ImDatBlu_crop, vmin=0, vmax=scale)\n', (6415, 6450), True, 'import matplotlib.pyplot as plt\n'), ((6456, 6473), 'matplotlib.pyplot.title', 'plt.title', (['"""Blue"""'], {}), "('Blue')\n", (6465, 6473), True, 'import matplotlib.pyplot as plt\n'), ((6485, 6530), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': 'None', 'wspace': '(-0.7)'}), '(hspace=None, wspace=-0.7)\n', (6504, 6530), True, 'import matplotlib.pyplot as plt\n'), ((6689, 6743), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fsave_color'], {'bbox_inches': '"""tight"""', 'dpi': '(500)'}), "(fsave_color, bbox_inches='tight', dpi=500)\n", (6700, 6743), True, 'import matplotlib.pyplot as plt\n'), ((6903, 6914), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6912, 6914), True, 'import matplotlib.pyplot as plt\n'), ((8485, 8497), 'gc.collect', 'gc.collect', ([], {}), '()\n', (8495, 8497), False, 'import gc\n'), ((9337, 9347), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (9345, 9347), True, 'import matplotlib.pyplot as plt\n'), ((9353, 9365), 'matplotlib.pyplot.pause', 'plt.pause', (['(3)'], {}), '(3)\n', (9362, 9365), True, 'import matplotlib.pyplot as plt\n'), ((9719, 9729), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (9727, 9729), True, 'import matplotlib.pyplot as plt\n'), ((9735, 9747), 'matplotlib.pyplot.pause', 'plt.pause', (['(3)'], {}), '(3)\n', (9744, 9747), True, 'import matplotlib.pyplot as plt\n'), ((3171, 3221), 'cv2.cvtColor', 'cv2.cvtColor', (['ImDatBayer', 'cv2.COLOR_BayerGB2RGB_EA'], {}), '(ImDatBayer, cv2.COLOR_BayerGB2RGB_EA)\n', (3183, 3221), False, 'import cv2\n'), ((3851, 3880), 'numpy.uint8', 'np.uint8', (['(ImDat / scale * 255)'], {}), '(ImDat / scale * 255)\n', (3859, 3880), True, 'import numpy as np\n'), ((4148, 4178), 'numpy.argmax', 'np.argmax', (['(th_gray[i, :] < 0.5)'], {}), '(th_gray[i, :] < 0.5)\n', (4157, 4178), True, 'import numpy as np\n'), ((5078, 5109), 'numpy.average', 'np.average', (['middle[middle != 0]'], {}), '(middle[middle != 0])\n', (5088, 5109), True, 'import numpy as np\n'), ((7297, 7317), 'numpy.zeros', 'np.zeros', (['(m * n, 5)'], {}), '((m * n, 5))\n', (7305, 7317), True, 'import numpy as np\n'), ((8199, 8284), 'numpy.savetxt', 'np.savetxt', (['ImPathtecplot', 'tecplot'], {'delimiter': '""" """', 'header': 'header_str', 'comments': '""""""'}), "(ImPathtecplot, tecplot, delimiter=' ', header=header_str,\n comments='')\n", (8209, 8284), True, 'import numpy as np\n'), ((4332, 4354), 'numpy.argmax', 'np.argmax', (['(left[:] > 0)'], {}), '(left[:] > 0)\n', (4341, 4354), True, 'import numpy as np\n'), ((4225, 4250), 'numpy.flip', 'np.flip', (['th_gray[i, :]', '(0)'], {}), '(th_gray[i, :], 0)\n', (4232, 4250), True, 'import numpy as np\n'), ((4393, 4409), 'numpy.flip', 'np.flip', (['left[:]'], {}), '(left[:])\n', (4400, 4409), True, 'import numpy as np\n'), ((4651, 4669), 'numpy.nonzero', 'np.nonzero', (['middle'], {}), '(middle)\n', (4661, 4669), True, 'import numpy as np\n')] |
import asyncio
from logging import INFO, getLogger
import backoff
from requests_html import AsyncHTMLSession
from crawler_magazine.downloader import BaseDownloader
logger = getLogger()
logger.setLevel(INFO)
class AsyncDownloader(BaseDownloader):
def __init__(self, header: dict = None, proxy: dict = None):
super().__init__(header, proxy)
self.asession = AsyncHTMLSession()
def __del__(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self.asession.close())
@property
def cookies(self):
return self.asession.cookies
async def get(self, url, *args, **kwargs):
"""
Example:
payload = {'some': 'data'}
headers = {'content-type': 'application/json'}
params = {'key1': 'value1', 'key2': 'value2'}
"""
return await self.execute("get", url, *args, **kwargs)
async def post(self, url, *args, **kwargs):
"""
Example:
payload = {'some': 'data'}
headers = {'content-type': 'application/json'}
params = {'key1': 'value1', 'key2': 'value2'}
"""
return await self.execute("post", url, *args, **kwargs)
@backoff.on_exception(
backoff.expo, Exception, max_tries=3, max_time=30,
)
async def execute(
self,
method: str,
url,
headers: dict = None,
json: dict = None,
params: dict = None,
cookies=None,
payload=None,
files=None,
timeout: int = 60,
):
if method.lower() not in ["get", "post"]:
raise ValueError
content = {"headers": headers if headers else self.custom_header}
if payload:
content["data"] = payload
if json:
content["json"] = json
if params:
content["params"] = params
if cookies:
content["cookies"] = cookies
if files:
content["files"] = files
if timeout:
content["timeout"] = timeout
if self.proxies:
content["proxies"] = self.proxies
return await self.asession.request(method, url, **content)
| [
"logging.getLogger",
"requests_html.AsyncHTMLSession",
"asyncio.get_event_loop",
"backoff.on_exception"
] | [((176, 187), 'logging.getLogger', 'getLogger', ([], {}), '()\n', (185, 187), False, 'from logging import INFO, getLogger\n'), ((1217, 1288), 'backoff.on_exception', 'backoff.on_exception', (['backoff.expo', 'Exception'], {'max_tries': '(3)', 'max_time': '(30)'}), '(backoff.expo, Exception, max_tries=3, max_time=30)\n', (1237, 1288), False, 'import backoff\n'), ((380, 398), 'requests_html.AsyncHTMLSession', 'AsyncHTMLSession', ([], {}), '()\n', (396, 398), False, 'from requests_html import AsyncHTMLSession\n'), ((438, 462), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (460, 462), False, 'import asyncio\n')] |
# -*- coding: utf-8 -*-
"""
@author: <NAME>
bottom effect correction coefficients
"""
import numpy as np
from numpy import pi
def bottom_effect_correction(Poisson, Probe_dimension, Height, modelprobe,
indentationfull):
BEC = np.ones(len(indentationfull))
indentationfull[indentationfull < 0] = np.nan # to remove warnings in sqrt
if 0.45 <= Poisson <= 0.5: # coefficients from doi.org/10.1016/j.bpj.2018.05.012
if modelprobe == 'sphere': # sphere
Dpar1 = 1.133
Dpar2 = 1.497
Dpar3 = 1.469
Dpar4 = 0.755
R = Probe_dimension
h = Height
BEC = 1 + (Dpar1*(R*indentationfull)**(0.5)/h) +\
(Dpar2*((R*indentationfull)**(0.5)/h)**2) +\
(Dpar3*((R*indentationfull)**(0.5)/h)**3) +\
(Dpar4*((R*indentationfull)**(0.5)/h)**4)
BECspeed = 1 + 4/3*(Dpar1*(R*indentationfull)**(0.5)/h) +\
5/3*(Dpar2*((R*indentationfull)**(0.5)/h)**2) +\
2*(Dpar3*((R*indentationfull)**(0.5)/h)**3) +\
7/3*(Dpar4*((R*indentationfull)**(0.5)/h)**4)
elif modelprobe in ['pyramid', 'cylinder']:
# in the curent version of the script the correction is not yet implemented
BEC = np.ones(len(indentationfull))
BECspeed = np.ones(len(indentationfull))
print('BEC is not yet avilable')
else:
if modelprobe == 'sphere':
# coefficients are from doi.org/10.1016/S0006-3495(02)75620-8
alpha = -(1.2876-1.4678*Poisson+1.3442*Poisson^2)/(1-Poisson) # bonded case
bettaD = (0.6387-1.0277*Poisson+1.5164*Poisson^2)/(1-Poisson) # bonded case
Dpar1 = 2*alpha/pi
Dpar2 = 4*(alpha/pi)**2
Dpar3 = (8/pi**3)*(alpha**3+(4*(pi**2)*bettaD/15))
Dpar4 = -(16*alpha/pi**4)*(alpha**3+(3*(pi**2)*bettaD/5))
R = Radius
h = Height
BEC = 1 + (Dpar1*(R*indentationfull)**(0.5)/h) +\
(Dpar2*((R*indentationfull)**(0.5)/h)**2) +\
(Dpar3*((R*indentationfull)**(0.5)/h)**3) +\
(Dpar4*((R*indentationfull)**(0.5)/h)**4)
BECspeed = 1 + 4/3*(Dpar1*(R*indentationfull)**(0.5)/h) +\
5/3*(Dpar2*((R*indentationfull)**(0.5)/h)**2) +\
2*(Dpar3*((R*indentationfull)**(0.5)/h)**3) +\
7/3*(Dpar4*((R*indentationfull)**(0.5)/h)**4)
elif modelprobe in ['pyramid', 'cylinder']:
# in the curent version of the script the correction is not yet implemented
BEC = np.ones(len(indentationfull))
BECspeed = np.ones(len(indentationfull))
print('BEC is not yet avilable')
return BEC, BECspeed
if __name__ == '__main__':
import matplotlib.pyplot as plt
Poisson = 0.5
Radius = 1000
Height = 1000
modelprobe = 'sphere'
indentationfull = np.linspace(0, 200, 200)
BEC = bottom_effect_correction(Poisson, Radius, Height, modelprobe, indentationfull)[0]
plt.plot(indentationfull, BEC)
| [
"numpy.linspace",
"matplotlib.pyplot.plot"
] | [((3154, 3178), 'numpy.linspace', 'np.linspace', (['(0)', '(200)', '(200)'], {}), '(0, 200, 200)\n', (3165, 3178), True, 'import numpy as np\n'), ((3277, 3307), 'matplotlib.pyplot.plot', 'plt.plot', (['indentationfull', 'BEC'], {}), '(indentationfull, BEC)\n', (3285, 3307), True, 'import matplotlib.pyplot as plt\n')] |
from time import time
from django.core.management.base import BaseCommand
from ...activepostersranking import build_active_posters_ranking
class Command(BaseCommand):
help = "Builds active posters ranking"
def handle(self, *args, **options):
self.stdout.write("\nBuilding active posters ranking...")
start_time = time()
build_active_posters_ranking()
end_time = time() - start_time
self.stdout.write("Finished after %.2fs" % end_time)
| [
"time.time"
] | [((343, 349), 'time.time', 'time', ([], {}), '()\n', (347, 349), False, 'from time import time\n'), ((408, 414), 'time.time', 'time', ([], {}), '()\n', (412, 414), False, 'from time import time\n')] |
from __future__ import annotations
from typing import Annotated, TYPE_CHECKING
from jsonclasses import jsonclass, linkedthru
if TYPE_CHECKING:
from tests.classes.linked_customer import LinkedCustomer
@jsonclass
class LinkedProduct:
name: str
customers: Annotated[list[LinkedCustomer], linkedthru('products')]
| [
"jsonclasses.linkedthru"
] | [((299, 321), 'jsonclasses.linkedthru', 'linkedthru', (['"""products"""'], {}), "('products')\n", (309, 321), False, 'from jsonclasses import jsonclass, linkedthru\n')] |
import time
AHT10_I2CADDR = 56
CalibrateCmd = b'\xE1\x08\x00'
NormalCmd = b'\xA8\x00\x00'
MeasureCmd = b'\xAC\x33\x00'
ResetCmd = b'\xBA'
class AHT10:
def __init__(self,address=AHT10_I2CADDR,i2c=None):
if i2c is None:
raise ValueError('An I2C object is required.')
self.i2c = i2c
self.address = address
self.reset()
self.raw_data = bytearray(6)
def readStatus(self, from_buffer=False):
if from_buffer:
status = self.raw_data[0]
else:
status = self.i2c.readfrom(self.address, 1)[0]
return status
def initiateMeasurement(self):
self.i2c.writeto(self.address, MeasureCmd)
def reset(self):
self.i2c.writeto(self.address, ResetCmd)
def calibrate(self):
self.i2c.writeto(self.address, CalibrateCmd)
def readRawData(self):
self.raw_data = self.i2c.readfrom(self.address, 6)
def convertHumidity(self):
raw_humidity = ((self.raw_data[1] << 16) | (self.raw_data[2] << 8) | self.raw_data[3]) >> 4
return raw_humidity * 100 / 1048576
def convertTemperature(self):
raw_temperature = ((self.raw_data[3] & 0x0F) << 16) | (self.raw_data[4] << 8) | self.raw_data[5]
return ((200 * raw_temperature) / 1048576) - 50
def readAndConvert(self):
self.initiateMeasurement()
time.sleep_ms(100)
self.readRawData()
hum = self.convertHumidity()
temp = self.convertTemperature()
return [hum, temp]
def statusCalibrated(self, from_buffer=False):
status = self.readStatus(from_buffer)
return self.bitIsSet(status,3)
def statusBusy(self, from_buffer=False):
status = self.readStatus(from_buffer)
return self.bitIsSet(status,7)
def statusMode(self, from_buffer=False):
status = self.readStatus(from_buffer)
if (self.bitIsSet(status,6)):
return 'CMD'
elif (self.bitIsSet(status,5)):
return 'CYC'
else:
return 'NOR'
def bitIsSet(self, byte, bit):
if (byte & (1<<bit) == 0 ):
return False
else:
return True
@property
def values(self):
""" human readable values """
h, t = self.readAndConvert()
return ("{:.2f}".format(t), "{:.2f}".format(h))
| [
"time.sleep_ms"
] | [((1377, 1395), 'time.sleep_ms', 'time.sleep_ms', (['(100)'], {}), '(100)\n', (1390, 1395), False, 'import time\n')] |
from functools import wraps
import os
from flask import (
Flask,
redirect,
request,
session,
url_for,
jsonify,
send_from_directory,
make_response,
render_template
)
from visualset.api import produce_playlist
from visualset.spotify_auth import authorize_url, access_token, refresh_if_needed
template_dir = os.path.realpath(os.path.join(
os.path.dirname(__file__), 'web/templates'
))
app = Flask(__name__, template_folder=template_dir)
app.config['SECRET_KEY'] = '<KEY>'
def spotify_login_required(func):
@wraps(func)
def decorator(*args, **kwargs):
current_token = session['spotify_token']
try:
new_token = refresh_if_needed(current_token, expired_minutes=10)
except ValueError as e:
return make_response(jsonify(dict(error=str(e))), 403)
else:
session['spotify_token'] = new_token
return func(*args, **kwargs)
return decorator
# @app.route('/')
# def index():
# return send_from_directory('web', 'index.html')
# @app.route('/js/<path:path>')
# def js(path):
# return send_from_directory('web', path)
@app.route('/api/spotify/authorize')
def authorize():
return redirect(authorize_url())
@app.route('/api/spotify/callback')
def callback():
error = request.args.get('error', '')
token = access_token(request.url) if not error else {}
session['spotify_token'] = token
return render_template('spotifycallback.html', token=token, error=error)
@app.route('/api/lines', methods=['POST'])
@spotify_login_required
def submit_line():
data = request.get_json()
playlist = produce_playlist(data, audio_attribute='energy')
return jsonify(playlist)
| [
"flask.render_template",
"flask.request.args.get",
"visualset.api.produce_playlist",
"visualset.spotify_auth.authorize_url",
"flask.Flask",
"functools.wraps",
"os.path.dirname",
"flask.request.get_json",
"visualset.spotify_auth.access_token",
"visualset.spotify_auth.refresh_if_needed",
"flask.jsonify"
] | [((432, 477), 'flask.Flask', 'Flask', (['__name__'], {'template_folder': 'template_dir'}), '(__name__, template_folder=template_dir)\n', (437, 477), False, 'from flask import Flask, redirect, request, session, url_for, jsonify, send_from_directory, make_response, render_template\n'), ((554, 565), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (559, 565), False, 'from functools import wraps\n'), ((1305, 1334), 'flask.request.args.get', 'request.args.get', (['"""error"""', '""""""'], {}), "('error', '')\n", (1321, 1334), False, 'from flask import Flask, redirect, request, session, url_for, jsonify, send_from_directory, make_response, render_template\n'), ((1442, 1507), 'flask.render_template', 'render_template', (['"""spotifycallback.html"""'], {'token': 'token', 'error': 'error'}), "('spotifycallback.html', token=token, error=error)\n", (1457, 1507), False, 'from flask import Flask, redirect, request, session, url_for, jsonify, send_from_directory, make_response, render_template\n'), ((1607, 1625), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1623, 1625), False, 'from flask import Flask, redirect, request, session, url_for, jsonify, send_from_directory, make_response, render_template\n'), ((1641, 1689), 'visualset.api.produce_playlist', 'produce_playlist', (['data'], {'audio_attribute': '"""energy"""'}), "(data, audio_attribute='energy')\n", (1657, 1689), False, 'from visualset.api import produce_playlist\n'), ((1701, 1718), 'flask.jsonify', 'jsonify', (['playlist'], {}), '(playlist)\n', (1708, 1718), False, 'from flask import Flask, redirect, request, session, url_for, jsonify, send_from_directory, make_response, render_template\n'), ((379, 404), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (394, 404), False, 'import os\n'), ((1222, 1237), 'visualset.spotify_auth.authorize_url', 'authorize_url', ([], {}), '()\n', (1235, 1237), False, 'from visualset.spotify_auth import authorize_url, access_token, refresh_if_needed\n'), ((1347, 1372), 'visualset.spotify_auth.access_token', 'access_token', (['request.url'], {}), '(request.url)\n', (1359, 1372), False, 'from visualset.spotify_auth import authorize_url, access_token, refresh_if_needed\n'), ((688, 740), 'visualset.spotify_auth.refresh_if_needed', 'refresh_if_needed', (['current_token'], {'expired_minutes': '(10)'}), '(current_token, expired_minutes=10)\n', (705, 740), False, 'from visualset.spotify_auth import authorize_url, access_token, refresh_if_needed\n')] |
from datetime import datetime, timedelta
import requests
class Market:
def __init__(self, client) -> None:
self.client = client
def historical_options(self, symbol, start=datetime.strftime(datetime.today() - timedelta(days=1), '%Y-%m-%d'), to=datetime.strftime(datetime.today() - timedelta(days=1), '%Y-%m-%d'), token=None):
if (token is None):
token = self.client.get_token()
r = requests.get('%s/historical/options/%s/%s/%s' %
(self.url(), symbol, start, to), headers={'Access-Token': token})
return r.json()
def url(self):
return '%s%s' % (self.client.config['base_url'], 'market')
| [
"datetime.datetime.today",
"datetime.timedelta"
] | [((208, 224), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (222, 224), False, 'from datetime import datetime, timedelta\n'), ((227, 244), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (236, 244), False, 'from datetime import datetime, timedelta\n'), ((280, 296), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (294, 296), False, 'from datetime import datetime, timedelta\n'), ((299, 316), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (308, 316), False, 'from datetime import datetime, timedelta\n')] |
import os
import json
import click
import logging
import itertools
import pandas as pd
from tqdm import tqdm
from collections import OrderedDict
from more_itertools import ichunked
from twarc.expansions import flatten
log = logging.getLogger("twarc")
DEFAULT_TWEET_COLUMNS = """__twarc.retrieved_at
__twarc.url
__twarc.version
attachments.media
attachments.media_keys
attachments.poll.duration_minutes
attachments.poll.end_datetime
attachments.poll.id
attachments.poll.options
attachments.poll.voting_status
attachments.poll_ids
author.created_at
author.description
author.entities.description.cashtags
author.entities.description.hashtags
author.entities.description.mentions
author.entities.description.urls
author.entities.url.urls
author.id
author.location
author.name
author.pinned_tweet_id
author.profile_image_url
author.protected
author.public_metrics.followers_count
author.public_metrics.following_count
author.public_metrics.listed_count
author.public_metrics.tweet_count
author.url
author.username
author.verified
author.withheld.country_codes
author_id
context_annotations
conversation_id
created_at
entities.annotations
entities.cashtags
entities.hashtags
entities.mentions
entities.urls
geo.coordinates.coordinates
geo.coordinates.type
geo.country
geo.country_code
geo.full_name
geo.geo.bbox
geo.geo.type
geo.id
geo.name
geo.place_id
geo.place_type
id
in_reply_to_user.created_at
in_reply_to_user.description
in_reply_to_user.entities.description.cashtags
in_reply_to_user.entities.description.hashtags
in_reply_to_user.entities.description.mentions
in_reply_to_user.entities.description.urls
in_reply_to_user.entities.url.urls
in_reply_to_user.id
in_reply_to_user.location
in_reply_to_user.name
in_reply_to_user.pinned_tweet_id
in_reply_to_user.profile_image_url
in_reply_to_user.protected
in_reply_to_user.public_metrics.followers_count
in_reply_to_user.public_metrics.following_count
in_reply_to_user.public_metrics.listed_count
in_reply_to_user.public_metrics.tweet_count
in_reply_to_user.url
in_reply_to_user.username
in_reply_to_user.verified
in_reply_to_user.withheld.country_codes
in_reply_to_user_id
lang
possibly_sensitive
public_metrics.like_count
public_metrics.quote_count
public_metrics.reply_count
public_metrics.retweet_count
referenced_tweets
reply_settings
source
text
type
withheld.copyright
withheld.country_codes""".split(
"\n"
)
DEFAULT_USERS_COLUMNS = """__twarc.retrieved_at
__twarc.url
__twarc.version
created_at
description
entities.description.cashtags
entities.description.hashtags
entities.description.mentions
entities.description.urls
entities.url.urls
id
location
name
pinned_tweet_id
pinned_tweet
profile_image_url
protected
public_metrics.followers_count
public_metrics.following_count
public_metrics.listed_count
public_metrics.tweet_count
url
username
verified
withheld.country_codes""".split(
"\n"
)
class CSVConverter:
def __init__(
self,
infile,
outfile,
json_encode_all=False,
json_encode_lists=True,
json_encode_text=False,
inline_referenced_tweets=True,
inline_pinned_tweets=False,
allow_duplicates=False,
input_tweet_columns=True,
input_users_columns=False,
input_columns="",
output_columns="",
batch_size=5000,
):
self.infile = infile
self.outfile = outfile
self.json_encode_all = json_encode_all
self.json_encode_lists = json_encode_lists
self.json_encode_text = json_encode_text
self.inline_referenced_tweets = inline_referenced_tweets
self.inline_pinned_tweets = inline_pinned_tweets
self.allow_duplicates = allow_duplicates
self.batch_size = batch_size
self.dataset_ids = set()
self.std = infile.name == "<stdin>" or outfile.name == "<stdout>"
self.progress = tqdm(
unit="B",
unit_scale=True,
unit_divisor=1024,
total=os.stat(infile.name).st_size if not self.std else 1,
disable=self.std,
)
self.columns = list()
if input_tweet_columns:
self.columns.extend(DEFAULT_TWEET_COLUMNS)
if input_users_columns:
self.columns.extend(DEFAULT_USERS_COLUMNS)
if input_columns:
self.columns.extend(manual_columns.split(","))
self.output_columns = (
output_columns.split(",") if output_columns else self.columns
)
self.counts = {
"lines": 0,
"tweets": 0,
"referenced_tweets": 0,
"parse_errors": 0,
"duplicates": 0,
"rows": 0,
"input_columns": len(self.columns),
"output_columns": len(self.output_columns),
}
def _read_lines(self):
"""
Generator for reading files line byline from a file. Progress bar is based on file size.
"""
line = self.infile.readline()
while line:
self.counts["lines"] = self.counts["lines"] + 1
if line.strip() != "":
try:
o = json.loads(line)
yield o
except Exception as ex:
self.counts["parse_errors"] = self.counts["parse_errors"] + 1
log.error(f"Error when trying to parse json: '{line}' {ex}")
if not self.std:
self.progress.update(self.infile.tell() - self.progress.n)
line = self.infile.readline()
def _handle_formats(self, batch):
"""
Handle different types of json formats, generating 1 tweet at a time
a batch is a number of lines from a json,
these can be full pages of requests or individual tweets.
"""
for item in batch:
# if it has a "data" key ensure data it is flattened
if "data" in item:
# flatten a list of tweets
if isinstance(item["data"], list):
for i in flatten(item)["data"]:
yield i
# flatten a single tweet, eg, from stream
else:
yield flatten(item)["data"]
else:
# this assumes the data is flattened
yield item
def _inline_referenced_tweets(self, tweet):
"""
Insert referenced tweets into the main CSV
"""
if "referenced_tweets" in tweet and self.inline_referenced_tweets:
for referenced_tweet in tweet["referenced_tweets"]:
# extract the referenced tweet as a new row
self.counts["referenced_tweets"] = self.counts["referenced_tweets"] + 1
yield referenced_tweet
# leave behind the reference, but not the full tweet
tweet["referenced_tweets"] = [
{"type": r["type"], "id": r["id"]} for r in tweet["referenced_tweets"]
]
# Deal with pinned tweets for user datasets:
# Todo: This is not fully implemented!
if self.inline_pinned_tweets:
if "pinned_tweet" in tweet:
# extract the referenced tweet as a new row
tweet["pinned_tweet"]["type"] = "pinned_tweet"
self.counts["referenced_tweets"] = self.counts["referenced_tweets"] + 1
yield referenced_tweet
# pinned_tweet_id remains:
tweet.pop("pinned_tweet")
yield tweet
def _process_tweets(self, tweets):
"""
Process a single tweet before adding it to the dataframe.
ToDo: Drop columns and dedupe etc here.
"""
for tweet in tweets:
# Order the fields in the json, because JSON key order isn't guaranteed.
# Needed so that different batches won't produce different ordered columns
json_keys = sorted(tweet.keys())
selected_field_order = list()
# Opinion: always put in id,created_at,text first, and then the rest
if "id" in json_keys:
selected_field_order.append(json_keys.pop(json_keys.index("id")))
if "created_at" in json_keys:
selected_field_order.append(
json_keys.pop(json_keys.index("created_at"))
)
if "text" in json_keys:
selected_field_order.append(json_keys.pop(json_keys.index("text")))
selected_field_order.extend(json_keys)
tweet = OrderedDict((k, tweet[k]) for k in selected_field_order)
self.counts["tweets"] = self.counts["tweets"] + 1
if tweet["id"] in self.dataset_ids:
self.counts["duplicates"] = self.counts["duplicates"] + 1
if self.allow_duplicates:
yield tweet
else:
if tweet["id"] not in self.dataset_ids:
yield tweet
self.dataset_ids.add(tweet["id"])
def _process_dataframe(self, _df):
# (Optional) json encode all
if self.json_encode_all:
_df = _df.applymap(json.dumps, na_action="ignore")
else:
# (Optional) text escape for any text fields
if self.json_encode_text:
_df = _df.applymap(
lambda x: json.dumps(x) if type(x) is str else x,
na_action="ignore",
)
else:
# Mandatory newline escape to prevent breaking csv format:
_df = _df.applymap(
lambda x: x.replace("\r", "").replace("\n", r"\n")
if type(x) is str
else x,
na_action="ignore",
)
# (Optional) json for lists
if self.json_encode_lists:
_df = _df.applymap(
lambda x: json.dumps(x) if pd.api.types.is_list_like(x) else x,
na_action="ignore",
)
return _df
def _process_batch(self, batch):
# (Optional) append referenced tweets as new rows
tweet_batch = itertools.chain.from_iterable(
self._process_tweets(self._inline_referenced_tweets(tweet))
for tweet in self._handle_formats(batch)
)
_df = pd.json_normalize([tweet for tweet in tweet_batch], errors="ignore")
# Check for mismatched columns
if len(_df.columns) > len(self.columns):
diff = set(_df.columns) - set(self.columns)
click.echo(
click.style(
f"💔 ERROR: Unexpected Data: \n\"{','.join(diff)}\"\n to fix, add these with --input-columns. Skipping entire batch of {len(_df)} tweets!",
fg="red",
),
err=True,
)
return pd.DataFrame(columns=self.columns)
_df = _df.reindex(columns=self.columns)
_df = self._process_dataframe(_df)
return _df
def _write_output(self, _df, first_batch):
"""
Write out the dataframe chunk by chunk
todo: take parameters from commandline for optional output formats.
"""
if first_batch:
mode = "w"
header = True
else:
mode = "a+"
header = False
self.counts["rows"] = self.counts["rows"] + len(_df)
_df.to_csv(
self.outfile,
mode=mode,
columns=self.output_columns,
index=False,
header=header,
) # todo: (Optional) arguments for to_csv
def process(self):
"""
Process a file containing JSON into a CSV
"""
# Flag for writing header & appending to CSV file
first_batch = True
for batch in ichunked(self._read_lines(), self.batch_size):
self._write_output(self._process_batch(batch), first_batch)
first_batch = False
self.progress.close()
@click.command()
@click.argument("infile", type=click.File("r"), default="-")
@click.argument("outfile", type=click.File("w"), default="-")
@click.option(
"--json-encode-all/--no-json-encode-all",
default=False,
help="JSON encode / escape all fields. Default: no",
)
@click.option(
"--json-encode-lists/--no-json-encode-lists",
default=True,
help="JSON encode / escape lists. Default: yes",
)
@click.option(
"--json-encode-text/--no-json-encode-text",
default=False,
help="JSON encode / escape text fields. Default: no",
)
@click.option(
"--inline-referenced-tweets/--no-inline-referenced-tweets",
default=True,
help="Output referenced tweets inline as separate rows. Default: yes",
)
@click.option(
"--inline-pinned-tweets/--no-pinned-tweets",
default=False,
help="If converting a user dataset, output pinned tweets inline as separate rows. Default: no",
)
@click.option(
"--allow-duplicates/--no-allow-duplicates",
default=False,
help="Remove duplicate tweets by ID. Default: yes",
)
@click.option(
"--input-tweet-columns/--no-input-tweet-columns",
default=True,
help="Use a default list of tweet column names in the input. Only modify this if you have processed the json yourself. Default: yes",
)
@click.option(
"--input-users-columns/--no-input-users-columns",
default=False,
help="Use a default list of user column names in the input. Only modify this if you have a dataset of users as opposed to tweets. Default: no",
)
@click.option(
"--input-columns",
default="",
help="Manually specify input columns. Comma separated string. Default is blank, no extra input columns",
)
@click.option(
"--output-columns",
default="",
help="Specify what columns to output in the CSV. Default is all input columns.",
)
@click.option(
"--batch-size",
type=int,
default=5000,
help="How many lines to process per chunk. Default is 5000",
)
@click.option(
"--show-stats/--no-show-stats",
default=True,
help="Show stats about the dataset on completion. Default is show. Always hidden if you're using stdin / stdout pipes.",
)
def csv(
infile,
outfile,
json_encode_all,
json_encode_lists,
json_encode_text,
inline_referenced_tweets,
inline_pinned_tweets,
allow_duplicates,
input_tweet_columns,
input_users_columns,
input_columns,
output_columns,
batch_size,
show_stats,
):
"""
Convert tweets to CSV.
"""
if infile.name == outfile.name:
click.echo(
click.style(
f"💔 Cannot convert files in-place, specify a different output file!",
fg="red",
),
err=True,
)
return
converter = CSVConverter(
infile,
outfile,
json_encode_all,
json_encode_lists,
json_encode_text,
inline_referenced_tweets,
inline_pinned_tweets,
allow_duplicates,
input_tweet_columns,
input_users_columns,
input_columns,
output_columns,
batch_size,
)
converter.process()
errors = (
click.style(
f"{converter.counts['parse_errors']} failed to parse. See twarc.log for details.\n",
fg="red",
)
if converter.counts["parse_errors"] > 0
else ""
)
if show_stats and outfile.name != "<stdout>":
click.echo(
f"\nℹ️\n"
+ f"Read {converter.counts['tweets']} tweets from {converter.counts['lines']} lines. \n"
+ f"{converter.counts['referenced_tweets']} were referenced tweets, {converter.counts['duplicates']} were duplicates.\n"
+ errors
+ f"Wrote {converter.counts['rows']} rows and wrote {converter.counts['output_columns']} of {converter.counts['input_columns']} input columns in the CSV.\n",
err=True,
)
| [
"logging.getLogger",
"collections.OrderedDict",
"json.loads",
"pandas.json_normalize",
"pandas.api.types.is_list_like",
"click.option",
"click.style",
"click.File",
"json.dumps",
"click.echo",
"twarc.expansions.flatten",
"pandas.DataFrame",
"os.stat",
"click.command"
] | [((225, 251), 'logging.getLogger', 'logging.getLogger', (['"""twarc"""'], {}), "('twarc')\n", (242, 251), False, 'import logging\n'), ((11998, 12013), 'click.command', 'click.command', ([], {}), '()\n', (12011, 12013), False, 'import click\n'), ((12138, 12265), 'click.option', 'click.option', (['"""--json-encode-all/--no-json-encode-all"""'], {'default': '(False)', 'help': '"""JSON encode / escape all fields. Default: no"""'}), "('--json-encode-all/--no-json-encode-all', default=False, help=\n 'JSON encode / escape all fields. Default: no')\n", (12150, 12265), False, 'import click\n'), ((12277, 12402), 'click.option', 'click.option', (['"""--json-encode-lists/--no-json-encode-lists"""'], {'default': '(True)', 'help': '"""JSON encode / escape lists. Default: yes"""'}), "('--json-encode-lists/--no-json-encode-lists', default=True,\n help='JSON encode / escape lists. Default: yes')\n", (12289, 12402), False, 'import click\n'), ((12415, 12544), 'click.option', 'click.option', (['"""--json-encode-text/--no-json-encode-text"""'], {'default': '(False)', 'help': '"""JSON encode / escape text fields. Default: no"""'}), "('--json-encode-text/--no-json-encode-text', default=False,\n help='JSON encode / escape text fields. Default: no')\n", (12427, 12544), False, 'import click\n'), ((12557, 12723), 'click.option', 'click.option', (['"""--inline-referenced-tweets/--no-inline-referenced-tweets"""'], {'default': '(True)', 'help': '"""Output referenced tweets inline as separate rows. Default: yes"""'}), "('--inline-referenced-tweets/--no-inline-referenced-tweets',\n default=True, help=\n 'Output referenced tweets inline as separate rows. Default: yes')\n", (12569, 12723), False, 'import click\n'), ((12731, 12913), 'click.option', 'click.option', (['"""--inline-pinned-tweets/--no-pinned-tweets"""'], {'default': '(False)', 'help': '"""If converting a user dataset, output pinned tweets inline as separate rows. Default: no"""'}), "('--inline-pinned-tweets/--no-pinned-tweets', default=False,\n help=\n 'If converting a user dataset, output pinned tweets inline as separate rows. Default: no'\n )\n", (12743, 12913), False, 'import click\n'), ((12916, 13043), 'click.option', 'click.option', (['"""--allow-duplicates/--no-allow-duplicates"""'], {'default': '(False)', 'help': '"""Remove duplicate tweets by ID. Default: yes"""'}), "('--allow-duplicates/--no-allow-duplicates', default=False,\n help='Remove duplicate tweets by ID. Default: yes')\n", (12928, 13043), False, 'import click\n'), ((13056, 13280), 'click.option', 'click.option', (['"""--input-tweet-columns/--no-input-tweet-columns"""'], {'default': '(True)', 'help': '"""Use a default list of tweet column names in the input. Only modify this if you have processed the json yourself. Default: yes"""'}), "('--input-tweet-columns/--no-input-tweet-columns', default=True,\n help=\n 'Use a default list of tweet column names in the input. Only modify this if you have processed the json yourself. Default: yes'\n )\n", (13068, 13280), False, 'import click\n'), ((13283, 13519), 'click.option', 'click.option', (['"""--input-users-columns/--no-input-users-columns"""'], {'default': '(False)', 'help': '"""Use a default list of user column names in the input. Only modify this if you have a dataset of users as opposed to tweets. Default: no"""'}), "('--input-users-columns/--no-input-users-columns', default=\n False, help=\n 'Use a default list of user column names in the input. Only modify this if you have a dataset of users as opposed to tweets. Default: no'\n )\n", (13295, 13519), False, 'import click\n'), ((13521, 13679), 'click.option', 'click.option', (['"""--input-columns"""'], {'default': '""""""', 'help': '"""Manually specify input columns. Comma separated string. Default is blank, no extra input columns"""'}), "('--input-columns', default='', help=\n 'Manually specify input columns. Comma separated string. Default is blank, no extra input columns'\n )\n", (13533, 13679), False, 'import click\n'), ((13686, 13816), 'click.option', 'click.option', (['"""--output-columns"""'], {'default': '""""""', 'help': '"""Specify what columns to output in the CSV. Default is all input columns."""'}), "('--output-columns', default='', help=\n 'Specify what columns to output in the CSV. Default is all input columns.')\n", (13698, 13816), False, 'import click\n'), ((13828, 13946), 'click.option', 'click.option', (['"""--batch-size"""'], {'type': 'int', 'default': '(5000)', 'help': '"""How many lines to process per chunk. Default is 5000"""'}), "('--batch-size', type=int, default=5000, help=\n 'How many lines to process per chunk. Default is 5000')\n", (13840, 13946), False, 'import click\n'), ((13962, 14151), 'click.option', 'click.option', (['"""--show-stats/--no-show-stats"""'], {'default': '(True)', 'help': '"""Show stats about the dataset on completion. Default is show. Always hidden if you\'re using stdin / stdout pipes."""'}), '(\'--show-stats/--no-show-stats\', default=True, help=\n "Show stats about the dataset on completion. Default is show. Always hidden if you\'re using stdin / stdout pipes."\n )\n', (13974, 14151), False, 'import click\n'), ((10317, 10385), 'pandas.json_normalize', 'pd.json_normalize', (['[tweet for tweet in tweet_batch]'], {'errors': '"""ignore"""'}), "([tweet for tweet in tweet_batch], errors='ignore')\n", (10334, 10385), True, 'import pandas as pd\n'), ((15169, 15285), 'click.style', 'click.style', (['f"""{converter.counts[\'parse_errors\']} failed to parse. See twarc.log for details.\n"""'], {'fg': '"""red"""'}), '(\n f"{converter.counts[\'parse_errors\']} failed to parse. See twarc.log for details.\\n"\n , fg=\'red\')\n', (15180, 15285), False, 'import click\n'), ((15440, 15883), 'click.echo', 'click.echo', (['(f\'\\nℹ️\\n\' +\n f"""Read {converter.counts[\'tweets\']} tweets from {converter.counts[\'lines\']} lines. \n"""\n +\n f"""{converter.counts[\'referenced_tweets\']} were referenced tweets, {converter.counts[\'duplicates\']} were duplicates.\n"""\n + errors +\n f"""Wrote {converter.counts[\'rows\']} rows and wrote {converter.counts[\'output_columns\']} of {converter.counts[\'input_columns\']} input columns in the CSV.\n"""\n )'], {'err': '(True)'}), '(f\'\\nℹ️\\n\' +\n f"""Read {converter.counts[\'tweets\']} tweets from {converter.counts[\'lines\']} lines. \n"""\n +\n f"""{converter.counts[\'referenced_tweets\']} were referenced tweets, {converter.counts[\'duplicates\']} were duplicates.\n"""\n + errors +\n f"""Wrote {converter.counts[\'rows\']} rows and wrote {converter.counts[\'output_columns\']} of {converter.counts[\'input_columns\']} input columns in the CSV.\n"""\n , err=True)\n', (15450, 15883), False, 'import click\n'), ((12045, 12060), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (12055, 12060), False, 'import click\n'), ((12107, 12122), 'click.File', 'click.File', (['"""w"""'], {}), "('w')\n", (12117, 12122), False, 'import click\n'), ((8509, 8565), 'collections.OrderedDict', 'OrderedDict', (['((k, tweet[k]) for k in selected_field_order)'], {}), '((k, tweet[k]) for k in selected_field_order)\n', (8520, 8565), False, 'from collections import OrderedDict\n'), ((10851, 10885), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'self.columns'}), '(columns=self.columns)\n', (10863, 10885), True, 'import pandas as pd\n'), ((14571, 14671), 'click.style', 'click.style', (['f"""💔 Cannot convert files in-place, specify a different output file!"""'], {'fg': '"""red"""'}), "(\n f'💔 Cannot convert files in-place, specify a different output file!',\n fg='red')\n", (14582, 14671), False, 'import click\n'), ((5111, 5127), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (5121, 5127), False, 'import json\n'), ((3958, 3978), 'os.stat', 'os.stat', (['infile.name'], {}), '(infile.name)\n', (3965, 3978), False, 'import os\n'), ((6008, 6021), 'twarc.expansions.flatten', 'flatten', (['item'], {}), '(item)\n', (6015, 6021), False, 'from twarc.expansions import flatten\n'), ((6169, 6182), 'twarc.expansions.flatten', 'flatten', (['item'], {}), '(item)\n', (6176, 6182), False, 'from twarc.expansions import flatten\n'), ((9319, 9332), 'json.dumps', 'json.dumps', (['x'], {}), '(x)\n', (9329, 9332), False, 'import json\n'), ((9903, 9931), 'pandas.api.types.is_list_like', 'pd.api.types.is_list_like', (['x'], {}), '(x)\n', (9928, 9931), True, 'import pandas as pd\n'), ((9886, 9899), 'json.dumps', 'json.dumps', (['x'], {}), '(x)\n', (9896, 9899), False, 'import json\n')] |
from functools import lru_cache
from importlib import import_module
from mimetypes import guess_type
import os.path
import unicodedata
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils.encoding import force_str
from django.utils.http import urlquote
@lru_cache(maxsize=None)
def _get_sendfile():
backend = getattr(settings, 'SENDFILE_BACKEND', None)
if not backend:
raise ImproperlyConfigured('You must specify a value for SENDFILE_BACKEND')
module = import_module(backend)
return module.sendfile
def sendfile(request, filename, attachment=False, attachment_filename=None,
mimetype=None, encoding=None):
'''
create a response to send file using backend configured in SENDFILE_BACKEND
Filename is the absolute path to the file to send.
If attachment is True the content-disposition header will be set accordingly.
This will typically prompt the user to download the file, rather
than view it. But even if False, the user may still be prompted, depending
on the browser capabilities and configuration.
The content-disposition filename depends on the value of attachment_filename:
None (default): Same as filename
False: No content-disposition filename
String: Value used as filename
If no mimetype or encoding are specified, then they will be guessed via the
filename (using the standard python mimetypes module)
'''
_sendfile = _get_sendfile()
if not os.path.exists(filename):
raise Http404('"%s" does not exist' % filename)
guessed_mimetype, guessed_encoding = guess_type(filename)
if mimetype is None:
if guessed_mimetype:
mimetype = guessed_mimetype
else:
mimetype = 'application/octet-stream'
response = _sendfile(request, filename, mimetype=mimetype)
# Suggest to view (inline) or download (attachment) the file
parts = ['attachment' if attachment else 'inline']
if attachment_filename is None:
attachment_filename = os.path.basename(filename)
if attachment_filename:
attachment_filename = force_str(attachment_filename)
ascii_filename = unicodedata.normalize('NFKD', attachment_filename)
ascii_filename = ascii_filename.encode('ascii', 'ignore').decode()
parts.append('filename="%s"' % ascii_filename)
if ascii_filename != attachment_filename:
quoted_filename = urlquote(attachment_filename)
parts.append('filename*=UTF-8\'\'%s' % quoted_filename)
response['Content-Disposition'] = '; '.join(parts)
response['Content-length'] = os.path.getsize(filename)
response['Content-Type'] = mimetype
if not encoding:
encoding = guessed_encoding
if encoding:
response['Content-Encoding'] = encoding
return response
| [
"importlib.import_module",
"django.utils.encoding.force_str",
"django.utils.http.urlquote",
"unicodedata.normalize",
"mimetypes.guess_type",
"functools.lru_cache",
"django.core.exceptions.ImproperlyConfigured",
"django.http.Http404"
] | [((343, 366), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (352, 366), False, 'from functools import lru_cache\n'), ((563, 585), 'importlib.import_module', 'import_module', (['backend'], {}), '(backend)\n', (576, 585), False, 'from importlib import import_module\n'), ((1687, 1707), 'mimetypes.guess_type', 'guess_type', (['filename'], {}), '(filename)\n', (1697, 1707), False, 'from mimetypes import guess_type\n'), ((480, 549), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (['"""You must specify a value for SENDFILE_BACKEND"""'], {}), "('You must specify a value for SENDFILE_BACKEND')\n", (500, 549), False, 'from django.core.exceptions import ImproperlyConfigured\n'), ((1603, 1644), 'django.http.Http404', 'Http404', (['(\'"%s" does not exist\' % filename)'], {}), '(\'"%s" does not exist\' % filename)\n', (1610, 1644), False, 'from django.http import Http404\n'), ((2204, 2234), 'django.utils.encoding.force_str', 'force_str', (['attachment_filename'], {}), '(attachment_filename)\n', (2213, 2234), False, 'from django.utils.encoding import force_str\n'), ((2260, 2310), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKD"""', 'attachment_filename'], {}), "('NFKD', attachment_filename)\n", (2281, 2310), False, 'import unicodedata\n'), ((2522, 2551), 'django.utils.http.urlquote', 'urlquote', (['attachment_filename'], {}), '(attachment_filename)\n', (2530, 2551), False, 'from django.utils.http import urlquote\n')] |
# coding=utf-8
"""
http://www.johnwittenauer.net/machine-learning-exercises-in-python-part-1/
"""
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from util.lib import computeCost, gradient_descent
path = os.getcwd() + "\data\ex1data1.txt"
data = pd.read_csv(path, header=None, names=['Population', 'Profit'])
print(data.head())
print(data.describe())
data.plot(kind='scatter', x="Population", y="Profit", figsize=(12, 8))
# plt.show()
# append a ones column to the front of the data set
data.insert(0, 'Ones', 1)
print(data.head())
# set X (training data) and y (target variable)
cols = data.shape[1]
X = data.iloc[:, 0:cols - 1]
y = data.iloc[:, cols - 1:cols]
# convert from data frames to numpy matrices
X = np.matrix(X.values)
y = np.matrix(y.values)
# theta = np.matrix(np.array([0,0]))
theta = np.matrix(np.zeros(X.shape[1]))
theta = theta.T
print(X.shape, theta.shape, y.shape)
error = computeCost(X, y, theta)
print("error:", error)
iters = 20000
g, cost, final_cost = gradient_descent(X, y, theta, 0.01, iters)
print(g)
print(final_cost)
fig, ax = plt.subplots(figsize=(12,8))
ax.plot(X[:,1], (g[0, 0] + (g[1, 0] * X[:,1])), 'r', label='Prediction')
ax.scatter(data.Population, data.Profit, label='Traning Data')
ax.legend(loc=2)
ax.set_xlabel('Population')
ax.set_ylabel('Profit')
ax.set_title('Predicted Profit vs. Population Size')
fig, ax = plt.subplots(figsize=(12,8))
ax.plot(np.arange(iters), cost, 'r')
ax.set_xlabel('Iterations')
ax.set_ylabel('Cost')
ax.set_title('Error vs. Training Epoch')
plt.show()
| [
"pandas.read_csv",
"util.lib.computeCost",
"os.getcwd",
"util.lib.gradient_descent",
"numpy.zeros",
"numpy.matrix",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((282, 344), 'pandas.read_csv', 'pd.read_csv', (['path'], {'header': 'None', 'names': "['Population', 'Profit']"}), "(path, header=None, names=['Population', 'Profit'])\n", (293, 344), True, 'import pandas as pd\n'), ((756, 775), 'numpy.matrix', 'np.matrix', (['X.values'], {}), '(X.values)\n', (765, 775), True, 'import numpy as np\n'), ((780, 799), 'numpy.matrix', 'np.matrix', (['y.values'], {}), '(y.values)\n', (789, 799), True, 'import numpy as np\n'), ((939, 963), 'util.lib.computeCost', 'computeCost', (['X', 'y', 'theta'], {}), '(X, y, theta)\n', (950, 963), False, 'from util.lib import computeCost, gradient_descent\n'), ((1025, 1067), 'util.lib.gradient_descent', 'gradient_descent', (['X', 'y', 'theta', '(0.01)', 'iters'], {}), '(X, y, theta, 0.01, iters)\n', (1041, 1067), False, 'from util.lib import computeCost, gradient_descent\n'), ((1107, 1136), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (1119, 1136), True, 'import matplotlib.pyplot as plt\n'), ((1407, 1436), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (1419, 1436), True, 'import matplotlib.pyplot as plt\n'), ((1564, 1574), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1572, 1574), True, 'import matplotlib.pyplot as plt\n'), ((240, 251), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (249, 251), False, 'import os\n'), ((855, 875), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (863, 875), True, 'import numpy as np\n'), ((1444, 1460), 'numpy.arange', 'np.arange', (['iters'], {}), '(iters)\n', (1453, 1460), True, 'import numpy as np\n')] |
import numpy as np
import time
if __name__ == '__main__':
primelist = np.array([2])
pcount = 1
pnum = 1
stime = time.time()
while pcount!=10001:
primefound = 0
pnum+=2
for i in range(pcount):
if pnum%primelist[i]==0:
break
elif primelist[i]>=np.sqrt(pnum):
primelist = np.append(primelist,pnum)
primefound = 1
break
if primefound == 1:
pcount+=1
print("10001st prime number is %s"%(pnum))
print("Time taken :: %.3f seconds"%(time.time()-stime)) | [
"numpy.append",
"numpy.array",
"numpy.sqrt",
"time.time"
] | [((76, 89), 'numpy.array', 'np.array', (['[2]'], {}), '([2])\n', (84, 89), True, 'import numpy as np\n'), ((124, 135), 'time.time', 'time.time', ([], {}), '()\n', (133, 135), False, 'import time\n'), ((487, 498), 'time.time', 'time.time', ([], {}), '()\n', (496, 498), False, 'import time\n'), ((278, 291), 'numpy.sqrt', 'np.sqrt', (['pnum'], {}), '(pnum)\n', (285, 291), True, 'import numpy as np\n'), ((310, 336), 'numpy.append', 'np.append', (['primelist', 'pnum'], {}), '(primelist, pnum)\n', (319, 336), True, 'import numpy as np\n')] |
import inviwopy
# import inviwopy.glm as glm
import numpy as np
import h5py
import time
from envisionpy.utils.exceptions import *
from .baseNetworks.LinePlotSubnetwork import LinePlotSubnetwork
class DensityOfStates(LinePlotSubnetwork):
'''
Manages a subnetwork for density of states (DoS) visualisation.
Uses a modified LinePlotSubnetwork. Dataframes are not read directly from
hdf5 file but are processed before sent to the line plot processor.
'''
def __init__(self, inviwoApp, hdf5_path, hdf5_outport, xpos=0, ypos=0):
LinePlotSubnetwork.__init__(self, inviwoApp, hdf5_path, hdf5_outport, xpos, ypos, False)
self.modify_network(hdf5_path, hdf5_outport, xpos, ypos)
self.totalEnabled = True
self.partialEnabled = True
self.set_title('DOS [1/(eV * unit cell)]')
@staticmethod
def valid_hdf5(hdf5_file):
return '/DOS' in hdf5_file and '/DOS/Partial' in hdf5_file and '/DOS/Total' in hdf5_file
def get_ui_data(self):
return []
def toggle_total(self, enable):
self.totalEnabled = enable
totalCollector = self.get_processor("TotalCollector")
collector = self.get_processor("Collector")
if enable:
self.network.addConnection(totalCollector.getOutport("dataframeOutport"), collector.getInport("dataframeMultiInport"))
else:
self.network.removeConnection(totalCollector.getOutport("dataframeOutport"), collector.getInport("dataframeMultiInport"))
def toggle_partial(self, enable):
self.partialEnabled = enable
partialCollector = self.get_processor("PartialCollector")
collector = self.get_processor("Collector")
if enable:
self.network.addConnection(partialCollector.getOutport("dataframeOutport"), collector.getInport("dataframeMultiInport"))
else:
self.network.removeConnection(partialCollector.getOutport("dataframeOutport"), collector.getInport("dataframeMultiInport"))
def get_n_partials(self):
return self.get_processor("Select Partial Band").intProperty.maxValue
def set_partial_selection(self, n):
self.get_processor("Select Partial Band").intProperty.value = n
def get_partial_selection(self):
return self.get_processor("Select Partial Band").intProperty.value
def modify_network(self, hdf5_path, hdf5_outport, xpos, ypos):
self.network.lock()
# Remove default hdf5 to function section.
self.remove_processor('ChildCollector')
self.remove_processor('h5ToFunction')
self.remove_processor('dataFrame')
pathSelection = self.get_processor('PathSelection')
with h5py.File(hdf5_path,"r") as h5:
total_list = []
for key in h5["/DOS/Total"].keys():
if key == "Energy": continue
total_list.append(key)
total_list.sort(key=lambda item: "".join(reversed(item)))
down_totals = [x for x in total_list if x.endswith("(dwn)")]
up_totals = [x for x in total_list if x.endswith("(up)")]
has_partial = "/DOS/Partial" in h5
if has_partial:
n_partials = len(h5['/DOS/Partial'])
partial_list = []
for key in h5["/DOS/Partial/0"]:
if key == "Energy": continue
partial_list.append(key)
partial_list.sort(key=lambda item: "".join(reversed(item)))
down_partials = [x for x in partial_list if x.endswith("(dwn)")]
up_partials = [x for x in partial_list if x.endswith("(up)")]
to_func_processors = []
# Setup totals
totalSelection = self.add_processor("org.inviwo.hdf5.PathSelection", "Select Total", xpos+7, ypos)
self.network.addConnection(pathSelection.getOutport('outport'), totalSelection.getInport('inport'))
totalCollector = self.add_processor("org.inviwo.DataFrameCollector", "TotalCollector", xpos+7, ypos+15)
xpos_tmp = xpos + 7
for n, key in enumerate(down_totals):
toFunction = self.add_processor("org.inviwo.HDF5ToFunction", key, xpos_tmp, ypos+6)
to_func_processors.append(toFunction)
self.network.addConnection(totalSelection.getOutport('outport'), toFunction.getInport('hdf5HandleFlatMultiInport'))
toFunction.yNamePrependParentsProperty.value = 1
addOperation = self.add_processor("org.inviwo.FunctionOperationNary", "Total Down Add {}".format(n), xpos_tmp, ypos+9)
self.network.addConnection(toFunction.getOutport("functionVectorOutport"), addOperation.getInport("functionFlatMultiInport"))
addOperation.operationProperty.value = 'add'
negateOperation = self.add_processor("org.inviwo.FunctionOperationUnary", "Total Down Negate {}".format(n), xpos_tmp, ypos+12)
self.network.addConnection(addOperation.getOutport("dataframeOutport"), negateOperation.getInport("dataframeInport"))
self.network.addConnection(negateOperation.getOutport("dataframeOutport"), totalCollector.getInport('dataframeMultiInport'))
negateOperation.operationProperty.value = 'negate'
n += 1
xpos_tmp += 7
for n, key in enumerate(up_totals):
toFunction = self.add_processor("org.inviwo.HDF5ToFunction", key, xpos_tmp, ypos+6)
to_func_processors.append(toFunction)
self.network.addConnection(totalSelection.getOutport('outport'), toFunction.getInport('hdf5HandleFlatMultiInport'))
toFunction.yNamePrependParentsProperty.value = 1
addOperation = self.add_processor("org.inviwo.FunctionOperationNary", "Total Up Add {}".format(n), xpos_tmp, ypos+9)
self.network.addConnection(toFunction.getOutport("functionVectorOutport"), addOperation.getInport("functionFlatMultiInport"))
self.network.addConnection(addOperation.getOutport("dataframeOutport"), totalCollector.getInport('dataframeMultiInport'))
addOperation.operationProperty.value = 'add'
n += 1
xpos_tmp += 7
# Setup partials
partialSelection = self.add_processor("org.inviwo.hdf5.PathSelection", "Select Partial", xpos_tmp, ypos)
partialBandSelection = self.add_processor("org.inviwo.HDF5PathSelectionInt", "Select Partial Band", xpos_tmp, ypos+3)
self.network.addConnection(pathSelection.getOutport('outport'), partialSelection.getInport('inport'))
self.network.addConnection(partialSelection.getOutport('outport'), partialBandSelection.getInport('hdf5HandleInport'))
partialBandSelection.intProperty.value = 0
partialBandSelection.intProperty.minValue = 0
partialBandSelection.intProperty.maxValue = n_partials
partialCollector = self.add_processor("org.inviwo.DataFrameCollector", "PartialCollector", xpos_tmp, ypos+15)
for n, key in enumerate(down_partials):
toFunction = self.add_processor("org.inviwo.HDF5ToFunction", key, xpos_tmp, ypos+6)
to_func_processors.append(toFunction)
self.network.addConnection(partialBandSelection.getOutport('hdf5HandleVectorOutport'), toFunction.getInport('hdf5HandleFlatMultiInport'))
toFunction.yNamePrependParentsProperty.value = 2
addOperation = self.add_processor("org.inviwo.FunctionOperationNary", "Partial Down Add {}".format(n), xpos_tmp, ypos+9)
self.network.addConnection(toFunction.getOutport("functionVectorOutport"), addOperation.getInport("functionFlatMultiInport"))
addOperation.operationProperty.value = 'add'
negateOperation = self.add_processor("org.inviwo.FunctionOperationUnary", "Partial Down Negate {}".format(n), xpos_tmp, ypos+12)
self.network.addConnection(addOperation.getOutport("dataframeOutport"), negateOperation.getInport("dataframeInport"))
self.network.addConnection(negateOperation.getOutport("dataframeOutport"), partialCollector.getInport('dataframeMultiInport'))
negateOperation.operationProperty.value = 'negate'
n += 1
xpos_tmp += 7
for n, key in enumerate(up_partials):
toFunction = self.add_processor("org.inviwo.HDF5ToFunction", key, xpos_tmp, ypos+6)
to_func_processors.append(toFunction)
self.network.addConnection(partialBandSelection.getOutport('hdf5HandleVectorOutport'), toFunction.getInport('hdf5HandleFlatMultiInport'))
toFunction.yNamePrependParentsProperty.value = 2
addOperation = self.add_processor("org.inviwo.FunctionOperationNary", "Partial Up Add {}".format(n), xpos_tmp, ypos+9)
self.network.addConnection(toFunction.getOutport("functionVectorOutport"), addOperation.getInport("functionFlatMultiInport"))
self.network.addConnection(addOperation.getOutport("dataframeOutport"), partialCollector.getInport('dataframeMultiInport'))
addOperation.operationProperty.value = 'add'
n += 1
xpos_tmp += 7
collector = self.add_processor("org.inviwo.DataFrameCollector", "Collector", xpos+7, ypos+18)
self.network.addConnection(totalCollector.getOutport("dataframeOutport"), collector.getInport('dataframeMultiInport'))
self.network.addConnection(partialCollector.getOutport("dataframeOutport"), collector.getInport('dataframeMultiInport'))
linePlot = self.get_processor("LinePlot")
self.network.addConnection(collector.getOutport("dataframeOutport"), linePlot.getInport('dataFrameInport'))
self.network.unlock()
# Set hdf5 selector paths
pathSelection.selection.value = '/DOS'
totalSelection.selection.value = '/Total'
partialSelection.selection.value = '/Partial'
# Set function paths.
self.network.lock() # Lock network for performence increase.
names = down_totals + up_totals + down_partials + up_partials
for i, toFunction in enumerate(to_func_processors):
toFunction.implicitXProperty.value = False
toFunction.xPathSelectionProperty.value = '/Energy'
toFunction.yPathSelectionProperty.value = '/{}'.format(names[i])
toFunction.xPathFreeze.value = True
toFunction.yPathFreeze.value = True
self.set_y_selection_type(2)
self.network.unlock() | [
"h5py.File"
] | [((2707, 2732), 'h5py.File', 'h5py.File', (['hdf5_path', '"""r"""'], {}), "(hdf5_path, 'r')\n", (2716, 2732), False, 'import h5py\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 15 11:52:31 2019
@author: tgadfort
"""
import sys
import re
from datetime import timedelta
from playTypes import playtype
# create logger
import logging
module_logger = logging.getLogger('log.{0}'.format(__name__))
############################################################################################################
## Drive Class
############################################################################################################
class driveclass:
def __init__(self, headline, detail, possession, postdrivehomescore, postdriveawayscore, plays=None, text={}):
self.logger = logging.getLogger('log.{0}.{1}'.format(__name__, self.__class__))
self.ind = 6*" "
self.headline = headline
self.detail = detail
self.possession = possession
self.postdrivehomescore = postdrivehomescore
self.postdriveawayscore = postdriveawayscore
self.plays = plays
try:
self.headlineText = text.get("Headline")[0]
except:
self.headlineText = str(None)
try:
self.detailText = text.get("Detail")[0]
except:
self.detailText = str(None)
def setPlays(self, plays):
self.plays = plays
def getHeadlineText(self):
return self.headlineText
def getDetailText(self):
return self.detailText
def getSummaryText(self):
plays = self.detail.plays
yards = self.detail.yards
headline = self.headline
retval = "{0: <5}{1: <5}{2: <25}{3: <25}{4: <25}".format(plays, yards, headline, self.headlineText, self.detailText)
return retval
############################################################################################################
## Drive Detail Class
############################################################################################################
class drivedetailclass:
def __init__(self, plays, yards, gametime):
self.logger = logging.getLogger('log.{0}.{1}'.format(__name__, self.__class__))
self.ind = 6*" "
self.plays = plays
self.yards = yards
self.gametime = gametime
############################################################################################################
## Drive Summary Class
############################################################################################################
class drivesummary:
def __init__(self, drive, fieldMap):
self.logger = logging.getLogger('log.{0}.{1}'.format(__name__, self.__class__))
self.ind = 4*" "
self.name = "drivesummary"
self.headline = None
self.score = None
self.details = None
self.fullDrive = None
driveNo = drive.get('Drive')
if driveNo is None:
raise ValueError("No Drive in drive dict")
headline = drive.get('Headline')
if headline is None:
raise ValueError("No Headline in drive dict")
self.headlineText = headline
detail = drive.get('Detail')
if detail is None:
raise ValueError("No Detail in drive dict")
self.detailText = detail
possession = drive.get('Posession')
if possession is None:
raise ValueError("No Posession in drive dict")
data = drive.get('Data')
if data is None:
raise ValueError("No Data in drive dict")
###
### For whatever reason home/away scores are reversed on the webpage...
###
homescore = drive.get('AwayScore')
if homescore is None:
raise ValueError("No AwayScore in drive dict")
awayscore = drive.get('HomeScore')
if awayscore is None:
raise ValueError("No HomeScore in drive dict")
self.possession = self.parsePossession(possession, fieldMap)
self.headline = self.parseHeadline(headline)
self.detail = self.parseDetail(detail)
self.homescore = self.parseScore(homescore)
self.awayscore = self.parseScore(awayscore)
self.driveplays = data
self.logger.debug("{0}Drive Summary: [{1} - {2}] {3}".format(self.ind, self.awayscore, self.homescore, headline))
self.fullDrive = driveclass(headline=self.headline, detail=self.detail, possession=self.possession,
postdrivehomescore=self.homescore, postdriveawayscore=self.awayscore,
text={"Headline": self.headlineText, "Detail": self.detailText})
def getHeadline(self):
return self.headlineText
def getDetail(self):
return self.detailText
def getPostDriveScore(self):
return [self.awayscore, self.homescore]
def getDrivePlays(self):
return self.driveplays
def getFullDrive(self):
return self.fullDrive
def parsePossession(self, possession, fieldMap, debug=False):
if not isinstance(possession, list):
self.logger.error("Possession is not a list: {0}".format(possession))
if len(possession) != 1:
self.logger.error("Not one element in possession list: {0}".format(possession))
teamID = possession[0]
teamAbbrev = None
try:
teamAbbrev = fieldMap[teamID]
except:
self.logger.error("Could not find {0} in field map: {1}".format(teamID, fieldMap))
self.logger.debug("{0}Parsed Possession: {1}".format(self.ind, teamAbbrev))
return teamAbbrev
def parseHeadline(self, headline, debug=False):
play = None
if isinstance(headline, list):
if len(headline) >= 1:
pt = playtype()
play = pt.getPlay(headline[0]).name
else:
self.logger.error("Not one headline entry: {0}".format(headline))
else:
self.logger.error("Headline is not a list: {0}".format(headline))
self.logger.debug("{0}Parsed Headline: {1}".format(self.ind, play))
return play
def parseScore(self, score, debug=False):
if not isinstance(score, list):
self.logger.error("Could not determine score type: {0}".format(score))
if len(score) != 1:
self.logger.error("Not one detail entry: {0}".format(score))
scoredata = score[0]
try:
scoredata = int(scoredata)
except:
self.logger.error("Could not find an integer score for {0}".format(scoredata))
self.logger.debug("{0}Parsed Score: {1}".format(self.ind, scoredata))
return scoredata
def parseDetail(self, detail, debug=False):
if debug:
fname = sys._getframe().f_code.co_name
print("FUNC {0}".format(fname))
if not isinstance(detail, list):
raise ValueError("Could not determine detail play type: {0}".format(detail))
if len(detail) != 1:
raise ValueError("Not one detail entry: {0}".format(detail))
detaildata = detail[0]
yards = "(yards|yard|Yds|yds|Yd|yd)"
plays = "(play|plays)"
num = "([+-?]\d+|\d+)"
totalplays = None
totalyards = None
totalclock = None
m = re.search(r"{0}\s{1},\s{2}\s{3},\s{4}:{5}".format(num, plays, num, yards, num, num), detaildata)
if m is not None:
groups = m.groups()
totalplays = int(groups[0])
totalyards = int(groups[2])
totalclock = timedelta(minutes=int(groups[4]), seconds=int(groups[5]))
if totalplays is None and totalyards is None and totalclock is None:
m = re.search(r"{0}\s{1},\s{2}\s{3}".format(num, plays, num, yards), detaildata)
if m is not None:
groups = m.groups()
totalplays = int(groups[0])
totalyards = int(groups[2])
totalclock = timedelta(minutes=0, seconds=0)
if totalplays is None and totalyards is None and totalclock is None:
raise ValueError("Could not parse drive detail: {0}".format(detaildata))
drivedetail = drivedetailclass(plays=totalplays, yards=totalyards, gametime=totalclock)
return drivedetail
| [
"datetime.timedelta",
"sys._getframe",
"playTypes.playtype"
] | [((6126, 6136), 'playTypes.playtype', 'playtype', ([], {}), '()\n', (6134, 6136), False, 'from playTypes import playtype\n'), ((8496, 8527), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(0)', 'seconds': '(0)'}), '(minutes=0, seconds=0)\n', (8505, 8527), False, 'from datetime import timedelta\n'), ((7200, 7215), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (7213, 7215), False, 'import sys\n')] |
#!/usr/bin/env python
import io
import os
import re
import sys
from pathlib import Path
from setuptools import find_packages, setup
this_directory = Path(__file__).parent
long_description = (this_directory / "README.md").read_text()
version = ""
with io.open("django_react_templatetags/__init__.py", "r", encoding="utf8") as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE
).group(1)
setup(
name="django_react_templatetags",
version=version,
description=(
"This django library allows you to add React components into your django templates."
), # NOQA
long_description=long_description,
long_description_content_type="text/markdown",
author="Fröjd",
author_email="<EMAIL>",
url="https://github.com/frojd/django-react-templatetags",
packages=find_packages(exclude=("tests*", "example_django_react_templatetags")),
include_package_data=True,
install_requires=[
"Django>=3.2",
],
extras_require={
"ssr": ["requests"],
"hypernova": ["hypernova"],
},
tests_require=[
"Django>=3.2",
"requests",
],
license="MIT",
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Natural Language :: English",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Django",
"Framework :: Django :: 3.2",
"Framework :: Django :: 4.0",
"Topic :: Utilities",
"Programming Language :: JavaScript",
],
)
| [
"setuptools.find_packages",
"io.open",
"pathlib.Path"
] | [((152, 166), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (156, 166), False, 'from pathlib import Path\n'), ((255, 325), 'io.open', 'io.open', (['"""django_react_templatetags/__init__.py"""', '"""r"""'], {'encoding': '"""utf8"""'}), "('django_react_templatetags/__init__.py', 'r', encoding='utf8')\n", (262, 325), False, 'import io\n'), ((854, 924), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('tests*', 'example_django_react_templatetags')"}), "(exclude=('tests*', 'example_django_react_templatetags'))\n", (867, 924), False, 'from setuptools import find_packages, setup\n')] |
from loguru import logger
from utils import match_pkg
class digraph():
node: list
edge: list
def __init__(self):
self.node = []
self.edge = []
def _ignore_(self, name: str):
ignore_list = ["runtime", "internal",
"reflect", "math", "sync", "syscall", "fmt", "os", "time", "unicode", "strconv", "io", "sort"]
return match_pkg(name, ignore_list)
def add_node(self, node_name: str):
if node_name is None or not isinstance(node_name, str):
return False
tmp_name = node_name.strip()
if tmp_name == "":
return False
if self._ignore_(node_name):
return False
if tmp_name not in self.node:
logger.debug("add {} to node".format(tmp_name))
self.node.append(tmp_name)
return True
def add_edge(self, edge_tuple: tuple):
caller, callee = edge_tuple[0], edge_tuple[1]
if self.add_node(caller) and self.add_node(callee):
self.edge.append((caller, callee))
def print_graph(self):
print("node sum: {}".format(len(self.node)))
print("nodes: {}".format(self.node))
print("edge: {}".format(self.edge))
def load_dg_from_file(filename: str):
"""将指定路径的文件加载至内存
再一次会话中,该文件一直存在在内存中直至会话结束或者加载其他文件
文件内容格式: {{pkg.Caller}}\t{{pkg.Callee}}
"""
cdg = digraph()
with open(filename, "r") as f:
while True:
line = f.readline()
if not line:
break
line = line.replace('"', "")
tmp = line.split(" ")
tuple_edge = (tmp[0], tmp[1])
cdg.add_edge(tuple_edge)
return cdg
| [
"utils.match_pkg"
] | [((389, 417), 'utils.match_pkg', 'match_pkg', (['name', 'ignore_list'], {}), '(name, ignore_list)\n', (398, 417), False, 'from utils import match_pkg\n')] |
from django.db import models
from apps.users.models import User
# Create your models here.
class Person(models.Model):
id = models.AutoField(primary_key = True)
user_id = models.ForeignKey(User, on_delete=models.RESTRICT)
name = models.CharField(max_length = 200)
last_name = models.CharField(max_length = 200)
class Customer(models.Model):
id = models.AutoField(primary_key = True)
person_id = models.ForeignKey(Person, on_delete=models.RESTRICT)
is_active = models.BooleanField(default=False)
is_deleted = models.BooleanField(default=False)
class Purchase(models.Model):
id = models.AutoField(primary_key = True)
customer_id = models.ForeignKey(Customer, on_delete=models.RESTRICT)
iva = models.DecimalField(max_digits=5, decimal_places=2)
subtotal = models.DecimalField(max_digits=5, decimal_places=2)
total = models.DecimalField(max_digits=5, decimal_places=2)
class Category(models.Model):
id = models.AutoField(primary_key = True)
name = models.CharField(max_length = 200)
description = models.TextField()
class Product(models.Model):
id = models.AutoField(primary_key = True)
category_id = models.ForeignKey(Category, on_delete=models.RESTRICT)
name = models.CharField(max_length = 200)
description = models.TextField()
quantity = models.IntegerField()
price = models.DecimalField(max_digits=5, decimal_places=2)
class PurchaseProducts(models.Model):
id = models.AutoField(primary_key = True)
product_id = models.ForeignKey(Product, on_delete=models.RESTRICT)
purchase_id = models.ForeignKey(Purchase, on_delete=models.RESTRICT)
quantity = models.IntegerField() | [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((132, 166), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (148, 166), False, 'from django.db import models\n'), ((183, 233), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.RESTRICT'}), '(User, on_delete=models.RESTRICT)\n', (200, 233), False, 'from django.db import models\n'), ((244, 276), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (260, 276), False, 'from django.db import models\n'), ((294, 326), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (310, 326), False, 'from django.db import models\n'), ((370, 404), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (386, 404), False, 'from django.db import models\n'), ((422, 474), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Person'], {'on_delete': 'models.RESTRICT'}), '(Person, on_delete=models.RESTRICT)\n', (439, 474), False, 'from django.db import models\n'), ((490, 524), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (509, 524), False, 'from django.db import models\n'), ((541, 575), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (560, 575), False, 'from django.db import models\n'), ((617, 651), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (633, 651), False, 'from django.db import models\n'), ((671, 725), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Customer'], {'on_delete': 'models.RESTRICT'}), '(Customer, on_delete=models.RESTRICT)\n', (688, 725), False, 'from django.db import models\n'), ((735, 786), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (754, 786), False, 'from django.db import models\n'), ((801, 852), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (820, 852), False, 'from django.db import models\n'), ((864, 915), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (883, 915), False, 'from django.db import models\n'), ((957, 991), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (973, 991), False, 'from django.db import models\n'), ((1004, 1036), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1020, 1036), False, 'from django.db import models\n'), ((1056, 1074), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1072, 1074), False, 'from django.db import models\n'), ((1115, 1149), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1131, 1149), False, 'from django.db import models\n'), ((1169, 1223), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.RESTRICT'}), '(Category, on_delete=models.RESTRICT)\n', (1186, 1223), False, 'from django.db import models\n'), ((1234, 1266), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1250, 1266), False, 'from django.db import models\n'), ((1286, 1304), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1302, 1304), False, 'from django.db import models\n'), ((1319, 1340), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1338, 1340), False, 'from django.db import models\n'), ((1352, 1403), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(5)', 'decimal_places': '(2)'}), '(max_digits=5, decimal_places=2)\n', (1371, 1403), False, 'from django.db import models\n'), ((1453, 1487), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1469, 1487), False, 'from django.db import models\n'), ((1506, 1559), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Product'], {'on_delete': 'models.RESTRICT'}), '(Product, on_delete=models.RESTRICT)\n', (1523, 1559), False, 'from django.db import models\n'), ((1577, 1631), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Purchase'], {'on_delete': 'models.RESTRICT'}), '(Purchase, on_delete=models.RESTRICT)\n', (1594, 1631), False, 'from django.db import models\n'), ((1646, 1667), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1665, 1667), False, 'from django.db import models\n')] |
import os
import magic
from django.core.exceptions import ValidationError
def validate_user_photo_extension(value):
valid_mime_types = ['image/svg+xml', 'image/jpeg', 'image/png']
file_mime_type = magic.from_buffer(value.read(1024), mime=True)
if file_mime_type not in valid_mime_types:
raise ValidationError('Unsupported file type.')
valid_file_extensions = ['.svh', '.jpg', '.png']
ext = os.path.splitext(value.name)[1]
if ext.lower() not in valid_file_extensions:
raise ValidationError('Unacceptable file extension.')
| [
"os.path.splitext",
"django.core.exceptions.ValidationError"
] | [((314, 355), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Unsupported file type."""'], {}), "('Unsupported file type.')\n", (329, 355), False, 'from django.core.exceptions import ValidationError\n'), ((419, 447), 'os.path.splitext', 'os.path.splitext', (['value.name'], {}), '(value.name)\n', (435, 447), False, 'import os\n'), ((514, 561), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Unacceptable file extension."""'], {}), "('Unacceptable file extension.')\n", (529, 561), False, 'from django.core.exceptions import ValidationError\n')] |
import copy
max_students = int(input())
departments = {
"Biotech": [],
"Chemistry": [],
"Engineering": [],
"Mathematics": [],
"Physics": [],
}
applicants = []
with open("applicants.txt", "r") as apps:
for app in apps:
applicants.append(app.split())
applicants.sort(key=lambda x: (-float(x[2]), x[0], x[1]))
app_copy = copy.deepcopy(applicants)
choice = 7
bio = departments['Biotech']
chem = departments['Chemistry']
eng = departments['Engineering']
math = departments['Mathematics']
phys = departments['Physics']
while (len(departments['Biotech']) < max_students or len(departments['Chemistry']) < max_students
or len(departments['Engineering']) < max_students or len(departments['Mathematics']) <
max_students or len(departments['Physics']) < max_students) and choice < 10 and applicants:
if len(bio) < max_students:
bio1 = [x for x in sorted(app_copy, key=lambda x: (-max(((int(x[3]) + int(x[2])) / 2), int(x[6])), x[0], x[1]))
if x[choice] == 'Biotech'][:max_students - len(bio)]
bio.extend(bio1)
for student in bio:
if student in applicants:
applicants.remove(student)
app_copy = copy.deepcopy(applicants)
if len(chem) < max_students:
chem1 = [x for x in sorted(app_copy, key=lambda x: (-max(int(x[3]), int(x[6])), x[0], x[1]))
if x[choice] == 'Chemistry'][:max_students - len(chem)]
chem.extend(chem1)
for student in chem:
if student in applicants:
applicants.remove(student)
app_copy = copy.deepcopy(applicants)
if len(eng) < max_students:
eng1 = [x for x in sorted(app_copy, key=lambda x: (-max(((int(x[5]) + int(x[4])) / 2), int(x[6])), x[0], x[1]))
if x[choice] == 'Engineering'][:max_students - len(eng)]
eng.extend(eng1)
for student in eng:
if student in applicants:
applicants.remove(student)
app_copy = copy.deepcopy(applicants)
if len(math) < max_students:
math1 = [x for x in sorted(app_copy, key=lambda x: (-max(int(x[4]), int(x[6])), x[0], x[1]))
if x[choice] == 'Mathematics'][:max_students - len(math)]
math.extend(math1)
for student in math:
if student in applicants:
applicants.remove(student)
app_copy = copy.deepcopy(applicants)
if len(phys) < max_students:
phys1 = [x for x in sorted(app_copy, key=lambda x: (-max(((int(x[2]) + int(x[4])) / 2), int(x[6])), x[0], x[1]))
if x[choice] == 'Physics'][:max_students - len(phys)]
phys.extend(phys1)
for student in phys:
if student in applicants:
applicants.remove(student)
app_copy = copy.deepcopy(applicants)
choice += 1
for department in departments:
if department == 'Biotech':
departments[department].sort(key=lambda x: (-max(((int(x[3]) + int(x[2])) / 2), int(x[6])), x[0], x[1]))
score1 = 3
score2 = 2
elif department == 'Chemistry':
departments[department].sort(key=lambda x: (-max(int(x[3]), int(x[6])), x[0], x[1]))
score1 = 3
score2 = False
elif department == 'Engineering':
departments[department].sort(key=lambda x: (-max(((int(x[5]) + int(x[4])) / 2), int(x[6])), x[0], x[1]))
score1 = 5
score2 = 4
elif department == 'Mathematics':
departments[department].sort(key=lambda x: (-max(int(x[4]), int(x[6])), x[0], x[1]))
score1 = 4
score2 = False
else:
departments[department].sort(key=lambda x: (-max(((int(x[2]) + int(x[4])) / 2), int(x[6])), x[0], x[1]))
score1 = 2
score2 = 4
with open(f"{department.lower()}.txt", 'w') as write_file:
for student in departments[department]:
try:
write_file.write(' '.join([
student[0], student[1],
str(max(((int(student[score1]) + int(student[score2])) / 2), float(student[6]))) + '\n']))
except ValueError:
write_file.write(
' '.join([student[0], student[1], str(max(float(student[score1]), float(student[6]))) + '\n']))
| [
"copy.deepcopy"
] | [((356, 381), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (369, 381), False, 'import copy\n'), ((1225, 1250), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (1238, 1250), False, 'import copy\n'), ((1618, 1643), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (1631, 1643), False, 'import copy\n'), ((2026, 2051), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (2039, 2051), False, 'import copy\n'), ((2421, 2446), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (2434, 2446), False, 'import copy\n'), ((2832, 2857), 'copy.deepcopy', 'copy.deepcopy', (['applicants'], {}), '(applicants)\n', (2845, 2857), False, 'import copy\n')] |
#!/usr/bin/env python
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import thread_utils
SIZE = 10
COUNT = 65535
def nothing():
pass
if __name__ == '__main__':
pool = thread_utils.Pool(worker_size=SIZE, daemon=True)
futures = [pool.send(nothing) for i in xrange(COUNT)]
pool.kill(block=True)
for f in futures:
f.receive()
| [
"os.path.abspath",
"thread_utils.Pool"
] | [((233, 281), 'thread_utils.Pool', 'thread_utils.Pool', ([], {'worker_size': 'SIZE', 'daemon': '(True)'}), '(worker_size=SIZE, daemon=True)\n', (250, 281), False, 'import thread_utils\n'), ((92, 117), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (107, 117), False, 'import os\n')] |
import math
import datetime
import pytz
import pandas as pd
import csv
import urllib.request
import io
FORECAST_DATE = datetime.datetime.now(pytz.timezone('US/Pacific'))
FORECAST_DATE = FORECAST_DATE.replace(tzinfo=None)
for i in range(0, 8):
if FORECAST_DATE.weekday() == 6:
break
FORECAST_DATE -= datetime.timedelta(1)
# FIRST_WEEK is the first Saturday after forecast date.
FIRST_WEEK = FORECAST_DATE + datetime.timedelta(6)
# for i in range(0, 8):
# if FIRST_WEEK.weekday() == 5:
# break
# FIRST_WEEK += datetime.timedelta(1)
INPUT_FILENAME = "county_forecasts_quarantine_0.csv"
OUTPUT_FILENAME = FORECAST_DATE.strftime("%Y-%m-%d") + "-USC-SI_kJalpha.csv"
COLUMNS = ["forecast_date", "target", "target_end_date", "location", "type", "quantile", "value"]
ID_REGION_MAPPING = {}
def load_id_region_mapping():
"""
Return a mapping of <region id, region name>.
"""
MAPPING_CSV = "./locations.csv"
with open(MAPPING_CSV) as f:
reader = csv.reader(f)
id_region_mapping = {}
# Skip the header
next(reader)
for row in reader:
region_id = row[1]
region_name = row[2]
id_region_mapping[region_id] = region_name
return id_region_mapping
def load_truth_cumulative_cases():
dataset = {}
with open("county_data.csv") as f:
reader = csv.reader(f)
header = next(reader, None)
for row in reader:
region_id = row[1].strip().zfill(5)
if region_id not in ID_REGION_MAPPING:
continue
date = header[-1]
val = int(row[-1])
if date not in dataset:
dataset[date] = {}
dataset[date][region_id] = val
return dataset
def load_csv(input_filename):
"""
Read our forecast reports and return a dictionary structuring of <date_str, <region_id, value>>
e.g.
{
"2020-06-22": {
'10': 2000.0,
'11': 3000.0,
...
},
"2020-06-23": {
'10': 800.0,
'11': 900.0,
...
},
...
}
"""
dataset = {}
with open(input_filename) as f:
reader = csv.reader(f)
header = next(reader, None)
for i in range(2, len(header)):
date_str = header[i]
# Initialize the dataset entry on each date.
dataset[date_str] = {}
for row in reader:
region_id = row[1].strip().zfill(5)
# Skip the region if it is not listed in reichlab's region list.
if region_id not in ID_REGION_MAPPING:
continue
for i in range(2, len(header)):
date_str = header[i]
val = float(row[i])
if math.isnan(val) or val < 0:
val = 0
dataset[date_str][region_id] = val
return dataset
def generate_new_row(forecast_date, target, target_end_date,
location, type, quantile, value):
"""
Return a new row to be added to the pandas dataframe.
"""
new_row = {}
new_row["forecast_date"] = forecast_date
new_row["target"] = target
new_row["target_end_date"] = target_end_date
new_row["location"] = location
new_row["type"] = type
new_row["quantile"] = quantile
new_row["value"] = value
return new_row
def add_to_dataframe(dataframe, forecast, observed):
"""
Given a dataframe, forecast, and observed data,
add county level weekly incident cases predictions to the dataframe.
"""
# Write incident forecasts.
cum_week = 0
forecast_date_str = FORECAST_DATE.strftime("%Y-%m-%d")
for target_end_date_str in sorted(forecast.keys()):
target_end_date = datetime.datetime.strptime(target_end_date_str, "%Y-%m-%d")
# Terminate the loop after 8 weeks of forecasts.
if cum_week >= 8:
break
# Skip forecasts before the forecast date.
if target_end_date <= FORECAST_DATE:
continue
if (target_end_date_str == FIRST_WEEK.strftime("%Y-%m-%d")) or \
(target_end_date > FIRST_WEEK and target_end_date.weekday() == 5):
cum_week += 1
target = str(cum_week) + " wk ahead inc case"
last_week_date = target_end_date - datetime.timedelta(7)
last_week_date_str = last_week_date.strftime("%Y-%m-%d")
if last_week_date_str in observed:
for region_id in forecast[target_end_date_str].keys():
if region_id in observed[last_week_date_str]:
dataframe = dataframe.append(
generate_new_row(
forecast_date=forecast_date_str,
target=target,
target_end_date=target_end_date_str,
location=region_id,
type="point",
quantile="NA",
value=max(forecast[target_end_date_str][region_id]-observed[last_week_date_str][region_id], 0)
), ignore_index=True)
elif last_week_date_str in forecast:
for region_id in forecast[target_end_date_str].keys():
dataframe = dataframe.append(
generate_new_row(
forecast_date=forecast_date_str,
target=target,
target_end_date=target_end_date_str,
location=region_id,
type="point",
quantile="NA",
value=max(forecast[target_end_date_str][region_id]-forecast[last_week_date_str][region_id], 0)
), ignore_index=True)
return dataframe
# Main function
if __name__ == "__main__":
ID_REGION_MAPPING = load_id_region_mapping()
print("loading forecast...")
forecast = load_csv(INPUT_FILENAME)
observed = load_truth_cumulative_cases()
dataframe = pd.read_csv(OUTPUT_FILENAME, na_filter=False)
dataframe = add_to_dataframe(dataframe, forecast, observed)
print("writing files...")
dataframe.to_csv(OUTPUT_FILENAME, index=False)
print("done")
| [
"pytz.timezone",
"pandas.read_csv",
"datetime.datetime.strptime",
"datetime.timedelta",
"csv.reader",
"math.isnan"
] | [((142, 169), 'pytz.timezone', 'pytz.timezone', (['"""US/Pacific"""'], {}), "('US/Pacific')\n", (155, 169), False, 'import pytz\n'), ((316, 337), 'datetime.timedelta', 'datetime.timedelta', (['(1)'], {}), '(1)\n', (334, 337), False, 'import datetime\n'), ((423, 444), 'datetime.timedelta', 'datetime.timedelta', (['(6)'], {}), '(6)\n', (441, 444), False, 'import datetime\n'), ((6184, 6229), 'pandas.read_csv', 'pd.read_csv', (['OUTPUT_FILENAME'], {'na_filter': '(False)'}), '(OUTPUT_FILENAME, na_filter=False)\n', (6195, 6229), True, 'import pandas as pd\n'), ((998, 1011), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1008, 1011), False, 'import csv\n'), ((1382, 1395), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1392, 1395), False, 'import csv\n'), ((2236, 2249), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (2246, 2249), False, 'import csv\n'), ((3812, 3871), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['target_end_date_str', '"""%Y-%m-%d"""'], {}), "(target_end_date_str, '%Y-%m-%d')\n", (3838, 3871), False, 'import datetime\n'), ((4371, 4392), 'datetime.timedelta', 'datetime.timedelta', (['(7)'], {}), '(7)\n', (4389, 4392), False, 'import datetime\n'), ((2819, 2834), 'math.isnan', 'math.isnan', (['val'], {}), '(val)\n', (2829, 2834), False, 'import math\n')] |
import gym
from gym import spaces
from gym import utils
import numpy as np
import logging
import numpy.random as rn
import math
from itertools import product
from scipy.sparse import load_npz
from scipy.sparse import hstack
from scipy.sparse import vstack
from scipy.sparse.coo import coo_matrix
import os
logger = logging.getLogger(__name__)
class SwedenWorldEnv(gym.Env, utils.EzPickle):
metadata = {'render.modes': ['human']}
def __init__(self):
self.basedir = "/home/user/basedir/" #fill in your path here.
self.discount = 0.99
self.sweden_dir = os.path.join(self.basedir,"mdp/metaborlange/newattempt2")
self.destination = 7621 # Destination is in MATLAB indices. So remember to substract indices at right place
self.horizon = 100
self.incidence_smat_no_dummy = load_npz(os.path.join(self.sweden_dir, "incidence_no_dummy.npz"))
self.incidence_smat_dummy = load_npz(os.path.join(self.sweden_dir, "incidence_dummy.npz"))
self.incidence_smat = load_npz(os.path.join(self.sweden_dir, "incidence.npz"))
self.travel_time_smat = load_npz(os.path.join(self.sweden_dir, "travel_time.npz"))
self.turnangle_smat = load_npz(os.path.join(self.sweden_dir, "turn_angle.npz"))
self.uturn_smat = load_npz(os.path.join(self.sweden_dir, "u_turn.npz"))
self.lefturn_smat = load_npz(os.path.join(self.sweden_dir, "left_turn.npz"))
self.observation_smat = load_npz(os.path.join(self.sweden_dir, "observation.npz"))
self.gt_theta = np.array([-2., -1., -1., -20.])
self.N_ACTIONS = 6 # 0-4 correspond to turn angles from -3.14 to 3.14. Action 5 corresponds to reaching destination
self.N_ROADLINKS = self.travel_time_smat.shape[0]
# self.goal_reward = 10.
self.features = np.load(os.path.join(self.sweden_dir, "new_feat_data.npy"))
self.transition_probabilities = self.getTP()
self.state_debug = np.load(os.path.join(self.sweden_dir, "new_state_debug.npy"), allow_pickle=True).item()
self.rewards = np.load(os.path.join(self.sweden_dir, "virtual_rewards.npy"))
self.nodummy_states = np.array(np.load(os.path.join(self.sweden_dir, "nodummy_states.npy")))
self.gt_theta = np.array([-2., -1., -1., -20.])
self.N_STATES, self.N_FEATURES = np.shape(self.features)
self.viewer = None
self.server_process = None
self.server_port = None
self.state = None
self.obs = None
self.observation_space = spaces.Box(low=min(0., np.min(self.features)),
high=max(1., np.max(self.features)),
shape=(self.N_FEATURES,))
# Action space omits the Tackle/Catch actions, which are useful on defense
# self.action_space = spaces.Discrete(len(self.actions))
self.action_space = spaces.Box(low=0, high=self.N_ACTIONS,
shape=(1,))
self.reset()
print("init over")
def step(self, action):
#action = (np.round(action)).astype(np.int)[0]
#action2 = min(max(action, 0), self.N_ACTIONS)
action2 = np.floor(action).astype(np.int)[0]
action3 = min(max(action2, 0), self.N_ACTIONS-1)
if not(action3<self.N_ACTIONS):
print("This should not happen")
print(action,action2,action3)
obs, obsind, reward, done = self._take_action(action3)
return obs, reward, done, {}
def _take_action(self, action):
currentTp = self.transition_probabilities[action]
rowind = np.where(currentTp.row == self.state)[0]
assert (len(rowind) > 0)
next_state = currentTp.col[rowind].item()
self.state = next_state
self.obs = self.features[next_state]
reward = self.rewards[next_state]
if next_state == 20198:
done=True
else:
done=False
return self.obs, next_state, reward, done
def reset(self):
newState = np.random.choice(self.nodummy_states, 1).item()
newObs = self.features[newState]
self.state = newState
self.obs = newObs
return self.obs
def _render(self, mode='human', close=False):
""" Viewer only supports human mode currently. """
raise NotImplementedError
def getTP(self):
transitions = np.load(os.path.join(self.sweden_dir, "new_transitions.npy"), allow_pickle=True).item()
nstates, nfeatures = np.shape(self.features)
transition_dynamics = {}
for i in range(self.N_ACTIONS):
tpsparse = coo_matrix((transitions[i][2, :], (transitions[i][0, :], transitions[i][1, :])),
shape=(nstates, nstates))
tpdense = tpsparse.toarray()
assert (np.max(np.sum(tpdense, axis=1)) == 1. and np.min(np.sum(tpdense, axis=1)) == 1.)
transition_dynamics[i] = coo_matrix((transitions[i][2, :], (transitions[i][0, :], transitions[i][1, :])),
shape=(nstates, nstates))
return transition_dynamics
| [
"logging.getLogger",
"numpy.where",
"numpy.random.choice",
"os.path.join",
"numpy.floor",
"gym.spaces.Box",
"numpy.max",
"numpy.array",
"scipy.sparse.coo.coo_matrix",
"numpy.sum",
"numpy.min",
"numpy.shape"
] | [((316, 343), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (333, 343), False, 'import logging\n'), ((586, 644), 'os.path.join', 'os.path.join', (['self.basedir', '"""mdp/metaborlange/newattempt2"""'], {}), "(self.basedir, 'mdp/metaborlange/newattempt2')\n", (598, 644), False, 'import os\n'), ((1548, 1583), 'numpy.array', 'np.array', (['[-2.0, -1.0, -1.0, -20.0]'], {}), '([-2.0, -1.0, -1.0, -20.0])\n', (1556, 1583), True, 'import numpy as np\n'), ((2260, 2295), 'numpy.array', 'np.array', (['[-2.0, -1.0, -1.0, -20.0]'], {}), '([-2.0, -1.0, -1.0, -20.0])\n', (2268, 2295), True, 'import numpy as np\n'), ((2333, 2356), 'numpy.shape', 'np.shape', (['self.features'], {}), '(self.features)\n', (2341, 2356), True, 'import numpy as np\n'), ((2910, 2960), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': 'self.N_ACTIONS', 'shape': '(1,)'}), '(low=0, high=self.N_ACTIONS, shape=(1,))\n', (2920, 2960), False, 'from gym import spaces\n'), ((4534, 4557), 'numpy.shape', 'np.shape', (['self.features'], {}), '(self.features)\n', (4542, 4557), True, 'import numpy as np\n'), ((845, 900), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""incidence_no_dummy.npz"""'], {}), "(self.sweden_dir, 'incidence_no_dummy.npz')\n", (857, 900), False, 'import os\n'), ((947, 999), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""incidence_dummy.npz"""'], {}), "(self.sweden_dir, 'incidence_dummy.npz')\n", (959, 999), False, 'import os\n'), ((1040, 1086), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""incidence.npz"""'], {}), "(self.sweden_dir, 'incidence.npz')\n", (1052, 1086), False, 'import os\n'), ((1129, 1177), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""travel_time.npz"""'], {}), "(self.sweden_dir, 'travel_time.npz')\n", (1141, 1177), False, 'import os\n'), ((1218, 1265), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""turn_angle.npz"""'], {}), "(self.sweden_dir, 'turn_angle.npz')\n", (1230, 1265), False, 'import os\n'), ((1302, 1345), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""u_turn.npz"""'], {}), "(self.sweden_dir, 'u_turn.npz')\n", (1314, 1345), False, 'import os\n'), ((1384, 1430), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""left_turn.npz"""'], {}), "(self.sweden_dir, 'left_turn.npz')\n", (1396, 1430), False, 'import os\n'), ((1473, 1521), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""observation.npz"""'], {}), "(self.sweden_dir, 'observation.npz')\n", (1485, 1521), False, 'import os\n'), ((1830, 1880), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""new_feat_data.npy"""'], {}), "(self.sweden_dir, 'new_feat_data.npy')\n", (1842, 1880), False, 'import os\n'), ((2081, 2133), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""virtual_rewards.npy"""'], {}), "(self.sweden_dir, 'virtual_rewards.npy')\n", (2093, 2133), False, 'import os\n'), ((3635, 3672), 'numpy.where', 'np.where', (['(currentTp.row == self.state)'], {}), '(currentTp.row == self.state)\n', (3643, 3672), True, 'import numpy as np\n'), ((4654, 4764), 'scipy.sparse.coo.coo_matrix', 'coo_matrix', (['(transitions[i][2, :], (transitions[i][0, :], transitions[i][1, :]))'], {'shape': '(nstates, nstates)'}), '((transitions[i][2, :], (transitions[i][0, :], transitions[i][1,\n :])), shape=(nstates, nstates))\n', (4664, 4764), False, 'from scipy.sparse.coo import coo_matrix\n'), ((4974, 5084), 'scipy.sparse.coo.coo_matrix', 'coo_matrix', (['(transitions[i][2, :], (transitions[i][0, :], transitions[i][1, :]))'], {'shape': '(nstates, nstates)'}), '((transitions[i][2, :], (transitions[i][0, :], transitions[i][1,\n :])), shape=(nstates, nstates))\n', (4984, 5084), False, 'from scipy.sparse.coo import coo_matrix\n'), ((2182, 2233), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""nodummy_states.npy"""'], {}), "(self.sweden_dir, 'nodummy_states.npy')\n", (2194, 2233), False, 'import os\n'), ((4060, 4100), 'numpy.random.choice', 'np.random.choice', (['self.nodummy_states', '(1)'], {}), '(self.nodummy_states, 1)\n', (4076, 4100), True, 'import numpy as np\n'), ((1970, 2022), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""new_state_debug.npy"""'], {}), "(self.sweden_dir, 'new_state_debug.npy')\n", (1982, 2022), False, 'import os\n'), ((2559, 2580), 'numpy.min', 'np.min', (['self.features'], {}), '(self.features)\n', (2565, 2580), True, 'import numpy as np\n'), ((2640, 2661), 'numpy.max', 'np.max', (['self.features'], {}), '(self.features)\n', (2646, 2661), True, 'import numpy as np\n'), ((3205, 3221), 'numpy.floor', 'np.floor', (['action'], {}), '(action)\n', (3213, 3221), True, 'import numpy as np\n'), ((4425, 4477), 'os.path.join', 'os.path.join', (['self.sweden_dir', '"""new_transitions.npy"""'], {}), "(self.sweden_dir, 'new_transitions.npy')\n", (4437, 4477), False, 'import os\n'), ((4863, 4886), 'numpy.sum', 'np.sum', (['tpdense'], {'axis': '(1)'}), '(tpdense, axis=1)\n', (4869, 4886), True, 'import numpy as np\n'), ((4905, 4928), 'numpy.sum', 'np.sum', (['tpdense'], {'axis': '(1)'}), '(tpdense, axis=1)\n', (4911, 4928), True, 'import numpy as np\n')] |
from __future__ import print_function
from urllib import urlencode
import treq
from twisted.web.http_headers import Headers
from oauthlib import oauth1
import settings
import config
__all__ = ["moefou", "moefm"]
def request(method, url, **kwargs):
oauth_tokens = settings.get("oauth_tokens")
if not oauth_tokens:
raise ValueError("OAuth token is unavailable")
if kwargs.get("params"):
encoded_params = urlencode(kwargs["params"])
url = "".join([url, "&" if "?" in url else "?", encoded_params])
del kwargs["params"]
client = oauth1.Client(
config.CONSUMER_KEY,
client_secret=config.CONSUMER_SECRET,
resource_owner_key=oauth_tokens["oauth_token"],
resource_owner_secret=oauth_tokens["oauth_token_secret"],
)
new_url, headers, data = client.sign(
url, method, body=kwargs.get("data"), headers=kwargs.get("headers"),
)
# Twisted doesn't support unicode...
new_url = new_url.encode("utf-8")
h = Headers({})
for k, v in headers.iteritems():
k = k.encode("utf-8")
if isinstance(v, basestring):
v = v.encode("utf-8")
h.addRawHeader(k, v)
else:
v = [x.encode("utf-8") for x in v]
h.setRawHeaders(k, v)
kwargs["headers"] = h
kwargs["data"] = data
defer = treq.request(method, new_url, **kwargs)
return defer.addCallback(treq.json_content)
class Api(object):
def __init__(self, root):
super(Api, self).__init__()
self.root = root
def get(self, path, params=None, **kwargs):
return request("GET", self.root + path, params=params, **kwargs)
def post(self, path, body=None, **kwargs):
return request("GET", self.root + path, body=body, **kwargs)
moefou = Api(config.MOEFOU_API_ROOT)
moefm = Api(config.MOEFM_API_ROOT)
if __name__ == '__main__':
from twisted.internet import reactor
from pprint import pprint
import sys
def done(obj):
pprint(obj)
reactor.stop()
def error(fail):
fail.printDetailedTraceback()
reactor.stop()
request("GET", sys.argv[1]).addCallbacks(done, error)
reactor.run()
| [
"twisted.web.http_headers.Headers",
"settings.get",
"twisted.internet.reactor.stop",
"oauthlib.oauth1.Client",
"treq.request",
"urllib.urlencode",
"twisted.internet.reactor.run",
"pprint.pprint"
] | [((273, 301), 'settings.get', 'settings.get', (['"""oauth_tokens"""'], {}), "('oauth_tokens')\n", (285, 301), False, 'import settings\n'), ((581, 768), 'oauthlib.oauth1.Client', 'oauth1.Client', (['config.CONSUMER_KEY'], {'client_secret': 'config.CONSUMER_SECRET', 'resource_owner_key': "oauth_tokens['oauth_token']", 'resource_owner_secret': "oauth_tokens['oauth_token_secret']"}), "(config.CONSUMER_KEY, client_secret=config.CONSUMER_SECRET,\n resource_owner_key=oauth_tokens['oauth_token'], resource_owner_secret=\n oauth_tokens['oauth_token_secret'])\n", (594, 768), False, 'from oauthlib import oauth1\n'), ((1012, 1023), 'twisted.web.http_headers.Headers', 'Headers', (['{}'], {}), '({})\n', (1019, 1023), False, 'from twisted.web.http_headers import Headers\n'), ((1357, 1396), 'treq.request', 'treq.request', (['method', 'new_url'], {}), '(method, new_url, **kwargs)\n', (1369, 1396), False, 'import treq\n'), ((2193, 2206), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (2204, 2206), False, 'from twisted.internet import reactor\n'), ((437, 464), 'urllib.urlencode', 'urlencode', (["kwargs['params']"], {}), "(kwargs['params'])\n", (446, 464), False, 'from urllib import urlencode\n'), ((2012, 2023), 'pprint.pprint', 'pprint', (['obj'], {}), '(obj)\n', (2018, 2023), False, 'from pprint import pprint\n'), ((2032, 2046), 'twisted.internet.reactor.stop', 'reactor.stop', ([], {}), '()\n', (2044, 2046), False, 'from twisted.internet import reactor\n'), ((2115, 2129), 'twisted.internet.reactor.stop', 'reactor.stop', ([], {}), '()\n', (2127, 2129), False, 'from twisted.internet import reactor\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# author: <NAME>, revised from https://github.com/picotech/picosdk-picovna-python-examples
# Feb 22, 2022
import win32com.client
import numpy as np
import matplotlib.pyplot as plt
class Smith():
def __init__(self,real,imag,logmag,phase,freq):
self.real = np.array(real)
self.imag = np.array(imag)
self.log_mag = np.array(logmag)
self.phase_rad = np.array(phase)
self.freqs = np.array(freq)
def get_picoVNA_smith(port='S21',f_min=0.3,f_max=8500,number_of_points=1001,power=0,bandwidth=1000,Average=1):
picoVNA = win32com.client.gencache.EnsureDispatch("PicoControl3.PicoVNA_3_2")
try:
findVNA = picoVNA.FND()
ans=picoVNA.LoadCal(r'C:\Users\ICET\Documents\Pico Technology\PicoVNA3\FacCal.cal')
freq_step = np.ceil((f_max-f_min)/number_of_points*1E5)/1E5
picoVNA.SetFreqPlan(f_min,freq_step,number_of_points,power,bandwidth)
picoVNA.SetEnhance('Aver',Average)
picoVNA.Measure('ALL');
raw_logmag = picoVNA.GetData(port,"logmag",0)
splitdata_logmag = raw_logmag.split(',')
freq = np.float64(np.array(splitdata_logmag))[: : 2]
logmag = np.float64(np.array(splitdata_logmag))[1 : : 2]
raw_real = picoVNA.GetData(port, "real", 0)
splitdata_real = raw_real.split(',')
real = np.float64(np.array(splitdata_real))[1 : : 2]
raw_imag = picoVNA.GetData(port, "imag", 0)
splitdata_imag = raw_imag.split(',')
imag = np.float64(np.array(splitdata_imag))[1:: 2]
raw_phase = picoVNA.GetData(port, "phase", 0)
splitdata_phase = raw_phase.split(',')
phase = np.float64(np.array(splitdata_phase))[1:: 2]
data = Smith(real,imag,logmag,phase,freq)
return data
finally:
picoVNA.CloseVNA()
'''
data = get_picoVNA_smith()
plt.plot(data.freqs, data.log_mag)
plt.ylabel("S21 LogMag")
plt.xlabel("Frequency")
plt.show()
'''
| [
"numpy.array",
"numpy.ceil"
] | [((314, 328), 'numpy.array', 'np.array', (['real'], {}), '(real)\n', (322, 328), True, 'import numpy as np\n'), ((349, 363), 'numpy.array', 'np.array', (['imag'], {}), '(imag)\n', (357, 363), True, 'import numpy as np\n'), ((387, 403), 'numpy.array', 'np.array', (['logmag'], {}), '(logmag)\n', (395, 403), True, 'import numpy as np\n'), ((429, 444), 'numpy.array', 'np.array', (['phase'], {}), '(phase)\n', (437, 444), True, 'import numpy as np\n'), ((466, 480), 'numpy.array', 'np.array', (['freq'], {}), '(freq)\n', (474, 480), True, 'import numpy as np\n'), ((829, 883), 'numpy.ceil', 'np.ceil', (['((f_max - f_min) / number_of_points * 100000.0)'], {}), '((f_max - f_min) / number_of_points * 100000.0)\n', (836, 883), True, 'import numpy as np\n'), ((1162, 1188), 'numpy.array', 'np.array', (['splitdata_logmag'], {}), '(splitdata_logmag)\n', (1170, 1188), True, 'import numpy as np\n'), ((1225, 1251), 'numpy.array', 'np.array', (['splitdata_logmag'], {}), '(splitdata_logmag)\n', (1233, 1251), True, 'import numpy as np\n'), ((1386, 1410), 'numpy.array', 'np.array', (['splitdata_real'], {}), '(splitdata_real)\n', (1394, 1410), True, 'import numpy as np\n'), ((1545, 1569), 'numpy.array', 'np.array', (['splitdata_imag'], {}), '(splitdata_imag)\n', (1553, 1569), True, 'import numpy as np\n'), ((1707, 1732), 'numpy.array', 'np.array', (['splitdata_phase'], {}), '(splitdata_phase)\n', (1715, 1732), True, 'import numpy as np\n')] |
"""
optoforce
A package which simplifies connecting to and reading from optoforce sensors
"""
import serial
import logging
from serial.tools.list_ports import comports
from typing import Generic, List, Optional, TypeVar
from . import status
# typing.Literal introduced in Python v3.8
try:
from typing import Literal # type: ignore
except ImportError:
from typing_extensions import Literal # type: ignore
from .reading import Reading16, Reading22, Reading34, read_16bytes, read_22bytes, read_34bytes
__version__ = '0.3.0'
__all__ = ['OptoForce16', 'OptoForce34', 'OptoForce22', 'status']
logger = logging.getLogger(__name__)
# constants from datasheet
OPTO_PARAMS = {
'baudrate': 1000_000,
'stopbits': serial.STOPBITS_ONE,
'parity': serial.PARITY_NONE,
'bytesize': serial.EIGHTBITS,
}
SPEED_MAPPING = {
'stop': 0,
1000: 1,
333: 3,
100: 10,
30: 33,
10: 100,
}
# must be one of these specific values:
SPEEDS = Literal['stop', 1000, 333, 100, 30, 10]
FILTER_MAPPING = {
'none': 0,
500: 1,
150: 2,
50: 3,
15: 4,
5: 5,
1.5: 6,
}
FILTERS = Literal['none', 500, 150, 50, 15, 5, 1.5]
def find_optoforce_port() -> str:
devices = [dev for dev in comports() if dev.description == 'OptoForce DAQ']
if len(devices) == 0:
raise RuntimeError(f"Couldn't find an OptoForce")
elif len(devices) == 1:
port = devices[0].device
assert port is not None
return port
else:
raise RuntimeError(f'Found more than one OptoForce: {devices}')
T = TypeVar("T")
class _OptoForce(Generic[T]):
# attributes which are filled in by the classes that inherit from this one
_expected_header: bytes
_packet_size: int
def _decoder(self, b: bytes) -> T: ...
def __init__(self,
port: Optional[str] = None,
speed_hz: SPEEDS = 100,
filter_hz: FILTERS = 15,
zero: bool = False):
if speed_hz not in SPEED_MAPPING:
raise KeyError(
f'speed_hz must be one of: {list(SPEED_MAPPING.keys())}. Got: {speed_hz}'
)
if filter_hz not in FILTER_MAPPING:
raise KeyError(
f'filter_hz must be one of: {list(FILTER_MAPPING.keys())}. Got: {filter_hz}'
)
self.speed = SPEED_MAPPING[speed_hz]
self.filter = FILTER_MAPPING[filter_hz]
self.zero = 255 if zero else 0
if port is None:
self.port = find_optoforce_port()
else:
self.port = port
def connect(self):
logger.info(f'connecting at port: {self.port}')
self.opt_ser = serial.Serial(self.port, **OPTO_PARAMS)
# write optoforce setup code
header = (170, 0, 50, 3)
checksum = sum(header) + self.speed + self.filter + self.zero
payload = (*header,
self.speed, self.filter, self.zero,
*divmod(checksum, 256))
logger.info(f'sending configuration bytes: {payload}')
self.opt_ser.write(payload)
def read(self, only_latest_data: bool) -> T:
"""
Read a packet from the serial buffer. If `only_latest_data` is True,
and there is more than one packet waiting in the buffer, flush the buffer
until there is only one packet left (the latest packet). Otherwise,
just read the next packet in the buffer, even if that packet is slightly
old.
"""
# opt_ser.in_waiting returns the number of bytes in the buffer
if only_latest_data and self.opt_ser.in_waiting > self._packet_size:
# flush input to make sure we don't read old data
self.opt_ser.reset_input_buffer()
# Start by reading data from the input buffer until the header `expected_bytes`
# is found. This flushes data until a packet is found
self.opt_ser.read_until(self._expected_header)
logger.debug('received frame header')
# next, read the body of the packet
raw_data = self.opt_ser.read(
self._packet_size - len(self._expected_header)
)
# decode (deserialize) the bytes into regular Python data
return self._decoder(raw_data)
def read_all_packets_in_buffer(self) -> List[T]:
"""
Read all packets in the buffer. Note that the `count` attribute of a packet
can be used to tell when the packet was sent from the optoforce.
"""
data: List[T] = []
while self.opt_ser.in_waiting >= self._packet_size:
data.append(self.read(only_latest_data=False))
return data
def close(self):
if hasattr(self, 'opt_ser'):
self.opt_ser.close()
logger.info('closed connection')
def __enter__(self):
self.connect()
return self
def __exit__(self, *args):
self.close()
def __del__(self):
self.close()
##
class OptoForce16(_OptoForce[Reading16]):
_expected_header = bytes((170, 7, 8, 10))
_packet_size = 16
def _decoder(self, b): return read_16bytes(self._expected_header, b)
class OptoForce34(_OptoForce[Reading34]):
_expected_header = bytes((170, 7, 8, 28))
_packet_size = 34
def _decoder(self, b): return read_34bytes(self._expected_header, b)
class OptoForce22(_OptoForce[Reading22]):
_expected_header = bytes((170, 7, 8, 16))
_packet_size = 22
def _decoder(self, b): return read_22bytes(self._expected_header, b)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
logger.warning("This force sensor model hasn't been tested. "
"Please mention on the source repo how it went! "
"Also, the torques aren't scaled, since I don't have that datasheet!")
| [
"logging.getLogger",
"serial.tools.list_ports.comports",
"serial.Serial",
"typing.TypeVar"
] | [((616, 643), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (633, 643), False, 'import logging\n'), ((1571, 1583), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (1578, 1583), False, 'from typing import Generic, List, Optional, TypeVar\n'), ((2682, 2721), 'serial.Serial', 'serial.Serial', (['self.port'], {}), '(self.port, **OPTO_PARAMS)\n', (2695, 2721), False, 'import serial\n'), ((1233, 1243), 'serial.tools.list_ports.comports', 'comports', ([], {}), '()\n', (1241, 1243), False, 'from serial.tools.list_ports import comports\n')] |
from math import cos, sin, ceil
peripheral_view = 114
def find_best_seat(width_of_screen, distance_between_screen_and_first_row):
distance_from_screen = cot(peripheral_view) * width_of_screen/2
best_possible_seat = (distance_from_screen -
distance_between_screen_and_first_row)
return ceil(best_possible_seat)
def cot(angle):
return cos(angle)/sin(angle)
def run_example():
width_of_screen = 14
distance_between_screen_and_first_row = 1
best_seat = find_best_seat(width_of_screen,
distance_between_screen_and_first_row)
print('Best Seat Position: {}th row'.format(best_seat))
if __name__ == '__main__':
run_example()
| [
"math.cos",
"math.sin",
"math.ceil"
] | [((329, 353), 'math.ceil', 'ceil', (['best_possible_seat'], {}), '(best_possible_seat)\n', (333, 353), False, 'from math import cos, sin, ceil\n'), ((384, 394), 'math.cos', 'cos', (['angle'], {}), '(angle)\n', (387, 394), False, 'from math import cos, sin, ceil\n'), ((395, 405), 'math.sin', 'sin', (['angle'], {}), '(angle)\n', (398, 405), False, 'from math import cos, sin, ceil\n')] |
# Authored by : gusdn3477
# Co-authored by : -
# Link : http://boj.kr/93afacc450454aedbd2b0d6667914846
import sys
def input():
return sys.stdin.readline().rstrip()
def binary_search(t):
start, end = 0, len(arr)-1
while start <= end:
mid = (start + end) // 2
if arr[mid] == t:
return 1
elif arr[mid] > t:
end = mid - 1
else:
start = mid + 1
return 0
T = int(input())
for i in range(T):
N = int(input())
arr = list(map(int, input().split()))
M = int(input())
arr2 = list(map(int, input().split()))
arr.sort()
for j in arr2:
print(binary_search(j)) | [
"sys.stdin.readline"
] | [((139, 159), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (157, 159), False, 'import sys\n')] |
import torch.nn as nn
from graphgallery.nn.layers.pytorch import (GATConv, SparseGATConv,
Sequential, activations)
class GAT(nn.Module):
def __init__(self,
in_features,
out_features,
hids=[8],
num_heads=[8],
acts=['elu'],
dropout=0.6,
bias=True):
super().__init__()
head = 1
conv = []
conv.append(nn.Dropout(dropout))
for hid, num_head, act in zip(hids, num_heads, acts):
conv.append(SparseGATConv(in_features * head,
hid,
attn_heads=num_head,
reduction='concat',
bias=bias))
conv.append(activations.get(act))
conv.append(nn.Dropout(dropout))
in_features = hid
head = num_head
conv.append(SparseGATConv(in_features * head,
out_features,
attn_heads=1,
reduction='average',
bias=bias))
conv = Sequential(*conv)
self.conv = conv
def forward(self, x, adj):
return self.conv(x, adj)
| [
"graphgallery.nn.layers.pytorch.SparseGATConv",
"torch.nn.Dropout",
"graphgallery.nn.layers.pytorch.activations.get",
"graphgallery.nn.layers.pytorch.Sequential"
] | [((1303, 1320), 'graphgallery.nn.layers.pytorch.Sequential', 'Sequential', (['*conv'], {}), '(*conv)\n', (1313, 1320), False, 'from graphgallery.nn.layers.pytorch import GATConv, SparseGATConv, Sequential, activations\n'), ((521, 540), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (531, 540), True, 'import torch.nn as nn\n'), ((1052, 1150), 'graphgallery.nn.layers.pytorch.SparseGATConv', 'SparseGATConv', (['(in_features * head)', 'out_features'], {'attn_heads': '(1)', 'reduction': '"""average"""', 'bias': 'bias'}), "(in_features * head, out_features, attn_heads=1, reduction=\n 'average', bias=bias)\n", (1065, 1150), False, 'from graphgallery.nn.layers.pytorch import GATConv, SparseGATConv, Sequential, activations\n'), ((630, 725), 'graphgallery.nn.layers.pytorch.SparseGATConv', 'SparseGATConv', (['(in_features * head)', 'hid'], {'attn_heads': 'num_head', 'reduction': '"""concat"""', 'bias': 'bias'}), "(in_features * head, hid, attn_heads=num_head, reduction=\n 'concat', bias=bias)\n", (643, 725), False, 'from graphgallery.nn.layers.pytorch import GATConv, SparseGATConv, Sequential, activations\n'), ((903, 923), 'graphgallery.nn.layers.pytorch.activations.get', 'activations.get', (['act'], {}), '(act)\n', (918, 923), False, 'from graphgallery.nn.layers.pytorch import GATConv, SparseGATConv, Sequential, activations\n'), ((950, 969), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (960, 969), True, 'import torch.nn as nn\n')] |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Unit tests for create_assessment_report.py.
"""
import dateutil.parser
import urllib.request
import uuid
import boto3
from botocore.exceptions import ClientError
import pytest
from create_assessment_report import AuditReport
@pytest.mark.parametrize('inputs, outputs, error_code', [
(['bad-uuid'], (None, None), 'Nostub'),
(['f66e7fc4-baf1-4661-85db-f6ff6ee76630', 'bad-date'], (None, None), 'Nostub'),
(['f66e7fc4-baf1-4661-85db-f6ff6ee76630', '2021-01-01'],
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), dateutil.parser.parse('2021-01-01').date()),
None),
(['f66e7fc4-baf1-4661-85db-f6ff6ee76630', '2021-01-01'], (None, None), 'TestException'),
])
def test_get_input(make_stubber, monkeypatch, inputs, outputs, error_code):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
report = AuditReport(auditmanager_client)
monkeypatch.setattr('builtins.input', lambda x: inputs.pop(0))
if error_code != 'Nostub':
auditmanager_stubber.stub_get_assessment(inputs[0], error_code=error_code)
got_uuid, got_date = report.get_input()
assert got_uuid == outputs[0]
assert got_date == outputs[1]
@pytest.mark.parametrize('assessment_uuid, evidence_date, tokens, folders, stop_on_action, error_code', [
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), None, [None, None], [], None, None),
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), None, [None, None], [], 'stub_get_evidence_folders_by_assessment', 'TestException'),
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), None, [None, '1', None], [], None, None),
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), '2021-01-01', [None, None], [
{'id': f'id-{"1"*36}', 'name': '2021-01-01', 'assessmentReportSelectionCount': 1,
'totalEvidence': 1, 'controlId': f'ctl-{"1"*36}'}
], None, None),
(uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630'), '2021-01-01', [None, None], [
{'id': f'id-{"1" * 36}', 'name': '2021-01-01', 'assessmentReportSelectionCount': 2,
'totalEvidence': 1, 'controlId': f'ctl-{"1" * 36}'}
], None, None),
])
def test_clear_staging(
make_stubber, stub_runner, assessment_uuid, evidence_date, tokens, folders,
stop_on_action, error_code):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
report = AuditReport(auditmanager_client)
with stub_runner(error_code, stop_on_action) as runner:
for i_token in range(len(tokens) - 1):
runner.add(
auditmanager_stubber.stub_get_evidence_folders_by_assessment,
str(assessment_uuid), 1000, tokens[i_token:i_token+2], folders)
if len(folders) > 0:
if folders[0]['assessmentReportSelectionCount'] == folders[0]['totalEvidence']:
runner.add(
auditmanager_stubber.stub_disassociate_assessment_report_evidence_folder,
str(assessment_uuid), folders[0]['id'])
else:
evidence_id = f'ev-{"1"*36}'
runner.add(
auditmanager_stubber.stub_get_evidence_by_evidence_folder,
str(assessment_uuid), folders[0]['id'], 1000, [
{'id': evidence_id, 'assessmentReportSelection': 'Yes'}])
runner.add(
auditmanager_stubber.stub_batch_disassociate_assessment_report_evidence,
str(assessment_uuid), folders[0]['id'], [evidence_id])
if error_code is None:
got_folder_ids = report.clear_staging(assessment_uuid, evidence_date)
assert got_folder_ids == [folder['id'] for folder in folders]
else:
with pytest.raises(ClientError) as exc_info:
report.clear_staging(assessment_uuid, evidence_date)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_add_folder_to_staging(make_stubber, error_code):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
report = AuditReport(auditmanager_client)
assessment_uuid = uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630')
folder_id = f'folder-{"1"*36}'
auditmanager_stubber.stub_associate_assessment_report_evidence_folder(
str(assessment_uuid), folder_id, error_code=error_code)
if error_code is None:
report.add_folder_to_staging(assessment_uuid, [folder_id])
else:
with pytest.raises(ClientError) as exc_info:
report.add_folder_to_staging(assessment_uuid, [folder_id])
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code, stop_on_action', [
(None, None),
('TestException', 'stub_create_assessment_report')])
def test_get_report(make_stubber, stub_runner, monkeypatch, error_code, stop_on_action):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
report = AuditReport(auditmanager_client)
assessment_uuid = uuid.UUID('f66e7fc4-baf1-4661-85db-f6ff6ee76630')
report_id = f'report-{"1"*36}'
report_url = 'https://example.com/test-report'
monkeypatch.setattr(urllib.request, 'urlretrieve', lambda x, y: None)
with stub_runner(error_code, stop_on_action) as runner:
runner.add(
auditmanager_stubber.stub_create_assessment_report, 'ReportViaScript',
'testing', str(assessment_uuid), report_id)
runner.add(auditmanager_stubber.stub_list_assessment_reports, [report_id])
runner.add(
auditmanager_stubber.stub_get_assessment_report_url, report_id,
str(assessment_uuid), report_url)
if error_code is None:
report.get_report(assessment_uuid)
else:
with pytest.raises(ClientError) as exc_info:
report.get_report(assessment_uuid)
assert exc_info.value.response['Error']['Code'] == error_code
| [
"uuid.UUID",
"boto3.client",
"create_assessment_report.AuditReport",
"pytest.mark.parametrize",
"pytest.raises"
] | [((4082, 4144), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""error_code"""', "[None, 'TestException']"], {}), "('error_code', [None, 'TestException'])\n", (4105, 4144), False, 'import pytest\n'), ((4914, 5040), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""error_code, stop_on_action"""', "[(None, None), ('TestException', 'stub_create_assessment_report')]"], {}), "('error_code, stop_on_action', [(None, None), (\n 'TestException', 'stub_create_assessment_report')])\n", (4937, 5040), False, 'import pytest\n'), ((900, 928), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (912, 928), False, 'import boto3\n'), ((1003, 1035), 'create_assessment_report.AuditReport', 'AuditReport', (['auditmanager_client'], {}), '(auditmanager_client)\n', (1014, 1035), False, 'from create_assessment_report import AuditReport\n'), ((2460, 2488), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (2472, 2488), False, 'import boto3\n'), ((2563, 2595), 'create_assessment_report.AuditReport', 'AuditReport', (['auditmanager_client'], {}), '(auditmanager_client)\n', (2574, 2595), False, 'from create_assessment_report import AuditReport\n'), ((4229, 4257), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (4241, 4257), False, 'import boto3\n'), ((4332, 4364), 'create_assessment_report.AuditReport', 'AuditReport', (['auditmanager_client'], {}), '(auditmanager_client)\n', (4343, 4364), False, 'from create_assessment_report import AuditReport\n'), ((4387, 4436), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (4396, 4436), False, 'import uuid\n'), ((5160, 5188), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (5172, 5188), False, 'import boto3\n'), ((5263, 5295), 'create_assessment_report.AuditReport', 'AuditReport', (['auditmanager_client'], {}), '(auditmanager_client)\n', (5274, 5295), False, 'from create_assessment_report import AuditReport\n'), ((5318, 5367), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (5327, 5367), False, 'import uuid\n'), ((3904, 3930), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (3917, 3930), False, 'import pytest\n'), ((1445, 1494), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (1454, 1494), False, 'import uuid\n'), ((1538, 1587), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (1547, 1587), False, 'import uuid\n'), ((1679, 1728), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (1688, 1728), False, 'import uuid\n'), ((1777, 1826), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (1786, 1826), False, 'import uuid\n'), ((2032, 2081), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (2041, 2081), False, 'import uuid\n'), ((4730, 4756), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (4743, 4756), False, 'import pytest\n'), ((6068, 6094), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (6081, 6094), False, 'import pytest\n'), ((594, 643), 'uuid.UUID', 'uuid.UUID', (['"""f66e7fc4-baf1-4661-85db-f6ff6ee76630"""'], {}), "('f66e7fc4-baf1-4661-85db-f6ff6ee76630')\n", (603, 643), False, 'import uuid\n')] |
# -*- coding:utf-8 -*-
import hashlib
import json
from flask import Blueprint, session, request
from util import navigator
from util.common import getApiSingleData, getData, getParameter
userApi = Blueprint("userApi", __name__)
@userApi.route("/api/user/login", methods=['POST'])
def login():
result_data = {
"auth": False,
"message": ""
}
reqData = request.get_json()
if request.headers.getlist("X-Forward-FOR") :
ip = request.headers.getlist("X-Forward-FOR")
else :
ip = request.environ.get('REMOTE_ADDR')
param = {
'id' : getParameter(reqData, "id")
,'pwd' : getParameter(reqData, "<PASSWORD>")
,'loginIp' : ip
}
apiData = getApiSingleData("/login" ,param)
if "empId" in apiData:
result_data = {"auth": True,"message": ""}
session['empId'] = apiData['empId']
session['name'] = apiData['name']
session['position'] = apiData['position']
session['email'] = apiData['email']
session['phone'] = apiData['phone']
resultData = getData('/systemMng/authMenus' , {"empId" : apiData['empId']})
resultHighMenuList = resultData['resultHighMenuList']
subUrlAuthList = resultData['resultSubUrlList']
resultCommCodeList = resultData['resultCommCodeList']
menuItems = []
session['navigator'] = menuItems
for data in resultHighMenuList :
if data['level'] == '1' :
menuItems.append(data)
elif data['level'] == '2' :
subMenuArray = []
for menu in menuItems :
if data['parMenuId'] == menu['menuId']:
if "subMenu" in menu:
subMenuArray = menu["subMenu"]
subMenuArray.append(data)
menu['subMenu'] = subMenuArray
else :
subMenuArray = []
for highMenu in menuItems :
for secondMenu in highMenu["subMenu"]:
if data['parMenuId'] == secondMenu['menuId']:
if "subMenu" in secondMenu:
subMenuArray = secondMenu["subMenu"]
subMenuArray.append(data)
secondMenu['subMenu'] = subMenuArray
session['navigator'] = menuItems
session['subUrlAuthList'] = subUrlAuthList
session['commCodeList'] = resultCommCodeList
else :
result_data = apiData
return json.dumps(result_data)
@userApi.route("/api/user/passwordCheck", methods=['POST'])
def passwordCheck():
apiData = getApiSingleData("/login" ,{"id": session["email"],
"pwd" : getParameter(request.get_json() , "pwd"),
"loginIp" : request.environ.get('REMOTE_ADDR')
})
if "empId" in apiData:
return json.dumps({"data" : "1"})
else :
return json.dumps({"data" : "0"}) | [
"util.common.getData",
"util.common.getApiSingleData",
"util.common.getParameter",
"flask.request.environ.get",
"json.dumps",
"flask.request.headers.getlist",
"flask.request.get_json",
"flask.Blueprint"
] | [((200, 230), 'flask.Blueprint', 'Blueprint', (['"""userApi"""', '__name__'], {}), "('userApi', __name__)\n", (209, 230), False, 'from flask import Blueprint, session, request\n'), ((387, 405), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (403, 405), False, 'from flask import Blueprint, session, request\n'), ((413, 453), 'flask.request.headers.getlist', 'request.headers.getlist', (['"""X-Forward-FOR"""'], {}), "('X-Forward-FOR')\n", (436, 453), False, 'from flask import Blueprint, session, request\n'), ((742, 775), 'util.common.getApiSingleData', 'getApiSingleData', (['"""/login"""', 'param'], {}), "('/login', param)\n", (758, 775), False, 'from util.common import getApiSingleData, getData, getParameter\n'), ((2585, 2608), 'json.dumps', 'json.dumps', (['result_data'], {}), '(result_data)\n', (2595, 2608), False, 'import json\n'), ((469, 509), 'flask.request.headers.getlist', 'request.headers.getlist', (['"""X-Forward-FOR"""'], {}), "('X-Forward-FOR')\n", (492, 509), False, 'from flask import Blueprint, session, request\n'), ((534, 568), 'flask.request.environ.get', 'request.environ.get', (['"""REMOTE_ADDR"""'], {}), "('REMOTE_ADDR')\n", (553, 568), False, 'from flask import Blueprint, session, request\n'), ((604, 631), 'util.common.getParameter', 'getParameter', (['reqData', '"""id"""'], {}), "(reqData, 'id')\n", (616, 631), False, 'from util.common import getApiSingleData, getData, getParameter\n'), ((654, 689), 'util.common.getParameter', 'getParameter', (['reqData', '"""<PASSWORD>"""'], {}), "(reqData, '<PASSWORD>')\n", (666, 689), False, 'from util.common import getApiSingleData, getData, getParameter\n'), ((1105, 1165), 'util.common.getData', 'getData', (['"""/systemMng/authMenus"""', "{'empId': apiData['empId']}"], {}), "('/systemMng/authMenus', {'empId': apiData['empId']})\n", (1112, 1165), False, 'from util.common import getApiSingleData, getData, getParameter\n'), ((3030, 3055), 'json.dumps', 'json.dumps', (["{'data': '1'}"], {}), "({'data': '1'})\n", (3040, 3055), False, 'import json\n'), ((3083, 3108), 'json.dumps', 'json.dumps', (["{'data': '0'}"], {}), "({'data': '0'})\n", (3093, 3108), False, 'import json\n'), ((2908, 2942), 'flask.request.environ.get', 'request.environ.get', (['"""REMOTE_ADDR"""'], {}), "('REMOTE_ADDR')\n", (2927, 2942), False, 'from flask import Blueprint, session, request\n'), ((2825, 2843), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2841, 2843), False, 'from flask import Blueprint, session, request\n')] |
from basic_data_cleaning import BasicCleaning
# Read csv and clean data
#data_set = BasicCleaning.CleanData(path_to_data=r'C:\Users\ivangarrera\Desktop\T2.csv',var="Accelerometer")
data_set = BasicCleaning.CleanData(path_to_data=r'D:\Ing. Informatica\Cuarto\Machine Learning\T2.csv',var="Accelerometer")
#data_set.to_csv(r'C:\Users\ivangarrera\Desktop\T2_cleaned.csv',sep=',', index=False)
data_set.to_csv(r'D:\Ing. Informatica\Cuarto\Machine Learning\T2_cleaned_accelerometer.csv',sep=',', index=False)
| [
"basic_data_cleaning.BasicCleaning.CleanData"
] | [((194, 319), 'basic_data_cleaning.BasicCleaning.CleanData', 'BasicCleaning.CleanData', ([], {'path_to_data': '"""D:\\\\Ing. Informatica\\\\Cuarto\\\\Machine Learning\\\\T2.csv"""', 'var': '"""Accelerometer"""'}), "(path_to_data=\n 'D:\\\\Ing. Informatica\\\\Cuarto\\\\Machine Learning\\\\T2.csv', var=\n 'Accelerometer')\n", (217, 319), False, 'from basic_data_cleaning import BasicCleaning\n')] |
from __future__ import absolute_import
from functools import partial
import tensorflow as tf
from tensorflow_mean_average_precision.python.ops import greedy_assignment_ops
from tensorflow.keras.metrics import Metric, AUC
@tf.function
def _batch_greedy_assignment(similarity_true_pred, threshold):
return tf.map_fn(
partial(greedy_assignment_ops.greedy_assignment, threshold=threshold),
similarity_true_pred,
fn_output_signature=tf.bool)
class MeanAveragePrecision(Metric):
def __init__(self, thresholds, name=None):
super(MeanAveragePrecision, self).__init__(name=name)
self.thresholds = thresholds
self.ap_metrics = [
AUC(curve='pr', name='AveragePrecision @ %.3f' % threshold)
for threshold in self.thresholds
]
def reset_states(self):
ops = [metric.reset_states() for metric in self.ap_metrics]
return tf.group(ops, name='reset_states')
def result(self):
return tf.reduce_mean([metric.result() for metric in self.ap_metrics],
name='result')
def update_state(self, similarity_true_pred, scores_pred,
sample_weight=None):
# Sort predictions by scores
index = tf.argsort(scores_pred,
axis=-1, direction='DESCENDING')
similarity_true_pred = tf.gather(similarity_true_pred, index,
axis=-1, batch_dims=1)
scores_pred = tf.gather(scores_pred, index,
axis=-1, batch_dims=1)
# Update all metrices
ops = [metric.update_state(
_batch_greedy_assignment(similarity_true_pred, threshold),
scores_pred,
sample_weight)
for threshold, metric in zip(self.thresholds, self.ap_metrics)]
return tf.group(ops, name='update_state')
def get_config(self):
return {'thresholds': self.thresholds}
| [
"tensorflow.argsort",
"tensorflow.group",
"tensorflow.gather",
"functools.partial",
"tensorflow.keras.metrics.AUC"
] | [((331, 400), 'functools.partial', 'partial', (['greedy_assignment_ops.greedy_assignment'], {'threshold': 'threshold'}), '(greedy_assignment_ops.greedy_assignment, threshold=threshold)\n', (338, 400), False, 'from functools import partial\n'), ((923, 957), 'tensorflow.group', 'tf.group', (['ops'], {'name': '"""reset_states"""'}), "(ops, name='reset_states')\n", (931, 957), True, 'import tensorflow as tf\n'), ((1263, 1319), 'tensorflow.argsort', 'tf.argsort', (['scores_pred'], {'axis': '(-1)', 'direction': '"""DESCENDING"""'}), "(scores_pred, axis=-1, direction='DESCENDING')\n", (1273, 1319), True, 'import tensorflow as tf\n'), ((1378, 1439), 'tensorflow.gather', 'tf.gather', (['similarity_true_pred', 'index'], {'axis': '(-1)', 'batch_dims': '(1)'}), '(similarity_true_pred, index, axis=-1, batch_dims=1)\n', (1387, 1439), True, 'import tensorflow as tf\n'), ((1503, 1555), 'tensorflow.gather', 'tf.gather', (['scores_pred', 'index'], {'axis': '(-1)', 'batch_dims': '(1)'}), '(scores_pred, index, axis=-1, batch_dims=1)\n', (1512, 1555), True, 'import tensorflow as tf\n'), ((1869, 1903), 'tensorflow.group', 'tf.group', (['ops'], {'name': '"""update_state"""'}), "(ops, name='update_state')\n", (1877, 1903), True, 'import tensorflow as tf\n'), ((696, 755), 'tensorflow.keras.metrics.AUC', 'AUC', ([], {'curve': '"""pr"""', 'name': "('AveragePrecision @ %.3f' % threshold)"}), "(curve='pr', name='AveragePrecision @ %.3f' % threshold)\n", (699, 755), False, 'from tensorflow.keras.metrics import Metric, AUC\n')] |
# -*- coding:utf-8
from rigger_singleton.singleton import singleton
from rigger_plugin_framework.plugin_collector import PluginCollector
from rigger_plugin_framework.plugin import Plugin
from rigger_plugin_framework.plugin_installer import PluginInstaller
@singleton
class PluginManager:
__slots__ = (
"__raw_plugins",
"__plugin_instances",
"__plugin_type_map"
)
@staticmethod
def register(cls):
manager = PluginManager()
manager.register_plugin(cls)
@staticmethod
def raw_plugins():
"""
获取所有的插件类型(原型)
:return:
"""
manager = PluginManager()
return manager.all_raw_plugins
@staticmethod
def plugins():
"""
获取所有的插件实例
:return:
"""
manager = PluginManager()
return manager.all_plugins
@staticmethod
def get_plugin_names_by_type(t):
plugins = PluginManager().get_plugins_by_type(t)
ret = []
for plugin in plugins:
name = plugin.get_plugin_name()
ret.append(name)
return ret
@staticmethod
def start_plugins():
manager = PluginManager()
manager.launch_plugins()
@staticmethod
def pick_plugins(plugin_type, plugin_name=None):
manager = PluginManager()
return manager.get_plugins_by_type(plugin_type, plugin_name)
@staticmethod
def install(file_path, dest_dir):
"""
安装指定的插件包
:param file_path:
:param dest_dir:
:return:
"""
PluginInstaller().install(file_path, dest_dir)
@staticmethod
def collect(path):
"""
发现项目中的插件
:param path:
:return:
"""
PluginCollector.collect(path)
@staticmethod
def remove_plugins():
"""
:return:
"""
manager = PluginManager()
manager.__plugin_instances = []
manager.__plugin_type_map = dict()
manager.__raw_plugins = []
def __init__(self):
self.__raw_plugins = []
self.__plugin_instances = []
self.__plugin_type_map = dict()
def register_plugin(self, cls):
"""
注册插件
:param cls:
:return:
"""
if cls not in self.__raw_plugins:
self.__raw_plugins.append(cls)
def launch_plugins(self):
"""
启动所有插件
:return:
"""
insts = self.__plugin_instances
if len(insts) <= 0:
for plugin_cls in self.raw_plugins():
inst = plugin_cls()
insts.append(inst)
inst.on_start()
self.add_plugin_type(inst)
def stop_plugins(self):
"""
停止所有插件
:return:
"""
insts = self.__plugin_instances
for inst in insts:
assert isinstance(inst, Plugin)
inst.on_stop()
self.__plugin_instances = []
for inst in insts:
assert isinstance(inst, Plugin)
inst.on_start()
def add_plugin_type(self, plugin):
"""
将插件加入类型映射
:param plugin:
:return:
"""
plugins = self.get_plugins_by_type(plugin.get_plugin_type())
assert isinstance(plugins, list)
if plugin not in plugins:
plugins.append(plugin)
def get_plugins_by_type(self, plugin_type, plugin_name=None):
"""
获取指定类型的插件实例列表
:param plugin_type:
:param plugin_name:
:return:
"""
plugin_types = self.__plugin_type_map.get(plugin_type)
if plugin_types is None:
self.__plugin_type_map[plugin_type] = []
return self.__plugin_type_map.get(plugin_type)
else:
if plugin_name is not None:
temp = []
for plugin in plugin_types:
if plugin.get_plugin_name() == plugin_name:
temp.append(plugin)
return temp
else:
return plugin_types
@property
def all_raw_plugins(self):
return self.__raw_plugins
@property
def all_plugins(self):
return self.__plugin_instances
| [
"rigger_plugin_framework.plugin_collector.PluginCollector.collect",
"rigger_plugin_framework.plugin_installer.PluginInstaller"
] | [((1745, 1774), 'rigger_plugin_framework.plugin_collector.PluginCollector.collect', 'PluginCollector.collect', (['path'], {}), '(path)\n', (1768, 1774), False, 'from rigger_plugin_framework.plugin_collector import PluginCollector\n'), ((1569, 1586), 'rigger_plugin_framework.plugin_installer.PluginInstaller', 'PluginInstaller', ([], {}), '()\n', (1584, 1586), False, 'from rigger_plugin_framework.plugin_installer import PluginInstaller\n')] |
from poop.hfdp.factory.pizzaaf.cheese import Cheese
from poop.hfdp.factory.pizzaaf.clams import Clams
from poop.hfdp.factory.pizzaaf.dough import Dough
from poop.hfdp.factory.pizzaaf.fresh_clams import FreshClams
from poop.hfdp.factory.pizzaaf.garlic import Garlic
from poop.hfdp.factory.pizzaaf.marinara_sauce import MarinaraSauce
from poop.hfdp.factory.pizzaaf.mushroom import Mushroom
from poop.hfdp.factory.pizzaaf.onion import Onion
from poop.hfdp.factory.pizzaaf.pepperoni import Pepperoni
from poop.hfdp.factory.pizzaaf.pizza_ingredient_factory import (
PizzaIngredientFactory,
)
from poop.hfdp.factory.pizzaaf.red_pepper import RedPepper
from poop.hfdp.factory.pizzaaf.reggiano_pizza import ReggianoCheese
from poop.hfdp.factory.pizzaaf.sauce import Sauce
from poop.hfdp.factory.pizzaaf.sliced_pepperoni import SlicedPepperoni
from poop.hfdp.factory.pizzaaf.thick_crust_dough import ThickCrustDough
from poop.hfdp.factory.pizzaaf.veggies import Veggies
class NYPizzaIngredientFactory(PizzaIngredientFactory):
def create_dough(self) -> Dough:
return ThickCrustDough()
def create_sauce(self) -> Sauce:
return MarinaraSauce()
def create_cheese(self) -> Cheese:
return ReggianoCheese()
def create_veggies(self) -> list[Veggies]:
return [Garlic(), Onion(), Mushroom(), RedPepper()]
def create_pepperoni(self) -> Pepperoni:
return SlicedPepperoni()
def create_clam(self) -> Clams:
return FreshClams()
| [
"poop.hfdp.factory.pizzaaf.garlic.Garlic",
"poop.hfdp.factory.pizzaaf.sliced_pepperoni.SlicedPepperoni",
"poop.hfdp.factory.pizzaaf.marinara_sauce.MarinaraSauce",
"poop.hfdp.factory.pizzaaf.mushroom.Mushroom",
"poop.hfdp.factory.pizzaaf.reggiano_pizza.ReggianoCheese",
"poop.hfdp.factory.pizzaaf.fresh_clams.FreshClams",
"poop.hfdp.factory.pizzaaf.thick_crust_dough.ThickCrustDough",
"poop.hfdp.factory.pizzaaf.onion.Onion",
"poop.hfdp.factory.pizzaaf.red_pepper.RedPepper"
] | [((1075, 1092), 'poop.hfdp.factory.pizzaaf.thick_crust_dough.ThickCrustDough', 'ThickCrustDough', ([], {}), '()\n', (1090, 1092), False, 'from poop.hfdp.factory.pizzaaf.thick_crust_dough import ThickCrustDough\n'), ((1146, 1161), 'poop.hfdp.factory.pizzaaf.marinara_sauce.MarinaraSauce', 'MarinaraSauce', ([], {}), '()\n', (1159, 1161), False, 'from poop.hfdp.factory.pizzaaf.marinara_sauce import MarinaraSauce\n'), ((1217, 1233), 'poop.hfdp.factory.pizzaaf.reggiano_pizza.ReggianoCheese', 'ReggianoCheese', ([], {}), '()\n', (1231, 1233), False, 'from poop.hfdp.factory.pizzaaf.reggiano_pizza import ReggianoCheese\n'), ((1403, 1420), 'poop.hfdp.factory.pizzaaf.sliced_pepperoni.SlicedPepperoni', 'SlicedPepperoni', ([], {}), '()\n', (1418, 1420), False, 'from poop.hfdp.factory.pizzaaf.sliced_pepperoni import SlicedPepperoni\n'), ((1473, 1485), 'poop.hfdp.factory.pizzaaf.fresh_clams.FreshClams', 'FreshClams', ([], {}), '()\n', (1483, 1485), False, 'from poop.hfdp.factory.pizzaaf.fresh_clams import FreshClams\n'), ((1298, 1306), 'poop.hfdp.factory.pizzaaf.garlic.Garlic', 'Garlic', ([], {}), '()\n', (1304, 1306), False, 'from poop.hfdp.factory.pizzaaf.garlic import Garlic\n'), ((1308, 1315), 'poop.hfdp.factory.pizzaaf.onion.Onion', 'Onion', ([], {}), '()\n', (1313, 1315), False, 'from poop.hfdp.factory.pizzaaf.onion import Onion\n'), ((1317, 1327), 'poop.hfdp.factory.pizzaaf.mushroom.Mushroom', 'Mushroom', ([], {}), '()\n', (1325, 1327), False, 'from poop.hfdp.factory.pizzaaf.mushroom import Mushroom\n'), ((1329, 1340), 'poop.hfdp.factory.pizzaaf.red_pepper.RedPepper', 'RedPepper', ([], {}), '()\n', (1338, 1340), False, 'from poop.hfdp.factory.pizzaaf.red_pepper import RedPepper\n')] |
__author__ = '<EMAIL>'
import unittest
from test import mock_test_data
from modules.steps.init_service_pipeline_data import InitServicePipelineData
from modules.util import data_defs, pipeline_data_utils
class TestInitServicePipelineData(unittest.TestCase):
def test_run(self):
test_data = mock_test_data.get_parsed_stack_content()
pipeline_data = {data_defs.STACK_FILE_PARSED_CONTENT: test_data}
step = InitServicePipelineData()
result = step.run_step(pipeline_data)
self.assertEqual(result[data_defs.SERVICES][0][data_defs.S_ENVIRONMENT], {})
self.assertEqual(result[data_defs.SERVICES][0][data_defs.S_NAME], 'web')
self.assertEqual(result[data_defs.SERVICES][0][data_defs.S_DEPLOY_LABELS], [])
self.assertEqual(result[data_defs.SERVICES][1][data_defs.S_NAME], 'api')
deploy_labels = [label.split('=') for label in
result[data_defs.SERVICES][1][data_defs.S_DEPLOY_LABELS]]
for name, value in deploy_labels:
if name == 'traefik.deploy' and value == 'true':
break
else:
self.fail('Couldnt find traefik.deploy deploy label')
labels = pipeline_data_utils.get_labels(result[data_defs.SERVICES][1])
for name, value in labels:
if name == 'se.kth.slackChannels' and value == '#team-pipeline,#ita-ops':
break
else:
self.fail('Couldnt find se.kth.slackChannels label')
| [
"test.mock_test_data.get_parsed_stack_content",
"modules.steps.init_service_pipeline_data.InitServicePipelineData",
"modules.util.pipeline_data_utils.get_labels"
] | [((305, 346), 'test.mock_test_data.get_parsed_stack_content', 'mock_test_data.get_parsed_stack_content', ([], {}), '()\n', (344, 346), False, 'from test import mock_test_data\n'), ((435, 460), 'modules.steps.init_service_pipeline_data.InitServicePipelineData', 'InitServicePipelineData', ([], {}), '()\n', (458, 460), False, 'from modules.steps.init_service_pipeline_data import InitServicePipelineData\n'), ((1201, 1262), 'modules.util.pipeline_data_utils.get_labels', 'pipeline_data_utils.get_labels', (['result[data_defs.SERVICES][1]'], {}), '(result[data_defs.SERVICES][1])\n', (1231, 1262), False, 'from modules.util import data_defs, pipeline_data_utils\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''
kerberos.py - A very light wrapper around gssapi
This package contains a class to read a kerberos principal
May 2015 (original implementation with krbV)
<NAME> (<EMAIL>)
Nov 2019 (implementation with gssapi for Python3)
<NAME> (<EMAIL>)
'''
# python 3 support
from __future__ import absolute_import, print_function, division
import gssapi
class Kerberos:
def __init__(self):
self.credentials = gssapi.Credentials(usage='initiate')
def user_principal(self):
return str(self.credentials.inquire().name)
| [
"gssapi.Credentials"
] | [((1528, 1564), 'gssapi.Credentials', 'gssapi.Credentials', ([], {'usage': '"""initiate"""'}), "(usage='initiate')\n", (1546, 1564), False, 'import gssapi\n')] |
import torch
import torch.nn.functional as F
from torch import nn, cuda
from torch.autograd import Variable
from torch.nn.modules.batchnorm import _BatchNorm
from torch.nn.parameter import Parameter
class Conv(nn.Module):
def __init__(self, in_ch, out_ch, K=3, S=1, P=1, D=1, activation=nn.ReLU(inplace=True)):
super(Conv, self).__init__()
if activation:
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D),
nn.InstanceNorm2d(out_ch),
activation
)
else:
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D),
nn.InstanceNorm2d(out_ch)
)
def forward(self, x):
x = self.conv(x)
return x
class PartialConv(nn.Module):
def __init__(self, in_ch, out_ch, K=3, S=1, P=1, D=1, activation=nn.LeakyReLU(inplace=True)):
super(PartialConv, self).__init__()
if activation:
self.conv = nn.Sequential(
PartialConv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D),
nn.InstanceNorm2d(out_ch),
activation)
else:
self.conv = nn.Sequential(
PartialConv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D),
nn.InstanceNorm2d(out_ch)
)
def forward(self, x):
x = self.conv(x)
return x
class SN_Conv(nn.Module):
def __init__(self, in_ch, out_ch, K=3, S=1, P=1, D=1, activation=nn.ReLU(inplace=True)):
super(SN_Conv, self).__init__()
self.SpectralNorm = torch.nn.utils.spectral_norm
if activation:
self.conv = nn.Sequential(
self.SpectralNorm(nn.Conv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)),
nn.InstanceNorm2d(out_ch),
activation
)
else:
self.conv = nn.Sequential(
self.SpectralNorm(nn.Conv2d(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)),
nn.InstanceNorm2d(out_ch)
)
def forward(self, x):
x = self.conv(x)
return x
class ResidualBlock(nn.Module):
def __init__(self, in_features, K=3, S=1, P=1, D=1, activation=nn.ReLU(inplace=True)):
super(ResidualBlock, self).__init__()
conv_block = [ Conv(in_features, in_features, K, S, P, D, activation=activation),
Conv(in_features, in_features, activation=False)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, x):
return x + self.conv_block(x)
class SN_ResidualBlock(nn.Module):
def __init__(self, in_features, K=3, S=1, P=1, D=1, activation=nn.LeakyReLU(inplace=True)):
super(SN_ResidualBlock, self).__init__()
conv_block = [ SN_Conv(in_features, in_features, K, S, P, D, activation=activation),
SN_Conv(in_features, in_features, activation=False)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, x):
return x + self.conv_block(x)
class Partial_ResidualBlock(nn.Module):
def __init__(self, in_features, K=3, S=1, P=1, D=1, activation=nn.ReLU(inplace=True)):
super(Partial_ResidualBlock, self).__init__()
conv_block = [ PartialConv(in_features, in_features, K, S, P, D, activation=activation),
PartialConv(in_features, in_features, activation=False)]
self.conv_block = nn.Sequential(*conv_block)
def forward(self, x):
return x + self.conv_block(x)
class PartialConv2d(nn.Conv2d):
def __init__(self, *args, **kwargs):
# whether the mask is multi-channel or not
if 'multi_channel' in kwargs:
self.multi_channel = kwargs['multi_channel']
kwargs.pop('multi_channel')
else:
self.multi_channel = False
if 'return_mask' in kwargs:
self.return_mask = kwargs['return_mask']
kwargs.pop('return_mask')
else:
self.return_mask = False
super(PartialConv2d, self).__init__(*args, **kwargs)
if self.multi_channel:
self.weight_maskUpdater = torch.ones(self.out_channels, self.in_channels, self.kernel_size[0],
self.kernel_size[1])
else:
self.weight_maskUpdater = torch.ones(1, 1, self.kernel_size[0], self.kernel_size[1])
self.slide_winsize = self.weight_maskUpdater.shape[1] * self.weight_maskUpdater.shape[2] * \
self.weight_maskUpdater.shape[3]
self.last_size = (None, None, None, None)
self.update_mask = None
self.mask_ratio = None
def forward(self, input, mask_in=None):
assert len(input.shape) == 4
if mask_in is not None or self.last_size != tuple(input.shape):
self.last_size = tuple(input.shape)
with torch.no_grad():
if self.weight_maskUpdater.type() != input.type():
self.weight_maskUpdater = self.weight_maskUpdater.to(input)
if mask_in is None:
# if mask is not provided, create a mask
if self.multi_channel:
mask = torch.ones(input.data.shape[0], input.data.shape[1], input.data.shape[2],
input.data.shape[3]).to(input)
else:
mask = torch.ones(1, 1, input.data.shape[2], input.data.shape[3]).to(input)
else:
mask = mask_in
self.update_mask = F.conv2d(mask, self.weight_maskUpdater, bias=None, stride=self.stride,
padding=self.padding, dilation=self.dilation, groups=1)
self.mask_ratio = self.slide_winsize / (self.update_mask + 1e-8)
# self.mask_ratio = torch.max(self.update_mask)/(self.update_mask + 1e-8)
self.update_mask = torch.clamp(self.update_mask, 0, 1)
self.mask_ratio = torch.mul(self.mask_ratio, self.update_mask)
# if self.update_mask.type() != input.type() or self.mask_ratio.type() != input.type():
# self.update_mask.to(input)
# self.mask_ratio.to(input)
raw_out = super(PartialConv2d, self).forward(torch.mul(input, mask) if mask_in is not None else input)
if self.bias is not None:
bias_view = self.bias.view(1, self.out_channels, 1, 1)
output = torch.mul(raw_out - bias_view, self.mask_ratio) + bias_view
output = torch.mul(output, self.update_mask)
else:
output = torch.mul(raw_out, self.mask_ratio)
if self.return_mask:
return output, self.update_mask
else:
return output
class double_conv(nn.Module):
'''(conv => BN => ReLU) * 2'''
def __init__(self, in_ch, out_ch):
super(double_conv, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True),
nn.Conv2d(out_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv(x)
return x
class inconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(inconv, self).__init__()
self.conv = double_conv(in_ch, out_ch)
def forward(self, x):
x = self.conv(x)
return x
class down(nn.Module):
def __init__(self, in_ch, out_ch):
super(down, self).__init__()
self.mpconv = nn.Sequential(
nn.MaxPool2d(2),
double_conv(in_ch, out_ch)
)
def forward(self, x):
x = self.mpconv(x)
return x
class up(nn.Module):
def __init__(self, in_ch, out_ch, bilinear=True):
super(up, self).__init__()
# would be a nice idea if the upsampling could be learned too,
# but my machine do not have enough memory to handle all those weights
if bilinear:
self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(in_ch // 2, in_ch // 2, 2, stride=2)
self.conv = double_conv(in_ch, out_ch)
def forward(self, x1, x2):
x1 = self.up(x1)
# input is CHW
diffY = x2.size()[2] - x1.size()[2]
diffX = x2.size()[3] - x1.size()[3]
x1 = F.pad(x1, (diffX // 2, diffX - diffX // 2,
diffY // 2, diffY - diffY // 2))
x = torch.cat([x2, x1], dim=1)
x = self.conv(x)
return x
class outconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(outconv, self).__init__()
self.conv = nn.Conv2d(in_ch, out_ch, 1)
def forward(self, x):
x = self.conv(x)
return x
class _BatchInstanceNorm(_BatchNorm):
def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True):
super(_BatchInstanceNorm, self).__init__(num_features, eps, momentum, affine)
self.gate = Parameter(torch.Tensor(num_features))
self.gate.data.fill_(1)
setattr(self.gate, 'bin_gate', True)
def forward(self, input):
self._check_input_dim(input)
# Batch norm
if self.affine:
bn_w = self.weight * self.gate
else:
bn_w = self.gate
out_bn = F.batch_norm(
input, self.running_mean, self.running_var, bn_w, self.bias,
self.training, self.momentum, self.eps)
# Instance norm
b, c = input.size(0), input.size(1)
if self.affine:
in_w = self.weight * (1 - self.gate)
else:
in_w = 1 - self.gate
input = input.view(1, b * c, *input.size()[2:])
out_in = F.batch_norm(
input, None, None, None, None,
True, self.momentum, self.eps)
out_in = out_in.view(b, c, *input.size()[2:])
out_in.mul_(in_w[None, :, None, None])
return out_bn + out_in
class BatchInstanceNorm1d(_BatchInstanceNorm):
def _check_input_dim(self, input):
if input.dim() != 2 and input.dim() != 3:
raise ValueError('expected 2D or 3D input (got {}D input)'.format(input.dim()))
class BatchInstanceNorm2d(_BatchInstanceNorm):
def _check_input_dim(self, input):
if input.dim() != 4:
raise ValueError('expected 4D input (got {}D input)'.format(input.dim()))
class BatchInstanceNorm3d(_BatchInstanceNorm):
def _check_input_dim(self, input):
if input.dim() != 5:
raise ValueError('expected 5D input (got {}D input)'.format(input.dim())) | [
"torch.mul",
"torch.nn.ReLU",
"torch.nn.BatchNorm2d",
"torch.nn.functional.conv2d",
"torch.nn.LeakyReLU",
"torch.nn.Sequential",
"torch.Tensor",
"torch.nn.Conv2d",
"torch.nn.InstanceNorm2d",
"torch.nn.MaxPool2d",
"torch.nn.Upsample",
"torch.nn.functional.pad",
"torch.no_grad",
"torch.nn.functional.batch_norm",
"torch.nn.ConvTranspose2d",
"torch.clamp",
"torch.cat",
"torch.ones"
] | [((292, 313), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (299, 313), False, 'from torch import nn, cuda\n'), ((957, 983), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (969, 983), False, 'from torch import nn, cuda\n'), ((1624, 1645), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1631, 1645), False, 'from torch import nn, cuda\n'), ((2388, 2409), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2395, 2409), False, 'from torch import nn, cuda\n'), ((2651, 2677), 'torch.nn.Sequential', 'nn.Sequential', (['*conv_block'], {}), '(*conv_block)\n', (2664, 2677), False, 'from torch import nn, cuda\n'), ((2846, 2872), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2858, 2872), False, 'from torch import nn, cuda\n'), ((3122, 3148), 'torch.nn.Sequential', 'nn.Sequential', (['*conv_block'], {}), '(*conv_block)\n', (3135, 3148), False, 'from torch import nn, cuda\n'), ((3322, 3343), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3329, 3343), False, 'from torch import nn, cuda\n'), ((3606, 3632), 'torch.nn.Sequential', 'nn.Sequential', (['*conv_block'], {}), '(*conv_block)\n', (3619, 3632), False, 'from torch import nn, cuda\n'), ((8696, 8771), 'torch.nn.functional.pad', 'F.pad', (['x1', '(diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2)'], {}), '(x1, (diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2))\n', (8701, 8771), True, 'import torch.nn.functional as F\n'), ((8809, 8835), 'torch.cat', 'torch.cat', (['[x2, x1]'], {'dim': '(1)'}), '([x2, x1], dim=1)\n', (8818, 8835), False, 'import torch\n'), ((9005, 9032), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch', '(1)'], {}), '(in_ch, out_ch, 1)\n', (9014, 9032), False, 'from torch import nn, cuda\n'), ((9655, 9772), 'torch.nn.functional.batch_norm', 'F.batch_norm', (['input', 'self.running_mean', 'self.running_var', 'bn_w', 'self.bias', 'self.training', 'self.momentum', 'self.eps'], {}), '(input, self.running_mean, self.running_var, bn_w, self.bias,\n self.training, self.momentum, self.eps)\n', (9667, 9772), True, 'import torch.nn.functional as F\n'), ((10056, 10130), 'torch.nn.functional.batch_norm', 'F.batch_norm', (['input', 'None', 'None', 'None', 'None', '(True)', 'self.momentum', 'self.eps'], {}), '(input, None, None, None, None, True, self.momentum, self.eps)\n', (10068, 10130), True, 'import torch.nn.functional as F\n'), ((4323, 4417), 'torch.ones', 'torch.ones', (['self.out_channels', 'self.in_channels', 'self.kernel_size[0]', 'self.kernel_size[1]'], {}), '(self.out_channels, self.in_channels, self.kernel_size[0], self.\n kernel_size[1])\n', (4333, 4417), False, 'import torch\n'), ((4514, 4572), 'torch.ones', 'torch.ones', (['(1)', '(1)', 'self.kernel_size[0]', 'self.kernel_size[1]'], {}), '(1, 1, self.kernel_size[0], self.kernel_size[1])\n', (4524, 4572), False, 'import torch\n'), ((6760, 6795), 'torch.mul', 'torch.mul', (['output', 'self.update_mask'], {}), '(output, self.update_mask)\n', (6769, 6795), False, 'import torch\n'), ((6831, 6866), 'torch.mul', 'torch.mul', (['raw_out', 'self.mask_ratio'], {}), '(raw_out, self.mask_ratio)\n', (6840, 6866), False, 'import torch\n'), ((7179, 7217), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch', '(3)'], {'padding': '(1)'}), '(in_ch, out_ch, 3, padding=1)\n', (7188, 7217), False, 'from torch import nn, cuda\n'), ((7231, 7253), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_ch'], {}), '(out_ch)\n', (7245, 7253), False, 'from torch import nn, cuda\n'), ((7267, 7288), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (7274, 7288), False, 'from torch import nn, cuda\n'), ((7302, 7341), 'torch.nn.Conv2d', 'nn.Conv2d', (['out_ch', 'out_ch', '(3)'], {'padding': '(1)'}), '(out_ch, out_ch, 3, padding=1)\n', (7311, 7341), False, 'from torch import nn, cuda\n'), ((7355, 7377), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_ch'], {}), '(out_ch)\n', (7369, 7377), False, 'from torch import nn, cuda\n'), ((7391, 7412), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (7398, 7412), False, 'from torch import nn, cuda\n'), ((7863, 7878), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (7875, 7878), False, 'from torch import nn, cuda\n'), ((8308, 8372), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(scale_factor=2, mode='bilinear', align_corners=True)\n", (8319, 8372), False, 'from torch import nn, cuda\n'), ((8409, 8464), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(in_ch // 2)', '(in_ch // 2)', '(2)'], {'stride': '(2)'}), '(in_ch // 2, in_ch // 2, 2, stride=2)\n', (8427, 8464), False, 'from torch import nn, cuda\n'), ((9333, 9359), 'torch.Tensor', 'torch.Tensor', (['num_features'], {}), '(num_features)\n', (9345, 9359), False, 'import torch\n'), ((431, 503), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch'], {'kernel_size': 'K', 'stride': 'S', 'padding': 'P', 'dilation': 'D'}), '(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)\n', (440, 503), False, 'from torch import nn, cuda\n'), ((521, 546), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (538, 546), False, 'from torch import nn, cuda\n'), ((658, 730), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch'], {'kernel_size': 'K', 'stride': 'S', 'padding': 'P', 'dilation': 'D'}), '(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)\n', (667, 730), False, 'from torch import nn, cuda\n'), ((748, 773), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (765, 773), False, 'from torch import nn, cuda\n'), ((1202, 1227), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (1219, 1227), False, 'from torch import nn, cuda\n'), ((1420, 1445), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (1437, 1445), False, 'from torch import nn, cuda\n'), ((1932, 1957), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (1949, 1957), False, 'from torch import nn, cuda\n'), ((2178, 2203), 'torch.nn.InstanceNorm2d', 'nn.InstanceNorm2d', (['out_ch'], {}), '(out_ch)\n', (2195, 2203), False, 'from torch import nn, cuda\n'), ((5071, 5086), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5084, 5086), False, 'import torch\n'), ((5773, 5903), 'torch.nn.functional.conv2d', 'F.conv2d', (['mask', 'self.weight_maskUpdater'], {'bias': 'None', 'stride': 'self.stride', 'padding': 'self.padding', 'dilation': 'self.dilation', 'groups': '(1)'}), '(mask, self.weight_maskUpdater, bias=None, stride=self.stride,\n padding=self.padding, dilation=self.dilation, groups=1)\n', (5781, 5903), True, 'import torch.nn.functional as F\n'), ((6151, 6186), 'torch.clamp', 'torch.clamp', (['self.update_mask', '(0)', '(1)'], {}), '(self.update_mask, 0, 1)\n', (6162, 6186), False, 'import torch\n'), ((6221, 6265), 'torch.mul', 'torch.mul', (['self.mask_ratio', 'self.update_mask'], {}), '(self.mask_ratio, self.update_mask)\n', (6230, 6265), False, 'import torch\n'), ((6498, 6520), 'torch.mul', 'torch.mul', (['input', 'mask'], {}), '(input, mask)\n', (6507, 6520), False, 'import torch\n'), ((6679, 6726), 'torch.mul', 'torch.mul', (['(raw_out - bias_view)', 'self.mask_ratio'], {}), '(raw_out - bias_view, self.mask_ratio)\n', (6688, 6726), False, 'import torch\n'), ((1841, 1913), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch'], {'kernel_size': 'K', 'stride': 'S', 'padding': 'P', 'dilation': 'D'}), '(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)\n', (1850, 1913), False, 'from torch import nn, cuda\n'), ((2087, 2159), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_ch', 'out_ch'], {'kernel_size': 'K', 'stride': 'S', 'padding': 'P', 'dilation': 'D'}), '(in_ch, out_ch, kernel_size=K, stride=S, padding=P, dilation=D)\n', (2096, 2159), False, 'from torch import nn, cuda\n'), ((5407, 5505), 'torch.ones', 'torch.ones', (['input.data.shape[0]', 'input.data.shape[1]', 'input.data.shape[2]', 'input.data.shape[3]'], {}), '(input.data.shape[0], input.data.shape[1], input.data.shape[2],\n input.data.shape[3])\n', (5417, 5505), False, 'import torch\n'), ((5611, 5669), 'torch.ones', 'torch.ones', (['(1)', '(1)', 'input.data.shape[2]', 'input.data.shape[3]'], {}), '(1, 1, input.data.shape[2], input.data.shape[3])\n', (5621, 5669), False, 'import torch\n')] |
from django.utils import crypto
def generate_token(length=50):
return crypto.get_random_string(length=length)
| [
"django.utils.crypto.get_random_string"
] | [((76, 115), 'django.utils.crypto.get_random_string', 'crypto.get_random_string', ([], {'length': 'length'}), '(length=length)\n', (100, 115), False, 'from django.utils import crypto\n')] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def rats(path):
"""Rat treatment data from Mantel et al
Rat treatment data from Mantel et al. Three rats were chosen from each
of 100 litters, one of which was treated with a drug, and then all
followed for tumor incidence.
+-----------+----------------------------------------+
| litter: | litter number from 1 to 100 |
+-----------+----------------------------------------+
| rx: | treatment,(1=drug, 0=control) |
+-----------+----------------------------------------+
| time: | time to tumor or last follow-up |
+-----------+----------------------------------------+
| status: | event status, 1=tumor and 0=censored |
+-----------+----------------------------------------+
| sex: | male or female |
+-----------+----------------------------------------+
<NAME>, <NAME> and <NAME>. Mantel-Haenszel analyses of
litter-matched time to response data, with modifications for recovery of
interlitter information. Cancer Research, 37:3863-3868, 1977.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `rats.csv`.
Returns:
Tuple of np.ndarray `x_train` with 300 rows and 5 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'rats.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/survival/rats.csv'
maybe_download_and_extract(path, url,
save_file_name='rats.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
| [
"observations.util.maybe_download_and_extract",
"os.path.join",
"os.path.expanduser"
] | [((1648, 1672), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (1666, 1672), False, 'import os\n'), ((1815, 1893), 'observations.util.maybe_download_and_extract', 'maybe_download_and_extract', (['path', 'url'], {'save_file_name': '"""rats.csv"""', 'resume': '(False)'}), "(path, url, save_file_name='rats.csv', resume=False)\n", (1841, 1893), False, 'from observations.util import maybe_download_and_extract\n'), ((1978, 2006), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1990, 2006), False, 'import os\n'), ((1721, 1749), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1733, 1749), False, 'import os\n')] |
from gi.repository import Gtk
from gaphor import UML
from gaphor.core import gettext, transactional
from gaphor.diagram.interactions import MessageItem
from gaphor.diagram.propertypages import (
EditableTreeModel,
NamedItemPropertyPage,
PropertyPages,
create_hbox_label,
create_tree_view,
create_uml_combo,
)
@PropertyPages.register(MessageItem)
class MessagePropertyPage(NamedItemPropertyPage):
"""Property page for editing message items.
When message is on communication diagram, then additional messages can
be added. On sequence diagram sort of message can be changed.
"""
NAME_LABEL = gettext("Message")
MESSAGE_SORT = [
("Call", "synchCall"),
("Asynchronous", "asynchCall"),
("Signal", "asynchSignal"),
("Create", "createMessage"),
("Delete", "deleteMessage"),
("Reply", "reply"),
]
def construct(self):
page = super().construct()
item = self.item
subject = item.subject
if not subject:
return page
if not item.is_communication():
hbox = create_hbox_label(self, page, gettext("Message sort"))
sort_data = self.MESSAGE_SORT
lifeline = None
cinfo = item.canvas.get_connection(item.tail)
if cinfo:
lifeline = cinfo.connected
# disallow connecting two delete messages to a lifeline
if (
lifeline
and lifeline.is_destroyed
and subject.messageSort != "deleteMessage"
):
sort_data = list(sort_data)
assert sort_data[4][1] == "deleteMessage"
del sort_data[4]
combo = self.combo = create_uml_combo(
sort_data, self._on_message_sort_change
)
hbox.pack_start(combo, False, True, 0)
index = combo.get_model().get_index(subject.messageSort)
combo.set_active(index)
return page
@transactional
def _on_message_sort_change(self, combo):
"""Update message item's message sort information."""
combo = self.combo
ms = combo.get_model().get_value(combo.get_active())
item = self.item
subject = item.subject
lifeline = None
cinfo = item.canvas.get_connection(item.tail)
if cinfo:
lifeline = cinfo.connected
#
# allow only one delete message to connect to lifeline's lifetime
# destroyed status can be changed only by delete message itself
#
if lifeline:
if subject.messageSort == "deleteMessage" or not lifeline.is_destroyed:
is_destroyed = ms == "deleteMessage"
lifeline.is_destroyed = is_destroyed
# TODO: is required here?
lifeline.request_update()
subject.messageSort = ms
# TODO: is required here?
item.request_update()
| [
"gaphor.diagram.propertypages.PropertyPages.register",
"gaphor.core.gettext",
"gaphor.diagram.propertypages.create_uml_combo"
] | [((337, 372), 'gaphor.diagram.propertypages.PropertyPages.register', 'PropertyPages.register', (['MessageItem'], {}), '(MessageItem)\n', (359, 372), False, 'from gaphor.diagram.propertypages import EditableTreeModel, NamedItemPropertyPage, PropertyPages, create_hbox_label, create_tree_view, create_uml_combo\n'), ((639, 657), 'gaphor.core.gettext', 'gettext', (['"""Message"""'], {}), "('Message')\n", (646, 657), False, 'from gaphor.core import gettext, transactional\n'), ((1767, 1824), 'gaphor.diagram.propertypages.create_uml_combo', 'create_uml_combo', (['sort_data', 'self._on_message_sort_change'], {}), '(sort_data, self._on_message_sort_change)\n', (1783, 1824), False, 'from gaphor.diagram.propertypages import EditableTreeModel, NamedItemPropertyPage, PropertyPages, create_hbox_label, create_tree_view, create_uml_combo\n'), ((1152, 1175), 'gaphor.core.gettext', 'gettext', (['"""Message sort"""'], {}), "('Message sort')\n", (1159, 1175), False, 'from gaphor.core import gettext, transactional\n')] |
import Markers
import Modeler
import sys
class Integers(Markers.Markers):
name = "integers"
def __init__(self, spec):
super().__init__(spec)
def reset(self):
super().reset()
for g in self.g:
g.setNoise(0.25)
Modeler.modelers.append(Integers) | [
"Modeler.modelers.append"
] | [((261, 294), 'Modeler.modelers.append', 'Modeler.modelers.append', (['Integers'], {}), '(Integers)\n', (284, 294), False, 'import Modeler\n')] |
# -*- coding: utf-8 -*-
import unittest
from time import strptime
from datetime import datetime, timedelta
from pytz import timezone, UTC
from StringIO import StringIO
import os
import operator
import shutil
from trac.core import *
from trac.test import EnvironmentStub, Mock
from trac.web.api import Request
from irclogs.search import *
from irclogs.api import IRCChannelManager, IRCChannel
class RequestStub(object):
def __init__(self):
self.session = {'tz': 'UTC'}
class SearchTestCase(unittest.TestCase):
def setUp(self):
self.indexdir = os.tempnam()
self.env = EnvironmentStub()
self.config = self.env.config
self.config.set('irclogs', 'search_db_path', self.indexdir)
self.config.set('irclogs', 'last_index', None)
self.chmgr = IRCChannelManager(self.env)
def events(start, end):
self.assertTrue(start < end)
self.dt = start
dt = self.dt
delta = timedelta(seconds=1)
for i in range(0, 20):
yield {
'timestamp': dt,
'network': u'freenode',
'channel': u'#trac',
'nick': u'doki_pen',
'type': u'comment',
'comment': u'hello %d'%i
}
dt += delta
def fake_channels():
obj = IRCChannel(self.env)
obj.events_in_range = events
yield obj
def fake_channel(name):
obj = IRCChannel(self.env)
obj.events_in_range = events
return obj
self.chmgr.channels = fake_channels
self.chmgr.channel = fake_channel
self.out = WhooshIrcLogsIndex(self.env)
def tearDown(self):
shutil.rmtree(self.indexdir)
def _make_environ(self, scheme='http', server_name='example.org',
server_port=80, method='GET', script_name='/trac',
**kwargs):
environ = {'wsgi.url_scheme': scheme, 'wsgi.input': StringIO(''),
'REQUEST_METHOD': method, 'SERVER_NAME': server_name,
'SERVER_PORT': server_port, 'SCRIPT_NAME': script_name}
environ.update(kwargs)
return environ
def test_index_and_search(self):
self.out.update_index()
req = Request(self._make_environ(), None)
req.session = {'tz': 'UTC'}
req.perm = Mock(has_permission= lambda x: True)
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(20, len(results))
self.assertEqual(self.dt.hour, results[0][2].hour)
req.session = {'tz': 'America/New_York'}
req.perm = Mock(has_permission= lambda x: True)
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(20, len(results))
est = timezone('America/New_York')
expect_dt = est.normalize(self.dt.astimezone(est))
sorted_results = sorted(results, key=operator.itemgetter(2))
self.assertEqual(expect_dt, sorted_results[0][2])
def test_timezones(self):
self.out.config.set('irclogs', 'timezone', 'America/New_York')
self.out.update_index()
req = Request(self._make_environ(), None)
req.session = {'tz': 'America/New_York'}
req.perm = Mock(has_permission= lambda x: True)
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(20, len(results))
self.assertEqual((self.dt.hour-5+24)%24, results[0][2].hour)
def test_update(self):
self.out.update_index()
req = Request(self._make_environ(), None)
req.session = {'tz': 'UTC'}
req.perm = Mock(has_permission= lambda x: True)
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(20, len(results))
self.out.update_index()
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(40, len(results))
self.out.update_index()
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(60, len(results))
self.out.update_index()
results = [i for i in self.out.get_search_results(req, ('hello',), ('irclogs',))]
self.assertEqual(80, len(results))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SearchTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| [
"StringIO.StringIO",
"unittest.TestSuite",
"pytz.timezone",
"irclogs.api.IRCChannelManager",
"trac.test.EnvironmentStub",
"unittest.makeSuite",
"trac.test.Mock",
"irclogs.api.IRCChannel",
"shutil.rmtree",
"unittest.main",
"operator.itemgetter",
"datetime.timedelta",
"os.tempnam"
] | [((4485, 4505), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (4503, 4505), False, 'import unittest\n'), ((4617, 4651), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""suite"""'}), "(defaultTest='suite')\n", (4630, 4651), False, 'import unittest\n'), ((570, 582), 'os.tempnam', 'os.tempnam', ([], {}), '()\n', (580, 582), False, 'import os\n'), ((602, 619), 'trac.test.EnvironmentStub', 'EnvironmentStub', ([], {}), '()\n', (617, 619), False, 'from trac.test import EnvironmentStub, Mock\n'), ((802, 829), 'irclogs.api.IRCChannelManager', 'IRCChannelManager', (['self.env'], {}), '(self.env)\n', (819, 829), False, 'from irclogs.api import IRCChannelManager, IRCChannel\n'), ((1785, 1813), 'shutil.rmtree', 'shutil.rmtree', (['self.indexdir'], {}), '(self.indexdir)\n', (1798, 1813), False, 'import shutil\n'), ((2442, 2477), 'trac.test.Mock', 'Mock', ([], {'has_permission': '(lambda x: True)'}), '(has_permission=lambda x: True)\n', (2446, 2477), False, 'from trac.test import EnvironmentStub, Mock\n'), ((2739, 2774), 'trac.test.Mock', 'Mock', ([], {'has_permission': '(lambda x: True)'}), '(has_permission=lambda x: True)\n', (2743, 2774), False, 'from trac.test import EnvironmentStub, Mock\n'), ((2923, 2951), 'pytz.timezone', 'timezone', (['"""America/New_York"""'], {}), "('America/New_York')\n", (2931, 2951), False, 'from pytz import timezone, UTC\n'), ((3390, 3425), 'trac.test.Mock', 'Mock', ([], {'has_permission': '(lambda x: True)'}), '(has_permission=lambda x: True)\n', (3394, 3425), False, 'from trac.test import EnvironmentStub, Mock\n'), ((3794, 3829), 'trac.test.Mock', 'Mock', ([], {'has_permission': '(lambda x: True)'}), '(has_permission=lambda x: True)\n', (3798, 3829), False, 'from trac.test import EnvironmentStub, Mock\n'), ((4524, 4566), 'unittest.makeSuite', 'unittest.makeSuite', (['SearchTestCase', '"""test"""'], {}), "(SearchTestCase, 'test')\n", (4542, 4566), False, 'import unittest\n'), ((976, 996), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (985, 996), False, 'from datetime import datetime, timedelta\n'), ((1397, 1417), 'irclogs.api.IRCChannel', 'IRCChannel', (['self.env'], {}), '(self.env)\n', (1407, 1417), False, 'from irclogs.api import IRCChannelManager, IRCChannel\n'), ((1532, 1552), 'irclogs.api.IRCChannel', 'IRCChannel', (['self.env'], {}), '(self.env)\n', (1542, 1552), False, 'from irclogs.api import IRCChannelManager, IRCChannel\n'), ((2051, 2063), 'StringIO.StringIO', 'StringIO', (['""""""'], {}), "('')\n", (2059, 2063), False, 'from StringIO import StringIO\n'), ((3056, 3078), 'operator.itemgetter', 'operator.itemgetter', (['(2)'], {}), '(2)\n', (3075, 3078), False, 'import operator\n')] |
""""PC est magique - Custom request context"""
import functools
import flask
from flask import g
from flask_babel import _
import flask_login
from app.models import PCeen
from app.tools import utils, typing
def create_request_context() -> typing.RouteReturn | None:
"""Make checks about current request and define custom ``g`` properties.
Intended to be registered by :func:`before_request`.
Defines:
* :attr:`flask.g.logged_in` (default ``False``):
Shorthand for :attr:`flask_login.current_user.is_authenticated`.
* :attr:`flask.g.logged_in_user` (default ``None``):
Shorthand for :attr:`flask_login.current_user`. Warning: ignores
doas mechanism; please use :attr:`flask.g.pceen` instead.
* :attr:`flask.g.doas` (default ``False``):
If ``True``, the logged in user is a GRI that is doing an action
as another pceen.
* :attr:`flask.g.pceen` (default ``None``):
The pceen the request is made as: ``None`` if not
:attr:`flask.g.logged_in`, the controlled pceen if
:attr:`~flask.g.doas`, or :attr:`flask.g.logged_in_user`.
* :attr:`flask.g.is_gri` (default ``False``):
``True`` if the user is logged in and is a GRI.
"""
# Defaults
g.logged_in = False
g.logged_in_user = None
g.pceen = None
g.is_gri = False
g.doas = False
# Get user
current_user = typing.cast(
flask_login.AnonymousUserMixin | PCeen,
flask_login.current_user
)
g.logged_in = current_user.is_authenticated
if g.logged_in:
g.logged_in_user = typing.cast(PCeen, current_user)
g.pceen = g.logged_in_user # May be overridden later if doas
g.is_gri = g.pceen.is_gri
# Check doas
doas_id = flask.request.args.get("doas", "")
doas = PCeen.query.get(doas_id) if doas_id.isdigit() else None
if doas:
if g.is_gri:
g.pceen = doas
g.is_gri = g.pceen.is_gri
g.doas = True
else:
# Not authorized to do things as other pceens!
new_args = flask.request.args.copy()
del new_args["doas"]
return flask.redirect(flask.url_for(
flask.request.endpoint or "main.index", **new_args
))
# Check maintenance
if flask.current_app.config["MAINTENANCE"]:
if g.is_gri:
flask.flash(_("Le site est en mode maintenance : seuls les GRI "
"peuvent y accéder."), "warning")
else:
flask.abort(503) # 503 Service Unavailable
# All set!
return None
# Type variables for decoraters below
_RP = typing.ParamSpec("_RP")
_Route = typing.Callable[_RP, typing.RouteReturn]
def logged_in_only(route: _Route) -> _Route:
"""Route function decorator to restrict route to logged in users.
Redirects user to "auth.auth_needed" if :attr:`flask.g.logged_in`
is ``False``.
Args:
route: The route function to restrict access to.
Returns:
The protected route.
"""
@functools.wraps(route)
def new_route(*args: _RP.args, **kwargs: _RP.kwargs) -> typing.RouteReturn:
if g.logged_in:
return route(*args, **kwargs)
else:
flask.flash(_("Veuillez vous authentifier pour accéder "
"à cette page."), "warning")
return utils.ensure_safe_redirect("auth.auth_needed")
return new_route
def gris_only(route: _Route) -> _Route:
"""Route function decorator to restrict route to logged in GRIs.
Aborts with a 403 if :attr:`flask.g.is_gri` is ``False``.
Args:
route: The route function to restrict access to.
Returns:
The protected route.
"""
@functools.wraps(route)
def new_route(*args: _RP.args, **kwargs: _RP.kwargs) -> typing.RouteReturn:
if g.is_gri:
return route(*args, **kwargs)
elif g.logged_in:
flask.abort(403) # 403 Not Authorized
raise # never reached, just to tell the type checker
else:
flask.flash(_("Veuillez vous authentifier pour accéder "
"à cette page."), "warning")
return utils.ensure_safe_redirect("auth.login")
return new_route
| [
"flask_babel._",
"flask.request.args.get",
"app.tools.typing.ParamSpec",
"app.tools.typing.cast",
"app.models.PCeen.query.get",
"functools.wraps",
"flask.request.args.copy",
"flask.url_for",
"app.tools.utils.ensure_safe_redirect",
"flask.abort"
] | [((2704, 2727), 'app.tools.typing.ParamSpec', 'typing.ParamSpec', (['"""_RP"""'], {}), "('_RP')\n", (2720, 2727), False, 'from app.tools import utils, typing\n'), ((1441, 1518), 'app.tools.typing.cast', 'typing.cast', (['(flask_login.AnonymousUserMixin | PCeen)', 'flask_login.current_user'], {}), '(flask_login.AnonymousUserMixin | PCeen, flask_login.current_user)\n', (1452, 1518), False, 'from app.tools import utils, typing\n'), ((1810, 1844), 'flask.request.args.get', 'flask.request.args.get', (['"""doas"""', '""""""'], {}), "('doas', '')\n", (1832, 1844), False, 'import flask\n'), ((3108, 3130), 'functools.wraps', 'functools.wraps', (['route'], {}), '(route)\n', (3123, 3130), False, 'import functools\n'), ((3801, 3823), 'functools.wraps', 'functools.wraps', (['route'], {}), '(route)\n', (3816, 3823), False, 'import functools\n'), ((1636, 1668), 'app.tools.typing.cast', 'typing.cast', (['PCeen', 'current_user'], {}), '(PCeen, current_user)\n', (1647, 1668), False, 'from app.tools import utils, typing\n'), ((1856, 1880), 'app.models.PCeen.query.get', 'PCeen.query.get', (['doas_id'], {}), '(doas_id)\n', (1871, 1880), False, 'from app.models import PCeen\n'), ((2133, 2158), 'flask.request.args.copy', 'flask.request.args.copy', ([], {}), '()\n', (2156, 2158), False, 'import flask\n'), ((2580, 2596), 'flask.abort', 'flask.abort', (['(503)'], {}), '(503)\n', (2591, 2596), False, 'import flask\n'), ((3434, 3480), 'app.tools.utils.ensure_safe_redirect', 'utils.ensure_safe_redirect', (['"""auth.auth_needed"""'], {}), "('auth.auth_needed')\n", (3460, 3480), False, 'from app.tools import utils, typing\n'), ((2226, 2291), 'flask.url_for', 'flask.url_for', (["(flask.request.endpoint or 'main.index')"], {}), "(flask.request.endpoint or 'main.index', **new_args)\n", (2239, 2291), False, 'import flask\n'), ((2441, 2512), 'flask_babel._', '_', (['"""Le site est en mode maintenance : seuls les GRI peuvent y accéder."""'], {}), "('Le site est en mode maintenance : seuls les GRI peuvent y accéder.')\n", (2442, 2512), False, 'from flask_babel import _\n'), ((3315, 3373), 'flask_babel._', '_', (['"""Veuillez vous authentifier pour accéder à cette page."""'], {}), "('Veuillez vous authentifier pour accéder à cette page.')\n", (3316, 3373), False, 'from flask_babel import _\n'), ((4005, 4021), 'flask.abort', 'flask.abort', (['(403)'], {}), '(403)\n', (4016, 4021), False, 'import flask\n'), ((4270, 4310), 'app.tools.utils.ensure_safe_redirect', 'utils.ensure_safe_redirect', (['"""auth.login"""'], {}), "('auth.login')\n", (4296, 4310), False, 'from app.tools import utils, typing\n'), ((4151, 4209), 'flask_babel._', '_', (['"""Veuillez vous authentifier pour accéder à cette page."""'], {}), "('Veuillez vous authentifier pour accéder à cette page.')\n", (4152, 4209), False, 'from flask_babel import _\n')] |
import datetime
import io
import logging
import pathlib
import sys
from typing import Dict, Optional, Union
__all__ = [
"DatabaseHandler",
"make_log_table_definition",
"setup_database_logger",
"setup_file_logger",
"setup_stream_logger",
]
LOG_TABLE_DEFINITION = """CREATE TABLE {table_name} (
log_id INT NOT NULL IDENTITY
, date DATETIME NOT NULL
, logger VARCHAR(100) NULL
, module VARCHAR(100) NOT NULL
, func_name VARCHAR(100) NOT NULL
, line INT NULL
, level INT NOT NULL
, level_name VARCHAR(100) NOT NULL
, message VARCHAR(400) NULL
, traceback VARCHAR(4000) NULL
, CONSTRAINT {primary_key} PRIMARY KEY (log_id)
);"""
LOG_TABLE_MAP = {
"date": "created",
"logger": "name",
"module": "module",
"func_name": "funcName",
"line": "lineno",
"level": "levelno",
"level_name": "levelname",
"message": "message",
"traceback": "exc_info",
}
class DatabaseHandler(logging.Handler):
"""A logging library handler subclass that writes logging records to a
database table."""
record = logging.makeLogRecord({})
logging.Formatter().format(record)
default_mapping: Dict[str, str] = {k.lower(): k for k in record.__dict__}
del record
insert_query = "INSERT INTO {table} ({cols}) VALUES ({values});"
def __init__(
self, connection, table: str, mapping: Optional[Dict[str, str]] = None
):
"""Initialize handler.
:param connection: A DB API 2.0 compliant Connection object
:param table: Table name
:param mapping: Table column names and LogRecord object attributes
mapping (default: LOG_TABLE_MAP)
"""
super().__init__()
self.connection = connection
self.table = table
self.mapping = mapping or LOG_TABLE_MAP
diff = set(self.mapping.values()) - set(self.default_mapping.values())
if diff:
raise AttributeError(
"'%s' object has no attribute%s %s"
% (
logging.LogRecord.__name__,
"s" if len(diff) > 1 else "",
", ".join("'%s'" % x for x in sorted(diff)),
)
)
self.cursor = self.connection.cursor()
def emit(self, record: logging.LogRecord) -> None:
try:
self.format(record)
query = self.insert_query.format(
table=self.table,
cols=", ".join(self.mapping),
values=", ".join("?" for _ in range(len(self.mapping))),
)
params = []
param_type = Union[str, int, float, None, datetime.datetime]
for attr in self.mapping.values():
value = getattr(record, attr)
if attr == "created":
param: param_type = datetime.datetime.fromtimestamp(value)
elif attr in ("exc_info", "exc_text"):
if record.exc_info and any(record.exc_info):
param = "|".join(
logging.Formatter()
.formatException(record.exc_info)
.splitlines()
)
else:
param = None
elif isinstance(value, str) and value.strip() == "":
param = None
elif isinstance(
value, (str, int, float, type(None), datetime.datetime)
):
param = value
else:
param = str(value)
params.append(param)
self.cursor.execute(query, params)
self.cursor.commit()
except Exception:
import traceback
traceback.print_exc(file=sys.stderr)
def close(self) -> None:
self.cursor.close()
self.connection.close()
super().close()
def make_log_table_definition(table_name: str, primary_key: str) -> str:
"""Return default log table definition query.
:param table_name: Table name
:param primary_key: Primary key name
"""
return LOG_TABLE_DEFINITION.format(
table_name=table_name, primary_key=primary_key
)
def setup_database_logger(
connection,
table: str,
name: Optional[str] = None,
attrs: Optional[Dict[str, str]] = None,
level_logger: int = logging.DEBUG,
level_handler: int = logging.DEBUG,
) -> None:
"""Attach logger handler that writes to a database table with level
`level_handler` and set or update logging level `level_logger` for logger
`name`.
:param connection: A DB API 2.0 compliant Connection object
:param table: Table name
:param name: Logger name (default: root)
:param attrs: Table column names and log record attributes mapping
(default: LOG_TABLE_MAP)
:param level_logger: Logging level of the logger
:param level_handler: Logging level of the database handler
"""
log = logging.getLogger(name)
if level_logger is not None:
log.setLevel(level_logger)
handler = DatabaseHandler(
connection=connection,
table=table,
mapping=attrs,
)
handler.setLevel(level_handler)
log.addHandler(handler)
def setup_file_logger(
filename: Union[str, pathlib.Path],
name: Optional[str] = None,
mode: str = "a",
encoding: Optional[str] = None,
level_logger: int = logging.DEBUG,
level_handler: int = logging.DEBUG,
fmt: str = "%(asctime)s %(levelname)-8s %(message)s",
) -> None:
"""Attach logger handler that writes to a file with level `level_handler`
and set or update logging level `level_logger` for logger `name`.
:param filename: File name
:param name: Logger name (default: root)
:param mode: File mode
:param encoding: File encoding
:param level_logger: Logging level of the logger
:param level_handler: Logging level of the file handler
:param fmt: Format string for the file handler
"""
log = logging.getLogger(name)
if level_logger is not None:
log.setLevel(level_logger)
for h in log.handlers:
if isinstance(h, logging.FileHandler) and h.baseFilename == filename:
return
handler = logging.FileHandler(
filename=filename, mode=mode, encoding=encoding
)
handler.setLevel(level_handler)
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
log.addHandler(handler)
def setup_stream_logger(
name: Optional[str] = None,
stream: Optional[io.TextIOWrapper] = None,
level_logger: int = logging.DEBUG,
level_handler: int = logging.DEBUG,
fmt: str = "%(asctime)s %(levelname)-8s %(message)s",
) -> None:
"""Attach logger handler that writes to a stream with level `level_handler`
and set or update logging level `level_logger` for logger `name`.
:param name: Logger name (default: root)
:param stream: Stream object (default: sys.stderr)
:param level_logger: Logging level of the logger
:param level_handler: Logging level of the stream handler
:param fmt: Format string for the stream handler
"""
log = logging.getLogger(name)
if level_logger is not None:
log.setLevel(level_logger)
for h in log.handlers:
if isinstance(h, logging.StreamHandler):
return
handler = logging.StreamHandler(stream=stream)
handler.setLevel(level_handler)
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
log.addHandler(handler)
| [
"logging.getLogger",
"logging.StreamHandler",
"datetime.datetime.fromtimestamp",
"logging.Formatter",
"logging.FileHandler",
"logging.makeLogRecord",
"traceback.print_exc"
] | [((1198, 1223), 'logging.makeLogRecord', 'logging.makeLogRecord', (['{}'], {}), '({})\n', (1219, 1223), False, 'import logging\n'), ((5169, 5192), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (5186, 5192), False, 'import logging\n'), ((6211, 6234), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (6228, 6234), False, 'import logging\n'), ((6444, 6512), 'logging.FileHandler', 'logging.FileHandler', ([], {'filename': 'filename', 'mode': 'mode', 'encoding': 'encoding'}), '(filename=filename, mode=mode, encoding=encoding)\n', (6463, 6512), False, 'import logging\n'), ((6580, 6602), 'logging.Formatter', 'logging.Formatter', (['fmt'], {}), '(fmt)\n', (6597, 6602), False, 'import logging\n'), ((7359, 7382), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (7376, 7382), False, 'import logging\n'), ((7563, 7599), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'stream'}), '(stream=stream)\n', (7584, 7599), False, 'import logging\n'), ((7653, 7675), 'logging.Formatter', 'logging.Formatter', (['fmt'], {}), '(fmt)\n', (7670, 7675), False, 'import logging\n'), ((1228, 1247), 'logging.Formatter', 'logging.Formatter', ([], {}), '()\n', (1245, 1247), False, 'import logging\n'), ((3931, 3967), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stderr'}), '(file=sys.stderr)\n', (3950, 3967), False, 'import traceback\n'), ((2978, 3016), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['value'], {}), '(value)\n', (3009, 3016), False, 'import datetime\n'), ((3207, 3226), 'logging.Formatter', 'logging.Formatter', ([], {}), '()\n', (3224, 3226), False, 'import logging\n')] |
from string import Template
gdStr1 = """ .data
.align 2
.globl class_nameTab
.globl Main_protObj
.globl Int_protObj
.globl String_protObj
.globl bool_const0
.globl bool_const1
.globl _int_tag
.globl _bool_tag
.globl _string_tag
"""
gdTpl1 = Template("""
_int_tag:
.word $intTag
_bool_tag:
.word $boolTag
_string_tag:
.word $stringTag
""")
gdStr2 = """
.globl _MemMgr_INITIALIZER
_MemMgr_INITIALIZER:
.word _NoGC_Init
.globl _MemMgr_COLLECTOR
_MemMgr_COLLECTOR:
.word _NoGC_Collect
.globl _MemMgr_TEST
_MemMgr_TEST:
.word 0
"""
cTplInt = Template("""
.word -1
int_const$idx:
.word $tag
.word 4
.word Int_dispTab
.word $value
""")
cTplStr = Template("""
.word -1
str_const$idx:
.word $tag
.word $size
.word String_dispTab
.word int_const$sizeIdx
.ascii "$value"
.byte 0
.align 2
""")
boolStr = Template("""
.word -1
bool_const0:
.word $tag
.word 4
.word Bool_dispTab
.word 0
.word -1
bool_const1:
.word $tag
.word 4
.word Bool_dispTab
.word 1
""")
heapStr = """
.globl heap_start
heap_start:
.word 0
"""
textStr = """
.text
.globl Main_init
.globl Int_init
.globl String_init
.globl Bool_init
.globl Main.main
"""
############################## codegen 2
# Layout of the activation:
#
# prolog:
#
# 1. arguments in same order, on the stack
# 2. saved fp
# 3. saved s0 (self)
# 4. saved ra
# 5. locals
# 6. 4 empty bytes, new fp points here
# 7. top of the stack
#
# To address locals: n($fp) where n is an offest that lands on locals
# To address parameters: m($fp) where m is an offest that lands over the saved fp
#
# totalStack = (localsCount + 3)*4
# 3? $fp, $s0, $ra
# *4? count in words
# TODO: Define objects inits?
#definir: klass, method, ts=3+locals*4, fp=ts, s0=fp-4, ra=fp-8, locals
methodTpl_in = Template("""
${klass}.${method}:
addiu $$sp $$sp -$ts #inm: frame has $locals locals
sw $$fp ${fp}($$sp) #inm: save $$fp
sw $$s0 ${s0}($$sp) #inm: save $$s0 (self)
sw $$ra ${ra}($$sp) #inm: save $$ra
addiu $$fp $$sp 4 #inm: $$fp points to locals
move $$s0 $$a0 #inm: self to $$s0
""")
#definir: ts=3+locals*4, fp=ts, s0=fp-4, ra=fp-8, formals, locals, everything=formals+locals
methodTpl_out = Template("""
lw $$fp ${ts}($$sp) #outm: restore $$fp
lw $$s0 ${s0}($$sp) #outm: restore $$s0 (self)
lw $$ra ${ra}($$sp) #outm: restore $$ra
#outm: Clean everything! restore sp, $formals from formals, $ts from local frame
addiu $$sp $$sp $everything
jr $$ra #outm: jump and make happy the callee
""")
# definir: literal, value
litTpl = Template("""
la $$a0 $literal #literal, $value
""")
selfStr = """
move $a0 $s0 #self
"""
# En los letdecl, la expr es opcional...
# Si viene, simplemente emitir expr y luego poner el valor en $a0
#definir: $address, $symbol
letdeclTpl1 = Template("""
$expr
sw $$a0 $address #letdecl: initial value of $symbol
""")
# Si no viene, poner el default de cada tipo (String, Int, Bool) y en otro caso void
#definir: stringNulo, address, symbol
letdeclTpl2 = Template("""
la $$a0 $stringNulo #letdecl: string nulo
sw $$a0 $address #letdecl: String default value, $symbol
""")
#definir: intZero, address, symbol
letdeclTpl3 = Template("""
la $$a0 $intZero #letdecl: int zero
sw $$a0 $address #letdecl: Int default value, $symbol
""")
#definir: boolFalse, address, symbol
letdeclTpl2 = Template("""
la $$a0 $boolFalse #letdecl: boolean false
sw $$a0 $address #letdecl: Boolean default value, $symbol
""")
#definir: address, symbol
letdeclTpl2 = Template("""
la $$a0 $zero #letdecl: void
sw $$a0 $address #letdecl: object default value, $symbol
""")
#definir: $address, $symbol, $klass
varTpl = Template("""
lw $$a0 $address #obj: load [$symbol], $klass
""")
negStr = """
jal Object.copy #neg
lw $t1 12($a0) #neg
neg $t1 $t1 #neg
sw $t1 12($a0) #neg
"""
# definir: label
notTpl = Template("""
lw $$t1 12($$a0) #not
la $$a0 bool_const1 #not
beqz $$t1 $label #not
la $$a0 bool_const0 #not
$label:
""")
#definir: left_subexp, right_subexp,
arithTpl = Template("""
$left_subexp
sw $$a0 0($$sp) #arith: push left subexp into the stack
addiu $$sp $$sp -4 #arith
$right_subexp
jal Object.copy #arith: get a copy to store value on
lw $$s1 4($$sp) #arith: pop saved value from the stack to $$s1
addiu $$sp $$sp 4 #arith
lw $$t2 12($$s1) #arith: load in temp register
lw $$t1 12($$a0) #arith: load in temp register
$op $$t1 $$t2 $$t1 #arith: operate on them
sw $$t1 12($$a0) #arith: store result in copy
""")
#definir: test_subexp, true_subexp, false_subexp, label_false, label_exit
ifTpl = Template("""
$test_subexp
lw $$t1 12($$a0) #if: get value from boolean
beqz $$t1 $label_false #if: jump if false
$true_subexp
b $label_exit #if: jump to endif
$label_false:
$false_subexp
$label_exit:
""")
#definir: label_loop, label_exit, test_subexp, loop_subexp
whileTpl = Template("""
$label_loop:
$test_subexp
lw $$t1 12($$a0) #while: get value from boolean
beq $$t1 $$zero $label_exit #while: branch if false
$loop_subexp
b $label_loop #while: loop
$label_exit:
move $$a0 $$zero #while: must put void in $$a0
""")
#definir: label_exit
isVoidTpl = Template("""
$label_exit:
$subexp
move $$t1 $$a0 #isvoid: load self into $$t1
la $$a0 bool_const1 #isvoid: load true into $$a0
beqz $$t1 $label_exit #isvoid: exit if $$t1 zero (void)
la $$a0 bool_const0 #isvoid: otherwise, load false
$label_exit:
""")
#definir: left_subexp, right_subexp, label_exit
leTpl = Template("""
$left_subexp
sw $$a0 0($$sp) #<: push left subexp into the stack
addiu $$sp $$sp -4 #<:
$right_subexp
lw $$s1 4($$sp) #<: pop saved value from the stack into $$s1
addiu $$sp $$sp 4 #<:
lw $$t1 12($$s1) #<: load temp values
lw $$t2 12($$a0) #<:
la $$a0 bool_const1 #<: load true
ble $$t1 $$t2 #<: exit if less
la $$a0 bool_const0 #<: load false
$label_exit:
""")
#definir: left_subexp, right_subexp, label_exit
# TODO: <= El mismo que arriba pero blt en vez de ble
letTpl = Template("""
""")
#definir: left_subexp, right_subexp, label
# TODO: Checar beq, porque tal vez falta $label
eqTpl = Template("""
$left_subexp
sw $$a0 0($$sp) #=: push left subexp into the stack
addiu $$sp $$sp -4 #=:
$right_subexp
lw $$s1 4($$sp) #=: pop saved value from the stack into $$s1
addiu $$sp $$sp 4 #=:
move $$t1 $$s1 #=: load objects (addresses) to compare
move $$t2 $$a0 #=:
la $$a0 bool_const1 #=: load true
beq $$t1 $$t2 $label #=: if identical (same address)
la $$a1 bool_const0 #=: load false
jal equality_test #=: the runtime will know...
$label:
""")
#definir: exp
callParametersTpl = Template("""
$exp
sw $$a0 0($$sp) #call: push Param
addiu $$sp $$sp -4 #call:
""")
# Para el call hay 3 tipos y $$a0 cambia (instancia sobre la que se llama el método)
# TODO: Checar los otros tipos de CallStr1
# 1. metodo( params ... )
callStr1 = """
move $a0 $s0 #call: get self into $a0
"""
# 2. (object expr).metodo( params ... )
# 3. (object expr)@Klass.metodo( params ... )
#definir: exp
callStr2 = Template("""
$exp
""")
#definir: fileName, line
callTpl1 = Template("""
bne $$a0 $$zero $label #call: protect from dispatch to void
la $$a0 $fileName #call: constant object with name of the file
li $$t1 $line #call: line number
jal _dispatch_abort #call: message and die
$label:
""")
#definir: off, method
callTpl_instance = Template("""
lw $$t1 8($$a0) #call: ptr to dispatch table
lw $$t1 $off($$t1) #call: method $method is at offset $offset
jalr $$t1
""")
#definir: klass, off, method
callTpl_at = Template("""
la $$t1 ${klass}_dispTab #at: dispatch table for $klass
lw $$t1 $off($$t1) #at: method $method is at offset $off
jalr $$t1
""")
#definir: address, symbol
assignTpl = Template("""
$expr
sw $$a0 $address #assignment of $symbol
""")
#definir: klass
newTpl_explicit = Template("""
la $$a0 ${klass}_protObj #new: explicit name
jal Object.copy #new: call copy
jal ${klass}_init #new: call constructor
}
}
""")
newTpl_SELF_TYPE = """
la $$t1 class_objTab #new: self_type, go and find class
lw $$t2 0($$s0) #new: load tag
sll $$t2 $$t2 3 #new: mult by 8 (4 words x 2 places (prot, init))
addu $$t1 $$t1 $$t2 #new: add to base to find protObj
move $$s1 $$t1 #new: keep in s1 to get _init
lw $$a0 0($$t1) #new: put in $$a0 so we can
jal Object.copy #new: make a copy
lw $$t1 4($$s1) #new: add 1 word to find _init
jalr $$t1 #new: call _init
"""
#definir: test_expr, fileName, line, labelNotVoid
caseTpl_begin = Template("""
$test_expr
bne $$a0 $$zero $labelVoid #case: protect from case on void (abort)
la $$a0 str_const0 #case: fileName
li $$t1 $line #case: line number
jal _case_abort2
$labelNotVoid:
lw $$t1 0($$a0) #case: load obj tag
""")
#definir: minChild, maxChild, nLbl, address, symbol, expr, labelEnd
caseBranch = Template("""
blt $$t1 $minChild $nextLbl #case: $minChild, $name
bgt $$t1 $maxChild $nextLbl #case: $maxChild, $name
sw $$a0 $address #case: $symbol
$exp
b $labelEnd #case: go to end
$nextLbl:
""")
#definir: labelEnd
caseTpl_end = Template("""
jal _case_abort #case: default
""") | [
"string.Template"
] | [((297, 422), 'string.Template', 'Template', (['"""\n_int_tag:\n .word $intTag\n_bool_tag:\n .word $boolTag\n_string_tag:\n .word $stringTag\n"""'], {}), '(\n """\n_int_tag:\n .word $intTag\n_bool_tag:\n .word $boolTag\n_string_tag:\n .word $stringTag\n"""\n )\n', (305, 422), False, 'from string import Template\n'), ((646, 777), 'string.Template', 'Template', (['"""\n .word -1\nint_const$idx:\n .word $tag\n .word 4\n .word Int_dispTab\n .word $value\n"""'], {}), '(\n """\n .word -1\nint_const$idx:\n .word $tag\n .word 4\n .word Int_dispTab\n .word $value\n"""\n )\n', (654, 777), False, 'from string import Template\n'), ((779, 977), 'string.Template', 'Template', (['"""\n .word -1\nstr_const$idx:\n .word $tag\n .word $size\n .word String_dispTab\n .word int_const$sizeIdx\n .ascii "$value"\n .byte 0\n .align 2\n"""'], {}), '(\n """\n .word -1\nstr_const$idx:\n .word $tag\n .word $size\n .word String_dispTab\n .word int_const$sizeIdx\n .ascii "$value"\n .byte 0\n .align 2\n"""\n )\n', (787, 977), False, 'from string import Template\n'), ((979, 1202), 'string.Template', 'Template', (['"""\n .word -1\nbool_const0:\n .word $tag\n .word 4\n .word Bool_dispTab\n .word 0\n .word -1\nbool_const1:\n .word $tag\n .word 4\n .word Bool_dispTab\n .word 1\n"""'], {}), '(\n """\n .word -1\nbool_const0:\n .word $tag\n .word 4\n .word Bool_dispTab\n .word 0\n .word -1\nbool_const1:\n .word $tag\n .word 4\n .word Bool_dispTab\n .word 1\n"""\n )\n', (987, 1202), False, 'from string import Template\n'), ((2014, 2434), 'string.Template', 'Template', (['"""\n${klass}.${method}:\n addiu $$sp $$sp -$ts #inm: frame has $locals locals\n sw $$fp ${fp}($$sp) #inm: save $$fp\n sw $$s0 ${s0}($$sp) #inm: save $$s0 (self)\n sw $$ra ${ra}($$sp) #inm: save $$ra\n addiu $$fp $$sp 4 #inm: $$fp points to locals\n move $$s0 $$a0 #inm: self to $$s0\n"""'], {}), '(\n """\n${klass}.${method}:\n addiu $$sp $$sp -$ts #inm: frame has $locals locals\n sw $$fp ${fp}($$sp) #inm: save $$fp\n sw $$s0 ${s0}($$sp) #inm: save $$s0 (self)\n sw $$ra ${ra}($$sp) #inm: save $$ra\n addiu $$fp $$sp 4 #inm: $$fp points to locals\n move $$s0 $$a0 #inm: self to $$s0\n"""\n )\n', (2022, 2434), False, 'from string import Template\n'), ((2535, 2948), 'string.Template', 'Template', (['"""\n lw $$fp ${ts}($$sp) #outm: restore $$fp\n lw $$s0 ${s0}($$sp) #outm: restore $$s0 (self)\n lw $$ra ${ra}($$sp) #outm: restore $$ra\n#outm: Clean everything! restore sp, $formals from formals, $ts from local frame\n addiu $$sp $$sp $everything\n jr $$ra #outm: jump and make happy the callee\n"""'], {}), '(\n """\n lw $$fp ${ts}($$sp) #outm: restore $$fp\n lw $$s0 ${s0}($$sp) #outm: restore $$s0 (self)\n lw $$ra ${ra}($$sp) #outm: restore $$ra\n#outm: Clean everything! restore sp, $formals from formals, $ts from local frame\n addiu $$sp $$sp $everything\n jr $$ra #outm: jump and make happy the callee\n"""\n )\n', (2543, 2948), False, 'from string import Template\n'), ((2975, 3049), 'string.Template', 'Template', (['"""\n la $$a0 $literal #literal, $value\n"""'], {}), '("""\n la $$a0 $literal #literal, $value\n""")\n', (2983, 3049), False, 'from string import Template\n'), ((3266, 3379), 'string.Template', 'Template', (['"""\n $expr\n sw $$a0 $address #letdecl: initial value of $symbol\n"""'], {}), '(\n """\n $expr\n sw $$a0 $address #letdecl: initial value of $symbol\n"""\n )\n', (3274, 3379), False, 'from string import Template\n'), ((3508, 3677), 'string.Template', 'Template', (['"""\n la $$a0 $stringNulo #letdecl: string nulo\n sw $$a0 $address #letdecl: String default value, $symbol\n"""'], {}), '(\n """\n la $$a0 $stringNulo #letdecl: string nulo\n sw $$a0 $address #letdecl: String default value, $symbol\n"""\n )\n', (3516, 3677), False, 'from string import Template\n'), ((3717, 3880), 'string.Template', 'Template', (['"""\n la $$a0 $intZero #letdecl: int zero\n sw $$a0 $address #letdecl: Int default value, $symbol\n"""'], {}), '(\n """\n la $$a0 $intZero #letdecl: int zero\n sw $$a0 $address #letdecl: Int default value, $symbol\n"""\n )\n', (3725, 3880), False, 'from string import Template\n'), ((3922, 4094), 'string.Template', 'Template', (['"""\n la $$a0 $boolFalse #letdecl: boolean false\n sw $$a0 $address #letdecl: Boolean default value, $symbol\n"""'], {}), '(\n """\n la $$a0 $boolFalse #letdecl: boolean false\n sw $$a0 $address #letdecl: Boolean default value, $symbol\n"""\n )\n', (3930, 4094), False, 'from string import Template\n'), ((4125, 4287), 'string.Template', 'Template', (['"""\n la $$a0 $zero #letdecl: void\n sw $$a0 $address #letdecl: object default value, $symbol\n"""'], {}), '(\n """\n la $$a0 $zero #letdecl: void\n sw $$a0 $address #letdecl: object default value, $symbol\n"""\n )\n', (4133, 4287), False, 'from string import Template\n'), ((4324, 4421), 'string.Template', 'Template', (['"""\n lw $$a0 $address #obj: load [$symbol], $klass\n"""'], {}), '(\n """\n lw $$a0 $address #obj: load [$symbol], $klass\n"""\n )\n', (4332, 4421), False, 'from string import Template\n'), ((4637, 4852), 'string.Template', 'Template', (['"""\n lw $$t1 12($$a0) #not\n la $$a0 bool_const1 #not\n beqz $$t1 $label #not\n la $$a0 bool_const0 #not\n$label:\n"""'], {}), '(\n """\n lw $$t1 12($$a0) #not\n la $$a0 bool_const1 #not\n beqz $$t1 $label #not\n la $$a0 bool_const0 #not\n$label:\n"""\n )\n', (4645, 4852), False, 'from string import Template\n'), ((4893, 5558), 'string.Template', 'Template', (['"""\n$left_subexp\n sw $$a0 0($$sp) #arith: push left subexp into the stack\n addiu $$sp $$sp -4 #arith\n$right_subexp\n jal Object.copy #arith: get a copy to store value on\n lw $$s1 4($$sp) #arith: pop saved value from the stack to $$s1\n addiu $$sp $$sp 4 #arith\n lw $$t2 12($$s1) #arith: load in temp register\n lw $$t1 12($$a0) #arith: load in temp register\n $op $$t1 $$t2 $$t1 #arith: operate on them\n sw $$t1 12($$a0) #arith: store result in copy\n"""'], {}), '(\n """\n$left_subexp\n sw $$a0 0($$sp) #arith: push left subexp into the stack\n addiu $$sp $$sp -4 #arith\n$right_subexp\n jal Object.copy #arith: get a copy to store value on\n lw $$s1 4($$sp) #arith: pop saved value from the stack to $$s1\n addiu $$sp $$sp 4 #arith\n lw $$t2 12($$s1) #arith: load in temp register\n lw $$t1 12($$a0) #arith: load in temp register\n $op $$t1 $$t2 $$t1 #arith: operate on them\n sw $$t1 12($$a0) #arith: store result in copy\n"""\n )\n', (4901, 5558), False, 'from string import Template\n'), ((5632, 5912), 'string.Template', 'Template', (['"""\n$test_subexp\n lw $$t1 12($$a0) #if: get value from boolean\n beqz $$t1 $label_false #if: jump if false\n$true_subexp\n b $label_exit #if: jump to endif\n$label_false:\n$false_subexp\n$label_exit:\n"""'], {}), '(\n """\n$test_subexp\n lw $$t1 12($$a0) #if: get value from boolean\n beqz $$t1 $label_false #if: jump if false\n$true_subexp\n b $label_exit #if: jump to endif\n$label_false:\n$false_subexp\n$label_exit:\n"""\n )\n', (5640, 5912), False, 'from string import Template\n'), ((5974, 6327), 'string.Template', 'Template', (['"""\n$label_loop:\n$test_subexp\n lw $$t1 12($$a0) #while: get value from boolean\n beq $$t1 $$zero $label_exit #while: branch if false\n$loop_subexp\n b $label_loop #while: loop\n$label_exit:\n move $$a0 $$zero #while: must put void in $$a0\n"""'], {}), '(\n """\n$label_loop:\n$test_subexp\n lw $$t1 12($$a0) #while: get value from boolean\n beq $$t1 $$zero $label_exit #while: branch if false\n$loop_subexp\n b $label_loop #while: loop\n$label_exit:\n move $$a0 $$zero #while: must put void in $$a0\n"""\n )\n', (5982, 6327), False, 'from string import Template\n'), ((6352, 6712), 'string.Template', 'Template', (['"""\n$label_exit:\n$subexp\n move $$t1 $$a0 #isvoid: load self into $$t1\n la $$a0 bool_const1 #isvoid: load true into $$a0\n beqz $$t1 $label_exit #isvoid: exit if $$t1 zero (void)\n la $$a0 bool_const0 #isvoid: otherwise, load false\n$label_exit:\n"""'], {}), '(\n """\n$label_exit:\n$subexp\n move $$t1 $$a0 #isvoid: load self into $$t1\n la $$a0 bool_const1 #isvoid: load true into $$a0\n beqz $$t1 $label_exit #isvoid: exit if $$t1 zero (void)\n la $$a0 bool_const0 #isvoid: otherwise, load false\n$label_exit:\n"""\n )\n', (6360, 6712), False, 'from string import Template\n'), ((6760, 7393), 'string.Template', 'Template', (['"""\n$left_subexp\n sw $$a0 0($$sp) #<: push left subexp into the stack\n addiu $$sp $$sp -4 #<:\n\n$right_subexp\n lw $$s1 4($$sp) #<: pop saved value from the stack into $$s1\n addiu $$sp $$sp 4 #<:\n \n lw $$t1 12($$s1) #<: load temp values\n lw $$t2 12($$a0) #<:\n la $$a0 bool_const1 #<: load true\n ble $$t1 $$t2 #<: exit if less\n la $$a0 bool_const0 #<: load false\n$label_exit:\n"""'], {}), '(\n """\n$left_subexp\n sw $$a0 0($$sp) #<: push left subexp into the stack\n addiu $$sp $$sp -4 #<:\n\n$right_subexp\n lw $$s1 4($$sp) #<: pop saved value from the stack into $$s1\n addiu $$sp $$sp 4 #<:\n \n lw $$t1 12($$s1) #<: load temp values\n lw $$t2 12($$a0) #<:\n la $$a0 bool_const1 #<: load true\n ble $$t1 $$t2 #<: exit if less\n la $$a0 bool_const0 #<: load false\n$label_exit:\n"""\n )\n', (6768, 7393), False, 'from string import Template\n'), ((7496, 7510), 'string.Template', 'Template', (['"""\n"""'], {}), "('\\n')\n", (7504, 7510), False, 'from string import Template\n'), ((7614, 8358), 'string.Template', 'Template', (['"""\n$left_subexp\n sw $$a0 0($$sp) #=: push left subexp into the stack\n addiu $$sp $$sp -4 #=:\n$right_subexp\n lw $$s1 4($$sp) #=: pop saved value from the stack into $$s1\n addiu $$sp $$sp 4 #=:\n\n move $$t1 $$s1 #=: load objects (addresses) to compare\n move $$t2 $$a0 #=:\n \n la $$a0 bool_const1 #=: load true\n beq $$t1 $$t2 $label #=: if identical (same address)\n \n la $$a1 bool_const0 #=: load false\n jal equality_test #=: the runtime will know...\n$label:\n"""'], {}), '(\n """\n$left_subexp\n sw $$a0 0($$sp) #=: push left subexp into the stack\n addiu $$sp $$sp -4 #=:\n$right_subexp\n lw $$s1 4($$sp) #=: pop saved value from the stack into $$s1\n addiu $$sp $$sp 4 #=:\n\n move $$t1 $$s1 #=: load objects (addresses) to compare\n move $$t2 $$a0 #=:\n \n la $$a0 bool_const1 #=: load true\n beq $$t1 $$t2 $label #=: if identical (same address)\n \n la $$a1 bool_const0 #=: load false\n jal equality_test #=: the runtime will know...\n$label:\n"""\n )\n', (7622, 8358), False, 'from string import Template\n'), ((8384, 8529), 'string.Template', 'Template', (['"""\n$exp\n sw $$a0 0($$sp) #call: push Param\n addiu $$sp $$sp -4 #call:\n"""'], {}), '(\n """\n$exp\n sw $$a0 0($$sp) #call: push Param\n addiu $$sp $$sp -4 #call:\n"""\n )\n', (8392, 8529), False, 'from string import Template\n'), ((8874, 8898), 'string.Template', 'Template', (['"""\n $exp\n"""'], {}), "('\\n $exp\\n')\n", (8882, 8898), False, 'from string import Template\n'), ((8939, 9274), 'string.Template', 'Template', (['"""\n bne $$a0 $$zero $label #call: protect from dispatch to void\n la $$a0 $fileName #call: constant object with name of the file\n li $$t1 $line #call: line number\n jal _dispatch_abort #call: message and die\n$label:\n"""'], {}), '(\n """\n bne $$a0 $$zero $label #call: protect from dispatch to void\n la $$a0 $fileName #call: constant object with name of the file\n li $$t1 $line #call: line number\n jal _dispatch_abort #call: message and die\n$label:\n"""\n )\n', (8947, 9274), False, 'from string import Template\n'), ((9307, 9511), 'string.Template', 'Template', (['"""\n lw $$t1 8($$a0) #call: ptr to dispatch table\n lw $$t1 $off($$t1) #call: method $method is at offset $offset\n jalr $$t1\n"""'], {}), '(\n """\n lw $$t1 8($$a0) #call: ptr to dispatch table\n lw $$t1 $off($$t1) #call: method $method is at offset $offset\n jalr $$t1\n"""\n )\n', (9315, 9511), False, 'from string import Template\n'), ((9545, 9746), 'string.Template', 'Template', (['"""\n la $$t1 ${klass}_dispTab #at: dispatch table for $klass\n lw $$t1 $off($$t1) #at: method $method is at offset $off\n jalr $$t1\n"""'], {}), '(\n """\n la $$t1 ${klass}_dispTab #at: dispatch table for $klass\n lw $$t1 $off($$t1) #at: method $method is at offset $off\n jalr $$t1\n"""\n )\n', (9553, 9746), False, 'from string import Template\n'), ((9776, 9880), 'string.Template', 'Template', (['"""\n $expr\n sw $$a0 $address #assignment of $symbol\n"""'], {}), '(\n """\n $expr\n sw $$a0 $address #assignment of $symbol\n"""\n )\n', (9784, 9880), False, 'from string import Template\n'), ((9906, 10140), 'string.Template', 'Template', (['"""\n la $$a0 ${klass}_protObj #new: explicit name\n jal Object.copy #new: call copy\n jal ${klass}_init #new: call constructor\n }\n }\n"""'], {}), '(\n """\n la $$a0 ${klass}_protObj #new: explicit name\n jal Object.copy #new: call copy\n jal ${klass}_init #new: call constructor\n }\n }\n"""\n )\n', (9914, 10140), False, 'from string import Template\n'), ((10880, 11233), 'string.Template', 'Template', (['"""\n $test_expr\n bne $$a0 $$zero $labelVoid #case: protect from case on void (abort)\n la $$a0 str_const0 #case: fileName\n li $$t1 $line #case: line number\n jal _case_abort2\n$labelNotVoid:\n lw $$t1 0($$a0) #case: load obj tag\n"""'], {}), '(\n """\n $test_expr\n bne $$a0 $$zero $labelVoid #case: protect from case on void (abort)\n la $$a0 str_const0 #case: fileName\n li $$t1 $line #case: line number\n jal _case_abort2\n$labelNotVoid:\n lw $$t1 0($$a0) #case: load obj tag\n"""\n )\n', (10888, 11233), False, 'from string import Template\n'), ((11306, 11604), 'string.Template', 'Template', (['"""\n blt $$t1 $minChild $nextLbl #case: $minChild, $name\n bgt $$t1 $maxChild $nextLbl #case: $maxChild, $name\n sw $$a0 $address #case: $symbol\n$exp\n b $labelEnd #case: go to end\n$nextLbl:\n"""'], {}), '(\n """\n blt $$t1 $minChild $nextLbl #case: $minChild, $name\n bgt $$t1 $maxChild $nextLbl #case: $maxChild, $name\n sw $$a0 $address #case: $symbol\n$exp\n b $labelEnd #case: go to end\n$nextLbl:\n"""\n )\n', (11314, 11604), False, 'from string import Template\n'), ((11629, 11705), 'string.Template', 'Template', (['"""\n jal _case_abort #case: default\n"""'], {}), '("""\n jal _case_abort #case: default\n""")\n', (11637, 11705), False, 'from string import Template\n')] |
from unittest.mock import patch
import pytest
from node.blockchain.facade import BlockchainFacade
from node.blockchain.inner_models import NodeDeclarationBlock
from node.blockchain.models import Block as ORMBlock
from node.blockchain.tests.factories.block import make_block
from node.blockchain.tests.factories.block_message.node_declaration import make_node_declaration_block_message
from node.blockchain.utils.blockchain_sync import sync_with_address, sync_with_node
@pytest.mark.django_db
@pytest.mark.usefixtures('rich_blockchain', 'force_smart_mocked_node_client')
def test_sync_with_node_mocked(self_node_declared, test_server_address):
with patch('node.blockchain.utils.blockchain_sync.sync_with_address', return_value=iter(((1, 1),))) as mock:
next(sync_with_node(self_node_declared))
mock.assert_called_once_with(test_server_address, to_block_number=6)
@pytest.mark.django_db
@pytest.mark.usefixtures('rich_blockchain', 'force_smart_mocked_node_client')
def test_sync_with_address(
self_node_declared, test_server_address, primary_validator_key_pair, regular_node, regular_node_key_pair
):
facade = BlockchainFacade.get_instance()
start_block_number = facade.get_next_block_number()
signing_key = primary_validator_key_pair.private
expected_blocks = []
def raw_block_generator(self, address, block_number_min, block_number_max):
assert block_number_max - block_number_min + 1 == 5
for expected_block_number in range(block_number_min, block_number_max + 1):
block = make_block(
make_node_declaration_block_message(regular_node, regular_node_key_pair, facade),
signing_key,
block_class=NodeDeclarationBlock
)
expected_blocks.append(block)
assert block.get_block_number() == expected_block_number
yield block.dict()
with patch('node.core.clients.node.NodeClient.yield_blocks_dict', new=raw_block_generator):
generator = sync_with_address(test_server_address, to_block_number=start_block_number + 4)
assert next(generator) == (start_block_number, 0.2)
assert next(generator) == (start_block_number + 1, 0.4)
assert next(generator) == (start_block_number + 2, 0.6)
assert next(generator) == (start_block_number + 3, 0.8)
assert next(generator) == (start_block_number + 4, 1)
with pytest.raises(StopIteration):
next(generator)
assert facade.get_next_block_number() == start_block_number + 5
actual_blocks = ORMBlock.objects.filter(_id__in=range(start_block_number, start_block_number + 5)).order_by('_id')
assert expected_blocks == [block.get_block() for block in actual_blocks]
| [
"node.blockchain.utils.blockchain_sync.sync_with_address",
"pytest.raises",
"pytest.mark.usefixtures",
"node.blockchain.facade.BlockchainFacade.get_instance",
"unittest.mock.patch",
"node.blockchain.tests.factories.block_message.node_declaration.make_node_declaration_block_message",
"node.blockchain.utils.blockchain_sync.sync_with_node"
] | [((497, 573), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""rich_blockchain"""', '"""force_smart_mocked_node_client"""'], {}), "('rich_blockchain', 'force_smart_mocked_node_client')\n", (520, 573), False, 'import pytest\n'), ((909, 985), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""rich_blockchain"""', '"""force_smart_mocked_node_client"""'], {}), "('rich_blockchain', 'force_smart_mocked_node_client')\n", (932, 985), False, 'import pytest\n'), ((1139, 1170), 'node.blockchain.facade.BlockchainFacade.get_instance', 'BlockchainFacade.get_instance', ([], {}), '()\n', (1168, 1170), False, 'from node.blockchain.facade import BlockchainFacade\n'), ((1905, 1995), 'unittest.mock.patch', 'patch', (['"""node.core.clients.node.NodeClient.yield_blocks_dict"""'], {'new': 'raw_block_generator'}), "('node.core.clients.node.NodeClient.yield_blocks_dict', new=\n raw_block_generator)\n", (1910, 1995), False, 'from unittest.mock import patch\n'), ((2012, 2090), 'node.blockchain.utils.blockchain_sync.sync_with_address', 'sync_with_address', (['test_server_address'], {'to_block_number': '(start_block_number + 4)'}), '(test_server_address, to_block_number=start_block_number + 4)\n', (2029, 2090), False, 'from node.blockchain.utils.blockchain_sync import sync_with_address, sync_with_node\n'), ((773, 807), 'node.blockchain.utils.blockchain_sync.sync_with_node', 'sync_with_node', (['self_node_declared'], {}), '(self_node_declared)\n', (787, 807), False, 'from node.blockchain.utils.blockchain_sync import sync_with_address, sync_with_node\n'), ((2418, 2446), 'pytest.raises', 'pytest.raises', (['StopIteration'], {}), '(StopIteration)\n', (2431, 2446), False, 'import pytest\n'), ((1579, 1664), 'node.blockchain.tests.factories.block_message.node_declaration.make_node_declaration_block_message', 'make_node_declaration_block_message', (['regular_node', 'regular_node_key_pair', 'facade'], {}), '(regular_node, regular_node_key_pair, facade\n )\n', (1614, 1664), False, 'from node.blockchain.tests.factories.block_message.node_declaration import make_node_declaration_block_message\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is a port of the Kepler wavelet search TPS machinary from matlab to python.
The Kepler pipeline is open source and available here
https://github.com/nasa/kepler-pipeline
The original matlab code that this code is a modification of was released under
NASA Open Source Agreement v1.3 (NOSA v1.3)
NOSA require the following statement for modifications to the original software
to appear prominently
Copyright 2017 United States Government as represented by the Administrator
of the National Aeronautics and Space Administration. All Rights Reserved.
NASA acknowledges the SETI Institute’s primary role in authoring and
producing the Kepler Data Processing Pipeline under Cooperative Agreement
Nos. NNA04CC63A, NNX07AD96A, NNX07AD98A, NNX11AI13A, NNX11AI14A, NNX13AD01A
& NNX13AD16A.
Portions of the Kepler Data Processing Pipeline software constitute modified
Matlab scripts and may be utilized only in a manner consistent with the
terms and conditions of the Mathworks Limited License, rather than the
terms of this Agreement. The Mathworks Limited License may be found
in the file MATHWORKS-LIMITED-LICENSE.docx.
Further NOSA license details for the original source code are available
in the included document
kep_wavelets_py-NASA-OPEN-SOURCE-AGREEMENT.doc
kep_wavelets.py is the only code as part of the TESS-ExoClass project
that NOSA is applicable.
Modification from original matlab source to Python by
@author: <NAME> (MIT)
"""
import numpy as np
import matplotlib.pyplot as plt
from statsmodels import robust
import scipy.signal as sig
import scipy.io as sio
class waveletObject:
h0 = np.array([])
H = np.array([], dtype=np.complex)
G = np.array([], dtype=np.complex)
nBands = 0
def __init__(self, waveletlen, fluxTS, varwindow):
""" fluxTS must be a power of 2 """
self.fluxTS = np.array(fluxTS).flatten()
self.whiteningCoeffs = np.array([])
self.waveletCoeffs = np.array([])
# Build the wavelet and whitening coeffs
self.h0, tmp = daubcqf(waveletlen)
self.nBands = calcNBands(waveletlen, len(fluxTS))
self.H, self.G = self.set_filter_bank()
self.waveletCoeffs = self.overcomplete_wavelet_transform()
self.whiteningCoeffs = self.set_whitening_coefficients(varwindow)
# Set the filter banks
def set_filter_bank(self):
wavObj = self
nSamples = len(wavObj.fluxTS)
filterLength = len(wavObj.h0)
nBands = wavObj.nBands
# construct the 4 basis vectors
# matlab assumed h0 is 1,filterLength
h0 = np.reshape(wavObj.h0, (filterLength,1))
h1 = np.flipud(h0) * np.reshape(np.power(-1, np.arange(0,filterLength)), (filterLength,1))
g0 = np.flipud(h0)
g1 = np.flipud(h1)
# construct the FFT of each of the vectors, with appropriate padding -- note that here we
# explicitly show which are low-pass and which are high-pass
HL = np.fft.fft(h0.flatten(), nSamples)
HH = np.fft.fft(h1.flatten(), nSamples)
GL = np.fft.fft(g0.flatten(), nSamples)
GH = np.fft.fft(g1.flatten(), nSamples)
#np.save('sfb_HLR', np.real(HL))
#np.save('sfb_HLI', np.imag(HL))
#np.save('sfb_HHR', np.real(HH))
#np.save('sfb_HHI', np.imag(HH))
#np.save('sfb_GLR', np.real(GL))
#np.save('sfb_GLI', np.imag(GL))
#np.save('sfb_GHR', np.real(GL))
#np.save('sfb_GHI', np.imag(GL))
# define the filters
wavObj.G = np.zeros((nSamples, nBands), dtype=np.complex)
wavObj.H = np.zeros((nSamples, nBands), dtype=np.complex)
# define 2 vectors which will hold product of low-pass filters
GLProduct = np.ones((nSamples,), dtype=np.complex)
HLProduct = np.ones((nSamples,), dtype=np.complex)
# Loop over bands
for iBand in range(0,nBands):
#on the last band, the GH and HH vectors have to be set to one, since the lowest band
#sees only low-pass filters all the way down
if iBand == nBands -1:
HH = np.ones((nSamples,))
GH = np.ones((nSamples,))
wavObj.G[:,iBand] = GH * GLProduct
wavObj.H[:,iBand] = HH * HLProduct
# Increment the products of the low-pass filters
GLProduct = GLProduct * GL
HLProduct = HLProduct * HL
# convert the elemental filters to the next band down in freq
tmp = GL[0::2]
GL = np.append(tmp, tmp)
tmp = HL[0::2]
HL = np.append(tmp, tmp)
tmp = GH[0::2]
GH = np.append(tmp, tmp)
tmp = HH[0::2]
HH = np.append(tmp, tmp)
# print("hello world")
#np.save('sfb_HR',np.real(wavObj.H))
#np.save('sfb_HI',np.imag(wavObj.H))
#np.save('sfb_GR',np.real(wavObj.G))
#np.save('sfb_GI',np.imag(wavObj.G))
return wavObj.H, wavObj.G
def overcomplete_wavelet_transform(self, usets=None):
wavObj = self
nBands = wavObj.nBands
filterLength = len(wavObj.h0)
nSamples = len(wavObj.fluxTS)
default = False
if usets is None:
default = True
usets = wavObj.fluxTS
if not len(usets) == nSamples:
print("Warning the input time series to owt is not the same as was used to setup wavelet object!!!")
waveletCoefficients = -1.0 * np.ones((nSamples, nBands))
#% construct the FFT of the initial vector and repmat it to the # of bands
Xoneband = np.reshape(np.fft.fft(usets, axis=0), (nSamples,1))
#if default:
#np.save('owt_usets', usets)
#np.save('owt_XonebandR',np.real(Xoneband))
#np.save('owt_XonebandI',np.imag(Xoneband))
X = np.tile(Xoneband, (1,nBands))
#% the wavelet expansion is ALMOST just the IFFT of X multiplied by H ...
waveletCoefficients = np.real(np.fft.ifft(X * wavObj.H, axis=0))
#if default:
# np.save('owt_wc1',waveletCoefficients)
# Except for some circshifts
for iBand in range(nBands):
shiftIndex = np.min([iBand+1, nBands-1])
nShift = filterLength*np.int(np.power(2, shiftIndex-1)) - np.int(np.power(2, shiftIndex-1))
waveletCoefficients[:,iBand] = np.roll(waveletCoefficients[:,iBand], -nShift)
# print("hello world")
#if default:
# np.save('owt_wc2', waveletCoefficients)
return waveletCoefficients
def set_whitening_coefficients(self, varWindow, usewavc=None):
wavObj = self
nBands = wavObj.nBands
nSamples = len(wavObj.fluxTS)
if usewavc is None:
usewavc = wavObj.waveletCoeffs
whitec = np.zeros_like(usewavc)
for iBand in range(nBands):
if iBand == nBands-1:
subtractMedianFlag = True
else:
subtractMedianFlag = False
decimationFactor = int(np.power(2, iBand))
whitec[:,iBand] = np.power(self.moving_circular_mad(usewavc[:,iBand], \
varWindow*decimationFactor, subtractMedianFlag), -2.0)
#np.save('swc_whitec1', whitec)
#% Look for bands that have excessively large whitening coefficients and
#% set them to something reasonable if they do
waveletSupportBuffer = 50
outlierSigmaMultiplier = 6.0 #% 6.0 may need to become a module parameter and tuned
#% an impulse has support of 2*2^iBand so multiply by buffer to be safe
waveletSupportInCadences = (waveletSupportBuffer * 2* np.power(2, np.arange(1,nBands+1))).astype(np.int)
suspectBandIndicator = waveletSupportInCadences >= nSamples
meanWhiteningCoefficients = np.mean(whitec,axis=0)
overallMeanWhiteningCoefficients = np.median(meanWhiteningCoefficients[np.logical_not(suspectBandIndicator)].flatten())
stdWhiteningCoefficients = robust.mad(meanWhiteningCoefficients[np.logical_not(suspectBandIndicator)])
badBands = (meanWhiteningCoefficients-overallMeanWhiteningCoefficients) > outlierSigmaMultiplier*stdWhiteningCoefficients
idxBad = np.where((badBands) & (suspectBandIndicator))[0]
if len(idxBad)>0:
for i in idxBad:
whitec[:,i] = overallMeanWhiteningCoefficients
#np.save('swc_whitec2', whitec)
return whitec
def moving_circular_mad(self, vec, window, subMedian=True):
madValues = np.zeros_like(vec)
# do a circular extension
nSamples = len(vec)
if window < nSamples-2:
# window should be odd for sig.medfilt
if np.mod(window,2)==0:
window = window+1
vecCirc = np.insert(vec, 0, vec[nSamples-window:])
vecCirc = np.append(vecCirc, vec[0:window])
nSamplesCirc = len(vecCirc)
##% if median subtracted is desired, compute the median; otherwise, set equal to zero
if subMedian:
medianValue = sig.medfilt(vecCirc, window)
else:
medianValue = np.zeros((nSamplesCirc,))
tmp = np.abs(vecCirc-medianValue)
# medfilt2d is much faster than medfilt based upon
#https://gist.github.com/f0k/2f8402e4dfb6974bfcf1
madValuesCirc = sig.medfilt2d(tmp.reshape(1,-1), (1, window))[0]
# madValuesCirc = sig.medfilt(np.abs(vecCirc-medianValue), window)
# How about convolve for moving mean
# This is much faster than medfilt2, but the signal is supprresed
# because mean is less robust to the signal, thus the CDPP
# spikes around the signal
#madValuesCirc = np.convolve(tmp, np.ones((window,))/window, mode='full')
madValues = madValuesCirc[window:nSamplesCirc-window]
madValues = madValues / 0.6745
else:
if subMedian:
medianValue = np.median(vec)
else:
medianValue = 0.0
madValues = madValues + np.median(np.abs(vec-medianValue))
madValues = madValues / 0.6745
return madValues
def calcNBands(wN, fN):
return int(np.log2(fN) - np.int(np.floor(np.log2(wN))) + 1)
def daubcqf(N):
"""literal translation of daubcqf.m of Kepler pipeline"""
if np.mod(N,2) == 1:
print("No Daubechies filter exists for ODD length {0:d}".format(N))
K = int(N/2)
a = 1
p = 1
q = 1
h0 = np.array([1.0, 1.0])
for j in range(1,K):
a = -a*0.25*(j+K-1)/j
h0 = np.insert(h0, 0, 0.0) + np.append(h0, 0.0)
negp = -p
p = np.insert(negp, 0, 0) + np.append(p, 0)
negp = -p
p = np.insert(negp, 0, 0) + np.append(p, 0)
zqz = np.insert(q, 0, 0.0)
zqz = np.append(zqz, 0.0)
q = zqz + a*p
q = np.sort(np.roots(q))
qt = q[0:K-1]
h0 = np.convolve(h0, np.real(np.poly(qt)))
h0 = np.sqrt(2.0)*h0/np.sum(h0) # normalize to sqrt(2)
if (np.abs(np.sum(np.power(h0,2))) - 1.0) > 1.0e-4:
print("Numerically unstable Daubechies for this value of N {0:d}".format(N))
h1 = np.rot90(np.reshape(h0, (len(h0),1)), 2).flatten()
h1[0:2:N] = -h1[0:2:N]
return h0, h1
def set_trial_transit_pulse(duration):
""" duration is transit search width in cadences integer
do a box signal"""
trial_pulse = np.zeros((duration+1),)
trial_pulse[0:duration] = -0.5
trial_pulse[-1] = 0.0
return trial_pulse
def compute_statistic_time_series(wavObj, searchLen, trial_pulse):
whtC = wavObj.whiteningCoeffs
x = wavObj.waveletCoeffs
nSamples = x.shape[0]
nBands = x.shape[1]
shiftLength = np.int(np.fix(searchLen/2.0)) + 1
#% zero pad the pulse so its the same length as x
full_trial_pulse = np.zeros((nSamples,))
full_trial_pulse[0:len(trial_pulse)] = trial_pulse
s = wavObj.overcomplete_wavelet_transform(full_trial_pulse)
#np.save('csts_s', s)
corrTS = np.zeros((nSamples,))
normTS = np.zeros((nSamples,))
for iBand in range(0,nBands-1):
factorOfTwo = np.power(2.0, -np.min([iBand+1, nBands-1]))
SNRi = circfilt(np.flip(s[:,iBand]*s[:,iBand], 0), whtC[:,iBand])
Li = circfilt(np.flip(s[:,iBand], 0), x[:,iBand]*whtC[:,iBand])
SNRi = np.roll(SNRi, shiftLength)
Li = np.roll(Li, shiftLength)
normTS = normTS + SNRi*factorOfTwo
corrTS = corrTS + Li*factorOfTwo
if iBand == nBands-2:
normTS = np.sqrt(normTS)
#np.save('csts_normTS', normTS)
#np.save('csts_corrTS', corrTS)
return normTS, corrTS
def circfilt(vec1, vec2):
nLength = len(vec2)
X = np.fft.fft(vec2)
H = np.fft.fft(vec1, nLength)
y = np.real(np.fft.ifft(H*X))
return y
if __name__ == '__main__':
waveletLen = 12
nTrials = 200
depth = 6.0
durat = 3
sesMax = np.zeros((nTrials,))
for i in range(nTrials):
if np.mod(i,10) == 0:
print("{0:d}".format(i))
fluxTS = np.random.randn(2048,1)
# fluxTS = np.load('test_ts.npy')
oFluxTS = fluxTS
fluxTS[1024:1024+durat] = fluxTS[1024:1024+durat] - depth
# plt.plot(fluxTS, '.')
# plt.show()
searchLen = durat
varianceFilterFactor = 10
varianceFilterWindow = searchLen * varianceFilterFactor
wavObj = waveletObject(waveletLen, fluxTS, varianceFilterWindow)
trial_pulse = set_trial_transit_pulse(searchLen)
normTS, corrTS = compute_statistic_time_series(wavObj, searchLen, trial_pulse)
sesMax[i] = np.max(corrTS/normTS)
# plt.plot(corrTS/normTS, '.')
# plt.show()
# plt.plot(1.0/normTS, '.')
# plt.show()
print("Mean: {0:f} std: {1:f}".format(np.mean(sesMax), np.std(sesMax)))
print("Expect Mean: {0:f}".format(depth*np.sqrt(durat)))
# matin = sio.loadmat('test_tps_matlabout.mat')
# matCorrTS = matin['corrTS'].flatten()
# matNormTS = matin['normTS'].flatten()
# matSES = matCorrTS/matNormTS.flatten()
# ses = corrTS/normTS
# plt.plot(matSES, '.')
# plt.plot(ses, '.')
# plt.show()
# plt.plot(matSES-ses,'.')
# plt.show()
#
| [
"numpy.poly",
"numpy.sqrt",
"numpy.logical_not",
"numpy.roots",
"numpy.array",
"numpy.mod",
"numpy.arange",
"numpy.mean",
"numpy.flip",
"numpy.reshape",
"numpy.where",
"numpy.fix",
"numpy.fft.fft",
"numpy.max",
"numpy.min",
"numpy.tile",
"numpy.abs",
"numpy.ones",
"numpy.flipud",
"numpy.std",
"numpy.log2",
"numpy.fft.ifft",
"numpy.random.randn",
"numpy.insert",
"numpy.median",
"numpy.roll",
"numpy.power",
"numpy.append",
"numpy.sum",
"numpy.zeros",
"scipy.signal.medfilt",
"numpy.zeros_like"
] | [((1675, 1687), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1683, 1687), True, 'import numpy as np\n'), ((1696, 1726), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.complex'}), '([], dtype=np.complex)\n', (1704, 1726), True, 'import numpy as np\n'), ((1735, 1765), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.complex'}), '([], dtype=np.complex)\n', (1743, 1765), True, 'import numpy as np\n'), ((10686, 10706), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (10694, 10706), True, 'import numpy as np\n'), ((11616, 11638), 'numpy.zeros', 'np.zeros', (['(duration + 1)'], {}), '(duration + 1)\n', (11624, 11638), True, 'import numpy as np\n'), ((12039, 12060), 'numpy.zeros', 'np.zeros', (['(nSamples,)'], {}), '((nSamples,))\n', (12047, 12060), True, 'import numpy as np\n'), ((12219, 12240), 'numpy.zeros', 'np.zeros', (['(nSamples,)'], {}), '((nSamples,))\n', (12227, 12240), True, 'import numpy as np\n'), ((12254, 12275), 'numpy.zeros', 'np.zeros', (['(nSamples,)'], {}), '((nSamples,))\n', (12262, 12275), True, 'import numpy as np\n'), ((12924, 12940), 'numpy.fft.fft', 'np.fft.fft', (['vec2'], {}), '(vec2)\n', (12934, 12940), True, 'import numpy as np\n'), ((12949, 12974), 'numpy.fft.fft', 'np.fft.fft', (['vec1', 'nLength'], {}), '(vec1, nLength)\n', (12959, 12974), True, 'import numpy as np\n'), ((13135, 13155), 'numpy.zeros', 'np.zeros', (['(nTrials,)'], {}), '((nTrials,))\n', (13143, 13155), True, 'import numpy as np\n'), ((1960, 1972), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1968, 1972), True, 'import numpy as np\n'), ((2002, 2014), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2010, 2014), True, 'import numpy as np\n'), ((2670, 2710), 'numpy.reshape', 'np.reshape', (['wavObj.h0', '(filterLength, 1)'], {}), '(wavObj.h0, (filterLength, 1))\n', (2680, 2710), True, 'import numpy as np\n'), ((2822, 2835), 'numpy.flipud', 'np.flipud', (['h0'], {}), '(h0)\n', (2831, 2835), True, 'import numpy as np\n'), ((2849, 2862), 'numpy.flipud', 'np.flipud', (['h1'], {}), '(h1)\n', (2858, 2862), True, 'import numpy as np\n'), ((3599, 3645), 'numpy.zeros', 'np.zeros', (['(nSamples, nBands)'], {'dtype': 'np.complex'}), '((nSamples, nBands), dtype=np.complex)\n', (3607, 3645), True, 'import numpy as np\n'), ((3665, 3711), 'numpy.zeros', 'np.zeros', (['(nSamples, nBands)'], {'dtype': 'np.complex'}), '((nSamples, nBands), dtype=np.complex)\n', (3673, 3711), True, 'import numpy as np\n'), ((3812, 3850), 'numpy.ones', 'np.ones', (['(nSamples,)'], {'dtype': 'np.complex'}), '((nSamples,), dtype=np.complex)\n', (3819, 3850), True, 'import numpy as np\n'), ((3871, 3909), 'numpy.ones', 'np.ones', (['(nSamples,)'], {'dtype': 'np.complex'}), '((nSamples,), dtype=np.complex)\n', (3878, 3909), True, 'import numpy as np\n'), ((5925, 5955), 'numpy.tile', 'np.tile', (['Xoneband', '(1, nBands)'], {}), '(Xoneband, (1, nBands))\n', (5932, 5955), True, 'import numpy as np\n'), ((6900, 6922), 'numpy.zeros_like', 'np.zeros_like', (['usewavc'], {}), '(usewavc)\n', (6913, 6922), True, 'import numpy as np\n'), ((7922, 7945), 'numpy.mean', 'np.mean', (['whitec'], {'axis': '(0)'}), '(whitec, axis=0)\n', (7929, 7945), True, 'import numpy as np\n'), ((8650, 8668), 'numpy.zeros_like', 'np.zeros_like', (['vec'], {}), '(vec)\n', (8663, 8668), True, 'import numpy as np\n'), ((10536, 10548), 'numpy.mod', 'np.mod', (['N', '(2)'], {}), '(N, 2)\n', (10542, 10548), True, 'import numpy as np\n'), ((10977, 10997), 'numpy.insert', 'np.insert', (['q', '(0)', '(0.0)'], {}), '(q, 0, 0.0)\n', (10986, 10997), True, 'import numpy as np\n'), ((11012, 11031), 'numpy.append', 'np.append', (['zqz', '(0.0)'], {}), '(zqz, 0.0)\n', (11021, 11031), True, 'import numpy as np\n'), ((11080, 11091), 'numpy.roots', 'np.roots', (['q'], {}), '(q)\n', (11088, 11091), True, 'import numpy as np\n'), ((11183, 11193), 'numpy.sum', 'np.sum', (['h0'], {}), '(h0)\n', (11189, 11193), True, 'import numpy as np\n'), ((12543, 12569), 'numpy.roll', 'np.roll', (['SNRi', 'shiftLength'], {}), '(SNRi, shiftLength)\n', (12550, 12569), True, 'import numpy as np\n'), ((12583, 12607), 'numpy.roll', 'np.roll', (['Li', 'shiftLength'], {}), '(Li, shiftLength)\n', (12590, 12607), True, 'import numpy as np\n'), ((12991, 13009), 'numpy.fft.ifft', 'np.fft.ifft', (['(H * X)'], {}), '(H * X)\n', (13002, 13009), True, 'import numpy as np\n'), ((13269, 13293), 'numpy.random.randn', 'np.random.randn', (['(2048)', '(1)'], {}), '(2048, 1)\n', (13284, 13293), True, 'import numpy as np\n'), ((13842, 13865), 'numpy.max', 'np.max', (['(corrTS / normTS)'], {}), '(corrTS / normTS)\n', (13848, 13865), True, 'import numpy as np\n'), ((2723, 2736), 'numpy.flipud', 'np.flipud', (['h0'], {}), '(h0)\n', (2732, 2736), True, 'import numpy as np\n'), ((4600, 4619), 'numpy.append', 'np.append', (['tmp', 'tmp'], {}), '(tmp, tmp)\n', (4609, 4619), True, 'import numpy as np\n'), ((4664, 4683), 'numpy.append', 'np.append', (['tmp', 'tmp'], {}), '(tmp, tmp)\n', (4673, 4683), True, 'import numpy as np\n'), ((4728, 4747), 'numpy.append', 'np.append', (['tmp', 'tmp'], {}), '(tmp, tmp)\n', (4737, 4747), True, 'import numpy as np\n'), ((4792, 4811), 'numpy.append', 'np.append', (['tmp', 'tmp'], {}), '(tmp, tmp)\n', (4801, 4811), True, 'import numpy as np\n'), ((5557, 5584), 'numpy.ones', 'np.ones', (['(nSamples, nBands)'], {}), '((nSamples, nBands))\n', (5564, 5584), True, 'import numpy as np\n'), ((5698, 5723), 'numpy.fft.fft', 'np.fft.fft', (['usets'], {'axis': '(0)'}), '(usets, axis=0)\n', (5708, 5723), True, 'import numpy as np\n'), ((6076, 6109), 'numpy.fft.ifft', 'np.fft.ifft', (['(X * wavObj.H)'], {'axis': '(0)'}), '(X * wavObj.H, axis=0)\n', (6087, 6109), True, 'import numpy as np\n'), ((6282, 6313), 'numpy.min', 'np.min', (['[iBand + 1, nBands - 1]'], {}), '([iBand + 1, nBands - 1])\n', (6288, 6313), True, 'import numpy as np\n'), ((6457, 6504), 'numpy.roll', 'np.roll', (['waveletCoefficients[:, iBand]', '(-nShift)'], {}), '(waveletCoefficients[:, iBand], -nShift)\n', (6464, 6504), True, 'import numpy as np\n'), ((8332, 8373), 'numpy.where', 'np.where', (['(badBands & suspectBandIndicator)'], {}), '(badBands & suspectBandIndicator)\n', (8340, 8373), True, 'import numpy as np\n'), ((8914, 8956), 'numpy.insert', 'np.insert', (['vec', '(0)', 'vec[nSamples - window:]'], {}), '(vec, 0, vec[nSamples - window:])\n', (8923, 8956), True, 'import numpy as np\n'), ((8977, 9010), 'numpy.append', 'np.append', (['vecCirc', 'vec[0:window]'], {}), '(vecCirc, vec[0:window])\n', (8986, 9010), True, 'import numpy as np\n'), ((9330, 9359), 'numpy.abs', 'np.abs', (['(vecCirc - medianValue)'], {}), '(vecCirc - medianValue)\n', (9336, 9359), True, 'import numpy as np\n'), ((10780, 10801), 'numpy.insert', 'np.insert', (['h0', '(0)', '(0.0)'], {}), '(h0, 0, 0.0)\n', (10789, 10801), True, 'import numpy as np\n'), ((10804, 10822), 'numpy.append', 'np.append', (['h0', '(0.0)'], {}), '(h0, 0.0)\n', (10813, 10822), True, 'import numpy as np\n'), ((10853, 10874), 'numpy.insert', 'np.insert', (['negp', '(0)', '(0)'], {}), '(negp, 0, 0)\n', (10862, 10874), True, 'import numpy as np\n'), ((10877, 10892), 'numpy.append', 'np.append', (['p', '(0)'], {}), '(p, 0)\n', (10886, 10892), True, 'import numpy as np\n'), ((10923, 10944), 'numpy.insert', 'np.insert', (['negp', '(0)', '(0)'], {}), '(negp, 0, 0)\n', (10932, 10944), True, 'import numpy as np\n'), ((10947, 10962), 'numpy.append', 'np.append', (['p', '(0)'], {}), '(p, 0)\n', (10956, 10962), True, 'import numpy as np\n'), ((11144, 11155), 'numpy.poly', 'np.poly', (['qt'], {}), '(qt)\n', (11151, 11155), True, 'import numpy as np\n'), ((11167, 11179), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (11174, 11179), True, 'import numpy as np\n'), ((11935, 11958), 'numpy.fix', 'np.fix', (['(searchLen / 2.0)'], {}), '(searchLen / 2.0)\n', (11941, 11958), True, 'import numpy as np\n'), ((12406, 12443), 'numpy.flip', 'np.flip', (['(s[:, iBand] * s[:, iBand])', '(0)'], {}), '(s[:, iBand] * s[:, iBand], 0)\n', (12413, 12443), True, 'import numpy as np\n'), ((12478, 12501), 'numpy.flip', 'np.flip', (['s[:, iBand]', '(0)'], {}), '(s[:, iBand], 0)\n', (12485, 12501), True, 'import numpy as np\n'), ((12743, 12758), 'numpy.sqrt', 'np.sqrt', (['normTS'], {}), '(normTS)\n', (12750, 12758), True, 'import numpy as np\n'), ((13196, 13209), 'numpy.mod', 'np.mod', (['i', '(10)'], {}), '(i, 10)\n', (13202, 13209), True, 'import numpy as np\n'), ((14020, 14035), 'numpy.mean', 'np.mean', (['sesMax'], {}), '(sesMax)\n', (14027, 14035), True, 'import numpy as np\n'), ((14037, 14051), 'numpy.std', 'np.std', (['sesMax'], {}), '(sesMax)\n', (14043, 14051), True, 'import numpy as np\n'), ((1902, 1918), 'numpy.array', 'np.array', (['fluxTS'], {}), '(fluxTS)\n', (1910, 1918), True, 'import numpy as np\n'), ((4186, 4206), 'numpy.ones', 'np.ones', (['(nSamples,)'], {}), '((nSamples,))\n', (4193, 4206), True, 'import numpy as np\n'), ((4228, 4248), 'numpy.ones', 'np.ones', (['(nSamples,)'], {}), '((nSamples,))\n', (4235, 4248), True, 'import numpy as np\n'), ((7131, 7149), 'numpy.power', 'np.power', (['(2)', 'iBand'], {}), '(2, iBand)\n', (7139, 7149), True, 'import numpy as np\n'), ((8146, 8182), 'numpy.logical_not', 'np.logical_not', (['suspectBandIndicator'], {}), '(suspectBandIndicator)\n', (8160, 8182), True, 'import numpy as np\n'), ((8837, 8854), 'numpy.mod', 'np.mod', (['window', '(2)'], {}), '(window, 2)\n', (8843, 8854), True, 'import numpy as np\n'), ((9209, 9237), 'scipy.signal.medfilt', 'sig.medfilt', (['vecCirc', 'window'], {}), '(vecCirc, window)\n', (9220, 9237), True, 'import scipy.signal as sig\n'), ((9286, 9311), 'numpy.zeros', 'np.zeros', (['(nSamplesCirc,)'], {}), '((nSamplesCirc,))\n', (9294, 9311), True, 'import numpy as np\n'), ((10142, 10156), 'numpy.median', 'np.median', (['vec'], {}), '(vec)\n', (10151, 10156), True, 'import numpy as np\n'), ((10400, 10411), 'numpy.log2', 'np.log2', (['fN'], {}), '(fN)\n', (10407, 10411), True, 'import numpy as np\n'), ((12353, 12384), 'numpy.min', 'np.min', (['[iBand + 1, nBands - 1]'], {}), '([iBand + 1, nBands - 1])\n', (12359, 12384), True, 'import numpy as np\n'), ((14098, 14112), 'numpy.sqrt', 'np.sqrt', (['durat'], {}), '(durat)\n', (14105, 14112), True, 'import numpy as np\n'), ((2763, 2789), 'numpy.arange', 'np.arange', (['(0)', 'filterLength'], {}), '(0, filterLength)\n', (2772, 2789), True, 'import numpy as np\n'), ((6387, 6414), 'numpy.power', 'np.power', (['(2)', '(shiftIndex - 1)'], {}), '(2, shiftIndex - 1)\n', (6395, 6414), True, 'import numpy as np\n'), ((10255, 10280), 'numpy.abs', 'np.abs', (['(vec - medianValue)'], {}), '(vec - medianValue)\n', (10261, 10280), True, 'import numpy as np\n'), ((11239, 11254), 'numpy.power', 'np.power', (['h0', '(2)'], {}), '(h0, 2)\n', (11247, 11254), True, 'import numpy as np\n'), ((6351, 6378), 'numpy.power', 'np.power', (['(2)', '(shiftIndex - 1)'], {}), '(2, shiftIndex - 1)\n', (6359, 6378), True, 'import numpy as np\n'), ((7778, 7802), 'numpy.arange', 'np.arange', (['(1)', '(nBands + 1)'], {}), '(1, nBands + 1)\n', (7787, 7802), True, 'import numpy as np\n'), ((8025, 8061), 'numpy.logical_not', 'np.logical_not', (['suspectBandIndicator'], {}), '(suspectBandIndicator)\n', (8039, 8061), True, 'import numpy as np\n'), ((10430, 10441), 'numpy.log2', 'np.log2', (['wN'], {}), '(wN)\n', (10437, 10441), True, 'import numpy as np\n')] |
import tensorflow as tf
from typing import *
class ImageDataset:
"""A simple wrapper around the tf.data.Dataset class.
"""
def __init__(self, dataset) -> None:
self.data = dataset
@classmethod
def from_pattern(cls, file_pattern: str):
return cls(tf.data.Dataset.list_files(file_pattern))
@classmethod
def from_files(cls, file_list: List[str]):
return cls(tf.data.Dataset.from_tensor_slices(file_list))
@staticmethod
def decode(path, new_height: int, new_width: int) -> Tuple[tf.Tensor, str]:
# load the raw data from the file as a string
img = tf.io.read_file(path)
# convert the compressed string to a 3D uint8 tensor
img = tf.io.decode_jpeg(img, channels=3)
# Resize to destination size
x = tf.image.resize_with_pad(img, new_height, new_width)
return x, path
def prepare(self, height: int, width: int, batch_size: int):
self.data = self.data.map(lambda x: ImageDataset.decode(x, height, width),
num_parallel_calls = tf.data.AUTOTUNE)\
.cache()\
.batch(batch_size)\
.prefetch(buffer_size=tf.data.AUTOTUNE)
return self
# Usage:
# height = 240
# width = 240
# batch_size = 128
# ds = ImageDataset.from_pattern("image_directory/*.jpg")
# ds = ds.map(lambda x: ImageDataset.decode(x, height, width), num_parallel_calls = tf.data.AUTOTUNE)\
# .cache()\
# .batch(batch_size)\
# .prefetch(buffer_size=tf.data.AUTOTUNE)\ | [
"tensorflow.data.Dataset.from_tensor_slices",
"tensorflow.io.read_file",
"tensorflow.image.resize_with_pad",
"tensorflow.data.Dataset.list_files",
"tensorflow.io.decode_jpeg"
] | [((628, 649), 'tensorflow.io.read_file', 'tf.io.read_file', (['path'], {}), '(path)\n', (643, 649), True, 'import tensorflow as tf\n'), ((726, 760), 'tensorflow.io.decode_jpeg', 'tf.io.decode_jpeg', (['img'], {'channels': '(3)'}), '(img, channels=3)\n', (743, 760), True, 'import tensorflow as tf\n'), ((811, 863), 'tensorflow.image.resize_with_pad', 'tf.image.resize_with_pad', (['img', 'new_height', 'new_width'], {}), '(img, new_height, new_width)\n', (835, 863), True, 'import tensorflow as tf\n'), ((286, 326), 'tensorflow.data.Dataset.list_files', 'tf.data.Dataset.list_files', (['file_pattern'], {}), '(file_pattern)\n', (312, 326), True, 'import tensorflow as tf\n'), ((413, 458), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['file_list'], {}), '(file_list)\n', (447, 458), True, 'import tensorflow as tf\n')] |
from networkx import Graph
import random
import itertools
import dynetworkx as dnx
def __enumerate_subgraphs(g, size_k):
"""Enumerate all size_k connected subgraph of static graph g.
Parameters
----------
g: static graph to take sub-graphs from
size_k: size of sub-graphs
Returns
-------
an iterator for all size_k sub-graphs of g
"""
for v in g.nodes():
v_extension = set(filter(lambda x: x > v, g.neighbors(v)))
yield from __extend_subgraph({v}, v_extension, v, g, size_k)
def __extend_subgraph(v_subgraph, v_extension, v, g, size_k):
"""A recursive helper function for __enumerate_subgraphs() to enumerate all size_k connected sub-graphs
Parameters
----------
v_subgraph: current set of nodes belong to a sub-graph
v_extension: current set of possible nodes to extend v_subgraph
v: starting node of the subgraph
g: static graph to take sub-graphs from
size_k: size of sub-graphs
Returns
-------
an iterator for all size_k sub-graphs of g with v as the starting node
"""
if len(v_subgraph) == size_k:
yield g.subgraph(v_subgraph)
else:
while len(v_extension) != 0:
w = random.choice(tuple(v_extension))
v_extension.remove(w)
v2_extension = v_extension.copy().union(set(filter(lambda x: x > v,
set(g.neighbors(w)) - v_subgraph)))
yield from __extend_subgraph(v_subgraph.copy().union({w}), v2_extension, v, g, size_k)
def count_temporal_motif(G, sequence, delta, get_count_dict=False):
"""Count all temporal motifs.
Parameters
----------
G : the graph to count temporal motif from. This function only supports ImpulseDiGraph
sequence: a sequence of edges specifying the order of the motif. For example ((1,2), (2,3), (2,1)) means
1 -> 2 then 2 -> 3 then 2 -> 1. Note: The motif has to be connected.
delta: time window that specifies the maximum time limit that all edges in a motif must occur within.
get_count_dict: if True, return the motif count dictionary, which provides greater detail about which
motifs appear in a certain type of motif. If False, only returns the total count of all motifs of that type.
Returns
-------
count dictionary or total motif count
Examples
--------
>>> G = dnx.ImpulseDiGraph()
>>> G.add_edge(1, 2, 30)
>>> G.add_edge(3, 2, 30)
>>> G.add_edge(4, 2, 30)
>>> G.add_edge(2, 5, 32)
>>> G.add_edge(2, 5, 33)
>>> dnx.count_temporal_motif(G, ((1, 2), (2, 3), (2, 3)), 3)
3
>>> dnx.count_temporal_motif(G, ((1, 2), (2, 3), (2, 3)), 3, get_count_dict=True)
{(1, 2, 2, 5, 2, 5): 1, (4, 2, 2, 5, 2, 5): 1, (3, 2, 2, 5, 2, 5): 1}
"""
if not isinstance(G, dnx.ImpulseDiGraph):
raise TypeError('This function only supports ImpulseDiGraph')
total_counts = dict()
# this is used later for checking matching sequences
node_sequence = tuple(node for edge in sequence for node in edge)
g = Graph(G.to_networkx_graph())
static_motif = Graph()
static_motif.add_edges_from(sequence)
for sub in __enumerate_subgraphs(g, size_k=len(static_motif.nodes())):
# A way to check if nodes in sub may contain motif will help speed up. Using nx.is_isomorphic() will
# create error by dropping a lot of potential subgraphs.
counts = dict()
edges = list()
for u, v in itertools.combinations(sub.nodes(), 2):
edges.extend(G.edges(u, v))
edges.extend(G.edges(v, u))
# Motifs with self-loops won't be duplicated when iterating through subgraphs
for u in sub.nodes():
edges.extend(G.edges(u, u))
edges = sorted(edges, key=lambda x: x[2])
# Count all possible sequences from edges of the static subgraph
start = 0
end = 0
while end < len(edges):
while edges[start][2] + delta < edges[end][2]:
# combine all edges having the same timestamps to decrement counts
tmp_time = edges[start][2]
same_time_edges = list()
while edges[start][2] == tmp_time:
same_time_edges.append(edges[start][0:2])
start += 1
if start >= len(edges):
break
__decrement_counts(same_time_edges, len(sequence), counts)
# combine all edges having the same timestamps to increment counts
tmp_time = edges[end][2]
same_time_edges = list()
while edges[end][2] == tmp_time:
same_time_edges.append(edges[end][0:2])
end += 1
if end >= len(edges):
break
__increment_counts(same_time_edges, len(sequence), counts)
# Extract out count for sequences that are isomorphic to the temporal motifs
for keys in sorted(counts.keys()):
if len(keys) / 2 == len(sequence):
if counts[keys] == 0:
continue
node_map = dict()
isomorphic = True
# check matching sequences (node sequence vs key)
for n in range(len(node_sequence)):
if node_map.get(node_sequence[n]):
if node_map[node_sequence[n]] == keys[n]:
continue
else:
isomorphic = False
break
else:
if not keys[n] in node_map.values():
node_map[node_sequence[n]] = keys[n]
else:
isomorphic = False
break
if isomorphic:
total_counts[keys] = counts[keys]
if get_count_dict:
return total_counts
else:
return sum(total_counts.values())
def __decrement_counts(edges, motif_length, counts):
"""Decrement motif counts when removing edges.
Any potential orders of edges appearing at the same timestamp are ignored
(for example: when timestamp resolution is too high and edges that may happen one after another are combined
into 1 timestamp)
Parameters
----------
edges: list of edges having the same timestamp
motif_length: length of motif
counts: a dictionary containing counts of all motifs
Returns
-------
None
"""
suffixes = sorted(counts.keys(), key=len)
for e in edges:
counts[e] -= 1
for suffix in suffixes:
if len(suffix) / 2 < motif_length - 1:
for e in edges:
if counts.get(e + suffix):
counts[e + suffix] -= counts[suffix]
def __increment_counts(edges, motif_length, counts):
"""Increment motif counts when adding edges.
Any potential orders of edges appearing at the same timestamp are ignored
(for example: when timestamp resolution is too high and edges that may happen one after another are combined
into 1 timestamp)
Parameters
----------
edges: list of edges having the same timestamp
motif_length: length of motif
counts: a dictionary containing counts of all motifs
Returns
-------
None
"""
prefixes = sorted(counts.keys(), key=len, reverse=True)
for prefix in prefixes:
if len(prefix) / 2 < motif_length:
for e in edges:
if counts.get(prefix + e) is None:
counts[prefix + e] = 0
counts[prefix + e] += counts[prefix]
for e in edges:
if counts.get(e) is None:
counts[e] = 0
counts[e] += 1
| [
"networkx.Graph"
] | [((3152, 3159), 'networkx.Graph', 'Graph', ([], {}), '()\n', (3157, 3159), False, 'from networkx import Graph\n')] |
"""Part Two.
Now, take the signal you got on wire a, override wire b to that signal, and
reset the other wires (including wire a). What new signal is ultimately
provided to wire a?
"""
from src.year2015.day07a import get_value
from src.year2015.day07a import HDict
from src.year2015.day07a import process_data
def solve(task):
r"""Process task data to compute 'a' value after 'b' overriding.
Function recursively processes task data to compute wire 'a' value
after wire 'b' signal overriding.
Args:
task: "NOT dq -> dr\nkg OR kf -> kh..."
Returns:
int: wire 'a' signal value
"""
commands = process_data(task)
wires = HDict()
for command in commands:
wires[command.output] = command
wires["b"] = get_value(wires["a"], wires)
return get_value(wires["a"], wires)
| [
"src.year2015.day07a.process_data",
"src.year2015.day07a.get_value",
"src.year2015.day07a.HDict"
] | [((642, 660), 'src.year2015.day07a.process_data', 'process_data', (['task'], {}), '(task)\n', (654, 660), False, 'from src.year2015.day07a import process_data\n'), ((673, 680), 'src.year2015.day07a.HDict', 'HDict', ([], {}), '()\n', (678, 680), False, 'from src.year2015.day07a import HDict\n'), ((769, 797), 'src.year2015.day07a.get_value', 'get_value', (["wires['a']", 'wires'], {}), "(wires['a'], wires)\n", (778, 797), False, 'from src.year2015.day07a import get_value\n'), ((810, 838), 'src.year2015.day07a.get_value', 'get_value', (["wires['a']", 'wires'], {}), "(wires['a'], wires)\n", (819, 838), False, 'from src.year2015.day07a import get_value\n')] |
# _ \ _) | | _) |
# | | | __| __ \ | | __ \ | / __|
# __ < | ( | | | | | | | < \__ \
# _| \_\ _| \___| _| |_| _____| _| _| _| _|\_\ ____/
import warnings
from ._richlinks import Richlinks
warnings.filterwarnings("ignore", category=UserWarning, module='bs4')
| [
"warnings.filterwarnings"
] | [((259, 328), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'UserWarning', 'module': '"""bs4"""'}), "('ignore', category=UserWarning, module='bs4')\n", (282, 328), False, 'import warnings\n')] |
Subsets and Splits