code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
'''
Context:
Given:
linkedlist
Objective:
delete middle node
Definitions:
middle node = any node bu the head and tail node
Assumptions:
linkedlist has > 2 nodes
middle node to delete exists in linkedlist
middle node is not the head nor the tail node
Constraints:
only have access to the middle node
access = only node I am given
Example:
linkedlist = a->b->c->d->e->f
result => linkedlist = a->b->d->e->f
Flow:
prior_node = find node before middle_node
prior_node.next = prior_node.next.next
Performance:
Time = O(1)
constant time to copy data and shift pointers
Space = O(N)
Space to store linkedlist
'''
from impl.linkedlist import LinkedList, Node
def delete_middle_node(target_node):
target_node.data = target_node.next.data
target_node.next = target_node.next.next
def main():
linkedlist = LinkedList()
numbers = [1, 2, 4, 6, 1, 6, 8, 5]
linkedlist.initialise_from_array(numbers)
print("before delete")
linkedlist.toString()
target_node = None
target_node_value = 6
current_node = linkedlist.head
while(current_node.next is not None):
if(current_node.data == target_node_value):
target_node = current_node
current_node = current_node.next
delete_middle_node(target_node)
print("after delete")
linkedlist.toString()
main() | [
"impl.linkedlist.LinkedList"
] | [((904, 916), 'impl.linkedlist.LinkedList', 'LinkedList', ([], {}), '()\n', (914, 916), False, 'from impl.linkedlist import LinkedList, Node\n')] |
#-----------------------------------------------------------------------------
# Copyright (c) 2014-2020, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
from PyInstaller.utils.hooks import collect_data_files
# Hook tested with scikit-image (skimage) 0.9.3 on Mac OS 10.9 and Windows 7
# 64-bit
hiddenimports = ['skimage.draw.draw',
'skimage._shared.geometry',
'skimage._shared.transform',
'skimage.filters.rank.core_cy']
datas = collect_data_files('skimage')
| [
"PyInstaller.utils.hooks.collect_data_files"
] | [((836, 865), 'PyInstaller.utils.hooks.collect_data_files', 'collect_data_files', (['"""skimage"""'], {}), "('skimage')\n", (854, 865), False, 'from PyInstaller.utils.hooks import collect_data_files\n')] |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Este programa gestiona el archivo de datos enunciados.csv
author: <NAME>
email: <EMAIL>
last edited: marzo 2018
"""
__author__ = '<NAME>'
__version__ = "1.0"
import pandas as pd
from PyQt5.QtCore import pyqtSignal, QEvent, QObject
from PyQt5 import QtCore, uic, QtWidgets
from PyQt5.QtCore import (QTranslator, QLibraryInfo, QLocale)
from PyQt5.QtWidgets import QMessageBox, QDesktopWidget
QTCREATORFILE = "enunciados_ui.ui" # Nombre del archivo aquí.
Ui_MainWindow, QtBaseClass = uic.loadUiType(QTCREATORFILE)
def actualizaModelo(self):
model = PandasModel(self.ui.data)
self.ui.tableView.setModel(model)
self.ui.tableView.update()
def clickable(widget):
class Filter(QObject):
clicked = pyqtSignal()
def eventFilter(self, obj, event):
if obj == widget:
if event.type() == QEvent.MouseButtonRelease:
if obj.rect().contains(event.pos()):
self.clicked.emit()
# The developer can opt for .emit(obj) to get the object within the slot.
return True
return False
filter = Filter(widget)
widget.installEventFilter(filter)
return filter.clicked
class PandasModel(QtCore.QAbstractTableModel):
def __init__(self, data, parent=None):
QtCore.QAbstractTableModel.__init__(self, parent)
self._data = data
def rowCount(self, parent=None):
return len(self._data.values)
def columnCount(self, parent=None):
return self._data.columns.size
def data(self, index, role=QtCore.Qt.DisplayRole):
if index.isValid():
if role == QtCore.Qt.DisplayRole:
return QtCore.QVariant(str(
self._data.values[index.row()][index.column()]))
return QtCore.QVariant()
class Modelo(QtWidgets.QMainWindow, Ui_MainWindow):
def __init__(self):
super(Modelo, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.center()
self.setWindowTitle('Gestor de Enunciados')
self.ui.data = pd.read_csv('enunciados124.csv', sep=';', encoding="ISO-8859-1")
self.ui.sel = 0
clickable(self.ui.txtEnunc).connect(self.limpiartxtenunc)
clickable(self.ui.txtMod).connect(self.limpiartxtmod)
clickable(self.ui.txtListVarIndep).connect(self.limpiartxtlistvarindep)
clickable(self.ui.txtListVarDep).connect(self.limpiartxtlistvardep)
clickable(self.ui.txtRef).connect(self.limpiartxtref)
clickable(self.ui.txtPag).connect(self.limpiartxtpag)
clickable(self.ui.txtNro).connect(self.limpiartxtnro)
self.ui.btnAgregar.clicked.connect(self.agregardatos)
self.ui.btnModificar.clicked.connect(self.modificardatos)
self.ui.btnEliminar.clicked.connect(self.eliminardatos)
self.ui.tableView.clicked.connect(self.click_table)
actualizaModelo(self)
self.ui.btnAgregar.setEnabled(False)
self.ui.btnModificar.setEnabled(False)
self.ui.btnEliminar.setEnabled(False)
print(self.ui.txtEnunc.toPlainText())
def limpiartxtenunc(self):
print(self.ui.txtEnunc.toPlainText())
if self.ui.txtEnunc.toPlainText() == 'Enunciado':
self.ui.txtEnunc.clear()
def limpiartxtmod(self):
if self.ui.txtMod.toPlainText() == 'Modelo':
self.ui.txtMod.clear()
def limpiartxtlistvarindep(self):
if self.ui.txtListVarIndep.toPlainText() == 'Lista de Variables Independientes':
self.ui.txtListVarIndep.clear()
def limpiartxtlistvardep(self):
if self.ui.txtListVarDep.toPlainText() == 'Lista de Variables Dependientes':
self.ui.txtListVarDep.clear()
def limpiartxtref(self):
if self.ui.txtRef.toPlainText() == 'Referencia':
self.ui.txtRef.clear()
def limpiartxtpag(self):
if self.ui.txtPag.text() == 'Página':
self.ui.txtPag.clear()
def limpiartxtnro(self):
print(self.ui.txtNro.text())
if self.ui.txtNro.text() == 'Número del Ejercicio':
self.ui.txtNro.clear()
self.ui.btnAgregar.setEnabled(True)
def agregardatos(self):
self.ui.data = self.ui.data.append({'enunc':self.ui.txtEnunc.toPlainText(),
'mod':self.ui.txtMod.toPlainText(),
'listvarindep':self.ui.txtListVarIndep.toPlainText(),
'listvardep':self.ui.txtListVarDep.toPlainText(),
'ref':self.ui.txtRef.toPlainText(),
'pag':self.ui.txtPag.Text(),
'nro':self.ui.txtNro.Text(),
}, ignore_index=True)
actualizaModelo(self)
self.ui.data.to_csv('modelos.csv', sep=';', encoding="ISO-8859-1", header=True, index=False)
self.ui.btnAgregar.setEnabled(False)
self.ui.txtEnunc.setPlainText('Enunciado')
self.ui.txtMod.setPlainText('Modelo')
self.ui.txtListVarIndep.setPlainText('Lista de Variables Independientes')
self.ui.txtListVarDep.setPlainText('Lista de Variables Dependientes')
self.ui.txtRef.setPlainText('Referencia')
self.ui.txtPag.setText('Página')
self.ui.txtNro.setText('Número del Ejercicio')
self.ui.txtMod.setFocus()
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Mensaje', "¿Esta seguro de salir?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def click_table(self):
index = self.ui.tableView.selectedIndexes()[0]
self.ui.sel = index.row()
self.ui.txtEnunc.setPlainText(self.ui.data['enunc'][index.row()])
self.ui.txtMod.setPlainText(self.ui.data['mod'][index.row()])
self.ui.txtListVarIndep.setPlainText(self.ui.data['listvarindep'][index.row()])
self.ui.txtListVarDep.setPlainText(self.ui.data['listvardep'][index.row()])
self.ui.txtRef.setPlainText(self.ui.data['ref'][index.row()])
self.ui.txtPag.setText(self.ui.data['pag'][index.row()])
self.ui.txtNro.setText(self.ui.data['nro'][index.row()])
self.ui.btnModificar.setEnabled(True)
self.ui.btnEliminar.setEnabled(True)
def eliminardatos(self):
self.ui.data = self.ui.data.drop(self.ui.sel)
actualizaModelo(self)
self.ui.data.to_csv('modelos.csv', sep=';', encoding="ISO-8859-1", header=True, index=False)
self.ui.btnAgregar.setEnabled(True)
self.ui.btnModificar.setEnabled(False)
self.ui.btnEliminar.setEnabled(False)
self.ui.txtEnunc.clear()
self.ui.txtMod.clear()
self.ui.txtListVarIndep.clear()
self.ui.txtListVarDep.clear()
self.ui.txtRef.clear()
self.ui.txtPag.clear()
self.ui.txtNro.clear()
self.ui.txtEnunciado.setFocus()
def getCSV(self):
'''
Esta función abre el archivo CSV
'''
filePath, _ = QtWidgets.QFileDialog.getOpenFileName(self, 'Open file', '/home')
if filePath != "":
print("Dirección", filePath) #Opcional imprimir la dirección del archivo
self.ui.df = pd.read_csv(str(filePath))
def modificardatos(self):
self.ui.data.loc[self.ui.sel, 'enunc'] = self.ui.txtEnunciado.toPlainText()
self.ui.data.loc[self.ui.sel, 'mod'] = self.ui.txtMod.toPlainText()
self.ui.data.loc[self.ui.sel, 'listvarindep'] = self.ui.txtListVarIndep.toPlainText()
self.ui.data.loc[self.ui.sel, 'listvardep'] = self.ui.txtListVarDep.toPlainText()
self.ui.data.loc[self.ui.sel, 'ref'] = self.ui.txtRef.toPlainText()
self.ui.data.loc[self.ui.sel, 'pag'] = self.ui.txtPag.Text()
self.ui.data.loc[self.ui.sel, 'nro'] = self.ui.txtNro.Text()
actualizaModelo(self)
self.ui.data.to_csv('modelos.csv', sep=';', encoding="ISO-8859-1", header=True, index=False)
self.ui.btnAgregar.setEnabled(True)
self.ui.btnModificar.setEnabled(False)
self.ui.btnEliminar.setEnabled(False)
self.ui.txtEnunc.clear()
self.ui.txtMod.clear()
self.ui.txtListVarIndep.clear()
self.ui.txtListVarDep.clear()
self.ui.txtRef.clear()
self.ui.txtPag.clear()
self.ui.txtNro.clear()
self.ui.txtEnunc.setFocus()
if __name__ == '__main__':
import sys
APP = QtWidgets.QApplication(sys.argv)
QT_TRADUCTOR = QTranslator()
QT_TRADUCTOR.load("qtbase_" + QLocale.system().name(),
QLibraryInfo.location(QLibraryInfo.TranslationsPath))
APP.installTranslator(QT_TRADUCTOR)
WINDOW = Modelo()
WINDOW.show()
sys.exit(APP.exec_())
| [
"PyQt5.QtCore.QAbstractTableModel.__init__",
"PyQt5.QtCore.pyqtSignal",
"pandas.read_csv",
"PyQt5.QtCore.QVariant",
"PyQt5.uic.loadUiType",
"PyQt5.QtWidgets.QMessageBox.question",
"PyQt5.QtCore.QLocale.system",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtCore.QLibraryInfo.location",
"PyQt5.QtCore.QTranslator",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"PyQt5.QtWidgets.QDesktopWidget"
] | [((536, 565), 'PyQt5.uic.loadUiType', 'uic.loadUiType', (['QTCREATORFILE'], {}), '(QTCREATORFILE)\n', (550, 565), False, 'from PyQt5 import QtCore, uic, QtWidgets\n'), ((8911, 8943), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (8933, 8943), False, 'from PyQt5 import QtCore, uic, QtWidgets\n'), ((8964, 8977), 'PyQt5.QtCore.QTranslator', 'QTranslator', ([], {}), '()\n', (8975, 8977), False, 'from PyQt5.QtCore import QTranslator, QLibraryInfo, QLocale\n'), ((775, 787), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', ([], {}), '()\n', (785, 787), False, 'from PyQt5.QtCore import pyqtSignal, QEvent, QObject\n'), ((1379, 1428), 'PyQt5.QtCore.QAbstractTableModel.__init__', 'QtCore.QAbstractTableModel.__init__', (['self', 'parent'], {}), '(self, parent)\n', (1414, 1428), False, 'from PyQt5 import QtCore, uic, QtWidgets\n'), ((1869, 1886), 'PyQt5.QtCore.QVariant', 'QtCore.QVariant', ([], {}), '()\n', (1884, 1886), False, 'from PyQt5 import QtCore, uic, QtWidgets\n'), ((2168, 2232), 'pandas.read_csv', 'pd.read_csv', (['"""enunciados124.csv"""'], {'sep': '""";"""', 'encoding': '"""ISO-8859-1"""'}), "('enunciados124.csv', sep=';', encoding='ISO-8859-1')\n", (2179, 2232), True, 'import pandas as pd\n'), ((5789, 5907), 'PyQt5.QtWidgets.QMessageBox.question', 'QMessageBox.question', (['self', '"""Mensaje"""', '"""¿Esta seguro de salir?"""', '(QMessageBox.Yes | QMessageBox.No)', 'QMessageBox.No'], {}), "(self, 'Mensaje', '¿Esta seguro de salir?', QMessageBox\n .Yes | QMessageBox.No, QMessageBox.No)\n", (5809, 5907), False, 'from PyQt5.QtWidgets import QMessageBox, QDesktopWidget\n'), ((7493, 7558), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QtWidgets.QFileDialog.getOpenFileName', (['self', '"""Open file"""', '"""/home"""'], {}), "(self, 'Open file', '/home')\n", (7530, 7558), False, 'from PyQt5 import QtCore, uic, QtWidgets\n'), ((9059, 9111), 'PyQt5.QtCore.QLibraryInfo.location', 'QLibraryInfo.location', (['QLibraryInfo.TranslationsPath'], {}), '(QLibraryInfo.TranslationsPath)\n', (9080, 9111), False, 'from PyQt5.QtCore import QTranslator, QLibraryInfo, QLocale\n'), ((9012, 9028), 'PyQt5.QtCore.QLocale.system', 'QLocale.system', ([], {}), '()\n', (9026, 9028), False, 'from PyQt5.QtCore import QTranslator, QLibraryInfo, QLocale\n'), ((5634, 5650), 'PyQt5.QtWidgets.QDesktopWidget', 'QDesktopWidget', ([], {}), '()\n', (5648, 5650), False, 'from PyQt5.QtWidgets import QMessageBox, QDesktopWidget\n')] |
import datetime
def current_time_string():
timing = str(datetime.datetime.now())
current_time = timing.split()[1]
return "[" + current_time + "] "
| [
"datetime.datetime.now"
] | [((62, 85), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (83, 85), False, 'import datetime\n')] |
import torch
import sys
from torch.utils import data
from torch.utils.data.sampler import SubsetRandomSampler
from torch.utils.tensorboard import SummaryWriter
from torch.cuda.amp import autocast,GradScaler
import torchvision as tv
import torch.nn as nn
import matplotlib.pyplot as plt
import subprocess
import os
import numpy as np
from datetime import datetime
import argparse
import zipfile
from pytorch_msssim import ssim, ms_ssim, SSIM, MS_SSIM
from nets.XLFMNet import XLFMNet
import utils.pytorch_shot_noise as pytorch_shot_noise
from utils.XLFMDataset import XLFMDatasetFull
from utils.misc_utils import *
# Arguments
parser = argparse.ArgumentParser()
parser.add_argument('--data_folder', nargs='?', default= '')
parser.add_argument('--data_folder_test', nargs='?', default='')
parser.add_argument('--lenslet_file', nargs='?', default= "lenslet_centers_python.txt")
parser.add_argument('--files_to_store', nargs='+', default=['mainTrainXLFMNet.py','mainTrainSLNet.py','mainCreateDataset.py','utils/XLFMDataset.py','utils/misc_utils.py','nets/extra_nets.py','nets/XLFMNet.py','nets/SLNet.py'])
parser.add_argument('--psf_file', nargs='?', default= "PSF_2.5um_processed.mat")
parser.add_argument('--prefix', nargs='?', default= "fishy")
parser.add_argument('--checkpoint', nargs='?', default= "")
parser.add_argument('--checkpoint_XLFMNet', nargs='?', default= "")
parser.add_argument('--checkpoint_SLNet', nargs='?', default="")
parser.add_argument('--images_to_use', nargs='+', type=int, default=list(range(0,50,1)))
parser.add_argument('--images_to_use_test', nargs='+', type=int, default=list(range(0,10,1)))
parser.add_argument('--batch_size', type=int, default=2)
parser.add_argument('--max_epochs', type=int, default=501)
parser.add_argument('--validation_split', type=float, default=0.1)
parser.add_argument('--eval_every', type=int, default=25)
parser.add_argument('--shuffle_dataset', type=int, default=1)
parser.add_argument('--learning_rate', type=float, default=0.0001)
parser.add_argument('--use_bias', type=int, default=0)
parser.add_argument('--use_random_shifts', nargs='+', type=int, default=0, help='Randomize the temporal shifts to use? 0 or 1')
# Noise arguments
parser.add_argument('--add_noise', type=int, default=0, help='Apply noise to images? 0 or 1')
parser.add_argument('--signal_power_max', type=float, default=30**2, help='Max signal value to control signal to noise ratio when applyting noise.')
parser.add_argument('--signal_power_min', type=float, default=60**2, help='Min signal value to control signal to noise ratio when applyting noise.')
parser.add_argument('--norm_type', type=float, default=1, help='Normalization type, see the normalize_type function for more info.')
parser.add_argument('--dark_current', type=float, default=106, help='Dark current value of camera.')
parser.add_argument('--dark_current_sparse', type=float, default=0, help='Dark current value of camera.')
parser.add_argument('--use_sparse', type=int, default=1)
parser.add_argument('--use_img_loss', type=float, default=1.0)
parser.add_argument('--unet_depth', type=int, default=2)
parser.add_argument('--unet_wf', type=int, default=7)
parser.add_argument('--unet_drop_out', type=float, default=0)
parser.add_argument('--output_path', nargs='?', default='')
parser.add_argument('--main_gpu', nargs='+', type=int, default=[1])
parser.add_argument('--gpu_repro', nargs='+', type=int, default=[])
parser.add_argument('--n_split', type=int, default=20)
debug = False
n_threads = 0
args = parser.parse_args()
if len(args.main_gpu)>0:
device = "cuda:" + str(args.main_gpu[0])
device_repro = "cuda:" + str(args.main_gpu[0]+1)
else:
device = "cuda"
device_repro = "cuda"
args.main_gpu = [1]
args.gpu_repro = [1]
if len(args.gpu_repro)==0:
device_repro = "cpu"
else:
device_repro = "cuda:" + str(args.gpu_repro[0])
if n_threads!=0:
torch.set_num_threads(n_threads)
torch.manual_seed(261290)
# Load previous checkpoints
if len(args.checkpoint_XLFMNet)>0:
checkpoint_XLFMNet = torch.load(args.checkpoint_XLFMNet, map_location=device)
args_deconv = checkpoint_XLFMNet['args']
args.unet_depth = args_deconv.unet_depth
args.unet_wf = args_deconv.unet_wf
if len(args.checkpoint_SLNet)>0:
checkpoint_SL = torch.load(args.checkpoint_SLNet, map_location=device)
argsSLNet = checkpoint_SL['args']
args.temporal_shifts = checkpoint_SL['args'].temporal_shifts
# If there is no output_path specified, write with the dataset and SLNet training
if len(args.output_path)==0:
head, tail = os.path.split(args.checkpoint_SLNet)
args.output_path = head
# Get commit number
label = subprocess.check_output(["git", "describe", "--always"]).strip()
save_folder = args.output_path + '/XLFMNet_train__' + datetime.now().strftime('%Y_%m_%d__%H:%M:%S') + '__' + str(label) + '_commit__' + args.prefix
# Get size of the volume
subimage_shape = argsSLNet.subimage_shape
# if args.train_who==2:
# args.n_frames = 1
dataset = XLFMDatasetFull(args.data_folder, args.lenslet_file, subimage_shape, img_shape=[2160,2160],
images_to_use=args.images_to_use, divisor=1, isTiff=True, n_frames_net=argsSLNet.n_frames, lenslets_offset=0,
load_all=True, load_vols=True, load_sparse=True, temporal_shifts=args.temporal_shifts, use_random_shifts=args.use_random_shifts, eval_video=False)
dataset_test = XLFMDatasetFull(args.data_folder_test, args.lenslet_file, subimage_shape, img_shape=[2160,2160],
images_to_use=args.images_to_use_test, divisor=1, isTiff=True, n_frames_net=argsSLNet.n_frames, lenslets_offset=0,
load_all=True, load_vols=True, load_sparse=True, temporal_shifts=args.temporal_shifts, use_random_shifts=args.use_random_shifts, eval_video=False)
n_depths = dataset.get_n_depths()
args.output_shape = subimage_shape + [n_depths]
# Get normalization values
max_images,max_images_sparse,max_volumes = dataset.get_max()
if args.use_sparse:
# Use statistics of sparse images
mean_imgs,std_images,mean_imgs_sparse,std_images_sparse,mean_vols,std_vols = dataset.get_statistics()
else:
mean_imgs,std_images,mean_vols,std_vols = dataset.get_statistics()
n_lenslets = dataset.len_lenslets()
# Creating data indices for training and validation splits:
dataset_size = len(dataset)
indices = list(range(dataset_size))
split = int(np.ceil(args.validation_split * dataset_size))
if args.shuffle_dataset :
# np.random.seed(261290)
np.random.shuffle(indices)
train_indices, val_indices = indices[split:], indices[:split]
# Create dataloaders
train_sampler = SubsetRandomSampler(train_indices)
valid_sampler = SubsetRandomSampler(val_indices)
data_loaders = \
{'train' : \
data.DataLoader(dataset, batch_size=args.batch_size,
sampler=train_sampler, pin_memory=False, num_workers=n_threads), \
'val' : \
data.DataLoader(dataset, batch_size=args.batch_size,
sampler=valid_sampler, pin_memory=False, num_workers=n_threads), \
'test' : \
data.DataLoader(dataset_test, batch_size=1, pin_memory=False, num_workers=n_threads, shuffle=True)
}
def init_weights(m):
if type(m) == nn.Conv2d or type(m) == nn.Conv3d or type(m) == nn.ConvTranspose2d:
torch.nn.init.xavier_uniform(m.weight)
# torch.nn.init.kaiming_uniform_(m.weight,a=math.sqrt(2))
# m.weight.data *= 20
# m.weight.data = m.weight.data.abs()
# m.bias.data.fill_(0.01)
unet_settings = {'depth':args.unet_depth, 'wf':args.unet_wf, 'drop_out':args.unet_drop_out}
args.unet_settings = unet_settings
# Create net
net = XLFMNet(n_lenslets, args.output_shape, n_temporal_frames=dataset.n_frames, dataset=dataset, use_bias=args.use_bias, unet_settings=unet_settings).to(device)
net.apply(init_weights)
# Trainable parameters
# mean_imgs = mean_imgs_sparse
# std_images = std_images_sparse
trainable_params = [{'params': net.deconv.parameters()}]
params = sum([np.prod(p.size()) for p in net.parameters()])
# Normalization statistics
stats = {'norm_type':args.norm_type, 'norm_type_img':args.norm_type, 'mean_imgs':mean_imgs, 'std_images':std_images, 'max_images':max_images,
'mean_vols':mean_vols, 'std_vols':std_vols, 'max_vols':max_volumes}
# Create loss function and optimizer
loss = nn.MSELoss()
if args.use_img_loss>0:
loss_img = nn.MSELoss()
# reuse the gaussian kernel with SSIM & MS_SSIM.
ssim_module = SSIM(data_range=1, size_average=True, channel=n_lenslets).to(device_repro)
optimizer = torch.optim.Adam(trainable_params, lr=args.learning_rate)
# timers
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
# create gradient scaler for mixed precision training
scaler = GradScaler()
start_epoch = 0
if len(args.checkpoint_XLFMNet)>0:
net.load_state_dict(checkpoint_XLFMNet['model_state_dict'], strict=False)
optimizer.load_state_dict(checkpoint_XLFMNet['optimizer_state_dict'])
start_epoch = checkpoint_XLFMNet['epoch']-1
save_folder += '_C'
if len(args.checkpoint_SLNet)>0 and dataset.n_frames>1:
net.tempConv.load_state_dict(checkpoint_SL['model_state_dict'])
stats_SLNet = checkpoint_SL['statistics']
stats['norm_type_img'] = checkpoint_SL['args'].norm_type
stats['mean_imgs'] = stats_SLNet[0]
stats['std_images'] = stats_SLNet[1]
else:
net.tempConv = None
# Create summary writer to log stuff
if debug is False:
writer = SummaryWriter(log_dir=save_folder)
writer.add_text('arguments',str(vars(args)),0)
writer.flush()
writer.add_scalar('params/', params)
# Store files
zf = zipfile.ZipFile(save_folder + "/files.zip", "w")
for ff in args.files_to_store:
zf.write(ff)
zf.close()
import time
if len(args.gpu_repro)>0:
S = time.time()
# Load PSF and compute OTF
n_split = args.n_split
if debug:
n_split=60
OTF,psf_shape = load_PSF_OTF(args.psf_file, args.output_shape, n_depths=n_depths, n_split=n_split, device="cpu")
OTF = OTF.to(device)
gc.collect()
torch.cuda.empty_cache()
E = time.time()
print(E - S)
gc.collect()
torch.cuda.empty_cache()
OTF_options = {'OTF':OTF,
'psf_shape':psf_shape,
'dataset':dataset,
'n_split':n_split,
'loss_img':loss_img}
net.OTF_options = OTF_options
# Update noramlization stats for SLNet inside network
net.stats = stats
if len(args.main_gpu)>1:
net = nn.DataParallel(net, args.main_gpu, args.main_gpu[0])
print("Let's use", torch.cuda.device_count(), "GPUs!")
lr = args.learning_rate
# Loop over epochs
for epoch in range(start_epoch, args.max_epochs):
for curr_train_stage in ['train','val','test']:
# Grab current data_loader
curr_loader = data_loaders[curr_train_stage]
curr_loader_len = curr_loader.sampler.num_samples if curr_train_stage=='test' else len(curr_loader.batch_sampler.sampler.indices)
if curr_train_stage=='train':
net.train()
net.tempConv.eval()
torch.set_grad_enabled(True)
if curr_train_stage=='val' or curr_train_stage=='test':
if epoch%args.eval_every!=0:
continue
net.eval()
torch.set_grad_enabled(False)
# Store loss
mean_volume_loss = 0
max_grad = 0
mean_psnr = 0
mean_time = 0
mean_repro = 0
mean_repro_ssim = 0
# Training
for ix,(curr_img_stack, local_volumes) in enumerate(curr_loader):
# If empty or nan in volumes, don't use these for training
if curr_img_stack.float().sum()==0 or torch.isnan(curr_img_stack.float().max()):
continue
# Normalize volumes if ill posed
if local_volumes.float().max()>=20000:
local_volumes = local_volumes.float()
local_volumes = local_volumes / local_volumes.max() * 4500.0
local_volumes = local_volumes.half()
# curr_img_stack returns both the dense and the sparse images, here we only need the sparse.
if net.tempConv is None:
assert len(curr_img_stack.shape)>=5, "If sparse is used curr_img_stack should contain both images, dense and sparse stacked in the last dim."
curr_img_sparse = curr_img_stack[...,-1].clone().to(device)
curr_img_stack = curr_img_stack[...,-1].clone().to(device)
else:
curr_img_sparse = curr_img_stack[...,-1].clone().to(device)
curr_img_stack = curr_img_stack[...,0].clone().to(device)
curr_img_stack = curr_img_stack.half()
curr_img_stack -= args.dark_current
curr_img_stack = F.relu(curr_img_stack).detach()
if args.add_noise==1 and curr_train_stage!='test':
curr_max = curr_img_stack.max()
# Update new signal power
signal_power = (args.signal_power_min + (args.signal_power_max-args.signal_power_min) * torch.rand(1)).item()
curr_img_stack = signal_power/curr_max * curr_img_stack
# Add noise
curr_img_stack = pytorch_shot_noise.add_camera_noise(curr_img_stack)
curr_img_stack = curr_img_stack.float().to(device)
local_volumes = local_volumes.half().to(device)
# if conversion to half precission messed up the volumes, continue
if torch.isinf(local_volumes.max()):
curr_loader_len -= local_volumes.shape[0]
continue
# Images are already normalized from mainCreateDataset.py
# curr_img_stack, local_volumes = normalize_type(curr_img_stack, local_volumes, args.norm_type, mean_imgs, std_images, mean_vols, std_vols, max_images, max_volumes)
_, local_volumes = normalize_type(curr_img_stack, local_volumes, stats['norm_type'], stats['mean_imgs'], stats['std_images'], stats['mean_vols'], stats['std_vols'], stats['max_images'], stats['max_vols'])
curr_img_stack, _ = normalize_type(curr_img_stack, local_volumes, stats['norm_type_img'], stats['mean_imgs'], stats['std_images'], stats['mean_vols'], stats['std_vols'], stats['max_images'], stats['max_vols'])
# curr_img_sparse, _ = normalize_type(curr_img_sparse, local_volumes, args.norm_type, mean_imgs_sparse, std_images_sparse, mean_vols, std_vols, max_images, max_volumes)
start.record()
if curr_train_stage=='train':
net.zero_grad()
optimizer.zero_grad()
#
with autocast():
# Run batch of predicted images in discriminator
prediction,sparse_prediction = net(curr_img_stack)
if not all([prediction.shape[i] == subimage_shape[i-2] for i in range(2,4)]):
diffY = (subimage_shape[0] - prediction.size()[2])
diffX = (subimage_shape[1] - prediction.size()[3])
prediction = F.pad(prediction, (diffX // 2, diffX - diffX // 2,
diffY // 2, diffY - diffY // 2))
# Use only first sparse image, corresponding to the selected volume.
if net_get_params(net).n_frames>1:
curr_img_sparse = curr_img_sparse[:,0,...].unsqueeze(1)
# Extract lenslet images
curr_img_sparse = dataset.extract_views(curr_img_sparse, dataset.lenslet_coords, dataset.subimage_shape)[:,0,...]
# curr_img_sparse, _ = normalize_type(curr_img_sparse, local_volumes, args.norm_type, mean_imgs_sparse, std_images_sparse, mean_vols, std_vols, max_images, max_volumes, inverse=True)
volume_loss = loss(local_volumes, prediction)
if curr_train_stage=='test' and len(args.gpu_repro)>0:
with torch.no_grad():
reproj_loss, reproj,curr_views,_ = reprojection_loss(sparse_prediction, prediction.float(), OTF, psf_shape, dataset, n_split, device_repro)
mean_repro += reproj_loss.item()
mean_repro_ssim += ssim_module((sparse_prediction/sparse_prediction.max()).to(device_repro).float(), (reproj/reproj.max()).float().to(device_repro)).cpu().item()
mean_volume_loss += volume_loss.mean().detach().item()
if curr_train_stage=='train':
scaler.scale(volume_loss).backward()
scaler.step(optimizer)
scaler.update()
# Record training time
end.record()
torch.cuda.synchronize()
end_time = start.elapsed_time(end)
mean_time += end_time
# detach tensors
local_volumes = local_volumes.detach().cpu().float()
prediction = prediction.detach().cpu().float()
curr_img_sparse = curr_img_sparse.detach()
curr_img_stack = curr_img_stack.detach()
# Normalize back
# curr_img_stack, local_volumes = normalize_type(curr_img_stack, local_volumes, args.norm_type, mean_imgs, std_images, mean_vols, std_vols, max_images, max_volumes, inverse=True)
# _, prediction = normalize_type(curr_img_stack, prediction, args.norm_type, mean_imgs, std_images, mean_vols, std_vols, max_images, max_volumes, inverse=True)
if torch.isinf(torch.tensor(mean_volume_loss)):
print('inf')
curr_img_sparse /= curr_img_sparse.max()
local_volumes -= local_volumes.min()
prediction -= prediction.min()
prediction /= max_volumes
local_volumes /= max_volumes
curr_img_stack -= curr_img_stack.min()
curr_img_stack /= curr_img_stack.max()
# mean_psnr += psnr(local_volumes.detach(), prediction.detach())
mean_volume_loss /= curr_loader_len
mean_psnr = 20 * torch.log10(max_volumes / torch.sqrt(torch.tensor(mean_volume_loss))) #/= curr_loader_len
mean_time /= curr_loader_len
mean_repro /= curr_loader_len
mean_repro_ssim /= curr_loader_len
# if epoch%args.eval_every==0:
# plt.imshow(volume_2_projections(prediction.permute(0,2,3,1).unsqueeze(1))[0,0,...].float().detach().cpu().numpy())
# plt.show()
if epoch%args.eval_every==0:
# plot_param_grads(writer, net, epoch, curr_train_stage+'_')
# plt.subplot(1,3,1)
# plt.imshow(curr_views[0,10,...].cpu().detach().numpy())
# plt.subplot(1,3,2)
# plt.imshow(reproj[0,10,...].cpu().detach().numpy())
# plt.subplot(1,3,3)
# plt.imshow((curr_views-reproj)[0,10,...].abs().cpu().detach().float().numpy())
# plt.title(str(image_loss))
# plt.show()
if local_volumes.shape == prediction.shape:
writer.add_image('max_GT_'+curr_train_stage, tv.utils.make_grid(volume_2_projections(local_volumes.permute(0,2,3,1).unsqueeze(1))[0,...], normalize=True, scale_each=True), epoch)
writer.add_image('sum_GT_'+curr_train_stage, tv.utils.make_grid(volume_2_projections(local_volumes.permute(0,2,3,1).unsqueeze(1), proj_type=torch.sum)[0,...], normalize=True, scale_each=True), epoch)
writer.add_image('max_prediction_'+curr_train_stage, tv.utils.make_grid(volume_2_projections(prediction.permute(0,2,3,1).unsqueeze(1))[0,...], normalize=True, scale_each=True), epoch)
writer.add_image('sum_prediction_'+curr_train_stage, tv.utils.make_grid(volume_2_projections(prediction.permute(0,2,3,1).unsqueeze(1), proj_type=torch.sum)[0,...], normalize=True, scale_each=True), epoch)
# input_noisy_grid = tv.utils.make_grid(curr_img_stack[0,0,...].float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
sparse_prediction = sparse_prediction- sparse_prediction.min()
sparse_prediction /= sparse_prediction.max()
input_intermediate_sparse_grid = tv.utils.make_grid(sparse_prediction[0,10,...].float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
input_GT_sparse_grid = tv.utils.make_grid(curr_img_sparse[0,10,...].float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
if curr_train_stage=='test' and len(args.gpu_repro)>0:
repro_grid = tv.utils.make_grid(reproj[0,...].sum(0).float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
writer.add_image('reproj_'+curr_train_stage, repro_grid, epoch)
repro_grid = tv.utils.make_grid(curr_img_sparse[0,...].sum(0).float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
writer.add_image('reproj_GT_'+curr_train_stage, repro_grid, epoch)
repro_grid = tv.utils.make_grid((curr_views-reproj)[0,10,...].abs().float().unsqueeze(0).cpu().data.detach(), normalize=True, scale_each=False)
writer.add_image('reproj_error_'+curr_train_stage, repro_grid, epoch)
writer.add_scalar('reproj/ssim/'+curr_train_stage, mean_repro_ssim, epoch)
writer.add_scalar('reproj/Loss/'+curr_train_stage, mean_repro, epoch)
# volGTHist,volPredHist,inputHist = compute_histograms(local_volumes[0,...].float(), prediction[0,...].float(), curr_img_stack[0,...].float())
# writer.add_image('input_noisy_'+curr_train_stage, input_noisy_grid, epoch)
writer.add_image('image_intermediate_sparse'+curr_train_stage, input_intermediate_sparse_grid, epoch)
writer.add_image('image_intermediate_sparse_GT'+curr_train_stage, input_GT_sparse_grid, epoch)
writer.add_scalar('Loss/'+curr_train_stage, mean_volume_loss, epoch)
writer.add_scalar('psnr/'+curr_train_stage, mean_psnr, epoch)
writer.add_scalar('times/'+curr_train_stage, mean_time, epoch)
# writer.add_scalar('grads/'+curr_train_stage, max_grad, epoch)
writer.add_scalar('lr/'+curr_train_stage, lr, epoch)
# if epoch%2==0:
print(str(epoch) + ' ' + curr_train_stage + " loss: " + str(mean_volume_loss) + " time: " + str(mean_time))
if os.path.isfile(main_folder+'exit_file.txt'):
torch.cuda.empty_cache()
sys.exit(0)
if epoch%25==0:
torch.save({
'epoch': epoch,
'args' : args,
'args_SLNet' : argsSLNet,
'statistics' : stats,
'model_state_dict': net_get_params(net).state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'scaler_state_dict' : scaler.state_dict(),
'loss': mean_volume_loss},
save_folder + '/model_')#+str(epoch))
if epoch%50==0:
torch.save({
'epoch': epoch,
'args' : args,
'args_SLNet' : argsSLNet,
'statistics' : stats,
'model_state_dict': net_get_params(net).state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'scaler_state_dict' : scaler.state_dict(),
'loss': mean_volume_loss},
save_folder + '/model_'+str(epoch))
| [
"zipfile.ZipFile",
"torch.cuda.device_count",
"utils.pytorch_shot_noise.add_camera_noise",
"torch.cuda.synchronize",
"torch.nn.MSELoss",
"torch.nn.init.xavier_uniform",
"sys.exit",
"torch.utils.tensorboard.SummaryWriter",
"pytorch_msssim.SSIM",
"torch.cuda.amp.GradScaler",
"argparse.ArgumentParser",
"nets.XLFMNet.XLFMNet",
"torch.set_num_threads",
"os.path.split",
"torch.cuda.amp.autocast",
"torch.utils.data.sampler.SubsetRandomSampler",
"torch.cuda.Event",
"numpy.ceil",
"subprocess.check_output",
"os.path.isfile",
"torch.no_grad",
"time.time",
"torch.cuda.empty_cache",
"torch.manual_seed",
"torch.optim.Adam",
"torch.load",
"utils.XLFMDataset.XLFMDatasetFull",
"torch.nn.DataParallel",
"torch.tensor",
"datetime.datetime.now",
"torch.utils.data.DataLoader",
"torch.set_grad_enabled",
"torch.rand",
"numpy.random.shuffle"
] | [((637, 662), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (660, 662), False, 'import argparse\n'), ((3919, 3944), 'torch.manual_seed', 'torch.manual_seed', (['(261290)'], {}), '(261290)\n', (3936, 3944), False, 'import torch\n'), ((4999, 5370), 'utils.XLFMDataset.XLFMDatasetFull', 'XLFMDatasetFull', (['args.data_folder', 'args.lenslet_file', 'subimage_shape'], {'img_shape': '[2160, 2160]', 'images_to_use': 'args.images_to_use', 'divisor': '(1)', 'isTiff': '(True)', 'n_frames_net': 'argsSLNet.n_frames', 'lenslets_offset': '(0)', 'load_all': '(True)', 'load_vols': '(True)', 'load_sparse': '(True)', 'temporal_shifts': 'args.temporal_shifts', 'use_random_shifts': 'args.use_random_shifts', 'eval_video': '(False)'}), '(args.data_folder, args.lenslet_file, subimage_shape,\n img_shape=[2160, 2160], images_to_use=args.images_to_use, divisor=1,\n isTiff=True, n_frames_net=argsSLNet.n_frames, lenslets_offset=0,\n load_all=True, load_vols=True, load_sparse=True, temporal_shifts=args.\n temporal_shifts, use_random_shifts=args.use_random_shifts, eval_video=False\n )\n', (5014, 5370), False, 'from utils.XLFMDataset import XLFMDatasetFull\n'), ((5389, 5771), 'utils.XLFMDataset.XLFMDatasetFull', 'XLFMDatasetFull', (['args.data_folder_test', 'args.lenslet_file', 'subimage_shape'], {'img_shape': '[2160, 2160]', 'images_to_use': 'args.images_to_use_test', 'divisor': '(1)', 'isTiff': '(True)', 'n_frames_net': 'argsSLNet.n_frames', 'lenslets_offset': '(0)', 'load_all': '(True)', 'load_vols': '(True)', 'load_sparse': '(True)', 'temporal_shifts': 'args.temporal_shifts', 'use_random_shifts': 'args.use_random_shifts', 'eval_video': '(False)'}), '(args.data_folder_test, args.lenslet_file, subimage_shape,\n img_shape=[2160, 2160], images_to_use=args.images_to_use_test, divisor=\n 1, isTiff=True, n_frames_net=argsSLNet.n_frames, lenslets_offset=0,\n load_all=True, load_vols=True, load_sparse=True, temporal_shifts=args.\n temporal_shifts, use_random_shifts=args.use_random_shifts, eval_video=False\n )\n', (5404, 5771), False, 'from utils.XLFMDataset import XLFMDatasetFull\n'), ((6598, 6632), 'torch.utils.data.sampler.SubsetRandomSampler', 'SubsetRandomSampler', (['train_indices'], {}), '(train_indices)\n', (6617, 6632), False, 'from torch.utils.data.sampler import SubsetRandomSampler\n'), ((6649, 6681), 'torch.utils.data.sampler.SubsetRandomSampler', 'SubsetRandomSampler', (['val_indices'], {}), '(val_indices)\n', (6668, 6681), False, 'from torch.utils.data.sampler import SubsetRandomSampler\n'), ((8359, 8371), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (8369, 8371), True, 'import torch.nn as nn\n'), ((8577, 8634), 'torch.optim.Adam', 'torch.optim.Adam', (['trainable_params'], {'lr': 'args.learning_rate'}), '(trainable_params, lr=args.learning_rate)\n', (8593, 8634), False, 'import torch\n'), ((8653, 8689), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (8669, 8689), False, 'import torch\n'), ((8696, 8732), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (8712, 8732), False, 'import torch\n'), ((8797, 8809), 'torch.cuda.amp.GradScaler', 'GradScaler', ([], {}), '()\n', (8807, 8809), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((3886, 3918), 'torch.set_num_threads', 'torch.set_num_threads', (['n_threads'], {}), '(n_threads)\n', (3907, 3918), False, 'import torch\n'), ((4035, 4091), 'torch.load', 'torch.load', (['args.checkpoint_XLFMNet'], {'map_location': 'device'}), '(args.checkpoint_XLFMNet, map_location=device)\n', (4045, 4091), False, 'import torch\n'), ((4276, 4330), 'torch.load', 'torch.load', (['args.checkpoint_SLNet'], {'map_location': 'device'}), '(args.checkpoint_SLNet, map_location=device)\n', (4286, 4330), False, 'import torch\n'), ((4563, 4599), 'os.path.split', 'os.path.split', (['args.checkpoint_SLNet'], {}), '(args.checkpoint_SLNet)\n', (4576, 4599), False, 'import os\n'), ((6364, 6409), 'numpy.ceil', 'np.ceil', (['(args.validation_split * dataset_size)'], {}), '(args.validation_split * dataset_size)\n', (6371, 6409), True, 'import numpy as np\n'), ((6472, 6498), 'numpy.random.shuffle', 'np.random.shuffle', (['indices'], {}), '(indices)\n', (6489, 6498), True, 'import numpy as np\n'), ((6729, 6849), 'torch.utils.data.DataLoader', 'data.DataLoader', (['dataset'], {'batch_size': 'args.batch_size', 'sampler': 'train_sampler', 'pin_memory': '(False)', 'num_workers': 'n_threads'}), '(dataset, batch_size=args.batch_size, sampler=train_sampler,\n pin_memory=False, num_workers=n_threads)\n', (6744, 6849), False, 'from torch.utils import data\n'), ((6910, 7030), 'torch.utils.data.DataLoader', 'data.DataLoader', (['dataset'], {'batch_size': 'args.batch_size', 'sampler': 'valid_sampler', 'pin_memory': '(False)', 'num_workers': 'n_threads'}), '(dataset, batch_size=args.batch_size, sampler=valid_sampler,\n pin_memory=False, num_workers=n_threads)\n', (6925, 7030), False, 'from torch.utils import data\n'), ((7094, 7197), 'torch.utils.data.DataLoader', 'data.DataLoader', (['dataset_test'], {'batch_size': '(1)', 'pin_memory': '(False)', 'num_workers': 'n_threads', 'shuffle': '(True)'}), '(dataset_test, batch_size=1, pin_memory=False, num_workers=\n n_threads, shuffle=True)\n', (7109, 7197), False, 'from torch.utils import data\n'), ((8411, 8423), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (8421, 8423), True, 'import torch.nn as nn\n'), ((9500, 9534), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'save_folder'}), '(log_dir=save_folder)\n', (9513, 9534), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((9674, 9722), 'zipfile.ZipFile', 'zipfile.ZipFile', (["(save_folder + '/files.zip')", '"""w"""'], {}), "(save_folder + '/files.zip', 'w')\n", (9689, 9722), False, 'import zipfile\n'), ((9842, 9853), 'time.time', 'time.time', ([], {}), '()\n', (9851, 9853), False, 'import time\n'), ((10108, 10132), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (10130, 10132), False, 'import torch\n'), ((10141, 10152), 'time.time', 'time.time', ([], {}), '()\n', (10150, 10152), False, 'import time\n'), ((10192, 10216), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (10214, 10216), False, 'import torch\n'), ((10556, 10609), 'torch.nn.DataParallel', 'nn.DataParallel', (['net', 'args.main_gpu', 'args.main_gpu[0]'], {}), '(net, args.main_gpu, args.main_gpu[0])\n', (10571, 10609), True, 'import torch.nn as nn\n'), ((4657, 4713), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'describe', '--always']"], {}), "(['git', 'describe', '--always'])\n", (4680, 4713), False, 'import subprocess\n'), ((7315, 7353), 'torch.nn.init.xavier_uniform', 'torch.nn.init.xavier_uniform', (['m.weight'], {}), '(m.weight)\n', (7343, 7353), False, 'import torch\n'), ((7680, 7828), 'nets.XLFMNet.XLFMNet', 'XLFMNet', (['n_lenslets', 'args.output_shape'], {'n_temporal_frames': 'dataset.n_frames', 'dataset': 'dataset', 'use_bias': 'args.use_bias', 'unet_settings': 'unet_settings'}), '(n_lenslets, args.output_shape, n_temporal_frames=dataset.n_frames,\n dataset=dataset, use_bias=args.use_bias, unet_settings=unet_settings)\n', (7687, 7828), False, 'from nets.XLFMNet import XLFMNet\n'), ((8489, 8546), 'pytorch_msssim.SSIM', 'SSIM', ([], {'data_range': '(1)', 'size_average': '(True)', 'channel': 'n_lenslets'}), '(data_range=1, size_average=True, channel=n_lenslets)\n', (8493, 8546), False, 'from pytorch_msssim import ssim, ms_ssim, SSIM, MS_SSIM\n'), ((10633, 10658), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (10656, 10658), False, 'import torch\n'), ((22621, 22666), 'os.path.isfile', 'os.path.isfile', (["(main_folder + 'exit_file.txt')"], {}), "(main_folder + 'exit_file.txt')\n", (22635, 22666), False, 'import os\n'), ((11148, 11176), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(True)'], {}), '(True)\n', (11170, 11176), False, 'import torch\n'), ((11342, 11371), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(False)'], {}), '(False)\n', (11364, 11371), False, 'import torch\n'), ((16860, 16884), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (16882, 16884), False, 'import torch\n'), ((22678, 22702), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (22700, 22702), False, 'import torch\n'), ((22715, 22726), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (22723, 22726), False, 'import sys\n'), ((13313, 13364), 'utils.pytorch_shot_noise.add_camera_noise', 'pytorch_shot_noise.add_camera_noise', (['curr_img_stack'], {}), '(curr_img_stack)\n', (13348, 13364), True, 'import utils.pytorch_shot_noise as pytorch_shot_noise\n'), ((14791, 14801), 'torch.cuda.amp.autocast', 'autocast', ([], {}), '()\n', (14799, 14801), False, 'from torch.cuda.amp import autocast, GradScaler\n'), ((17663, 17693), 'torch.tensor', 'torch.tensor', (['mean_volume_loss'], {}), '(mean_volume_loss)\n', (17675, 17693), False, 'import torch\n'), ((16099, 16114), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (16112, 16114), False, 'import torch\n'), ((18248, 18278), 'torch.tensor', 'torch.tensor', (['mean_volume_loss'], {}), '(mean_volume_loss)\n', (18260, 18278), False, 'import torch\n'), ((4776, 4790), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4788, 4790), False, 'from datetime import datetime\n'), ((13158, 13171), 'torch.rand', 'torch.rand', (['(1)'], {}), '(1)\n', (13168, 13171), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-11-08 05:57
from __future__ import unicode_literals
import datetime
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import re
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='EmailVerification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
('sent', models.DateTimeField(null=True, verbose_name='sent')),
('key', models.CharField(max_length=64, unique=True, verbose_name='key')),
],
options={
'verbose_name': 'email confirmation',
'verbose_name_plural': 'email confirmations',
},
),
migrations.CreateModel(
name='Feedback',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('remembered_password', models.BooleanField()),
('message', models.TextField(max_length=500)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='LoginAttempt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=50)),
('entry', models.CharField(max_length=30)),
('keystroke', models.CharField(max_length=50)),
('keystroke_intervals', models.CharField(max_length=200, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:\\,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')])),
('user_agent', models.CharField(max_length=30)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PasswordRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=30)),
('last_login', models.DateTimeField(default=datetime.datetime(2017, 1, 1, 0, 0), verbose_name='last_login')),
('login_days', models.PositiveIntegerField(default=0)),
('best_timing', models.DecimalField(decimal_places=3, default=None, max_digits=8, null=True)),
('last_timing', models.DecimalField(decimal_places=3, default=None, max_digits=8, null=True)),
('verified', models.BooleanField(default=False, verbose_name='verified')),
('pseudonym', models.CharField(max_length=15, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='emailverification',
name='user_profile',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='account.UserProfile'),
),
]
| [
"datetime.datetime",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"re.compile",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((397, 454), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (428, 454), False, 'from django.db import migrations, models\n'), ((4174, 4267), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""account.UserProfile"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'account.UserProfile')\n", (4191, 4267), False, 'from django.db import migrations, models\n'), ((596, 689), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (612, 689), False, 'from django.db import migrations, models\n'), ((716, 795), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""created"""'}), "(default=django.utils.timezone.now, verbose_name='created')\n", (736, 795), False, 'from django.db import migrations, models\n'), ((823, 875), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'verbose_name': '"""sent"""'}), "(null=True, verbose_name='sent')\n", (843, 875), False, 'from django.db import migrations, models\n'), ((902, 966), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)', 'unique': '(True)', 'verbose_name': '"""key"""'}), "(max_length=64, unique=True, verbose_name='key')\n", (918, 966), False, 'from django.db import migrations, models\n'), ((1253, 1346), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1269, 1346), False, 'from django.db import migrations, models\n'), ((1385, 1406), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1404, 1406), False, 'from django.db import migrations, models\n'), ((1437, 1469), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (1453, 1469), False, 'from django.db import migrations, models\n'), ((1502, 1541), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1522, 1541), False, 'from django.db import migrations, models\n'), ((1569, 1665), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1586, 1665), False, 'from django.db import migrations, models\n'), ((1798, 1891), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1814, 1891), False, 'from django.db import migrations, models\n'), ((1919, 1950), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1935, 1950), False, 'from django.db import migrations, models\n'), ((1979, 2010), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (1995, 2010), False, 'from django.db import migrations, models\n'), ((2043, 2074), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (2059, 2074), False, 'from django.db import migrations, models\n'), ((2337, 2368), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (2353, 2368), False, 'from django.db import migrations, models\n'), ((2401, 2440), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2421, 2440), False, 'from django.db import migrations, models\n'), ((2468, 2587), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to=settings.AUTH_USER_MODEL)\n', (2485, 2587), False, 'from django.db import migrations, models\n'), ((2723, 2816), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2739, 2816), False, 'from django.db import migrations, models\n'), ((2845, 2884), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2865, 2884), False, 'from django.db import migrations, models\n'), ((2912, 3008), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (2929, 3008), False, 'from django.db import migrations, models\n'), ((3140, 3233), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3156, 3233), False, 'from django.db import migrations, models\n'), ((3261, 3292), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (3277, 3292), False, 'from django.db import migrations, models\n'), ((3452, 3490), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)'}), '(default=0)\n', (3479, 3490), False, 'from django.db import migrations, models\n'), ((3525, 3601), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'default': 'None', 'max_digits': '(8)', 'null': '(True)'}), '(decimal_places=3, default=None, max_digits=8, null=True)\n', (3544, 3601), False, 'from django.db import migrations, models\n'), ((3636, 3712), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(3)', 'default': 'None', 'max_digits': '(8)', 'null': '(True)'}), '(decimal_places=3, default=None, max_digits=8, null=True)\n', (3655, 3712), False, 'from django.db import migrations, models\n'), ((3744, 3803), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""verified"""'}), "(default=False, verbose_name='verified')\n", (3763, 3803), False, 'from django.db import migrations, models\n'), ((3836, 3878), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(15)', 'null': '(True)'}), '(max_length=15, null=True)\n', (3852, 3878), False, 'from django.db import migrations, models\n'), ((3906, 4026), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""profile"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='profile', to=settings.AUTH_USER_MODEL)\n", (3923, 4026), False, 'from django.db import migrations, models\n'), ((3355, 3390), 'datetime.datetime', 'datetime.datetime', (['(2017)', '(1)', '(1)', '(0)', '(0)'], {}), '(2017, 1, 1, 0, 0)\n', (3372, 3390), False, 'import datetime\n'), ((2200, 2234), 're.compile', 're.compile', (['"""^\\\\d+(?:\\\\,\\\\d+)*\\\\Z"""'], {}), "('^\\\\d+(?:\\\\,\\\\d+)*\\\\Z')\n", (2210, 2234), False, 'import re\n')] |
#solution_python1_chapter10_regex.py
import re
cc = "1234-5678-1234-5678"
regex = re.compile(r'''(
(\d{4}) #match 4 digits
(\s*|\-*) #matches either a white space \s or a dash
(\d{4})
(\s*|\-*)
(\d{4})
)''',re.VERBOSE)
#validation: does it even look like a credit card?
m = regex.match(cc)
if not m:
print("Sorry, that's not a credit card number")
#if it is, we'll make a substitution of the first 12 numbers
fixed = re.sub(m.group(), "****-****-****",cc)
print("Success. Changed {} \n\tto the much safer {}.".format (cc, fixed))
| [
"re.compile"
] | [((84, 241), 're.compile', 're.compile', (['"""(\n(\\\\d{4}) #match 4 digits\n(\\\\s*|\\\\-*) #matches either a white space \\\\s or a dash\n(\\\\d{4})\n(\\\\s*|\\\\-*)\n(\\\\d{4})\n\n)"""', 're.VERBOSE'], {}), '(\n """(\n(\\\\d{4}) #match 4 digits\n(\\\\s*|\\\\-*) #matches either a white space \\\\s or a dash\n(\\\\d{4})\n(\\\\s*|\\\\-*)\n(\\\\d{4})\n\n)"""\n , re.VERBOSE)\n', (94, 241), False, 'import re\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Authors: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
"""Main class for sosia."""
from warnings import warn
from sosia.classes import Scientist
from sosia.processing import find_matches, inform_matches,\
maybe_add_source_names, search_group_from_sources
from sosia.utils import accepts, custom_print
class Original(Scientist):
@property
def matches(self):
"""List of Scopus IDs or list of namedtuples representing matches
of the original scientist in the treatment year.
Notes
-----
Property is initiated via .find_matches().
"""
try:
return self._matches
except AttributeError:
return None
@property
def search_group(self):
"""The set of authors that might be matches to the scientist. The
set contains the intersection of all authors publishing in the
treatment year as well as authors publishing around the year of first
publication. Some authors with too many publications in the
treatment year and authors having published too early are removed.
Notes
-----
Property is initiated via .define_search_group().
"""
try:
return self._search_group
except AttributeError:
return None
@property
def search_sources(self):
"""The set of sources (journals, books) comparable to the sources
the scientist published in until the treatment year.
A sources is comparable if is belongs to the scientist's main field
but not to fields alien to the scientist, and if the types of the
sources are the same as the types of the sources in the scientist's
main field where she published in.
Notes
-----
Property is initiated via .define_search_sources().
"""
try:
return self._search_sources
except AttributeError:
return None
@search_sources.setter
@accepts((set, list, tuple))
def search_sources(self, val):
self._search_sources = maybe_add_source_names(val, self.source_names)
def __init__(self, scientist, treatment_year, first_year_margin=2,
pub_margin=0.2, cits_margin=0.2, coauth_margin=0.2,
affiliations=None, period=None, first_year_search="ID",
eids=None, refresh=False, sql_fname=None):
"""Representation of a scientist for whom to find a control scientist.
Parameters
----------
scientist : str, int or list of str or int
Scopus Author ID, or list of Scopus Author IDs, of the scientist
to find a control scientist for.
treatment_year : str or numeric
Year of the event. Control scientist will be matched on trends and
characteristics of the original scientist up to this year.
first_year_margin : numeric (optional, default=2)
Number of years by which the search for authors publishing around
the year of the original scientist's year of first publication
should be extend in both directions.
pub_margin : numeric (optional, default=0.2)
The left and right margin for the number of publications to match
possible matches and the scientist on. If the value is a float,
it is interpreted as percentage of the scientists number of
publications and the resulting value is rounded up. If the value
is an integer it is interpreted as fixed number of publications.
cits_margin : numeric (optional, default=0.2)
The left and right margin for the number of citations to match
possible matches and the scientist on. If the value is a float,
it is interpreted as percentage of the scientists number of
publications and the resulting value is rounded up. If the value
is an integer it is interpreted as fixed number of citations.
coauth_margin : numeric (optional, default=0.2)
The left and right margin for the number of coauthors to match
possible matches and the scientist on. If the value is a float,
it is interpreted as percentage of the scientists number of
coauthors and the resulting value is rounded up. If the value
is an integer it is interpreted as fixed number of coauthors.
affiliations : list (optional, default=None)
A list of Scopus affiliation IDs. If provided, sosia conditions
the match procedure on affiliation with these IDs in the
treatment year.
period: int (optional, default=None)
An additional period prior to the publication year on which to
match scientists.
Note: If the value is larger than the publication range, period
sets back to None.
first_year_search: str (optional, default="ID")
How to determine characteristics of possible control scientists
in the first year of publication. Mode "ID" uses Scopus Author
IDs only. Mode "name" will select relevant profiles based on
their surname and first name but only when "period" is not None.
Select this mode to counter potential incompleteness of
author profiles.
eids : list (optional, default=None)
A list of scopus EIDs of the publications of the scientist you
want to find a control for. If it is provided, the scientist
properties and the control group are set based on this list of
publications, instead of the list of publications obtained from
the Scopus Author ID.
refresh : boolean (optional, default=False)
Whether to refresh cached results (if they exist) or not. If int
is passed, results will be refreshed if they are older than
that value in number of days.
sql_fname : str (optional, default=None)
The path of the SQLite database to connect to. If None, will use
the path specified in config.ini.
"""
# Internal checks
if not isinstance(first_year_margin, (int, float)):
raise Exception("Argument first_year_margin must be float or integer.")
if not isinstance(pub_margin, (int, float)):
raise Exception("Argument pub_margin must be float or integer.")
if not isinstance(coauth_margin, (int, float)):
raise Exception("Argument coauth_margin must be float or integer.")
if first_year_search not in ("ID", "name"):
raise Exception("Argument first_year_search must be either ID or name.")
if first_year_search == "name" and not period:
first_year_search = "ID"
text = "Argument first_year_search set to ID: Argument period "\
"must not be None"
warn(text)
# Variables
if not isinstance(scientist, list):
scientist = [scientist]
self.identifier = [str(auth_id) for auth_id in scientist]
self.treatment_year = int(treatment_year)
self.first_year_margin = first_year_margin
self.pub_margin = pub_margin
self.cits_margin = cits_margin
self.coauth_margin = coauth_margin
self.period = period
self.first_year_name_search = first_year_search == "name"
self.eids = eids
if isinstance(affiliations, (int, str)):
affiliations = [affiliations]
if affiliations:
affiliations = [int(a) for a in affiliations]
self.search_affiliations = affiliations
self.refresh = refresh
self.sql_fname = sql_fname
# Instantiate superclass to load private variables
Scientist.__init__(self, self.identifier, treatment_year, refresh=refresh,
period=period, sql_fname=self.sql_fname)
def define_search_group(self, stacked=False, verbose=False, refresh=False):
"""Define search_group.
Parameters
----------
stacked : bool (optional, default=False)
Whether to combine searches in few queries or not. Cached
files with most likely not be reusable. Set to True if you
query in distinct fields or you want to minimize API key usage.
verbose : bool (optional, default=False)
Whether to report on the progress of the process.
refresh : bool (optional, default=False)
Whether to refresh cached results (if they exist) or not.
"""
# Checks
if not self.search_sources:
text = "No search sources defined. Please run "\
".define_search_sources() first."
raise Exception(text)
# Query journals
params = {"original": self, "stacked": stacked,
"refresh": refresh, "verbose": verbose}
search_group = search_group_from_sources(**params)
# Remove own IDs and coauthors
search_group -= set(self.identifier)
search_group -= {str(i) for i in self.coauthors}
# Finalize
self._search_group = sorted(search_group)
text = f"Found {len(search_group):,} authors for search_group"
custom_print(text, verbose)
return self
def define_search_sources(self, verbose=False):
"""Define .search_sources.
Within the list of search sources sosia will search for matching
scientists. A search source is of the same main field as
the original scientist, the same types (journal, conference
proceeding, etc.), and must not be related to fields alien to the
original scientist.
Parameters
----------
verbose : bool (optional, default=False)
Whether to report on the progress of the process.
"""
df = self.field_source
# Sources in scientist's main field
same_field = df["asjc"] == self.main_field[0]
# Types of Scientist's sources
own_source_ids, _ = zip(*self.sources)
same_sources = df["source_id"].isin(own_source_ids)
main_types = df[same_sources]["type"].unique()
same_type = df["type"].isin(main_types)
# Select source IDs
selected_ids = df[same_field & same_type]["source_id"].unique()
selected = df[df["source_id"].isin(selected_ids)].copy()
selected["asjc"] = selected["asjc"].astype(int).astype(str) + " "
grouped = (selected.groupby("source_id")
.sum()["asjc"]
.to_frame())
# Deselect sources with alien fields
grouped["asjc"] = grouped["asjc"].astype(str).str.split().apply(set)
fields = set(str(f) for f in self.fields)
no_alien_field = grouped["asjc"].apply(lambda s: len(s - fields) == 0)
grouped = grouped[no_alien_field]
# Add source names
sources = set((s, self.source_names.get(s)) for s in grouped.index)
# Add own sources
sources.update(self.sources)
# Finalize
self._search_sources = sorted(sources)
text = f"Found {len(sources):,} sources matching main field "\
f"{self.main_field[0]} and source type(s) {'; '.join(main_types)}"
custom_print(text, verbose)
return self
def find_matches(self, stacked=False, verbose=False, refresh=False):
"""Find matches within search_group based on four criteria:
1. Started publishing in about the same year
2. Has about the same number of publications in the treatment year
3. Has about the same number of coauthors in the treatment year
4. Has about the same number of citations in the treatment year
5. Works in the same field as the scientist's main field
Parameters
----------
stacked : bool (optional, default=False)
Whether to combine searches in few queries or not. Cached
files will most likely not be reusable. Set to True if you
query in distinct fields or you want to minimize API key usage.
verbose : bool (optional, default=False)
Whether to report on the progress of the process.
refresh : bool (optional, default=False)
Whether to refresh cached results (if they exist) or not. If int
is passed and stacked=False, results will be refreshed if they are
older than that value in number of days.
Notes
-----
Matches are available through property `.matches`.
"""
# Checks
if not self.search_group:
text = "No search group defined. Please run "\
".define_search_group() first."
raise Exception(text)
if not isinstance(refresh, bool) and stacked:
refresh = False
warn("refresh parameter must be boolean when stacked=True. "
"Continuing with refresh=False.")
# Find matches
matches = find_matches(self, stacked, verbose, refresh)
text = f"Found {len(matches):,} author(s) matching all criteria"
custom_print(text, verbose)
self._matches = sorted([str(auth_id) for auth_id in matches])
def inform_matches(self, fields=None, verbose=False, refresh=False,
stop_words=None, **tfidf_kwds):
"""Add information to matches to aid in selection process.
Parameters
----------
fields : iterable (optional, default=None)
Which information to provide. Allowed values are "first_year",
"num_coauthors", "num_publications", "num_citations", "country",
"language", "reference_sim", "abstract_sim". If None, will
use all available fields.
verbose : bool (optional, default=False)
Whether to report on the progress of the process.
refresh : bool (optional, default=False)
Whether to refresh cached results (if they exist) or not. If int
is passed and stacked=False, results will be refreshed if they are
older than that value in number of days.
stop_words : list (optional, default=None)
A list of words that should be filtered in the analysis of
abstracts. If None uses the list of English stopwords
by nltk, augmented with numbers and interpunctuation.
tfidf_kwds : keywords
Parameters to pass to TfidfVectorizer from the sklearn package
for abstract vectorization. Not used when `information=False` or
or when "abstract_sim" is not in `information`. See
https://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.TfidfVectorizer.html
for possible values.
Notes
-----
Matches including corresponding information are available through
property `.matches`.
Raises
------
fields
If fields contains invalid keywords.
"""
# Checks
if not self._matches:
text = "No matches defined. Please run .find_matches() first."
raise Exception(text)
allowed_fields = ["first_name", "surname", "first_year",
"num_coauthors", "num_publications", "num_citations",
"num_coauthors_period", "num_publications_period",
"num_citations_period", "subjects", "country",
"affiliation_id", "affiliation", "language",
"reference_sim", "abstract_sim"]
if fields:
invalid = [x for x in fields if x not in allowed_fields]
if invalid:
text = "Parameter fields contains invalid keywords: " +\
", ".join(invalid)
raise ValueError(text)
else:
fields = allowed_fields
custom_print("Providing additional information...", verbose)
matches = inform_matches(self, fields, stop_words, verbose, refresh,
**tfidf_kwds)
self._matches = matches
| [
"sosia.classes.Scientist.__init__",
"sosia.utils.custom_print",
"sosia.utils.accepts",
"sosia.processing.search_group_from_sources",
"sosia.processing.inform_matches",
"warnings.warn",
"sosia.processing.maybe_add_source_names",
"sosia.processing.find_matches"
] | [((2055, 2082), 'sosia.utils.accepts', 'accepts', (['(set, list, tuple)'], {}), '((set, list, tuple))\n', (2062, 2082), False, 'from sosia.utils import accepts, custom_print\n'), ((2149, 2195), 'sosia.processing.maybe_add_source_names', 'maybe_add_source_names', (['val', 'self.source_names'], {}), '(val, self.source_names)\n', (2171, 2195), False, 'from sosia.processing import find_matches, inform_matches, maybe_add_source_names, search_group_from_sources\n'), ((7931, 8050), 'sosia.classes.Scientist.__init__', 'Scientist.__init__', (['self', 'self.identifier', 'treatment_year'], {'refresh': 'refresh', 'period': 'period', 'sql_fname': 'self.sql_fname'}), '(self, self.identifier, treatment_year, refresh=refresh,\n period=period, sql_fname=self.sql_fname)\n', (7949, 8050), False, 'from sosia.classes import Scientist\n'), ((9103, 9138), 'sosia.processing.search_group_from_sources', 'search_group_from_sources', ([], {}), '(**params)\n', (9128, 9138), False, 'from sosia.processing import find_matches, inform_matches, maybe_add_source_names, search_group_from_sources\n'), ((9430, 9457), 'sosia.utils.custom_print', 'custom_print', (['text', 'verbose'], {}), '(text, verbose)\n', (9442, 9457), False, 'from sosia.utils import accepts, custom_print\n'), ((11472, 11499), 'sosia.utils.custom_print', 'custom_print', (['text', 'verbose'], {}), '(text, verbose)\n', (11484, 11499), False, 'from sosia.utils import accepts, custom_print\n'), ((13222, 13267), 'sosia.processing.find_matches', 'find_matches', (['self', 'stacked', 'verbose', 'refresh'], {}), '(self, stacked, verbose, refresh)\n', (13234, 13267), False, 'from sosia.processing import find_matches, inform_matches, maybe_add_source_names, search_group_from_sources\n'), ((13349, 13376), 'sosia.utils.custom_print', 'custom_print', (['text', 'verbose'], {}), '(text, verbose)\n', (13361, 13376), False, 'from sosia.utils import accepts, custom_print\n'), ((16164, 16224), 'sosia.utils.custom_print', 'custom_print', (['"""Providing additional information..."""', 'verbose'], {}), "('Providing additional information...', verbose)\n", (16176, 16224), False, 'from sosia.utils import accepts, custom_print\n'), ((16243, 16315), 'sosia.processing.inform_matches', 'inform_matches', (['self', 'fields', 'stop_words', 'verbose', 'refresh'], {}), '(self, fields, stop_words, verbose, refresh, **tfidf_kwds)\n', (16257, 16315), False, 'from sosia.processing import find_matches, inform_matches, maybe_add_source_names, search_group_from_sources\n'), ((7057, 7067), 'warnings.warn', 'warn', (['text'], {}), '(text)\n', (7061, 7067), False, 'from warnings import warn\n'), ((13067, 13169), 'warnings.warn', 'warn', (['"""refresh parameter must be boolean when stacked=True. Continuing with refresh=False."""'], {}), "(\n 'refresh parameter must be boolean when stacked=True. Continuing with refresh=False.'\n )\n", (13071, 13169), False, 'from warnings import warn\n')] |
"""
Role tests
"""
import os
import pytest
from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize('name', [
('vsftpd'),
('db5.3-util'),
])
def test_installed_packages(host, name):
"""
Test if packages installed
"""
assert host.package(name).is_installed
def test_service(host):
"""
Test service state
"""
service = host.service('vsftpd')
assert service.is_enabled
# if host.system_info.codename in ['jessie', 'xenial']:
if host.file('/etc/init.d/vsftpd').exists:
assert 'is running' in host.check_output('/etc/init.d/vsftpd status')
else:
assert service.is_running
def test_process(host):
"""
Test process state
"""
assert len(host.process.filter(comm='vsftpd')) == 1
def test_socket(host):
"""
Test ports
"""
assert host.socket('tcp://127.0.0.1:21').is_listening
def test_user(host):
"""
Test ftp user exists
"""
ftp_user = host.user('ftp')
assert ftp_user.exists
assert ftp_user.shell in ['/usr/sbin/nologin', '/bin/false']
| [
"testinfra.utils.ansible_runner.AnsibleRunner",
"pytest.mark.parametrize"
] | [((198, 255), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name"""', "['vsftpd', 'db5.3-util']"], {}), "('name', ['vsftpd', 'db5.3-util'])\n", (221, 255), False, 'import pytest\n'), ((120, 172), 'testinfra.utils.ansible_runner.AnsibleRunner', 'AnsibleRunner', (["os.environ['MOLECULE_INVENTORY_FILE']"], {}), "(os.environ['MOLECULE_INVENTORY_FILE'])\n", (133, 172), False, 'from testinfra.utils.ansible_runner import AnsibleRunner\n')] |
from regolith.helpers.basehelper import SoutHelperBase
from regolith.fsclient import _id_key
from regolith.tools import (
all_docs_from_collection,
get_pi_id,
get_person_contact
)
TARGET_COLL = "presentations"
HELPER_TARGET = "l_abstract"
def subparser(subpi):
subpi.add_argument(
"run",
help='run the lister. To see allowed optional arguments, type '
'"regolith helper l_abstract"')
subpi.add_argument(
"-a",
"--author",
help='authors group ID(single argument only) to use to find '
'presentation abstract')
subpi.add_argument(
"-y",
"--year",
help='start or end year of the presentation (single argument only) to '
'use to find presentation')
subpi.add_argument(
"-l",
"--loc_inst",
help='location of presentation, either a fragment of an institution, '
'country, city, state, or university. If an institution is entered,'
'the search will be for seminars or colloquiums, otherwise the '
'search will be for all other meetings')
subpi.add_argument(
"-t",
"--title",
help='fragment of the title of the abstract or talk to use to '
'filter presentations')
return subpi
class AbstractListerHelper(SoutHelperBase):
"""Helper for finding and listing abstracts from the presentations.yml file
"""
# btype must be the same as helper target in helper.py
btype = HELPER_TARGET
needed_dbs = [f'{TARGET_COLL}', 'people', 'contacts']
def construct_global_ctx(self):
"""Constructs the global context"""
super().construct_global_ctx()
gtx = self.gtx
rc = self.rc
if "groups" in self.needed_dbs:
rc.pi_id = get_pi_id(rc)
rc.coll = f"{TARGET_COLL}"
try:
if not rc.database:
rc.database = rc.databases[0]
except BaseException:
pass
colls = [
sorted(
all_docs_from_collection(rc.client, collname), key=_id_key
)
for collname in self.needed_dbs
]
for db, coll in zip(self.needed_dbs, colls):
gtx[db] = coll
gtx["all_docs_from_collection"] = all_docs_from_collection
gtx["float"] = float
gtx["str"] = str
gtx["zip"] = zip
def sout(self):
rc = self.rc
presentations = self.gtx["presentations"]
SEMINAR_TYPES = ['seminar', 'colloquium']
filtered_title, filtered_authors, filtered_years, filtered_inst, filtered_loc = ([] for i in range(5))
if (not rc.author) and (not rc.year) and (not rc.loc_inst) and (not rc.title):
return None
if rc.title:
filtered_title = [presentation for presentation in presentations
if rc.title.casefold() in presentation.get('title').casefold()]
if rc.author:
filtered_authors = [presentation for presentation in presentations
if rc.author in presentation.get('authors')]
if rc.year:
filtered_years = [presentation for presentation in presentations
if int(rc.year) == presentation.get('begin_year', 'begin_date')
or int(rc.year) == presentation.get('end_year', 'end_date')]
if rc.loc_inst:
filtered_inst = [presentation for presentation in presentations
if presentation.get('type') in SEMINAR_TYPES and
rc.loc_inst.casefold() in presentation.get('institution').casefold()]
filtered_loc = [presentation for presentation in presentations
if rc.loc_inst.casefold() in presentation.get('location', 'institution').casefold()
and rc.loc_inst.casefold() not in presentation.get('institution').casefold()]
filtered_presentations_by_args = [filtered_inst, filtered_years, filtered_title,
filtered_authors, filtered_loc]
nonempty_filtered_presentations_by_args = [filtered_presentations
for filtered_presentations in filtered_presentations_by_args
if filtered_presentations]
filtered_presentations = [talk for presentations in nonempty_filtered_presentations_by_args
for talk in presentations
if all(talk in presentations
for presentations in nonempty_filtered_presentations_by_args)]
flat_filtered_presentations = list({talk['_id']: talk for talk in filtered_presentations}.values())
for presentation in flat_filtered_presentations:
print("---------------------------------------")
print(f"Title: {presentation.get('title')}\n")
author_list = [author
if not get_person_contact(author, self.gtx["people"], self.gtx["contacts"])
else get_person_contact(author, self.gtx["people"], self.gtx["contacts"]).get('name')
for author in presentation.get('authors')]
print(", ".join(author_list))
print(f"\nAbstract: {presentation.get('abstract')}")
return
| [
"regolith.tools.get_pi_id",
"regolith.tools.get_person_contact",
"regolith.tools.all_docs_from_collection"
] | [((1815, 1828), 'regolith.tools.get_pi_id', 'get_pi_id', (['rc'], {}), '(rc)\n', (1824, 1828), False, 'from regolith.tools import all_docs_from_collection, get_pi_id, get_person_contact\n'), ((2056, 2101), 'regolith.tools.all_docs_from_collection', 'all_docs_from_collection', (['rc.client', 'collname'], {}), '(rc.client, collname)\n', (2080, 2101), False, 'from regolith.tools import all_docs_from_collection, get_pi_id, get_person_contact\n'), ((5106, 5174), 'regolith.tools.get_person_contact', 'get_person_contact', (['author', "self.gtx['people']", "self.gtx['contacts']"], {}), "(author, self.gtx['people'], self.gtx['contacts'])\n", (5124, 5174), False, 'from regolith.tools import all_docs_from_collection, get_pi_id, get_person_contact\n'), ((5207, 5275), 'regolith.tools.get_person_contact', 'get_person_contact', (['author', "self.gtx['people']", "self.gtx['contacts']"], {}), "(author, self.gtx['people'], self.gtx['contacts'])\n", (5225, 5275), False, 'from regolith.tools import all_docs_from_collection, get_pi_id, get_person_contact\n')] |
"""
## NAME
Arrays.py
## VERSION
[1.0]
## AUTHOR
<NAME> <<EMAIL>>
## DATE
[25/10/2021]
## GITHUB LINK
https://github.com/zara-ms/python_class/tree/master/Tareas-BioPython
"""
import numpy as np
# Crear arrays estructurados especificando los tipos de datos
produccion = np.array([('Gen1', 30, 5), ('Gen2', 30, 11), ('Gen3', 30, 4), ('Gen4', 30, 2),
('Gen1', 35, 3), ('Gen2', 35, 7), ('Gen3', 35, 9), ('Gen4', 35, 6)],
dtype=[('gen', (np.str_, 5)), ('temperatura', np.int32), ('produccion', np.int32)])
inductor = np.array([('Gen1', 3.5), ('Gen2', 5), ('Gem3', 7), ('Gen4', 4.3)],
dtype=[('gen', np.str_, 5), ('costo', np.float64)])
costo = np.array([('Gen1', 30, 0.7), ('Gen2', 30, 0.45454545), ('Gen3', 30, 1.75), ('Gen4', 30, 2.15),
('Gen1', 35, 1.16666667), ('Gen2', 35, 0.71428571), ('Gen3', 35, 0.77777778),
('Gen4', 35, 0.71666667)],
dtype=[('gen', (np.str_, 5)), ('temperatura', np.int32), ('costo por 1g/L', np.float64)])
| [
"numpy.array"
] | [((287, 531), 'numpy.array', 'np.array', (["[('Gen1', 30, 5), ('Gen2', 30, 11), ('Gen3', 30, 4), ('Gen4', 30, 2), (\n 'Gen1', 35, 3), ('Gen2', 35, 7), ('Gen3', 35, 9), ('Gen4', 35, 6)]"], {'dtype': "[('gen', (np.str_, 5)), ('temperatura', np.int32), ('produccion', np.int32)]"}), "([('Gen1', 30, 5), ('Gen2', 30, 11), ('Gen3', 30, 4), ('Gen4', 30, \n 2), ('Gen1', 35, 3), ('Gen2', 35, 7), ('Gen3', 35, 9), ('Gen4', 35, 6)],\n dtype=[('gen', (np.str_, 5)), ('temperatura', np.int32), ('produccion',\n np.int32)])\n", (295, 531), True, 'import numpy as np\n'), ((576, 699), 'numpy.array', 'np.array', (["[('Gen1', 3.5), ('Gen2', 5), ('Gem3', 7), ('Gen4', 4.3)]"], {'dtype': "[('gen', np.str_, 5), ('costo', np.float64)]"}), "([('Gen1', 3.5), ('Gen2', 5), ('Gem3', 7), ('Gen4', 4.3)], dtype=[(\n 'gen', np.str_, 5), ('costo', np.float64)])\n", (584, 699), True, 'import numpy as np\n'), ((724, 1026), 'numpy.array', 'np.array', (["[('Gen1', 30, 0.7), ('Gen2', 30, 0.45454545), ('Gen3', 30, 1.75), ('Gen4', \n 30, 2.15), ('Gen1', 35, 1.16666667), ('Gen2', 35, 0.71428571), ('Gen3',\n 35, 0.77777778), ('Gen4', 35, 0.71666667)]"], {'dtype': "[('gen', (np.str_, 5)), ('temperatura', np.int32), ('costo por 1g/L', np.\n float64)]"}), "([('Gen1', 30, 0.7), ('Gen2', 30, 0.45454545), ('Gen3', 30, 1.75),\n ('Gen4', 30, 2.15), ('Gen1', 35, 1.16666667), ('Gen2', 35, 0.71428571),\n ('Gen3', 35, 0.77777778), ('Gen4', 35, 0.71666667)], dtype=[('gen', (np\n .str_, 5)), ('temperatura', np.int32), ('costo por 1g/L', np.float64)])\n", (732, 1026), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 25 09:46:47 2019
@author: yujijun
Description: This is a script to convert file from bigwig to bedfile in kraken.
we can choose different chromosome and resolution to generate different bigwig file.
Input:
/homes/jjyu/higlass_projects/Input/DNase_bw_v1/*
Output:
/homes/jjyu/higlass_projects/Output/1308_manysample
"""
import pyBigWig
import numpy as np
import pandas as pd
import math
import datetime
import sys
#--------------hyperparameter--------------------
basic_path = '/homes/jjyu/higlass_projects'
Input_bw_path = '%s/Input/DNase_bw_v1' %basic_path
Output_path = '%s/Output/' %basic_path
filter_data = pd.read_csv('%s/Input/04-Homo_sapiens_ca_DNase_QC(906)_non-info-tissue_FRiP0p25(543).csv' % basic_path,sep=",")
seletedsample = pd.read_csv("%s/Input/seletedsample_recluster(222).txt" %basic_path, sep='\t',header=None, index_col=None)
allchrinfo = pd.read_csv("%s/Input/chromInfo.txt.allchr" %basic_path, sep='\t',header=None, index_col=None)
resolution = 1000
#------------------------------------------------
#all bw name list:
sample_list = seletedsample.iloc[:,0].tolist()
sample_ID = [sample.split(',')[3] for sample in sample_list]
bw_name = [sampleid + "_treat.bw" for sampleid in sample_ID]
#all chr name list:
allchrname_list = allchrinfo.iloc[0:24,0].tolist()
allchrname_length = allchrinfo.iloc[0:24,1].tolist()
#defind a normalization function
def normalization(x):
Whole_mean = sum(x)/len(x)
new_series = pd.Series(x)/Whole_mean
new_list = new_series.tolist()
return(new_list)
#test
bw_name = bw_name[0:222]
sample_list = sample_list[0:222]
print(int(sys.argv[1]))
allchrname_list = [[allchrname] for allchrname in allchrname_list] #this just for run chr one by one
allchrname_list = allchrname_list[int(sys.argv[1])]
allchrname_length = [[i] for i in allchrname_length]
allchrname_length = allchrname_length[int(sys.argv[1])]
for a in range(len(allchrname_list)):
starttime = datetime.datetime.now()
chromo = allchrname_list[0]
#Firsthalf
length_chromo = allchrname_length[0]
#Firsthalf_empty
df_Firsthalf_empty = pd.DataFrame(columns=["chr_name", "start", "end"])
df_Firsthalf_empty.chr_name = [chromo] * math.ceil(length_chromo/resolution)
df_Firsthalf_empty.start = np.arange(0,length_chromo,resolution)
df_Firsthalf_empty.end = np.append(np.arange(resolution,length_chromo,resolution),length_chromo)
#lasthalf_empty
df_lasthalf_empty = pd.DataFrame(columns = sample_list)
for b in range(len(bw_name)):
bw_file = bw_name[b]
print("This is %s.th file:%s in %s" %(b+1,bw_file,chromo))
#input_folder = 'dataset' + str(filter_data.DatsetId[i])
input_file = '%s/%s' %(Input_bw_path, bw_file)
try:
bw = pyBigWig.open(input_file)
except RuntimeError:
print("This is a error file.")
continue
if (chromo in bw.chroms().keys()):
chromo_values = bw.values(chromo,0,bw.chroms(chromo))
chromo_values_array = np.array(chromo_values)
chromo_values_array = np.nan_to_num(chromo_values_array)
chromo_values = chromo_values_array.tolist()
#print(chromo_values)
chromo_values_split16 = [chromo_values[i:i + resolution] for i in range(0, len(chromo_values), resolution)]
#chromo_values_split16_mean = [np.mean(x) for x in chromo_values_split16]
def listmean(x):
x_mean = np.mean(x)
return(x_mean)
chromo_values_split16_mean = list(map(listmean,chromo_values_split16))
chromo_values_split16_mean_norm = normalization(chromo_values_split16_mean) #add speed
#print(sum(chromo_values_split16_mean_norm))
#print(chromo_values_split16_mean_norm[0:10])
df_lasthalf_empty.iloc[:,b] = chromo_values_split16_mean_norm
bw.close()
print("\t")
else:
print("%s is not in the %s" %(chromo, bw))
df_lasthalf_empty.iloc[:,b] = [0]*math.ceil(length_chromo/resolution)
bw.close()
print("\t")
#concat two dataframe
df_merge = pd.concat([df_Firsthalf_empty, df_lasthalf_empty], axis=1)
print("This is the shape of %s matrix: %s" %(chromo,df_merge.shape))
df_merge.to_csv('%s/bedfile_1000_222_%s.csv' %(Output_path,chromo), sep='\t',index=False, header=True)
endtime = datetime.datetime.now()
print (endtime - starttime)
| [
"pandas.Series",
"numpy.mean",
"math.ceil",
"pyBigWig.open",
"pandas.read_csv",
"numpy.nan_to_num",
"datetime.datetime.now",
"numpy.array",
"pandas.DataFrame",
"pandas.concat",
"numpy.arange"
] | [((694, 816), 'pandas.read_csv', 'pd.read_csv', (["('%s/Input/04-Homo_sapiens_ca_DNase_QC(906)_non-info-tissue_FRiP0p25(543).csv'\n % basic_path)"], {'sep': '""","""'}), "(\n '%s/Input/04-Homo_sapiens_ca_DNase_QC(906)_non-info-tissue_FRiP0p25(543).csv'\n % basic_path, sep=',')\n", (705, 816), True, 'import pandas as pd\n'), ((822, 935), 'pandas.read_csv', 'pd.read_csv', (["('%s/Input/seletedsample_recluster(222).txt' % basic_path)"], {'sep': '"""\t"""', 'header': 'None', 'index_col': 'None'}), "('%s/Input/seletedsample_recluster(222).txt' % basic_path, sep=\n '\\t', header=None, index_col=None)\n", (833, 935), True, 'import pandas as pd\n'), ((942, 1043), 'pandas.read_csv', 'pd.read_csv', (["('%s/Input/chromInfo.txt.allchr' % basic_path)"], {'sep': '"""\t"""', 'header': 'None', 'index_col': 'None'}), "('%s/Input/chromInfo.txt.allchr' % basic_path, sep='\\t', header=\n None, index_col=None)\n", (953, 1043), True, 'import pandas as pd\n'), ((2011, 2034), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2032, 2034), False, 'import datetime\n'), ((2169, 2219), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['chr_name', 'start', 'end']"}), "(columns=['chr_name', 'start', 'end'])\n", (2181, 2219), True, 'import pandas as pd\n'), ((2332, 2371), 'numpy.arange', 'np.arange', (['(0)', 'length_chromo', 'resolution'], {}), '(0, length_chromo, resolution)\n', (2341, 2371), True, 'import numpy as np\n'), ((2515, 2548), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'sample_list'}), '(columns=sample_list)\n', (2527, 2548), True, 'import pandas as pd\n'), ((4244, 4302), 'pandas.concat', 'pd.concat', (['[df_Firsthalf_empty, df_lasthalf_empty]'], {'axis': '(1)'}), '([df_Firsthalf_empty, df_lasthalf_empty], axis=1)\n', (4253, 4302), True, 'import pandas as pd\n'), ((4497, 4520), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4518, 4520), False, 'import datetime\n'), ((1522, 1534), 'pandas.Series', 'pd.Series', (['x'], {}), '(x)\n', (1531, 1534), True, 'import pandas as pd\n'), ((2265, 2302), 'math.ceil', 'math.ceil', (['(length_chromo / resolution)'], {}), '(length_chromo / resolution)\n', (2274, 2302), False, 'import math\n'), ((2409, 2457), 'numpy.arange', 'np.arange', (['resolution', 'length_chromo', 'resolution'], {}), '(resolution, length_chromo, resolution)\n', (2418, 2457), True, 'import numpy as np\n'), ((2831, 2856), 'pyBigWig.open', 'pyBigWig.open', (['input_file'], {}), '(input_file)\n', (2844, 2856), False, 'import pyBigWig\n'), ((3093, 3116), 'numpy.array', 'np.array', (['chromo_values'], {}), '(chromo_values)\n', (3101, 3116), True, 'import numpy as np\n'), ((3151, 3185), 'numpy.nan_to_num', 'np.nan_to_num', (['chromo_values_array'], {}), '(chromo_values_array)\n', (3164, 3185), True, 'import numpy as np\n'), ((3537, 3547), 'numpy.mean', 'np.mean', (['x'], {}), '(x)\n', (3544, 3547), True, 'import numpy as np\n'), ((4112, 4149), 'math.ceil', 'math.ceil', (['(length_chromo / resolution)'], {}), '(length_chromo / resolution)\n', (4121, 4149), False, 'import math\n')] |
import traceback
from datetime import datetime
def print_graphql_exception(error):
from tornado.options import options as opts
with open(opts.graphql_error_log_file, 'a') as file:
file.write(f"Date: {datetime.now().isoformat()}\n")
traceback.print_exception(None, error, error.__traceback__, file=file) | [
"datetime.datetime.now",
"traceback.print_exception"
] | [((259, 329), 'traceback.print_exception', 'traceback.print_exception', (['None', 'error', 'error.__traceback__'], {'file': 'file'}), '(None, error, error.__traceback__, file=file)\n', (284, 329), False, 'import traceback\n'), ((219, 233), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (231, 233), False, 'from datetime import datetime\n')] |
from matplotlib import pyplot as plt
import sys
import numpy as np
from collections import defaultdict
import pandas as pd
import os
transpose = {
'Googlenet': 'output/coco/transpose_cv/TP_Googlenet_256x192_d256_h1024_enc4_mh8/TP_Googlenet_256x192_d256_h1024_enc4_mh8_2022-01-09-00-29_train.log',
'InceptionV3': 'output/coco/transpose_cv/TP_InceptionV3_256x192_d256_h1024_enc4_mh8/TP_InceptionV3_256x192_d256_h1024_enc4_mh8_2022-01-09-00-29_train.log',
'LinearProjection': 'output/coco/transpose_cv/TP_LinearProjection_256x192_d256_h1024_enc4_mh8/TP_LinearProjection_256x192_d256_h1024_enc4_mh8_2022-01-12-00-24_train.log',
'MobileNetV1': 'output/coco/transpose_cv/TP_M_256x192_d256_h1024_enc3_mh8_M1/TP_M_256x192_d256_h1024_enc3_mh8_M1_2021-12-19-16-18_train.log',
'MobileNetV2': 'output/coco/transpose_cv/TP_M_256x192_d256_h1024_enc3_mh8_M2/TP_M_256x192_d256_h1024_enc3_mh8_M2_2021-12-15-23-02_train.log',
'MobileNetV3Small': 'output/coco/transpose_cv/TP_M_256x192_d256_h1024_enc3_mh8_M3L/TP_M_256x192_d256_h1024_enc3_mh8_M3L_2021-12-15-23-04_train.log',
'MobileNetV3Large': 'output/coco/transpose_cv/TP_M_256x192_d256_h1024_enc3_mh8_M3S/TP_M_256x192_d256_h1024_enc3_mh8_M3S_2021-12-15-23-03_train.log',
'ShuffleNetV2': 'output/coco/transpose_cv/TP_ShuffleNetV2_256x192_d256_h1024_enc4_mh8/TP_ShuffleNetV2_256x192_d256_h1024_enc4_mh8_2022-01-09-00-33_train.log',
'SqueezeNet': 'output/coco/transpose_cv/TP_SqueezeNet_256x192_d256_h1024_enc4_mh8/TP_SqueezeNet_256x192_d256_h1024_enc4_mh8_2022-01-09-00-31_train.log',
'Xception': 'output/coco/transpose_cv/TP_Xception_256x192_d256_h1024_enc4_mh8/TP_Xception_256x192_d256_h1024_enc4_mh8_2022-01-09-00-33_train.log',
'HRNet': 'output/coco/transpose_h/TP_H_w48_256x192_stage3_1_4_d96_h192_relu_enc3_mh1/TP_H_w48_256x192_stage3_1_4_d96_h192_relu_enc3_mh1_2021-12-30-15-07_train.log',
'ResNet50': 'output/coco/transpose_r/TP_R_256x192_d256_h1024_enc3_mh8/TP_R_256x192_d256_h1024_enc3_mh8_2021-12-30-15-05_train.log',
}
swin_transpose = {
'Googlenet': 'output/coco/swin_transpose/STP_Googlenet_256x192_d256_h1024_enc4_mh8/STP_Googlenet_256x192_d256_h1024_enc4_mh8_2022-01-16-07-01_train.log',
'InceptionV3': 'output/coco/swin_transpose/STP_InceptionV3_256x192_d256_h1024_enc4_mh8/STP_InceptionV3_256x192_d256_h1024_enc4_mh8_2022-01-15-09-56_train.log',
'LinearProjection': 'output/coco/swin_transpose/STP_LinearProjection_256x192_d256_h1024_enc4_mh8/STP_LinearProjection_256x192_d256_h1024_enc4_mh8_2022-01-16-10-09_train.log',
'MobileNetV1': 'output/coco/swin_transpose/STP_M_256x192_d256_h1024_enc3_mh8_M1/STP_M_256x192_d256_h1024_enc3_mh8_M1_2022-01-12-21-14_train.log',
'MobileNetV2': 'output/coco/swin_transpose/STP_M_256x192_d256_h1024_enc3_mh8_M2/STP_M_256x192_d256_h1024_enc3_mh8_M2_2022-01-12-21-14_train.log',
'MobileNetV3Small': 'output/coco/swin_transpose/STP_M_256x192_d256_h1024_enc3_mh8_M3S/STP_M_256x192_d256_h1024_enc3_mh8_M3S_2022-01-12-21-15_train.log',
'MobileNetV3Large': 'output/coco/swin_transpose/STP_M_256x192_d256_h1024_enc3_mh8_M3L/STP_M_256x192_d256_h1024_enc3_mh8_M3L_2022-01-12-21-15_train.log',
'ShuffleNetV2': 'output/coco/swin_transpose/STP_ShuffleNetV2_256x192_d256_h1024_enc4_mh8/STP_ShuffleNetV2_256x192_d256_h1024_enc4_mh8_2022-01-16-10-13_train.log',
'SqueezeNet': 'output/coco/swin_transpose/STP_SqueezeNet_256x192_d256_h1024_enc4_mh8/STP_SqueezeNet_256x192_d256_h1024_enc4_mh8_2022-01-15-10-01_train.log',
'Xception': 'output/coco/swin_transpose/STP_Xception_256x192_d256_h1024_enc4_mh8/STP_Xception_256x192_d256_h1024_enc4_mh8_2022-01-15-09-57_train.log',
'HRNet': 'output/coco/swin_transposeH/STP_H_w48_256x192_stage3_1_4_d96_h192_relu_enc3_mh1/STP_H_w48_256x192_stage3_1_4_d96_h192_relu_enc3_mh1_2022-01-16-07-28_train.log',
'ResNet50': 'output/coco/swin_transpose/STP_ResNet50_256x192_d256_h1024_enc4_mh8/STP_ResNet50_256x192_d256_h1024_enc4_mh8_2022-01-16-07-04_train.log',
}
tags = ['AP', 'Ap .5', 'AP .75', 'AP (M)', 'AP (L)', 'AR', 'AR .5', 'AR .75', 'AR (M)', 'AR (L)']
result = {}
def plot_figure(which):
plt_tags = {
'AP': 'b', 'AP (M)': 'r', 'AR': 'k', 'AR (M)': 'g'
}
plt_backbones = {
'MobileNetV1': 'b',
'MobileNetV3Large': 'k',
'ResNet50': 'r',
'Xception': 'g',
'HRNet': 'grey',
'LinearProjection': 'orange'
}
save_dir = f'output/images/{which}'
os.makedirs(save_dir, exist_ok=True)
print(f'=========== begin plot {which}, save to {save_dir} ==============')
for k in result:
plt.figure()
for tag in plt_tags:
plt.plot(result[k][tag], label=tag, color=plt_tags[tag])
plt.xlabel('epoch', fontsize=30)
plt.xticks([0, 40, 80, 120], fontsize=30)
plt.yticks(np.arange(0, 1, 0.2), fontsize=30)
# plt.legend(fontsize=20)
plt.savefig(f'{save_dir}/{k}.pdf', bbox_inches='tight', pad_inches=0)
for tag in plt_tags:
plt.figure()
for k in plt_backbones:
plt.plot(result[k][tag], label=k, color=plt_backbones[k])
plt.xlabel('epoch', fontsize=30)
plt.xticks([0, 40, 80, 120], fontsize=30)
plt.yticks(np.arange(0, 1, 0.2), fontsize=30)
# plt.legend(fontsize=20)
plt.savefig(f'{save_dir}/{tag}.pdf', bbox_inches='tight', pad_inches=0)
print(f'=========== finish plot {which}, save to {save_dir} ==============')
for which in ['transpose', 'swin_transpose']:
result = {}
if which == 'transpose':
files = transpose
else:
files = swin_transpose
for k, v in files.items():
speeds = []
d = defaultdict(list)
with open(v) as f:
for line in f:
if all([i in line for i in ['Epoch', 'Time', 'Speed', 'samples/s Data', 'Loss', 'Accuracy']]):
speeds.append(float(line.split('Speed')[1].split()[0].strip()))
if f'| {v.split("/")[2]} |' in line and len(d[tags[0]]) < 120:
line = line.strip().split('|')[:-1]
for i in range(-1, -len(tags)-1, -1):
d[tags[i]].append(float(line[i].strip()))
os.makedirs(f'output/data/{which}', exist_ok=True)
pd.DataFrame(d).to_csv(f'output/data/{which}/{k}.csv', index=None)
result[k] = d
# print(k, f'{np.mean(speeds[1:]):.1f}', *[d[t][-1] for t in ['AP', 'AP (M)', 'AR', 'AR (M)']], sep=' & ') # skip the first value to avoid cold start
plot_figure(which)
print()
tp_res = {}
for k, v in transpose.items():
speeds = []
d = defaultdict(list)
with open(v) as f:
for line in f:
if all([i in line for i in ['Epoch', 'Time', 'Speed', 'samples/s Data', 'Loss', 'Accuracy']]):
speeds.append(float(line.split('Speed')[1].split()[0].strip()))
if f'| {v.split("/")[2]} |' in line and len(d[tags[0]]) < 120:
line = line.strip().split('|')[:-1]
for i in range(-1, -len(tags)-1, -1):
d[tags[i]].append(float(line[i].strip()))
d['speed'] = np.mean(speeds[1:])
tp_res[k] = d
stp_res = {}
for k, v in swin_transpose.items():
speeds = []
d = defaultdict(list)
with open(v) as f:
for line in f:
if all([i in line for i in ['Epoch', 'Time', 'Speed', 'samples/s Data', 'Loss', 'Accuracy']]):
speeds.append(float(line.split('Speed')[1].split()[0].strip()))
if f'| {v.split("/")[2]} |' in line and len(d[tags[0]]) < 120:
line = line.strip().split('|')[:-1]
for i in range(-1, -len(tags)-1, -1):
d[tags[i]].append(float(line[i].strip()))
d['speed'] = np.mean(speeds[1:])
stp_res[k] = d
print(f'=========== begin draw table: impact of backbone ================')
for k in stp_res:
tp, stp = tp_res[k], stp_res[k]
print(k, f'{tp["speed"]:.1f}', *[f'{tp[t][-1]:.3f}' for t in ['AP', 'AP (M)', 'AR', 'AR (M)']], r'\\', sep=' & ')
print(f'=========== finish draw table: impact of backbone ================')
print()
print(f'=========== begin draw table: impact of transformer ================')
for k in stp_res:
tp, stp = tp_res[k], stp_res[k]
print(k, f'{stp["speed"]:.1f}({stp["speed"] - tp["speed"]:.1f})', *[f'{stp[t][-1]:.3f}({stp[t][-1]-tp[t][-1]:.3f})' for t in ['AP', 'AP (M)', 'AR', 'AR (M)']], r'\\', sep=' & ')
print(f'=========== finish draw table: impact of transformer ================')
| [
"numpy.mean",
"matplotlib.pyplot.savefig",
"os.makedirs",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"collections.defaultdict",
"pandas.DataFrame",
"numpy.arange"
] | [((4432, 4468), 'os.makedirs', 'os.makedirs', (['save_dir'], {'exist_ok': '(True)'}), '(save_dir, exist_ok=True)\n', (4443, 4468), False, 'import os\n'), ((6621, 6638), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6632, 6638), False, 'from collections import defaultdict\n'), ((7132, 7151), 'numpy.mean', 'np.mean', (['speeds[1:]'], {}), '(speeds[1:])\n', (7139, 7151), True, 'import numpy as np\n'), ((7244, 7261), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (7255, 7261), False, 'from collections import defaultdict\n'), ((7755, 7774), 'numpy.mean', 'np.mean', (['speeds[1:]'], {}), '(speeds[1:])\n', (7762, 7774), True, 'import numpy as np\n'), ((4579, 4591), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4589, 4591), True, 'from matplotlib import pyplot as plt\n'), ((4698, 4730), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {'fontsize': '(30)'}), "('epoch', fontsize=30)\n", (4708, 4730), True, 'from matplotlib import pyplot as plt\n'), ((4739, 4780), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 40, 80, 120]'], {'fontsize': '(30)'}), '([0, 40, 80, 120], fontsize=30)\n', (4749, 4780), True, 'from matplotlib import pyplot as plt\n'), ((4877, 4946), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{save_dir}/{k}.pdf"""'], {'bbox_inches': '"""tight"""', 'pad_inches': '(0)'}), "(f'{save_dir}/{k}.pdf', bbox_inches='tight', pad_inches=0)\n", (4888, 4946), True, 'from matplotlib import pyplot as plt\n'), ((4985, 4997), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4995, 4997), True, 'from matplotlib import pyplot as plt\n'), ((5108, 5140), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {'fontsize': '(30)'}), "('epoch', fontsize=30)\n", (5118, 5140), True, 'from matplotlib import pyplot as plt\n'), ((5149, 5190), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 40, 80, 120]'], {'fontsize': '(30)'}), '([0, 40, 80, 120], fontsize=30)\n', (5159, 5190), True, 'from matplotlib import pyplot as plt\n'), ((5287, 5358), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{save_dir}/{tag}.pdf"""'], {'bbox_inches': '"""tight"""', 'pad_inches': '(0)'}), "(f'{save_dir}/{tag}.pdf', bbox_inches='tight', pad_inches=0)\n", (5298, 5358), True, 'from matplotlib import pyplot as plt\n'), ((5676, 5693), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5687, 5693), False, 'from collections import defaultdict\n'), ((6210, 6260), 'os.makedirs', 'os.makedirs', (['f"""output/data/{which}"""'], {'exist_ok': '(True)'}), "(f'output/data/{which}', exist_ok=True)\n", (6221, 6260), False, 'import os\n'), ((4633, 4689), 'matplotlib.pyplot.plot', 'plt.plot', (['result[k][tag]'], {'label': 'tag', 'color': 'plt_tags[tag]'}), '(result[k][tag], label=tag, color=plt_tags[tag])\n', (4641, 4689), True, 'from matplotlib import pyplot as plt\n'), ((4800, 4820), 'numpy.arange', 'np.arange', (['(0)', '(1)', '(0.2)'], {}), '(0, 1, 0.2)\n', (4809, 4820), True, 'import numpy as np\n'), ((5042, 5099), 'matplotlib.pyplot.plot', 'plt.plot', (['result[k][tag]'], {'label': 'k', 'color': 'plt_backbones[k]'}), '(result[k][tag], label=k, color=plt_backbones[k])\n', (5050, 5099), True, 'from matplotlib import pyplot as plt\n'), ((5210, 5230), 'numpy.arange', 'np.arange', (['(0)', '(1)', '(0.2)'], {}), '(0, 1, 0.2)\n', (5219, 5230), True, 'import numpy as np\n'), ((6269, 6284), 'pandas.DataFrame', 'pd.DataFrame', (['d'], {}), '(d)\n', (6281, 6284), True, 'import pandas as pd\n')] |
# Copyright (c) 2018 <NAME>, <NAME>.
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License 2.0 which is available at
# http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
# which is available at https://www.apache.org/licenses/LICENSE-2.0.
#
# SPDX-License-Identifier: Apache-2.0
#
# Contributors: <NAME>, <NAME>
'''Haskell-style MVars using threading primitives.'''
import contextlib
import threading
import typing
T = typing.TypeVar('T')
class MVar(typing.Generic[T]):
'''
Haskell-style MVars.
At its core functionality are methods `take()`, `get()`, and `put()`,
which closely reflect their haskell counterparts.
This implementation also adds a few more methods which enable a more
pythonic style of programming.
In particular, `wait()` and `locked()` by supporting the context manager protocol
allow you to perform atomic operations on the MVar using `with` syntax.
Using `with` syntax on MVar itself is equivalent to using `locked()` without arguments.
It is safe to nest `with` syntax on the same MVar (it uses a recursive lock).
For most of the functions, the documentation indicates whether they are blocking or not;
this refers to the underlying recursive lock.
'''
__lock: threading.RLock
__condition: threading.Condition
__full: bool
__value: typing.Optional[T]
def __init__(self):
self.__lock = threading.RLock()
self.__condition = threading.Condition(lock=self.__lock)
self.__full = False
self.__value = None
def __enter__(self) -> typing.Tuple[bool, typing.Optional[T]]:
self.__lock.__enter__()
return (self.__full, self.__value)
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def is_full(self) -> bool:
'''Whether the value is there (non-blocking)'''
return self.__full
def is_empty(self) -> bool:
'''Inverse of 'is_full' (non-blocking)'''
return not self.__full
def get(self, timeout: typing.Optional[float] = None) -> T:
'''
Wait for the value and get it without emptying the MVar (blocking).
Does not cause other waiting threads to wake up.
'''
with self.__lock:
if not self.__condition.wait_for(self.is_full, timeout=timeout):
raise TimeoutError('MVar.get: timeout')
return self.__value
def take(self, timeout: typing.Optional[float] = None) -> T:
'''
Wait for the value and take it (blocking).
Empties the MVar and wakes up all waiting threads.
'''
with self.__lock:
if not self.__condition.wait_for(self.is_full, timeout=timeout):
raise TimeoutError('MVar.take: timeout')
v = self.__value
self.__value = None
self.__full = False
self.__condition.notify_all()
return v
def take_nowait(self) -> T:
'''
Try to take the value without waiting (blocking).
Empties the MVar and wakes up all waiting threads if the MVar was full.
Otherwise, throws the ValueError.
'''
with self.__lock:
if self.is_empty():
raise ValueError("MVar.take_nowait: empty")
v = self.__value
self.__value = None
self.__full = False
self.__condition.notify_all()
return v
def put(self, value: T, timeout: typing.Optional[float] = None) -> None:
'''
Wait for MVar to become empty and put a value in there (blocking).
Wakes up all waiting threads.
'''
with self.__lock:
if not self.__condition.wait_for(self.is_empty, timeout=timeout):
raise TimeoutError('MVar.put: timeout')
self.__value = value
self.__full = True
self.__condition.notify_all()
@contextlib.contextmanager
def wait(self, timeout: typing.Optional[float] = None) -> bool:
'''
Wait for any changes in the state, or for the timeout event (blocking).
Returns whether it's awaken within the specified time.
'''
with self.__lock:
yield self.__condition.wait(timeout=timeout)
@contextlib.contextmanager
def locked(self, timeout: typing.Optional[float] = None) -> typing.Tuple[bool, typing.Optional[T]]:
'''
Use the MVar's lock independently of the MVar's state.
Returns a tuple (is_full, Optional[value]).
Note, the type of the value `T` may itself be optional,
that is why this method returns the tuple with the explicit indication of whether it's full.
'''
if self.__lock.acquire(True, **({'timeout': timeout} if timeout is not None else {})):
try:
yield (self.__full, self.__value)
finally:
self.__lock.release()
else:
raise TimeoutError('MVar.locked: timeout')
def notify_all(self) -> None:
'''Low-level function: wakeup all threads waiting for this MVar (blocking).'''
with self.__lock:
self.__condition.notify_all()
| [
"threading.Condition",
"threading.RLock",
"typing.TypeVar"
] | [((617, 636), 'typing.TypeVar', 'typing.TypeVar', (['"""T"""'], {}), "('T')\n", (631, 636), False, 'import typing\n'), ((1591, 1608), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (1606, 1608), False, 'import threading\n'), ((1636, 1673), 'threading.Condition', 'threading.Condition', ([], {'lock': 'self.__lock'}), '(lock=self.__lock)\n', (1655, 1673), False, 'import threading\n')] |
# -*- coding: utf-8 -*-
import os
import csv
pathM =r"C:\Users\<NAME>as\Desktop\nofakes project\FALdetector-master\val\modified"
path =r"C:\Users\<NAME>as\Desktop\nofakes project\FALdetector-master\val\original"
with open('train.csv', 'w', newline='') as csvfile: #Loop through path and add all files matching *.jpg to array files
files = []
for r,d,f in os.walk(path):
for _file in f:
if '.png' in _file:
files.append(_file)
writer = csv.writer(csvfile, delimiter=',') #Create a writer from csv module
for f in files: #find type of file
t=0 #cut off the number and .jpg from file, leaving only the type (this may have to be changed.)
writer.writerow([f, t]) #write the row to the file output.csv
files = []
for r,d,f in os.walk(pathM):
for _file in f:
if '.png' in _file:
files.append(_file)
writer = csv.writer(csvfile, delimiter=',') #Create a writer from csv module
for f in files: #find type of file
t=1 #cut off the number and .jpg from file, leaving only the type (this may have to be changed.)
writer.writerow([f, t]) #write the row to the file output.csv
| [
"csv.writer",
"os.walk"
] | [((372, 385), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (379, 385), False, 'import os\n'), ((502, 536), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (512, 536), False, 'import csv\n'), ((831, 845), 'os.walk', 'os.walk', (['pathM'], {}), '(pathM)\n', (838, 845), False, 'import os\n'), ((962, 996), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (972, 996), False, 'import csv\n')] |
from time import time
import requests
import asyncio
urls = ["https://www.naver.com", "http://www.google.com", "https://www.nytimes.com", "https://www.mlb.com", "https://www.kakaocorp.com"]
async def fetch(url):
response = await loop.run_in_executor(None, requests.get, url) # run_in_executor 사용
page = response.text
return "{0} Bytes".format(len(page))
async def main():
futures = [asyncio.ensure_future(fetch(url)) for url in urls]
# 태스크(퓨처) 객체를 리스트로 만듦
result = await asyncio.gather(*futures) # 결과를 한꺼번에 가져옴
print(result)
begin = time()
loop = asyncio.get_event_loop() # 이벤트 루프를 얻음
loop.run_until_complete(main()) # main이 끝날 때까지 기다림
loop.close() # 이벤트 루프를 닫음
end = time()
print('실행 시간: {0:.3f}초'.format(end - begin)) | [
"asyncio.get_event_loop",
"time.time",
"asyncio.gather"
] | [((570, 576), 'time.time', 'time', ([], {}), '()\n', (574, 576), False, 'from time import time\n'), ((584, 608), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (606, 608), False, 'import asyncio\n'), ((708, 714), 'time.time', 'time', ([], {}), '()\n', (712, 714), False, 'from time import time\n'), ((501, 525), 'asyncio.gather', 'asyncio.gather', (['*futures'], {}), '(*futures)\n', (515, 525), False, 'import asyncio\n')] |
""" square root birch """
import time
import random
def main():
""" this program sexes up numbie wumbies """
x = int(input("Number: "))#Takes the number you want to square root
y = int(input("To which Number: "))#And to which power you want the root to be
z = random.randint(5, 10)#gets a random number
while True:
if round((z ** y), 5) == round(x, 5):#If the number works print it and end
z = round(z, 4)
z = str(z)
print(z.rstrip(z[:-4]))
return
elif round((z ** y), 5) <= round(x, 5):#if its to small add a small amount
z += .0001
print(z, end="\r")
elif round((z ** y), 5) >= round(x, 5):#if its to big subtract small amount
z -= .0001
print(z, end="\r")
time.sleep(.001)
if __name__ == "__main__":
main()
| [
"random.randint",
"time.sleep"
] | [((278, 299), 'random.randint', 'random.randint', (['(5)', '(10)'], {}), '(5, 10)\n', (292, 299), False, 'import random\n'), ((816, 833), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (826, 833), False, 'import time\n')] |
# pylint: disable=missing-docstring
from __future__ import absolute_import, division, print_function, unicode_literals
from resdk.analysis import differential_expressions
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestExpressions(BaseResdkFunctionalTest):
def test_cuffdiff(self):
collection_1 = self.res.collection.create(name='Test collection 1')
collection_2 = self.res.collection.create(name='Test collection 2')
# pylint: disable=unbalanced-tuple-unpacking
cuffquant_1, cuffquant_2, cuffquant_3, cuffquant_4 = self.get_cuffquants(4, collection_1)
self.get_cuffquants(1, collection_1)
cuffquant_6, cuffquant_7, cuffquant_8, cuffquant_9 = self.get_cuffquants(4, collection_2)
cuffquant_10, cuffquant_11 = self.get_cuffquants(2, collection_2)
# pylint: enable=unbalanced-tuple-unpacking
gff = self.get_gtf()
relation = collection_1.create_compare_relation(
samples=[
cuffquant_1.sample,
cuffquant_2.sample,
cuffquant_3.sample,
cuffquant_4.sample,
],
positions=['control', 'control', 'case', 'case'],
label='case-control'
)
collection_2.create_compare_relation(
samples=[
cuffquant_6.sample,
cuffquant_7.sample,
cuffquant_8.sample,
cuffquant_9.sample,
],
positions=['control', 'control', 'case', 'case'],
label='case-control'
)
collection_2.create_compare_relation(
samples=[
cuffquant_6.sample,
cuffquant_7.sample,
cuffquant_10.sample,
cuffquant_11.sample,
],
positions=['control', 'control', 'case', 'case'],
label='case-control'
)
samples = [
cuffquant_1.sample,
cuffquant_2.sample,
cuffquant_3.sample,
cuffquant_4.sample,
]
# Run cuffdiff on a collection (one sample not in any relation)
cuffdiff = collection_1.run_cuffdiff(annotation=gff)
self.assertEqual(len(cuffdiff), 1)
self.assertEqual(cuffdiff[0].input['case'], [cuffquant_3.id, cuffquant_4.id])
self.assertEqual(cuffdiff[0].input['control'], [cuffquant_1.id, cuffquant_2.id])
self.assertEqual(cuffdiff[0].input['annotation'], gff.id)
# Run cuffdiff on a relation
cuffdiff = relation.run_cuffdiff(annotation=gff)
self.assertEqual(len(cuffdiff), 1)
self.assertEqual(cuffdiff[0].input['case'], [cuffquant_3.id, cuffquant_4.id])
self.assertEqual(cuffdiff[0].input['control'], [cuffquant_1.id, cuffquant_2.id])
self.assertEqual(cuffdiff[0].input['annotation'], gff.id)
# Run cuffdiff on a list of samples
cuffdiff = differential_expressions.cuffdiff(samples, annotation=gff)
self.assertEqual(len(cuffdiff), 1)
self.assertEqual(cuffdiff[0].input['case'], [cuffquant_3.id, cuffquant_4.id])
self.assertEqual(cuffdiff[0].input['control'], [cuffquant_1.id, cuffquant_2.id])
self.assertEqual(cuffdiff[0].input['annotation'], gff.id)
# Run cuffdiff on a collection (two different relation)
cuffdiff = collection_2.run_cuffdiff(annotation=gff)
self.assertEqual(len(cuffdiff), 2)
self.assertEqual(cuffdiff[0].input['case'], [cuffquant_8.id, cuffquant_9.id])
self.assertEqual(cuffdiff[0].input['control'], [cuffquant_6.id, cuffquant_7.id])
self.assertEqual(cuffdiff[1].input['case'], [cuffquant_10.id, cuffquant_11.id])
self.assertEqual(cuffdiff[1].input['control'], [cuffquant_6.id, cuffquant_7.id])
self.assertEqual(cuffdiff[0].input['annotation'], gff.id)
self.assertEqual(cuffdiff[1].input['annotation'], gff.id)
| [
"resdk.analysis.differential_expressions.cuffdiff"
] | [((2950, 3008), 'resdk.analysis.differential_expressions.cuffdiff', 'differential_expressions.cuffdiff', (['samples'], {'annotation': 'gff'}), '(samples, annotation=gff)\n', (2983, 3008), False, 'from resdk.analysis import differential_expressions\n')] |
import os
from flask import Flask, request
from flask_restful import Resource, reqparse
from com.medicom.health.diabetes.services.file_handler import FileHandler
class UserFileuploadscontroller(Resource):
def get(self):
return ''
def post(self):
print("UserFileuploadscontroller post called")
FileHandler().save(request)
return "file successfully saved" | [
"com.medicom.health.diabetes.services.file_handler.FileHandler"
] | [((328, 341), 'com.medicom.health.diabetes.services.file_handler.FileHandler', 'FileHandler', ([], {}), '()\n', (339, 341), False, 'from com.medicom.health.diabetes.services.file_handler import FileHandler\n')] |
from pyrsistent import freeze, m
from testtools import TestCase
from testtools.matchers import Contains, Equals, Is, MatchesListwise
from testtools.twistedsupport import succeeded
from twisted.python.failure import Failure
from fugue.adapters.nevow import nevow_adapter_resource
from fugue.interceptors import before, handler
from fugue.interceptors.nevow import NEVOW_REQUEST
from fugue.test.interceptors.test_nevow import fake_nevow_request
from fugue.test.util import depends_on
from fugue.util import constantly
def ok(body, headers=m(), status=200):
return m(status=status, body=body, headers=freeze(headers))
class NevowAdapterResourceTests(TestCase):
"""
Tests for `nevow_adapter_resource`.
"""
@depends_on('nevow')
def test_leaf(self):
"""
The resource identifies as a leaf.
"""
resource = nevow_adapter_resource()
self.assertThat(
resource.locateChild(None, None),
MatchesListwise([
Is(resource),
Equals(())]))
@depends_on('nevow')
def test_nevow_request(self):
"""
Rendering the resource returns a successful deferred and inserted a
`NEVOW_REQUEST` value into the context.
"""
def _spy(res):
def _spy_inner(context):
res.append(context)
return context
return before(_spy_inner)
requests = []
resource = nevow_adapter_resource([_spy(requests)])
req = fake_nevow_request()
self.assertThat(
resource.renderHTTP(req),
succeeded(Equals(b'')))
self.assertThat(
requests,
MatchesListwise([Contains(NEVOW_REQUEST)]))
@depends_on('nevow')
def test_body_status(self):
"""
Write a response body and status to the Nevow request.
"""
resource = nevow_adapter_resource(
[handler(lambda _: ok(b'Hello world!', status=201))])
req = fake_nevow_request()
self.assertThat(
resource.renderHTTP(req),
succeeded(Equals(b'')))
self.assertThat(
req.code,
Equals(201))
req.channel.transport.written.seek(0)
self.assertThat(
req.channel.transport.written.read(),
Contains(b'Hello world!'))
@depends_on('nevow')
def test_response_headers(self):
"""
Write response headers to the Nevow request.
"""
resource = nevow_adapter_resource(
[handler(lambda _: ok(b'', headers={b'X-Foo': [b'foo'],
b'X-Bar': b'bar'}))])
req = fake_nevow_request()
self.assertThat(
resource.renderHTTP(req),
succeeded(Equals(b'')))
self.assertThat(
req.responseHeaders.getRawHeaders(b'X-Foo'),
Equals([b'foo']))
self.assertThat(
req.responseHeaders.getRawHeaders(b'X-Bar'),
Equals([b'bar']))
@depends_on('nevow')
def test_error(self):
"""
If an exception is unhandled, set the response body and status
accordingly.
"""
f = Failure(RuntimeError('Nope'))
resource = nevow_adapter_resource([before(constantly(f))])
req = fake_nevow_request()
self.assertThat(
resource.renderHTTP(req),
succeeded(Equals(b'')))
req.channel.transport.written.seek(0)
self.assertThat(
req.channel.transport.written.read(),
Contains(b'Internal server error: exception'))
self.assertThat(
req.code,
Equals(500))
| [
"pyrsistent.freeze",
"testtools.matchers.Is",
"pyrsistent.m",
"fugue.util.constantly",
"fugue.interceptors.before",
"testtools.matchers.Equals",
"fugue.adapters.nevow.nevow_adapter_resource",
"testtools.matchers.Contains",
"fugue.test.interceptors.test_nevow.fake_nevow_request",
"fugue.test.util.depends_on"
] | [((540, 543), 'pyrsistent.m', 'm', ([], {}), '()\n', (541, 543), False, 'from pyrsistent import freeze, m\n'), ((728, 747), 'fugue.test.util.depends_on', 'depends_on', (['"""nevow"""'], {}), "('nevow')\n", (738, 747), False, 'from fugue.test.util import depends_on\n'), ((1051, 1070), 'fugue.test.util.depends_on', 'depends_on', (['"""nevow"""'], {}), "('nevow')\n", (1061, 1070), False, 'from fugue.test.util import depends_on\n'), ((1744, 1763), 'fugue.test.util.depends_on', 'depends_on', (['"""nevow"""'], {}), "('nevow')\n", (1754, 1763), False, 'from fugue.test.util import depends_on\n'), ((2364, 2383), 'fugue.test.util.depends_on', 'depends_on', (['"""nevow"""'], {}), "('nevow')\n", (2374, 2383), False, 'from fugue.test.util import depends_on\n'), ((3043, 3062), 'fugue.test.util.depends_on', 'depends_on', (['"""nevow"""'], {}), "('nevow')\n", (3053, 3062), False, 'from fugue.test.util import depends_on\n'), ((859, 883), 'fugue.adapters.nevow.nevow_adapter_resource', 'nevow_adapter_resource', ([], {}), '()\n', (881, 883), False, 'from fugue.adapters.nevow import nevow_adapter_resource\n'), ((1515, 1535), 'fugue.test.interceptors.test_nevow.fake_nevow_request', 'fake_nevow_request', ([], {}), '()\n', (1533, 1535), False, 'from fugue.test.interceptors.test_nevow import fake_nevow_request\n'), ((2006, 2026), 'fugue.test.interceptors.test_nevow.fake_nevow_request', 'fake_nevow_request', ([], {}), '()\n', (2024, 2026), False, 'from fugue.test.interceptors.test_nevow import fake_nevow_request\n'), ((2693, 2713), 'fugue.test.interceptors.test_nevow.fake_nevow_request', 'fake_nevow_request', ([], {}), '()\n', (2711, 2713), False, 'from fugue.test.interceptors.test_nevow import fake_nevow_request\n'), ((3328, 3348), 'fugue.test.interceptors.test_nevow.fake_nevow_request', 'fake_nevow_request', ([], {}), '()\n', (3346, 3348), False, 'from fugue.test.interceptors.test_nevow import fake_nevow_request\n'), ((605, 620), 'pyrsistent.freeze', 'freeze', (['headers'], {}), '(headers)\n', (611, 620), False, 'from pyrsistent import freeze, m\n'), ((1399, 1417), 'fugue.interceptors.before', 'before', (['_spy_inner'], {}), '(_spy_inner)\n', (1405, 1417), False, 'from fugue.interceptors import before, handler\n'), ((2185, 2196), 'testtools.matchers.Equals', 'Equals', (['(201)'], {}), '(201)\n', (2191, 2196), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((2331, 2356), 'testtools.matchers.Contains', 'Contains', (["b'Hello world!'"], {}), "(b'Hello world!')\n", (2339, 2356), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((2907, 2923), 'testtools.matchers.Equals', 'Equals', (["[b'foo']"], {}), "([b'foo'])\n", (2913, 2923), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((3019, 3035), 'testtools.matchers.Equals', 'Equals', (["[b'bar']"], {}), "([b'bar'])\n", (3025, 3035), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((3581, 3626), 'testtools.matchers.Contains', 'Contains', (["b'Internal server error: exception'"], {}), "(b'Internal server error: exception')\n", (3589, 3626), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((3687, 3698), 'testtools.matchers.Equals', 'Equals', (['(500)'], {}), '(500)\n', (3693, 3698), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((1621, 1632), 'testtools.matchers.Equals', 'Equals', (["b''"], {}), "(b'')\n", (1627, 1632), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((2112, 2123), 'testtools.matchers.Equals', 'Equals', (["b''"], {}), "(b'')\n", (2118, 2123), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((2799, 2810), 'testtools.matchers.Equals', 'Equals', (["b''"], {}), "(b'')\n", (2805, 2810), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((3434, 3445), 'testtools.matchers.Equals', 'Equals', (["b''"], {}), "(b'')\n", (3440, 3445), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((1001, 1013), 'testtools.matchers.Is', 'Is', (['resource'], {}), '(resource)\n', (1003, 1013), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((1031, 1041), 'testtools.matchers.Equals', 'Equals', (['()'], {}), '(())\n', (1037, 1041), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((1711, 1734), 'testtools.matchers.Contains', 'Contains', (['NEVOW_REQUEST'], {}), '(NEVOW_REQUEST)\n', (1719, 1734), False, 'from testtools.matchers import Contains, Equals, Is, MatchesListwise\n'), ((3297, 3310), 'fugue.util.constantly', 'constantly', (['f'], {}), '(f)\n', (3307, 3310), False, 'from fugue.util import constantly\n')] |
import re
import ruamel.yaml
from typing import Optional
yaml = ruamel.yaml.YAML()
def lights_manipulations(room: Optional[str] = None, todo: Optional[str] = None):
if room is None and todo is None:
return
with open('home_devices/light_groups.yaml') as f:
data = yaml.load(f)
changes = False
for group in data:
if bool(re.compile(room, re.I).search(group['name'])) or room == 'all':
for light in group['entities']:
key = list(light.keys())[0]
# check if the light is dimmable
if not bool(re.compile(r'dimmable', re.I).search(key)):
light[key] = todo
changes = True
else:
# full brightness if turning on
light[key] = 100 if todo == 'on' else 0
changes = True
if changes:
with open('home_devices/light_groups.yaml', 'w') as f:
yaml.dump(data, f)
def dimmable_lights_manipulations(room: Optional[str] = None, todo: Optional[int] = None):
if room is None and todo is None:
return
with open('home_devices/light_groups.yaml') as f:
data = yaml.load(f)
changes = False
for group in data:
if bool(re.compile(room, re.I).search(group['name'])):
for light in group['entities']:
key = list(light.keys())[0]
# check if the light is dimmable
if bool(re.compile(r'dimmable', re.I).search(key)):
if todo is None:
return True
light[key] = todo
changes = True
if todo is None:
return False
if changes:
with open('home_devices/light_groups.yaml', 'w') as f:
yaml.dump(data, f)
def check_dimmable(room) -> bool:
return dimmable_lights_manipulations(room=room) if room else False
def set_the_temp(room: Optional[str] = None, todo: Optional[int] = None):
if room is None and todo is None:
return
with open('home_devices/climate_group.yaml') as f:
data = yaml.load(f)
changes = False
for group in data:
if bool(re.compile(room, re.I).search(group['name'])):
for temp in group['entities']:
key = list(temp.keys())[0]
temp[key] = todo
changes = True
if changes:
with open('home_devices/climate_group.yaml', 'w') as f:
yaml.dump(data, f)
def heat_cool_the_temp(room: Optional[str] = None, todo: Optional[str] = None):
if room is None:
return "Unavailable"
with open('home_devices/climate_group.yaml') as f1:
data = yaml.load(f1)
with open('home_devices/temperature_sensors.yaml') as f2:
sensors = yaml.load(f2)
changes = False
cur_temp = None
for area in sensors:
if bool(re.compile(room, re.I).search(area['name'])):
cur_temp = int(area['temperature'])
if cur_temp is None:
return "Unavailable"
for group in data:
if bool(re.compile(room, re.I).search(group['name'])):
for temp in group['entities']:
key = list(temp.keys())[0]
temp[key] = cur_temp + 5 if todo == "heat" else cur_temp - 5
changes = True
if changes:
with open('home_devices/climate_group.yaml', 'w') as f:
yaml.dump(data, f)
return cur_temp + 5 if todo == "heat" else cur_temp - 5
def heat_floor(room: Optional[str] = None):
if room is None:
return
with open('home_devices/climate_group.yaml') as f:
data = yaml.load(f)
for group in data:
if bool(re.compile(room, re.I).search(group['name'])):
for temp in group['entities']:
key = list(temp.keys())[0]
temp[key] = 35
with open('home_devices/climate_group.yaml', 'w') as f:
yaml.dump(data, f)
| [
"re.compile"
] | [((1274, 1296), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (1284, 1296), False, 'import re\n'), ((2202, 2224), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (2212, 2224), False, 'import re\n'), ((2904, 2926), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (2914, 2926), False, 'import re\n'), ((3093, 3115), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (3103, 3115), False, 'import re\n'), ((3712, 3734), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (3722, 3734), False, 'import re\n'), ((362, 384), 're.compile', 're.compile', (['room', 're.I'], {}), '(room, re.I)\n', (372, 384), False, 'import re\n'), ((1482, 1510), 're.compile', 're.compile', (['"""dimmable"""', 're.I'], {}), "('dimmable', re.I)\n", (1492, 1510), False, 'import re\n'), ((591, 619), 're.compile', 're.compile', (['"""dimmable"""', 're.I'], {}), "('dimmable', re.I)\n", (601, 619), False, 'import re\n')] |
# Description
"""
MSF dataclasses and marshmallow schemas
Author: <NAME>
License: MIT
Copyright 2021, Python Metasploit Library
"""
# Import
from typing import List, Dict, Optional, Union
from dataclasses import dataclass, field
from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE
from marshmallow import Schema as MarshmallowSchema
from datetime import datetime
from ipaddress import IPv4Address, IPv6Address, ip_address
from pathlib import Path
from os import path
from configparser import ConfigParser
# Authorship information
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, Python Metasploit Library"
__credits__ = [""]
__license__ = "MIT"
__version__ = "0.2.4"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class Msf:
@dataclass
class Config:
file: str = f"{str(Path.home())}/.msf4/config"
url: Optional[str] = None
cert: Optional[str] = None
skip_verify: bool = False
api_token: Optional[str] = None
class Schema(MarshmallowSchema):
url = fields.String(missing=None)
cert = fields.String(missing=None)
skip_verify = fields.Boolean(missing=False)
api_token = fields.String(missing=None)
@post_load
def make_config(self, data, **kwargs):
return Msf.Config(**data)
@dataclass
class Workspace:
id: int = 1
name: str = "default"
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
boundary: Optional[str] = None
description: Optional[str] = None
owner_id: Optional[str] = None
limit_to_network: bool = False
import_fingerprint: bool = False
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
name = fields.String(required=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
boundary = fields.String(missing=None)
description = fields.String(missing=None)
owner_id = fields.String(missing=None, load_only=True)
limit_to_network = fields.Boolean(missing=False)
import_fingerprint = fields.Boolean(missing=False)
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_workspace(self, data, **kwargs):
return Msf.Workspace(**data)
@dataclass
class Host:
id: int = -1
workspace: Optional[str] = None
workspace_id: int = 1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
host: Optional[str] = None
address: Union[None, IPv4Address, IPv6Address] = None
mac: Optional[str] = None
comm: Optional[str] = None
name: Optional[str] = None
state: Optional[str] = None
os_name: Optional[str] = None
os_flavor: Optional[str] = None
os_sp: Optional[str] = None
os_lang: Optional[str] = None
arch: Optional[str] = None
purpose: Optional[str] = None
info: Optional[str] = None
comments: Optional[str] = None
scope: Optional[str] = None
virtual_host: Optional[str] = None
note_count: int = 0
vuln_count: int = 0
service_count: int = 0
host_detail_count: int = 0
exploit_attempt_count: int = 0
cred_count: int = 0
detected_arch: Optional[str] = None
os_family: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace = fields.String(dump_only=True)
workspace_id = fields.Integer(missing=1, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
host = fields.String(missing=None)
address = fields.String(missing=None, load_only=True)
mac = fields.String(missing=None)
comm = fields.String(missing=None)
name = fields.String(missing=None)
state = fields.String(missing=None)
os_name = fields.String(missing=None)
os_flavor = fields.String(missing=None)
os_sp = fields.String(missing=None)
os_lang = fields.String(missing=None)
arch = fields.String(missing=None)
purpose = fields.String(missing=None)
info = fields.String(missing=None)
comments = fields.String(missing=None)
scope = fields.String(missing=None)
virtual_host = fields.String(missing=None)
note_count = fields.Integer(missing=0, load_only=True)
vuln_count = fields.Integer(missing=0, load_only=True)
service_count = fields.Integer(missing=0, load_only=True)
host_detail_count = fields.Integer(missing=0, load_only=True)
exploit_attempt_count = fields.Integer(missing=0, load_only=True)
cred_count = fields.Integer(missing=0, load_only=True)
detected_arch = fields.String(missing=None)
os_family = fields.String(missing=None)
@pre_dump(pass_many=False)
def pre_dump_host(self, data, **kwargs):
if data.host is None:
data.host = data.address
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_host(self, data, **kwargs):
host = Msf.Host(**data)
host.address = ip_address(host.address)
return host
@dataclass
class Service:
id: int = -1
workspace: Optional[str] = None
workspace_id: int = 1
host: Union[None, IPv4Address, IPv6Address, "Msf.Host"] = None
host_id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
port: int = -1
proto: Optional[str] = None
state: Optional[str] = None
name: Optional[str] = None
info: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace = fields.String(dump_only=True)
workspace_id = fields.Integer(missing=1, load_only=True)
host = fields.Raw(missing=None)
host_id = fields.Integer(missing=-1, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
port = fields.Integer(missing=-1)
proto = fields.String(missing=None)
state = fields.String(missing=None)
name = fields.String(missing=None)
info = fields.String(missing=None)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.host, IPv4Address) or isinstance(
data.host, IPv6Address
):
data.host = str(data.host)
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_service(self, data, **kwargs):
result = Msf.Service(**data)
if isinstance(result.host, Dict):
result.host = Msf.Host.Schema().load(result.host)
return result
@dataclass
class Vuln:
id: int = -1
workspace: Optional[str] = None
workspace_id: int = 1
host: Union[None, IPv4Address, IPv6Address, "Msf.Host"] = None
host_id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
port: int = -1
service_id: int = -1
name: Optional[str] = None
info: Optional[str] = None
exploited_at: Optional[datetime] = None
vuln_detail_count: int = 0
vuln_attempt_count: int = 0
origin_id: Optional[str] = None
origin_type: Optional[str] = None
refs: Optional[List] = field(default_factory=lambda: [])
module_refs: Optional[List] = field(default_factory=lambda: [])
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace = fields.String(dump_only=True)
workspace_id = fields.Integer(missing=1, load_only=True)
host = fields.Raw(missing=None)
host_id = fields.Integer(missing=-1, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
port = fields.Integer(missing=-1)
service_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
name = fields.String(missing=None)
info = fields.String(missing=None)
exploited_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
vuln_detail_count = fields.Integer(missing=0, load_only=True)
vuln_attempt_count = fields.Integer(missing=0, load_only=True)
origin_id = fields.String(missing=None)
origin_type = fields.String(missing=None)
refs = fields.List(fields.String, missing=[])
module_refs = fields.List(fields.String, missing=[], load_only=True)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.host, IPv4Address) or isinstance(
data.host, IPv6Address
):
data.host = str(data.host)
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@pre_load(pass_many=False)
def pre_load_vuln(self, data, **kwargs):
result = data.copy()
result["refs"] = list()
result["module_refs"] = list()
for reference in data["refs"]:
if "name" in reference:
result["refs"].append(reference["name"])
for reference in data["module_refs"]:
if "name" in reference:
result["refs"].append(reference["name"])
return result
@post_load
def make_vuln(self, data, **kwargs):
result = Msf.Vuln(**data)
if isinstance(result.host, Dict):
result.host = Msf.Host.Schema().load(result.host)
return result
@dataclass
class Loot:
id: int = -1
workspace: Optional[str] = None
workspace_id: int = 1
host: Union[None, IPv4Address, IPv6Address, "Msf.Host"] = None
host_id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
port: int = -1
service_id: int = -1
ltype: Optional[str] = None
path: Optional[str] = None
data: Optional[str] = None
content_type: Optional[str] = None
name: Optional[str] = None
info: Optional[str] = None
module_run_id: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True, allow_none=True)
workspace = fields.String(dump_only=True)
workspace_id = fields.Integer(missing=1, load_only=True)
host = fields.Raw(missing=None)
host_id = fields.Integer(missing=-1, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
port = fields.Integer(missing=-1)
service_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
ltype = fields.String(missing=None)
path = fields.String(missing=None)
data = fields.String(missing=None)
content_type = fields.String(missing=None)
name = fields.String(missing=None)
info = fields.String(missing=None)
module_run_id = fields.String(missing=None)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.host, IPv4Address) or isinstance(
data.host, IPv6Address
):
data.host = str(data.host)
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_loot(self, data, **kwargs):
result = Msf.Loot(**data)
if isinstance(result.host, Dict):
result.host = Msf.Host.Schema().load(result.host)
return result
@dataclass
class Note:
id: int = -1
workspace: Optional[str] = None
workspace_id: int = 1
host: Union[None, IPv4Address, IPv6Address, "Msf.Host"] = None
host_id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
port: int = -1
service_id: int = -1
vuln_id: int = -1
ntype: Optional[str] = None
data: Optional[str] = None
critical: bool = False
seen: bool = False
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace = fields.String(dump_only=True)
workspace_id = fields.Integer(missing=1, load_only=True)
host = fields.Raw(missing=None)
host_id = fields.Integer(missing=-1, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
port = fields.Integer(missing=-1, allow_none=True)
service_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
vuln_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
ntype = fields.String(missing=None)
data = fields.String(missing=None)
critical = fields.Boolean(missing=False, allow_none=True)
seen = fields.Boolean(missing=False, allow_none=True)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.host, IPv4Address) or isinstance(
data.host, IPv6Address
):
data.host = str(data.host)
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@pre_load(pass_many=False)
def pre_make_note(self, data, many, **kwargs):
if "data" in data:
if isinstance(data["data"], Dict):
if "output" in data["data"]:
data["data"] = str(data["data"]["output"])
else:
data["data"] = str(data["data"])
else:
data["data"] = str(data["data"])
return data
@post_load
def make_note(self, data, **kwargs):
result = Msf.Note(**data)
if isinstance(result.host, Dict):
result.host = Msf.Host.Schema().load(result.host)
return result
@dataclass
class Cred:
id: int = -1
workspace_id: int = 1
username: Optional[str] = None
private_data: Optional[str] = None
private_type: Optional[str] = None
jtr_format: Optional[str] = None
address: Union[None, IPv4Address, IPv6Address] = None
port: int = -1
service_name: Optional[str] = None
protocol: Optional[str] = None
origin_type: str = "service"
module_fullname: Optional[str] = None
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
origin_id: int = -1
private_id: int = -1
public_id: int = -1
realm_id: int = -1
logins_count: int = -1
logins: Optional[List["Msf.Login"]] = field(default_factory=lambda: [])
public: Optional["Msf.Public"] = None
private: Optional["Msf.Private"] = None
origin: Optional["Msf.Origin"] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace_id = fields.Integer(required=True)
username = fields.String(missing=None)
private_data = fields.String(missing=None)
private_type = fields.String(missing=None)
jtr_format = fields.String(missing=None)
address = fields.String(missing=None)
port = fields.Integer(missing=-1)
service_name = fields.String(missing=None)
protocol = fields.String(missing=None)
origin_type = fields.String(missing="service")
module_fullname = fields.String(missing=None)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
origin_id = fields.Integer(missing=-1, load_only=True)
private_id = fields.Integer(missing=-1, load_only=True)
public_id = fields.Integer(missing=-1, load_only=True)
realm_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
logins_count = fields.Integer(missing=-1, load_only=True)
logins = fields.Nested(
lambda: Msf.Login.Schema, many=True, missing=[], load_only=True
)
public = fields.Nested(
lambda: Msf.Public.Schema, many=False, missing=None, load_only=True
)
private = fields.Nested(
lambda: Msf.Private.Schema, many=False, missing=None, load_only=True
)
origin = fields.Nested(
lambda: Msf.Origin.Schema, many=False, missing=None, load_only=True
)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.address, IPv4Address) or isinstance(
data.address, IPv6Address
):
data.address = str(data.address)
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_cred(self, data, **kwargs):
return Msf.Cred(**data)
@dataclass
class Login:
id: int = -1
workspace_id: int = 1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
last_attempted_at: Optional[datetime] = None
core_id: int = -1
service_id: int = -1
address: Optional[str] = None
service_name: str = "ssh"
port: int = -1
protocol: str = "tcp"
status: str = "Successful"
access_level: Optional[str] = None
public: Optional[str] = None
private: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
workspace_id = fields.Integer(required=True, dump_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
last_attempted_at = fields.DateTime("%Y-%m-%dT%H:%M:%S.%fZ", missing=None)
core_id = fields.Integer(missing=-1)
service_id = fields.Integer(missing=-1, load_only=True, allow_none=True)
address = fields.String(missing=None)
service_name = fields.String(missing=None)
port = fields.Integer(missing=-1)
protocol = fields.String(missing=None)
status = fields.String(missing=None)
access_level = fields.String(missing=None)
public = fields.String(missing=None, load_only=True)
private = fields.String(missing=None, load_only=True)
@pre_dump(pass_many=False)
def convert_host_address_to_string(self, data, many, **kwargs):
if isinstance(data.address, IPv4Address) or isinstance(
data.address, IPv6Address
):
data.address = str(data.address)
return data
@post_dump(pass_many=False)
def post_dump_login(self, data, many, **kwargs):
if "core_id" in data:
data["core"] = {"id": data["core_id"]}
del data["core_id"]
return data
@post_dump(pass_many=False)
def clean_missing_fields(self, data, many, **kwargs):
clean_data = data.copy()
for key in filter(lambda key: data[key] is None, data):
del clean_data[key]
for key in filter(lambda key: data[key] == -1, data):
del clean_data[key]
return clean_data
@post_load
def make_login(self, data, **kwargs):
return Msf.Login(**data)
@dataclass
class Public:
id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
username: Optional[str] = None
type: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
username = fields.String(missing=None, load_only=True)
type = fields.String(missing=None, load_only=True)
@post_load
def make_public(self, data, **kwargs):
return Msf.Public(**data)
@dataclass
class Private:
id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
data: Optional[str] = None
jtr_format: Optional[str] = None
type: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
data = fields.String(missing=None, load_only=True)
jtr_format = fields.String(missing=None, load_only=True)
type = fields.String(missing=None, load_only=True)
@post_load
def make_private(self, data, **kwargs):
return Msf.Private(**data)
@dataclass
class Origin:
id: int = -1
created_at: Optional[datetime] = None
updated_at: Optional[datetime] = None
service_id: int = -1
module_full_name: Optional[str] = None
type: Optional[str] = None
class Schema(MarshmallowSchema):
id = fields.Integer(required=True, load_only=True)
created_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
updated_at = fields.DateTime(
"%Y-%m-%dT%H:%M:%S.%fZ", missing=None, load_only=True
)
service_id = fields.Integer(missing=-1, load_only=True)
module_full_name = fields.String(missing=None, load_only=True)
type = fields.String(missing=None, load_only=True)
@post_load
def make_origin(self, data, **kwargs):
return Msf.Origin(**data)
@staticmethod
def load_config() -> Config:
config: Msf.Config = Msf.Config()
config_parser: ConfigParser = ConfigParser()
if path.isfile(config.file) and path.getsize(config.file) > 0:
config_parser.read(config.file)
default_db: Optional[str] = None
if "framework/database" in config_parser.sections():
if "default_db" in config_parser["framework/database"]:
default_db = config_parser["framework/database"]["default_db"]
if default_db is not None:
if f"framework/database/{default_db}":
config = Msf.Config.Schema(unknown=EXCLUDE).load(
config_parser[f"framework/database/{default_db}"]
)
return config
@dataclass
class MsfData:
workspace: Optional[str] = None
workspaces: Optional[List[Msf.Workspace]] = field(default_factory=lambda: [])
hosts: Optional[List[Msf.Host]] = field(default_factory=lambda: [])
services: Optional[List[Msf.Service]] = field(default_factory=lambda: [])
vulns: Optional[List[Msf.Vuln]] = field(default_factory=lambda: [])
loots: Optional[List[Msf.Loot]] = field(default_factory=lambda: [])
notes: Optional[List[Msf.Note]] = field(default_factory=lambda: [])
creds: Optional[List[Msf.Cred]] = field(default_factory=lambda: [])
logins: Optional[List[Msf.Login]] = field(default_factory=lambda: [])
| [
"marshmallow.post_dump",
"os.path.getsize",
"configparser.ConfigParser",
"marshmallow.fields.Boolean",
"pathlib.Path.home",
"marshmallow.fields.Nested",
"marshmallow.fields.Raw",
"marshmallow.fields.DateTime",
"os.path.isfile",
"marshmallow.fields.List",
"marshmallow.pre_load",
"marshmallow.fields.String",
"ipaddress.ip_address",
"marshmallow.fields.Integer",
"marshmallow.pre_dump",
"dataclasses.field"
] | [((28479, 28513), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28484, 28513), False, 'from dataclasses import dataclass, field\n'), ((28551, 28585), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28556, 28585), False, 'from dataclasses import dataclass, field\n'), ((28629, 28663), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28634, 28663), False, 'from dataclasses import dataclass, field\n'), ((28701, 28735), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28706, 28735), False, 'from dataclasses import dataclass, field\n'), ((28773, 28807), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28778, 28807), False, 'from dataclasses import dataclass, field\n'), ((28845, 28879), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28850, 28879), False, 'from dataclasses import dataclass, field\n'), ((28917, 28951), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28922, 28951), False, 'from dataclasses import dataclass, field\n'), ((28991, 29025), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (28996, 29025), False, 'from dataclasses import dataclass, field\n'), ((9678, 9712), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (9683, 9712), False, 'from dataclasses import dataclass, field\n'), ((9750, 9784), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (9755, 9784), False, 'from dataclasses import dataclass, field\n'), ((19234, 19268), 'dataclasses.field', 'field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (19239, 19268), False, 'from dataclasses import dataclass, field\n'), ((27690, 27704), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (27702, 27704), False, 'from configparser import ConfigParser\n'), ((1085, 1112), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (1098, 1112), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1132, 1159), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (1145, 1159), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1186, 1215), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'missing': '(False)'}), '(missing=False)\n', (1200, 1215), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1240, 1267), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (1253, 1267), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1823, 1868), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (1837, 1868), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1888, 1916), 'marshmallow.fields.String', 'fields.String', ([], {'required': '(True)'}), '(required=True)\n', (1901, 1916), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((1942, 2012), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (1957, 2012), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2068, 2138), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (2083, 2138), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2192, 2219), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (2205, 2219), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2246, 2273), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (2259, 2273), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2297, 2340), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (2310, 2340), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2372, 2401), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'missing': '(False)'}), '(missing=False)\n', (2386, 2401), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2435, 2464), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'missing': '(False)'}), '(missing=False)\n', (2449, 2464), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((2479, 2505), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (2488, 2505), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4154, 4199), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (4168, 4199), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4224, 4253), 'marshmallow.fields.String', 'fields.String', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (4237, 4253), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4281, 4322), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(1)', 'load_only': '(True)'}), '(missing=1, load_only=True)\n', (4295, 4322), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4348, 4418), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (4363, 4418), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4474, 4544), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (4489, 4544), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4594, 4621), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4607, 4621), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4644, 4687), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (4657, 4687), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4706, 4733), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4719, 4733), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4753, 4780), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4766, 4780), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4800, 4827), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4813, 4827), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4848, 4875), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4861, 4875), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4898, 4925), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4911, 4925), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4950, 4977), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (4963, 4977), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((4998, 5025), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5011, 5025), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5048, 5075), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5061, 5075), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5095, 5122), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5108, 5122), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5145, 5172), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5158, 5172), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5192, 5219), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5205, 5219), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5243, 5270), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5256, 5270), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5291, 5318), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5304, 5318), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5346, 5373), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5359, 5373), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5399, 5440), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5413, 5440), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5466, 5507), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5480, 5507), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5536, 5577), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5550, 5577), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5610, 5651), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5624, 5651), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5688, 5729), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5702, 5729), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5755, 5796), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (5769, 5796), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5825, 5852), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5838, 5852), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5877, 5904), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (5890, 5904), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((5919, 5944), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (5927, 5944), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((6123, 6149), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (6132, 6149), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7249, 7294), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (7263, 7294), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7319, 7348), 'marshmallow.fields.String', 'fields.String', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (7332, 7348), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7376, 7417), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(1)', 'load_only': '(True)'}), '(missing=1, load_only=True)\n', (7390, 7417), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7437, 7461), 'marshmallow.fields.Raw', 'fields.Raw', ([], {'missing': 'None'}), '(missing=None)\n', (7447, 7461), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7484, 7526), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (7498, 7526), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7552, 7622), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (7567, 7622), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7678, 7748), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (7693, 7748), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7798, 7824), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (7812, 7824), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7845, 7872), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (7858, 7872), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7893, 7920), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (7906, 7920), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7940, 7967), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (7953, 7967), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((7987, 8014), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (8000, 8014), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((8029, 8054), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (8037, 8054), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((8351, 8377), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (8360, 8377), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((9843, 9888), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (9857, 9888), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((9913, 9942), 'marshmallow.fields.String', 'fields.String', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (9926, 9942), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((9970, 10011), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(1)', 'load_only': '(True)'}), '(missing=1, load_only=True)\n', (9984, 10011), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10031, 10055), 'marshmallow.fields.Raw', 'fields.Raw', ([], {'missing': 'None'}), '(missing=None)\n', (10041, 10055), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10078, 10120), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (10092, 10120), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10146, 10216), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (10161, 10216), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10272, 10342), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (10287, 10342), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10392, 10418), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (10406, 10418), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10444, 10503), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (10458, 10503), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10523, 10550), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (10536, 10550), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10570, 10597), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (10583, 10597), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10625, 10695), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (10640, 10695), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10758, 10799), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (10772, 10799), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10833, 10874), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(0)', 'load_only': '(True)'}), '(missing=0, load_only=True)\n', (10847, 10874), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10899, 10926), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (10912, 10926), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((10953, 10980), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (10966, 10980), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((11000, 11038), 'marshmallow.fields.List', 'fields.List', (['fields.String'], {'missing': '[]'}), '(fields.String, missing=[])\n', (11011, 11038), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((11065, 11119), 'marshmallow.fields.List', 'fields.List', (['fields.String'], {'missing': '[]', 'load_only': '(True)'}), '(fields.String, missing=[], load_only=True)\n', (11076, 11119), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((11134, 11159), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (11142, 11159), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((11456, 11482), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (11465, 11482), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((11860, 11885), 'marshmallow.pre_load', 'pre_load', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (11868, 11885), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13363, 13425), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)', 'allow_none': '(True)'}), '(required=True, load_only=True, allow_none=True)\n', (13377, 13425), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13450, 13479), 'marshmallow.fields.String', 'fields.String', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (13463, 13479), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13507, 13548), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(1)', 'load_only': '(True)'}), '(missing=1, load_only=True)\n', (13521, 13548), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13568, 13592), 'marshmallow.fields.Raw', 'fields.Raw', ([], {'missing': 'None'}), '(missing=None)\n', (13578, 13592), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13615, 13657), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (13629, 13657), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13683, 13753), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (13698, 13753), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13809, 13879), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (13824, 13879), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13929, 13955), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (13943, 13955), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((13981, 14040), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (13995, 14040), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14061, 14088), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14074, 14088), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14108, 14135), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14121, 14135), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14155, 14182), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14168, 14182), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14210, 14237), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14223, 14237), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14257, 14284), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14270, 14284), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14304, 14331), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14317, 14331), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14360, 14387), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (14373, 14387), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14402, 14427), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (14410, 14427), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((14724, 14750), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (14733, 14750), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((15957, 16002), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (15971, 16002), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16027, 16056), 'marshmallow.fields.String', 'fields.String', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (16040, 16056), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16084, 16125), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(1)', 'load_only': '(True)'}), '(missing=1, load_only=True)\n', (16098, 16125), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16145, 16169), 'marshmallow.fields.Raw', 'fields.Raw', ([], {'missing': 'None'}), '(missing=None)\n', (16155, 16169), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16192, 16234), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (16206, 16234), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16260, 16330), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (16275, 16330), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16386, 16456), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (16401, 16456), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16506, 16549), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'allow_none': '(True)'}), '(missing=-1, allow_none=True)\n', (16520, 16549), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16575, 16634), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (16589, 16634), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16657, 16716), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (16671, 16716), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16737, 16764), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (16750, 16764), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16784, 16811), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (16797, 16811), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16835, 16881), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'missing': '(False)', 'allow_none': '(True)'}), '(missing=False, allow_none=True)\n', (16849, 16881), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16901, 16947), 'marshmallow.fields.Boolean', 'fields.Boolean', ([], {'missing': '(False)', 'allow_none': '(True)'}), '(missing=False, allow_none=True)\n', (16915, 16947), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((16962, 16987), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (16970, 16987), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((17284, 17310), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (17293, 17310), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((17688, 17713), 'marshmallow.pre_load', 'pre_load', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (17696, 17713), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19467, 19512), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (19481, 19512), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19540, 19569), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (19554, 19569), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19593, 19620), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19606, 19620), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19648, 19675), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19661, 19675), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19703, 19730), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19716, 19730), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19756, 19783), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19769, 19783), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19806, 19833), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19819, 19833), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19853, 19879), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (19867, 19879), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19907, 19934), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19920, 19934), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((19958, 19985), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (19971, 19985), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20012, 20044), 'marshmallow.fields.String', 'fields.String', ([], {'missing': '"""service"""'}), "(missing='service')\n", (20025, 20044), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20075, 20102), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (20088, 20102), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20128, 20198), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (20143, 20198), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20254, 20324), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (20269, 20324), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20379, 20421), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (20393, 20421), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20447, 20489), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (20461, 20489), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20514, 20556), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (20528, 20556), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20580, 20639), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (20594, 20639), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20667, 20709), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (20681, 20709), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20731, 20810), 'marshmallow.fields.Nested', 'fields.Nested', (['(lambda : Msf.Login.Schema)'], {'many': '(True)', 'missing': '[]', 'load_only': '(True)'}), '(lambda : Msf.Login.Schema, many=True, missing=[], load_only=True)\n', (20744, 20810), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20861, 20948), 'marshmallow.fields.Nested', 'fields.Nested', (['(lambda : Msf.Public.Schema)'], {'many': '(False)', 'missing': 'None', 'load_only': '(True)'}), '(lambda : Msf.Public.Schema, many=False, missing=None,\n load_only=True)\n', (20874, 20948), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((20996, 21084), 'marshmallow.fields.Nested', 'fields.Nested', (['(lambda : Msf.Private.Schema)'], {'many': '(False)', 'missing': 'None', 'load_only': '(True)'}), '(lambda : Msf.Private.Schema, many=False, missing=None,\n load_only=True)\n', (21009, 21084), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((21131, 21218), 'marshmallow.fields.Nested', 'fields.Nested', (['(lambda : Msf.Origin.Schema)'], {'many': '(False)', 'missing': 'None', 'load_only': '(True)'}), '(lambda : Msf.Origin.Schema, many=False, missing=None,\n load_only=True)\n', (21144, 21218), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((21258, 21283), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (21266, 21283), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((21592, 21618), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (21601, 21618), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((22716, 22761), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (22730, 22761), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((22789, 22834), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'dump_only': '(True)'}), '(required=True, dump_only=True)\n', (22803, 22834), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((22860, 22930), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (22875, 22930), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((22986, 23056), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (23001, 23056), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23119, 23173), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None)\n", (23134, 23173), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23196, 23222), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (23210, 23222), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23248, 23307), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)', 'allow_none': '(True)'}), '(missing=-1, load_only=True, allow_none=True)\n', (23262, 23307), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23330, 23357), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (23343, 23357), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23385, 23412), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (23398, 23412), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23432, 23458), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)'}), '(missing=-1)\n', (23446, 23458), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23482, 23509), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (23495, 23509), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23531, 23558), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (23544, 23558), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23586, 23613), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None'}), '(missing=None)\n', (23599, 23613), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23635, 23678), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (23648, 23678), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23701, 23744), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (23714, 23744), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((23759, 23784), 'marshmallow.pre_dump', 'pre_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (23767, 23784), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((24093, 24119), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (24102, 24119), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((24360, 24386), 'marshmallow.post_dump', 'post_dump', ([], {'pass_many': '(False)'}), '(pass_many=False)\n', (24369, 24386), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((25145, 25190), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (25159, 25190), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((25216, 25286), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (25231, 25286), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((25342, 25412), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (25357, 25412), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((25466, 25509), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (25479, 25509), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((25529, 25572), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (25542, 25572), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26008, 26053), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (26022, 26053), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26079, 26149), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (26094, 26149), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26205, 26275), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (26220, 26275), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26325, 26368), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (26338, 26368), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26394, 26437), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (26407, 26437), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26457, 26500), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (26470, 26500), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((26937, 26982), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'required': '(True)', 'load_only': '(True)'}), '(required=True, load_only=True)\n', (26951, 26982), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27008, 27078), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (27023, 27078), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27134, 27204), 'marshmallow.fields.DateTime', 'fields.DateTime', (['"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {'missing': 'None', 'load_only': '(True)'}), "('%Y-%m-%dT%H:%M:%S.%fZ', missing=None, load_only=True)\n", (27149, 27204), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27260, 27302), 'marshmallow.fields.Integer', 'fields.Integer', ([], {'missing': '(-1)', 'load_only': '(True)'}), '(missing=-1, load_only=True)\n', (27274, 27302), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27334, 27377), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (27347, 27377), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27397, 27440), 'marshmallow.fields.String', 'fields.String', ([], {'missing': 'None', 'load_only': '(True)'}), '(missing=None, load_only=True)\n', (27410, 27440), False, 'from marshmallow import fields, pre_load, post_load, pre_dump, post_dump, EXCLUDE\n'), ((27716, 27740), 'os.path.isfile', 'path.isfile', (['config.file'], {}), '(config.file)\n', (27727, 27740), False, 'from os import path\n'), ((6657, 6681), 'ipaddress.ip_address', 'ip_address', (['host.address'], {}), '(host.address)\n', (6667, 6681), False, 'from ipaddress import IPv4Address, IPv6Address, ip_address\n'), ((27745, 27770), 'os.path.getsize', 'path.getsize', (['config.file'], {}), '(config.file)\n', (27757, 27770), False, 'from os import path\n'), ((854, 865), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (863, 865), False, 'from pathlib import Path\n')] |
# coding:utf-8
"""
"""
import os
import sys
import cv2
import csv
import fire
import time
import json
import tarfile
import torch
import torch.nn as nn
from torchvision import models
from torchvision.models.vgg import VGG
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.datasets import ImageFolder
import pretrainedmodels
from glob import glob
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
data_dir = 'D:/workspace/udacity/MLND/capstone/distracted_driver_detection/data/'
ON_CLIENT = True
NUM_CLASSES = 10
DROPOUT = 0.5
DEVICE_ID = 0
CROP_SIZE = (320, 480)
IMG_SIZE = (224, 224)
BATCH_SIZE = 32
IMG_MEAN = (0.3184719383716583, 0.3813590109348297, 0.37875279784202576)
IMG_VAR = (0.3184719383716583, 0.3813589811325073, 0.37875282764434814)
IMG_STD = (0.5643, 0.6175, 0.6154)
KEEP_LAYER = None
CUDA = torch.cuda.is_available()
# DEVICE = torch.device("cuda" if CUDA else "cpu")
DEVICE_MASKER = torch.device("cpu")
def device_setting(on_client):
if on_client is True:
DEVICE_MASKER = torch.device("cuda:0")
else:
DEVICE_MASKER = torch.device("cuda:3")
ranges = {
'vgg11': ((0, 3), (3, 6), (6, 11), (11, 16), (16, 21)),
'vgg13': ((0, 5), (5, 10), (10, 15), (15, 20), (20, 25)),
'vgg16': ((0, 5), (5, 10), (10, 17), (17, 24), (24, 31)),
'vgg19': ((0, 5), (5, 10), (10, 19), (19, 28), (28, 37))
}
cfg = {
'vgg11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'vgg19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
}
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
class VGGNet(VGG):
def __init__(self, pretrained=True, model='vgg16'):
super().__init__(make_layers(cfg[model]))
self.ranges = ranges[model]
if pretrained:
exec("self.load_state_dict(models.%s(pretrained=True).state_dict())" % model)
def forward(self, x):
output = {}
# get the output of each maxpooling layer (5 maxpool in VGG net)
for idx, (begin, end) in enumerate(self.ranges):
# self.ranges = ((0, 5), (5, 10), (10, 17), (17, 24), (24, 31)) (vgg16 examples)
for layer in range(begin, end):
x = self.features[layer](x)
output["x%d" % (idx+1)] = x
return output
class MaskNet(nn.Module):
def __init__(self, model=None, dropout=DROPOUT, num_classes=NUM_CLASSES,
keep_layer=KEEP_LAYER):
super(MaskNet, self).__init__()
if model is None:
self.featurer = VGGNet()
else:
self.featurer = model
self.dropout = dropout
self.num_classes = num_classes
self.keep_layer = keep_layer
for name, param in self.featurer.named_parameters():
param.requires_grad = self.check_keep_layer(name)
self.relu = nn.ReLU(inplace=True)
self.deconv1 = nn.ConvTranspose2d(
512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn1 = nn.BatchNorm2d(512)
self.deconv2 = nn.ConvTranspose2d(
512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn2 = nn.BatchNorm2d(256)
self.deconv3 = nn.ConvTranspose2d(
256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn3 = nn.BatchNorm2d(128)
self.deconv4 = nn.ConvTranspose2d(
128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn4 = nn.BatchNorm2d(64)
self.deconv5 = nn.ConvTranspose2d(
64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn5 = nn.BatchNorm2d(32)
self.masker = nn.Sequential(
nn.Conv2d(32, 1, kernel_size=3, stride=1, padding=1)
)
self.make_classifier('xception', 'imagenet')
def extract_mask_image(self, x, show=False, show_index=0):
feature_image = self.extract_feature_image(
x, show=show, show_index=show_index)
mask = self.make_mask(feature_image, show=show, show_index=show_index)
mask_feature_image = self.make_image(
x, mask, show=show, show_index=show_index)
return mask_feature_image
def extract_feature_image(self, x, show=False, show_index=0):
output = self.featurer(x)
x5 = output['x5']
x4 = output['x4']
x3 = output['x3']
x2 = output['x2']
x1 = output['x1']
feature = self.bn1(self.relu(self.deconv1(x5)))
feature = feature + x4
feature = self.bn2(self.relu(self.deconv2(feature)))
feature = feature + x3
feature = self.bn3(self.relu(self.deconv3(feature)))
feature = feature + x2
feature = self.bn4(self.relu(self.deconv4(feature)))
feature = feature + x1
feature = self.bn5(self.relu(self.deconv5(feature)))
if show is True:
image = transforms.functional.to_pil_image(
feature[show_index].cpu().detach())
image.show()
return feature
def make_mask(self, feature, show=False, show_index=0):
mask = self.masker(feature)
self.tensor_filter(mask)
mask[mask > mask.mean()] = 0
mask[mask < mask.mean()] = 1
if show is True:
image = transforms.functional.to_pil_image(
mask[show_index].cpu().detach())
image.show()
return mask
def make_image(self, x, mask, show=False, show_index=0):
# show image after masked
image = x * mask
#show = transforms.functional.to_pil_image(image[0].cpu().detach())
# show.show()
if show is True:
show_image = transforms.functional.to_pil_image(
image[show_index].cpu().detach())
show_image.show()
return image
def forward(self, x):
#x = self.extract_feature_image(x)
pred = self.classifier(x)
return pred
def make_classifier(self, model_name='xception', pretrained='imagenet'):
self.classifier = pretrainedmodels.__dict__[
model_name](pretrained=pretrained)
for param in self.classifier.parameters():
param.requires_grad = False
in_dim = self.classifier.last_linear.in_features
self.classifier_last_linear = nn.Sequential(
nn.Dropout(self.dropout),
nn.Linear(in_dim, 4096),
nn.ReLU(True),
nn.Dropout(self.dropout),
nn.Linear(4096, 4096),
nn.ReLU(True),
nn.Dropout(self.dropout),
nn.Linear(4096, self.num_classes)
)
self.classifier.last_linear = self.classifier_last_linear
def ModePool2d(self, input_tensor, kernel_size=3):
row_size = input_tensor.size(0)
col_size = input_tensor.size(1)
row_count = row_size // kernel_size
col_count = col_size // kernel_size
for i in range(0, row_count):
for j in range(0, col_count):
m = input_tensor[i*kernel_size: i*kernel_size + kernel_size,
j*kernel_size: j*kernel_size+kernel_size]
feature_value = m.mode()[0].mode()[0]
input_tensor[i*kernel_size: i*kernel_size + kernel_size,
j*kernel_size: j*kernel_size+kernel_size] = feature_value
return input_tensor
def tensor_filter(self, tensors_items):
for item in tensors_items:
for tensor in item:
self.ModePool2d(tensor)
def check_keep_layer(self, name):
if self.keep_layer is None:
return False
if len(self.keep_layer) == 0:
return False
for kl in self.keep_layer:
if kl in name:
return True
return False
class DriverDataset(Dataset):
def __init__(self, data_dir, data_frame, subject):
super(DriverDataset, self).__init__()
self.data_dir = os.path.abspath(data_dir)
self.df = data_frame
self.count = 0
self.transorms_gen = transforms.Compose([
transforms.CenterCrop(CROP_SIZE),
transforms.Resize(IMG_SIZE),
transforms.ToTensor(),
transforms.Normalize(IMG_MEAN, IMG_STD)
])
self.image_paths = {}
self.image_paths_ = []
for s in range(0, 10):
state = 'c{}'.format(s)
self.image_paths[state] = []
items = self.df[(self.df["subject"] == subject)]
for _, row in items.iterrows():
subpath = row["classname"] + "/" + row["img"]
path = os.path.join(self.data_dir, subpath)
self.image_paths[state].append(path)
path_state = (path, s)
self.image_paths_.append(path_state)
self.count += 1
print("{} number of {}'s image loaded.".format(self.count, subject))
def __len__(self):
return self.count
def __getitem__(self, index):
# assert index >= len(self.image_paths), 'index, out of range'
path = self.image_paths_[index][0]
image = Image.open(path)
filename = os.path.basename(path)
# image_tensor = transforms.functional.to_tensor(image)
label = self.image_paths_[index][1]
transorms_image = self.transorms_gen(image)
return (transorms_image, (label, filename))
class ClassDataset(Dataset):
def __init__(self, data_dir, sub_dir='train'):
super(ClassDataset, self).__init__()
read_dir = os.path.join(data_dir, sub_dir)
self.data_dir = os.path.abspath(read_dir)
self.test = True if sub_dir == 'test' else False
self.transorms_gen = transforms.Compose([
transforms.CenterCrop(CROP_SIZE),
transforms.Resize(IMG_SIZE),
transforms.ToTensor(),
transforms.Normalize(IMG_MEAN, IMG_STD)
])
self.image_paths = {}
self.image_paths_ = []
if sub_dir == 'test':
self.image_paths_ = glob(os.path.join(self.data_dir, '*.jpg'))
self.image_paths = self.image_paths_
else:
for s in range(0, 10):
state = 'c{}'.format(s)
read_path = os.path.join(self.data_dir, state)
self.image_paths[state] = glob(
os.path.join(read_path, '*.jpg'))
path_states = [(path, s) for path in self.image_paths[state]]
self.image_paths_.extend(path_states)
print("{} number of image loaded.".format(len(self.image_paths_)))
def __len__(self):
return len(self.image_paths_)
def __getitem__(self, index):
# assert index >= len(self.image_paths), 'index, out of range'
if self.test is False:
path = self.image_paths_[index][0]
image = Image.open(path)
filename = os.path.basename(path)
# image_tensor = transforms.functional.to_tensor(image)
label = self.image_paths_[index][1]
transorms_image = self.transorms_gen(image)
else:
path = self.image_paths_[index]
image = Image.open(path)
filename = os.path.basename(path)
label = 0
transorms_image = self.transorms_gen(image)
return (transorms_image, (label, filename))
def generate_mask_image(data_dir='../data/', save_dir='../data/mask/',
sub_dir='train', batch_size=64, model_path=None,
on_client=ON_CLIENT):
device_setting(on_client)
data_dir = os.path.abspath(data_dir)
if model_path is not None:
model_path = os.path.abspath(model_path)
save_dir = os.path.abspath(save_dir)
masker_model = MaskNet()
masker_model.to(DEVICE_MASKER)
if model_path is not None:
masker_model.load_state_dict(torch.load(
model_path, map_location=DEVICE_MASKER))
print('[INFO]load model ok')
# mask_gen = ImageFolder(os.path.join(
# data_dir, sub_dir), transform=transorms_gen)
image_dataset = ClassDataset(data_dir=data_dir, sub_dir=sub_dir)
mask_dataiter = DataLoader(
image_dataset, batch_size=batch_size, shuffle=False)
print('[INFO]generate datasets ok')
output_dir = os.path.join(save_dir, sub_dir)
masker_model.eval()
for i, data in enumerate(mask_dataiter):
print('Batch {}'.format(i))
images = data[0]
labels = data[1][0]
filenames = data[1][1]
images = images.to(DEVICE_MASKER)
masked_images = masker_model.extract_mask_image(images, show=False)
for i, masked_image in enumerate(masked_images):
filename = filenames[i]
output_name = 'masked_{}'.format(filename)
if sub_dir == 'test':
output_path = os.path.join(output_dir, output_name)
else:
label = labels[i]
output_path = os.path.join(output_dir, 'c{}'.format(label))
output_image = transforms.functional.to_pil_image(
masked_image.cpu())
output_image.save(output_path)
print('save {} to {}'.format(output_name, output_path))
# ========================================================================================================
#generate_mask_image(data_dir=data_dir+'../data/', batch_size=1, on_client=True)
if __name__ == '__main__':
fire.Fire()
| [
"torch.nn.ReLU",
"torch.nn.Dropout",
"fire.Fire",
"torch.nn.Sequential",
"torch.cuda.is_available",
"torch.nn.BatchNorm2d",
"torchvision.transforms.ToTensor",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"torch.device",
"torchvision.transforms.CenterCrop",
"PIL.Image.open",
"torch.load",
"os.path.join",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"os.path.basename",
"torch.nn.Linear",
"torch.utils.data.DataLoader",
"os.path.abspath",
"torch.nn.ConvTranspose2d"
] | [((933, 958), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (956, 958), False, 'import torch\n'), ((1027, 1046), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1039, 1046), False, 'import torch\n'), ((2356, 2378), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2369, 2378), True, 'import torch.nn as nn\n'), ((12483, 12508), 'os.path.abspath', 'os.path.abspath', (['data_dir'], {}), '(data_dir)\n', (12498, 12508), False, 'import os\n'), ((12604, 12629), 'os.path.abspath', 'os.path.abspath', (['save_dir'], {}), '(save_dir)\n', (12619, 12629), False, 'import os\n'), ((13049, 13112), 'torch.utils.data.DataLoader', 'DataLoader', (['image_dataset'], {'batch_size': 'batch_size', 'shuffle': '(False)'}), '(image_dataset, batch_size=batch_size, shuffle=False)\n', (13059, 13112), False, 'from torch.utils.data import DataLoader\n'), ((13180, 13211), 'os.path.join', 'os.path.join', (['save_dir', 'sub_dir'], {}), '(save_dir, sub_dir)\n', (13192, 13211), False, 'import os\n'), ((14329, 14340), 'fire.Fire', 'fire.Fire', ([], {}), '()\n', (14338, 14340), False, 'import fire\n'), ((1130, 1152), 'torch.device', 'torch.device', (['"""cuda:0"""'], {}), "('cuda:0')\n", (1142, 1152), False, 'import torch\n'), ((1187, 1209), 'torch.device', 'torch.device', (['"""cuda:3"""'], {}), "('cuda:3')\n", (1199, 1209), False, 'import torch\n'), ((3627, 3648), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3634, 3648), True, 'import torch.nn as nn\n'), ((3672, 3770), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(512)', '(512)'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'dilation': '(1)', 'output_padding': '(1)'}), '(512, 512, kernel_size=3, stride=2, padding=1, dilation=1,\n output_padding=1)\n', (3690, 3770), True, 'import torch.nn as nn\n'), ((3799, 3818), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(512)'], {}), '(512)\n', (3813, 3818), True, 'import torch.nn as nn\n'), ((3842, 3940), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(512)', '(256)'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'dilation': '(1)', 'output_padding': '(1)'}), '(512, 256, kernel_size=3, stride=2, padding=1, dilation=1,\n output_padding=1)\n', (3860, 3940), True, 'import torch.nn as nn\n'), ((3969, 3988), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (3983, 3988), True, 'import torch.nn as nn\n'), ((4012, 4110), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(256)', '(128)'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'dilation': '(1)', 'output_padding': '(1)'}), '(256, 128, kernel_size=3, stride=2, padding=1, dilation=1,\n output_padding=1)\n', (4030, 4110), True, 'import torch.nn as nn\n'), ((4139, 4158), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (4153, 4158), True, 'import torch.nn as nn\n'), ((4182, 4279), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(128)', '(64)'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'dilation': '(1)', 'output_padding': '(1)'}), '(128, 64, kernel_size=3, stride=2, padding=1, dilation=1,\n output_padding=1)\n', (4200, 4279), True, 'import torch.nn as nn\n'), ((4308, 4326), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (4322, 4326), True, 'import torch.nn as nn\n'), ((4350, 4446), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(64)', '(32)'], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)', 'dilation': '(1)', 'output_padding': '(1)'}), '(64, 32, kernel_size=3, stride=2, padding=1, dilation=1,\n output_padding=1)\n', (4368, 4446), True, 'import torch.nn as nn\n'), ((4475, 4493), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (4489, 4493), True, 'import torch.nn as nn\n'), ((8822, 8847), 'os.path.abspath', 'os.path.abspath', (['data_dir'], {}), '(data_dir)\n', (8837, 8847), False, 'import os\n'), ((10003, 10019), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (10013, 10019), False, 'from PIL import Image\n'), ((10039, 10061), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (10055, 10061), False, 'import os\n'), ((10420, 10451), 'os.path.join', 'os.path.join', (['data_dir', 'sub_dir'], {}), '(data_dir, sub_dir)\n', (10432, 10451), False, 'import os\n'), ((10476, 10501), 'os.path.abspath', 'os.path.abspath', (['read_dir'], {}), '(read_dir)\n', (10491, 10501), False, 'import os\n'), ((12561, 12588), 'os.path.abspath', 'os.path.abspath', (['model_path'], {}), '(model_path)\n', (12576, 12588), False, 'import os\n'), ((2085, 2136), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'v'], {'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels, v, kernel_size=3, padding=1)\n', (2094, 2136), True, 'import torch.nn as nn\n'), ((4544, 4596), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(1)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(32, 1, kernel_size=3, stride=1, padding=1)\n', (4553, 4596), True, 'import torch.nn as nn\n'), ((7186, 7210), 'torch.nn.Dropout', 'nn.Dropout', (['self.dropout'], {}), '(self.dropout)\n', (7196, 7210), True, 'import torch.nn as nn\n'), ((7224, 7247), 'torch.nn.Linear', 'nn.Linear', (['in_dim', '(4096)'], {}), '(in_dim, 4096)\n', (7233, 7247), True, 'import torch.nn as nn\n'), ((7261, 7274), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (7268, 7274), True, 'import torch.nn as nn\n'), ((7288, 7312), 'torch.nn.Dropout', 'nn.Dropout', (['self.dropout'], {}), '(self.dropout)\n', (7298, 7312), True, 'import torch.nn as nn\n'), ((7326, 7347), 'torch.nn.Linear', 'nn.Linear', (['(4096)', '(4096)'], {}), '(4096, 4096)\n', (7335, 7347), True, 'import torch.nn as nn\n'), ((7361, 7374), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (7368, 7374), True, 'import torch.nn as nn\n'), ((7388, 7412), 'torch.nn.Dropout', 'nn.Dropout', (['self.dropout'], {}), '(self.dropout)\n', (7398, 7412), True, 'import torch.nn as nn\n'), ((7426, 7459), 'torch.nn.Linear', 'nn.Linear', (['(4096)', 'self.num_classes'], {}), '(4096, self.num_classes)\n', (7435, 7459), True, 'import torch.nn as nn\n'), ((11740, 11756), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (11750, 11756), False, 'from PIL import Image\n'), ((11780, 11802), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (11796, 11802), False, 'import os\n'), ((12053, 12069), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (12063, 12069), False, 'from PIL import Image\n'), ((12093, 12115), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (12109, 12115), False, 'import os\n'), ((12764, 12814), 'torch.load', 'torch.load', (['model_path'], {'map_location': 'DEVICE_MASKER'}), '(model_path, map_location=DEVICE_MASKER)\n', (12774, 12814), False, 'import torch\n'), ((2011, 2048), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (2023, 2048), True, 'import torch.nn as nn\n'), ((8963, 8995), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['CROP_SIZE'], {}), '(CROP_SIZE)\n', (8984, 8995), False, 'from torchvision import transforms\n'), ((9009, 9036), 'torchvision.transforms.Resize', 'transforms.Resize', (['IMG_SIZE'], {}), '(IMG_SIZE)\n', (9026, 9036), False, 'from torchvision import transforms\n'), ((9050, 9071), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (9069, 9071), False, 'from torchvision import transforms\n'), ((9085, 9124), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['IMG_MEAN', 'IMG_STD'], {}), '(IMG_MEAN, IMG_STD)\n', (9105, 9124), False, 'from torchvision import transforms\n'), ((9496, 9532), 'os.path.join', 'os.path.join', (['self.data_dir', 'subpath'], {}), '(self.data_dir, subpath)\n', (9508, 9532), False, 'import os\n'), ((10622, 10654), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['CROP_SIZE'], {}), '(CROP_SIZE)\n', (10643, 10654), False, 'from torchvision import transforms\n'), ((10668, 10695), 'torchvision.transforms.Resize', 'transforms.Resize', (['IMG_SIZE'], {}), '(IMG_SIZE)\n', (10685, 10695), False, 'from torchvision import transforms\n'), ((10709, 10730), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (10728, 10730), False, 'from torchvision import transforms\n'), ((10744, 10783), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['IMG_MEAN', 'IMG_STD'], {}), '(IMG_MEAN, IMG_STD)\n', (10764, 10783), False, 'from torchvision import transforms\n'), ((10925, 10961), 'os.path.join', 'os.path.join', (['self.data_dir', '"""*.jpg"""'], {}), "(self.data_dir, '*.jpg')\n", (10937, 10961), False, 'import os\n'), ((11129, 11163), 'os.path.join', 'os.path.join', (['self.data_dir', 'state'], {}), '(self.data_dir, state)\n', (11141, 11163), False, 'import os\n'), ((13732, 13769), 'os.path.join', 'os.path.join', (['output_dir', 'output_name'], {}), '(output_dir, output_name)\n', (13744, 13769), False, 'import os\n'), ((2199, 2216), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['v'], {}), '(v)\n', (2213, 2216), True, 'import torch.nn as nn\n'), ((2218, 2239), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2225, 2239), True, 'import torch.nn as nn\n'), ((2294, 2315), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2301, 2315), True, 'import torch.nn as nn\n'), ((11232, 11264), 'os.path.join', 'os.path.join', (['read_path', '"""*.jpg"""'], {}), "(read_path, '*.jpg')\n", (11244, 11264), False, 'import os\n')] |
import socket
import struct
import threading
import numpy as np
from Online.AmpInterface import AmpDataClient
class Neuracle(AmpDataClient):
UPDATE_INTERVAL = 0.04
BYTES_PER_NUM = 4
BUFFER_LEN = 4 # in secondes
def __init__(self, n_channel=9, samplerate=1000, host='localhost', port=8712):
self.n_channel = n_channel
self.chunk_size = int(self.UPDATE_INTERVAL * samplerate * self.BYTES_PER_NUM * n_channel)
self.__sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
self.buffer = []
self.max_buffer_length = int(self.BUFFER_LEN / self.UPDATE_INTERVAL)
self._host = host
self._port = port
# thread lock
self.lock = threading.Lock()
self.__datathread = threading.Thread(target=self.__recv_loop)
# start client
self.config()
def config(self):
self.__sock.connect((self._host, self._port))
self.__run_forever()
def is_active(self):
return self.__sock.fileno() != -1
def close(self):
self.__sock.close()
self.__datathread.join()
def __recv_loop(self):
while self.__sock.fileno() != -1:
try:
data = self.__sock.recv(self.chunk_size)
except OSError:
break
if len(data) % 4 != 0:
continue
self.lock.acquire()
self.buffer.append(data)
# remove old data
if len(self.buffer) == self.max_buffer_length:
del self.buffer[0]
self.lock.release()
def __run_forever(self):
self.__datathread.start()
def get_trial_data(self):
"""
called to copy trial data from buffer
:return:
timestamps: list of timestamp
data: ndarray with shape of (channels, timesteps)
"""
self.lock.acquire()
raw_data = self.buffer.copy()
self.buffer.clear()
self.lock.release()
total_data = b''.join(raw_data)
byte_data = bytearray(total_data)
if len(byte_data) % 4 != 0:
raise ValueError
data = np.frombuffer(byte_data, dtype='<f')
data = np.reshape(data, (-1, self.n_channel))
timestamps = np.nonzero(data[:, -1])[0].tolist()
return timestamps, data[:, :-1].T
| [
"numpy.reshape",
"socket.socket",
"threading.Lock",
"numpy.nonzero",
"numpy.frombuffer",
"threading.Thread"
] | [((467, 519), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM', '(0)'], {}), '(socket.AF_INET, socket.SOCK_STREAM, 0)\n', (480, 519), False, 'import socket\n'), ((716, 732), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (730, 732), False, 'import threading\n'), ((761, 802), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.__recv_loop'}), '(target=self.__recv_loop)\n', (777, 802), False, 'import threading\n'), ((2147, 2183), 'numpy.frombuffer', 'np.frombuffer', (['byte_data'], {'dtype': '"""<f"""'}), "(byte_data, dtype='<f')\n", (2160, 2183), True, 'import numpy as np\n'), ((2199, 2237), 'numpy.reshape', 'np.reshape', (['data', '(-1, self.n_channel)'], {}), '(data, (-1, self.n_channel))\n', (2209, 2237), True, 'import numpy as np\n'), ((2259, 2282), 'numpy.nonzero', 'np.nonzero', (['data[:, -1]'], {}), '(data[:, -1])\n', (2269, 2282), True, 'import numpy as np\n')] |
import jwt
private_key = open('keys/privatekey').read()
claims = {
'sub': 'Too5BdPayTQACdw1AJK1rD4nKUD0Ag7J',
'data_payload': 'payload data',
'iss': 'ambulance',
'aud': 'dps'
}
token = jwt.encode(claims, private_key, algorithm='RS256').decode('UTF-8')
print(token) | [
"jwt.encode"
] | [((204, 254), 'jwt.encode', 'jwt.encode', (['claims', 'private_key'], {'algorithm': '"""RS256"""'}), "(claims, private_key, algorithm='RS256')\n", (214, 254), False, 'import jwt\n')] |
# Copyright 2018 GoDaddy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
import pecan
from wsme import types as wtypes
from wsmeext import pecan as wsme_pecan
from octavia.api.v2.controllers import base
from octavia.api.v2.types import usage as usage_types
from octavia.common import constants
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class UsageController(base.BaseController):
RBAC_TYPE = constants.RBAC_USAGE
def __init__(self):
super(UsageController, self).__init__()
def _get_usage(self, project_id=None):
context = pecan.request.context.get('octavia_context')
rbac = constants.RBAC_GET_ALL_GLOBAL
if project_id:
rbac = constants.RBAC_GET_ONE
self._auth_validate_action(context, context.project_id, rbac)
db_usage = self._get_db_usage(context.session, project_id)
usage_model = usage_types.UsageResponse(db_usage)
return usage_types.UsageRootResponse(usage=usage_model)
@wsme_pecan.wsexpose(usage_types.UsageRootResponse, wtypes.text)
def get_one(self, project_id):
"""Gets a single project's usage details."""
return self._get_usage(project_id)
@wsme_pecan.wsexpose(usage_types.UsageRootResponse)
def get_all(self):
"""Gets usage details for the entire system."""
return self._get_usage()
| [
"octavia.api.v2.types.usage.UsageResponse",
"pecan.request.context.get",
"wsmeext.pecan.wsexpose",
"octavia.api.v2.types.usage.UsageRootResponse",
"oslo_log.log.getLogger"
] | [((913, 940), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (930, 940), True, 'from oslo_log import log as logging\n'), ((1581, 1644), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['usage_types.UsageRootResponse', 'wtypes.text'], {}), '(usage_types.UsageRootResponse, wtypes.text)\n', (1600, 1644), True, 'from wsmeext import pecan as wsme_pecan\n'), ((1782, 1832), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['usage_types.UsageRootResponse'], {}), '(usage_types.UsageRootResponse)\n', (1801, 1832), True, 'from wsmeext import pecan as wsme_pecan\n'), ((1159, 1203), 'pecan.request.context.get', 'pecan.request.context.get', (['"""octavia_context"""'], {}), "('octavia_context')\n", (1184, 1203), False, 'import pecan\n'), ((1475, 1510), 'octavia.api.v2.types.usage.UsageResponse', 'usage_types.UsageResponse', (['db_usage'], {}), '(db_usage)\n', (1500, 1510), True, 'from octavia.api.v2.types import usage as usage_types\n'), ((1526, 1574), 'octavia.api.v2.types.usage.UsageRootResponse', 'usage_types.UsageRootResponse', ([], {'usage': 'usage_model'}), '(usage=usage_model)\n', (1555, 1574), True, 'from octavia.api.v2.types import usage as usage_types\n')] |
"""
Aries Cloud Agent
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v0.7.2
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from acapy_client.api_client import ApiClient, Endpoint as _Endpoint
from acapy_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types,
)
from acapy_client.model.create_wallet_request import CreateWalletRequest
from acapy_client.model.create_wallet_response import CreateWalletResponse
from acapy_client.model.create_wallet_token_request import CreateWalletTokenRequest
from acapy_client.model.create_wallet_token_response import CreateWalletTokenResponse
from acapy_client.model.remove_wallet_request import RemoveWalletRequest
from acapy_client.model.update_wallet_request import UpdateWalletRequest
from acapy_client.model.wallet_list import WalletList
from acapy_client.model.wallet_record import WalletRecord
class MultitenancyApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.multitenancy_wallet_post_endpoint = _Endpoint(
settings={
"response_type": (CreateWalletResponse,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallet",
"operation_id": "multitenancy_wallet_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"body",
],
"required": [],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"body": (CreateWalletRequest,),
},
"attribute_map": {},
"location_map": {
"body": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.multitenancy_wallet_wallet_id_get_endpoint = _Endpoint(
settings={
"response_type": (WalletRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallet/{wallet_id}",
"operation_id": "multitenancy_wallet_wallet_id_get",
"http_method": "GET",
"servers": None,
},
params_map={
"all": [
"wallet_id",
],
"required": [
"wallet_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"wallet_id": (str,),
},
"attribute_map": {
"wallet_id": "wallet_id",
},
"location_map": {
"wallet_id": "path",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.multitenancy_wallet_wallet_id_put_endpoint = _Endpoint(
settings={
"response_type": (WalletRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallet/{wallet_id}",
"operation_id": "multitenancy_wallet_wallet_id_put",
"http_method": "PUT",
"servers": None,
},
params_map={
"all": [
"wallet_id",
"body",
],
"required": [
"wallet_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"wallet_id": (str,),
"body": (UpdateWalletRequest,),
},
"attribute_map": {
"wallet_id": "wallet_id",
},
"location_map": {
"wallet_id": "path",
"body": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.multitenancy_wallet_wallet_id_remove_post_endpoint = _Endpoint(
settings={
"response_type": (
bool,
date,
datetime,
dict,
float,
int,
list,
str,
none_type,
),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallet/{wallet_id}/remove",
"operation_id": "multitenancy_wallet_wallet_id_remove_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"wallet_id",
"body",
],
"required": [
"wallet_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"wallet_id": (str,),
"body": (RemoveWalletRequest,),
},
"attribute_map": {
"wallet_id": "wallet_id",
},
"location_map": {
"wallet_id": "path",
"body": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.multitenancy_wallet_wallet_id_token_post_endpoint = _Endpoint(
settings={
"response_type": (CreateWalletTokenResponse,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallet/{wallet_id}/token",
"operation_id": "multitenancy_wallet_wallet_id_token_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"wallet_id",
"body",
],
"required": [
"wallet_id",
],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"wallet_id": (str,),
"body": (CreateWalletTokenRequest,),
},
"attribute_map": {
"wallet_id": "wallet_id",
},
"location_map": {
"wallet_id": "path",
"body": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.multitenancy_wallets_get_endpoint = _Endpoint(
settings={
"response_type": (WalletList,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/multitenancy/wallets",
"operation_id": "multitenancy_wallets_get",
"http_method": "GET",
"servers": None,
},
params_map={
"all": [
"wallet_name",
],
"required": [],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {
"wallet_name": (str,),
},
"attribute_map": {
"wallet_name": "wallet_name",
},
"location_map": {
"wallet_name": "query",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
def multitenancy_wallet_post(self, **kwargs):
"""Create a subwallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallet_post(async_req=True)
>>> result = thread.get()
Keyword Args:
body (CreateWalletRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CreateWalletResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
return self.multitenancy_wallet_post_endpoint.call_with_http_info(**kwargs)
def multitenancy_wallet_wallet_id_get(self, wallet_id, **kwargs):
"""Get a single subwallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallet_wallet_id_get(wallet_id, async_req=True)
>>> result = thread.get()
Args:
wallet_id (str): Subwallet identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
WalletRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["wallet_id"] = wallet_id
return self.multitenancy_wallet_wallet_id_get_endpoint.call_with_http_info(
**kwargs
)
def multitenancy_wallet_wallet_id_put(self, wallet_id, **kwargs):
"""Update a subwallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallet_wallet_id_put(wallet_id, async_req=True)
>>> result = thread.get()
Args:
wallet_id (str): Subwallet identifier
Keyword Args:
body (UpdateWalletRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
WalletRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["wallet_id"] = wallet_id
return self.multitenancy_wallet_wallet_id_put_endpoint.call_with_http_info(
**kwargs
)
def multitenancy_wallet_wallet_id_remove_post(self, wallet_id, **kwargs):
"""Remove a subwallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallet_wallet_id_remove_post(wallet_id, async_req=True)
>>> result = thread.get()
Args:
wallet_id (str): Subwallet identifier
Keyword Args:
body (RemoveWalletRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["wallet_id"] = wallet_id
return (
self.multitenancy_wallet_wallet_id_remove_post_endpoint.call_with_http_info(
**kwargs
)
)
def multitenancy_wallet_wallet_id_token_post(self, wallet_id, **kwargs):
"""Get auth token for a subwallet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallet_wallet_id_token_post(wallet_id, async_req=True)
>>> result = thread.get()
Args:
wallet_id (str):
Keyword Args:
body (CreateWalletTokenRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CreateWalletTokenResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["wallet_id"] = wallet_id
return (
self.multitenancy_wallet_wallet_id_token_post_endpoint.call_with_http_info(
**kwargs
)
)
def multitenancy_wallets_get(self, **kwargs):
"""Query subwallets # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.multitenancy_wallets_get(async_req=True)
>>> result = thread.get()
Keyword Args:
wallet_name (str): Wallet name. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
WalletList
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
return self.multitenancy_wallets_get_endpoint.call_with_http_info(**kwargs)
| [
"acapy_client.api_client.ApiClient",
"acapy_client.api_client.Endpoint"
] | [((1518, 2141), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (CreateWalletResponse,), 'auth': ['AuthorizationHeader'],\n 'endpoint_path': '/multitenancy/wallet', 'operation_id':\n 'multitenancy_wallet_post', 'http_method': 'POST', 'servers': None}", 'params_map': "{'all': ['body'], 'required': [], 'nullable': [], 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'body': (\n CreateWalletRequest,)}, 'attribute_map': {}, 'location_map': {'body':\n 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (CreateWalletResponse,), 'auth': [\n 'AuthorizationHeader'], 'endpoint_path': '/multitenancy/wallet',\n 'operation_id': 'multitenancy_wallet_post', 'http_method': 'POST',\n 'servers': None}, params_map={'all': ['body'], 'required': [],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'body': (\n CreateWalletRequest,)}, 'attribute_map': {}, 'location_map': {'body':\n 'body'}, 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client)\n", (1527, 2141), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((2717, 3390), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (WalletRecord,), 'auth': ['AuthorizationHeader'],\n 'endpoint_path': '/multitenancy/wallet/{wallet_id}', 'operation_id':\n 'multitenancy_wallet_wallet_id_get', 'http_method': 'GET', 'servers': None}", 'params_map': "{'all': ['wallet_id'], 'required': ['wallet_id'], 'nullable': [], 'enum': [\n ], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id': (\n str,)}, 'attribute_map': {'wallet_id': 'wallet_id'}, 'location_map': {\n 'wallet_id': 'path'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (WalletRecord,), 'auth': [\n 'AuthorizationHeader'], 'endpoint_path':\n '/multitenancy/wallet/{wallet_id}', 'operation_id':\n 'multitenancy_wallet_wallet_id_get', 'http_method': 'GET', 'servers':\n None}, params_map={'all': ['wallet_id'], 'required': ['wallet_id'],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'wallet_id': (str,)},\n 'attribute_map': {'wallet_id': 'wallet_id'}, 'location_map': {\n 'wallet_id': 'path'}, 'collection_format_map': {}}, headers_map={\n 'accept': ['application/json'], 'content_type': []}, api_client=api_client)\n", (2726, 3390), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((4040, 4774), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (WalletRecord,), 'auth': ['AuthorizationHeader'],\n 'endpoint_path': '/multitenancy/wallet/{wallet_id}', 'operation_id':\n 'multitenancy_wallet_wallet_id_put', 'http_method': 'PUT', 'servers': None}", 'params_map': "{'all': ['wallet_id', 'body'], 'required': ['wallet_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id': (\n str,), 'body': (UpdateWalletRequest,)}, 'attribute_map': {'wallet_id':\n 'wallet_id'}, 'location_map': {'wallet_id': 'path', 'body': 'body'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (WalletRecord,), 'auth': [\n 'AuthorizationHeader'], 'endpoint_path':\n '/multitenancy/wallet/{wallet_id}', 'operation_id':\n 'multitenancy_wallet_wallet_id_put', 'http_method': 'PUT', 'servers':\n None}, params_map={'all': ['wallet_id', 'body'], 'required': [\n 'wallet_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map={\n 'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id':\n (str,), 'body': (UpdateWalletRequest,)}, 'attribute_map': {'wallet_id':\n 'wallet_id'}, 'location_map': {'wallet_id': 'path', 'body': 'body'},\n 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client)\n", (4049, 4774), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((5487, 6288), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (bool, date, datetime, dict, float, int, list, str,\n none_type), 'auth': ['AuthorizationHeader'], 'endpoint_path':\n '/multitenancy/wallet/{wallet_id}/remove', 'operation_id':\n 'multitenancy_wallet_wallet_id_remove_post', 'http_method': 'POST',\n 'servers': None}", 'params_map': "{'all': ['wallet_id', 'body'], 'required': ['wallet_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id': (\n str,), 'body': (RemoveWalletRequest,)}, 'attribute_map': {'wallet_id':\n 'wallet_id'}, 'location_map': {'wallet_id': 'path', 'body': 'body'},\n 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (bool, date, datetime, dict, float,\n int, list, str, none_type), 'auth': ['AuthorizationHeader'],\n 'endpoint_path': '/multitenancy/wallet/{wallet_id}/remove',\n 'operation_id': 'multitenancy_wallet_wallet_id_remove_post',\n 'http_method': 'POST', 'servers': None}, params_map={'all': [\n 'wallet_id', 'body'], 'required': ['wallet_id'], 'nullable': [], 'enum':\n [], 'validation': []}, root_map={'validations': {}, 'allowed_values': {\n }, 'openapi_types': {'wallet_id': (str,), 'body': (RemoveWalletRequest,\n )}, 'attribute_map': {'wallet_id': 'wallet_id'}, 'location_map': {\n 'wallet_id': 'path', 'body': 'body'}, 'collection_format_map': {}},\n headers_map={'accept': ['application/json'], 'content_type': []},\n api_client=api_client)\n", (5496, 6288), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((7195, 7960), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (CreateWalletTokenResponse,), 'auth': [\n 'AuthorizationHeader'], 'endpoint_path':\n '/multitenancy/wallet/{wallet_id}/token', 'operation_id':\n 'multitenancy_wallet_wallet_id_token_post', 'http_method': 'POST',\n 'servers': None}", 'params_map': "{'all': ['wallet_id', 'body'], 'required': ['wallet_id'], 'nullable': [],\n 'enum': [], 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id': (\n str,), 'body': (CreateWalletTokenRequest,)}, 'attribute_map': {\n 'wallet_id': 'wallet_id'}, 'location_map': {'wallet_id': 'path', 'body':\n 'body'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (CreateWalletTokenResponse,), 'auth':\n ['AuthorizationHeader'], 'endpoint_path':\n '/multitenancy/wallet/{wallet_id}/token', 'operation_id':\n 'multitenancy_wallet_wallet_id_token_post', 'http_method': 'POST',\n 'servers': None}, params_map={'all': ['wallet_id', 'body'], 'required':\n ['wallet_id'], 'nullable': [], 'enum': [], 'validation': []}, root_map=\n {'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_id':\n (str,), 'body': (CreateWalletTokenRequest,)}, 'attribute_map': {\n 'wallet_id': 'wallet_id'}, 'location_map': {'wallet_id': 'path', 'body':\n 'body'}, 'collection_format_map': {}}, headers_map={'accept': [\n 'application/json'], 'content_type': []}, api_client=api_client)\n", (7204, 7960), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((8657, 9304), 'acapy_client.api_client.Endpoint', '_Endpoint', ([], {'settings': "{'response_type': (WalletList,), 'auth': ['AuthorizationHeader'],\n 'endpoint_path': '/multitenancy/wallets', 'operation_id':\n 'multitenancy_wallets_get', 'http_method': 'GET', 'servers': None}", 'params_map': "{'all': ['wallet_name'], 'required': [], 'nullable': [], 'enum': [],\n 'validation': []}", 'root_map': "{'validations': {}, 'allowed_values': {}, 'openapi_types': {'wallet_name':\n (str,)}, 'attribute_map': {'wallet_name': 'wallet_name'},\n 'location_map': {'wallet_name': 'query'}, 'collection_format_map': {}}", 'headers_map': "{'accept': ['application/json'], 'content_type': []}", 'api_client': 'api_client'}), "(settings={'response_type': (WalletList,), 'auth': [\n 'AuthorizationHeader'], 'endpoint_path': '/multitenancy/wallets',\n 'operation_id': 'multitenancy_wallets_get', 'http_method': 'GET',\n 'servers': None}, params_map={'all': ['wallet_name'], 'required': [],\n 'nullable': [], 'enum': [], 'validation': []}, root_map={'validations':\n {}, 'allowed_values': {}, 'openapi_types': {'wallet_name': (str,)},\n 'attribute_map': {'wallet_name': 'wallet_name'}, 'location_map': {\n 'wallet_name': 'query'}, 'collection_format_map': {}}, headers_map={\n 'accept': ['application/json'], 'content_type': []}, api_client=api_client)\n", (8666, 9304), True, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n'), ((1420, 1431), 'acapy_client.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (1429, 1431), False, 'from acapy_client.api_client import ApiClient, Endpoint as _Endpoint\n')] |
import sys
import os
from cx_Freeze import setup, Executable
os.environ['TCL_LIBRARY'] = r'C:\Users\John\AppData\Local\Programs\Python\Python36\tcl\tcl8.6'
os.environ['TK_LIBRARY'] = r'C:\Users\John\AppData\Local\Programs\Python\Python36\tcl\tk8.6'
base = None
if sys.platform=='win32':
base="WIN32GUI"
executables = [
Executable("SynthesiaToKK.py", base=base)
]
buildOptions = dict(
includes = ["mido.backends.rtmidi"],
include_files = [r'C:\Users\John\AppData\Local\Programs\Python\Python36\DLLs\tcl86t.dll',
r'C:\Users\John\AppData\Local\Programs\Python\Python36\DLLs\tk86t.dll']
)
setup(
name = "SynthesiaToKK",
version = "1.0",
description = "Let Synthesia control Komplete Kontrol keyboards Light Guide",
options = dict(build_exe = buildOptions),
executables = executables
) | [
"cx_Freeze.Executable"
] | [((338, 379), 'cx_Freeze.Executable', 'Executable', (['"""SynthesiaToKK.py"""'], {'base': 'base'}), "('SynthesiaToKK.py', base=base)\n", (348, 379), False, 'from cx_Freeze import setup, Executable\n')] |
import json
from enum import Enum
class BaseCommand(Enum):
LEARN = "LEARN" # TEACH IN
UPDATE = "UPDATE" # trigger updated notification
@classmethod
def extract_json(cls, text: str):
data = json.loads(text)
sections = ["command", "cmd", "COMMAND", "CMD"]
for section in sections:
text = data.get(section)
if text is not None:
break
if text:
text = text.strip()
return text
| [
"json.loads"
] | [((218, 234), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (228, 234), False, 'import json\n')] |
from cartopy.crs import Globe, Stereographic
from shapely import geometry
class Amersfoort(Stereographic):
"""
Amersfoort projection for the Netherlands.
Ellipsoid is Bessel 1841.
https://epsg.io/28992
"""
def __init__(self):
globe = Globe(ellipse="bessel")
super(Amersfoort, self).__init__(
52.15616055555555,
5.38763888888889,
false_easting=155000,
false_northing=463000,
true_scale_latitude=0.9999079,
globe=globe,
)
@property
def x_limits(self):
return (-100000, 330000)
@property
def y_limits(self):
return (150000, 650000)
@property
def boundary(self):
x0, x1 = self.x_limits
y0, y1 = self.y_limits
return geometry.LineString(
[(x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)]
)
| [
"shapely.geometry.LineString",
"cartopy.crs.Globe"
] | [((269, 292), 'cartopy.crs.Globe', 'Globe', ([], {'ellipse': '"""bessel"""'}), "(ellipse='bessel')\n", (274, 292), False, 'from cartopy.crs import Globe, Stereographic\n'), ((802, 873), 'shapely.geometry.LineString', 'geometry.LineString', (['[(x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)]'], {}), '([(x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)])\n', (821, 873), False, 'from shapely import geometry\n')] |
# Follow resnet implementation at:
# https://github.com/deepmind/dm-haiku/blob/main/haiku/_src/nets/resnet.py
# which is under Apache License, Version 2.0.
"""Resnet Modules."""
from typing import Sequence, Tuple
import jax
import jax.numpy as jnp
from ..core import Module
from ..nn import BatchNorm2D, Conv2D, Linear, max_pool
class ResnetBlock(Module):
"""ResnetBlock"""
layers: Sequence[Tuple[Conv2D, BatchNorm2D]]
def __init__(
self,
in_channels: int,
out_channels: int,
stride,
use_projection: bool,
bottleneck: bool,
):
super().__init__()
self.use_projection = use_projection
if self.use_projection:
self.proj_conv = Conv2D(
in_channels,
out_channels,
kernel_shape=1,
stride=stride,
with_bias=False,
padding=[(0, 0), (0, 0)],
data_format="NCHW",
name="proj_conv",
)
self.proj_batchnorm = BatchNorm2D(
out_channels, True, True, 0.9, data_format="NCHW", name="proj_bn"
)
channel_div = 4 if bottleneck else 1
conv_0 = Conv2D(
in_features=in_channels,
out_features=out_channels // channel_div,
kernel_shape=1 if bottleneck else 3,
stride=1 if bottleneck else stride,
with_bias=False,
padding=[(0, 0), (0, 0)] if bottleneck else [(1, 1), (1, 1)],
data_format="NCHW",
name="conv1",
)
bn_0 = BatchNorm2D(
out_channels // channel_div, True, True, 0.9, data_format="NCHW", name="bn1"
)
conv_1 = Conv2D(
in_features=out_channels // channel_div,
out_features=out_channels,
kernel_shape=3,
stride=stride if bottleneck else 1,
with_bias=False,
padding=[(1, 1), (1, 1)],
data_format="NCHW",
name="conv2",
)
bn_1 = BatchNorm2D(
out_channels, True, True, 0.9, data_format="NCHW", name="bn2"
)
layers = ((conv_0, bn_0), (conv_1, bn_1))
if bottleneck:
conv_2 = Conv2D(
in_features=out_channels,
out_features=out_channels,
kernel_shape=1,
stride=1,
with_bias=False,
padding=[(0, 0), (0, 0)],
data_format="NCHW",
name="conv3",
)
bn_2 = BatchNorm2D(
out_channels,
True,
True,
0.9,
data_format="NCHW",
name="bn3",
)
layers = layers + ((conv_2, bn_2),)
self.layers = layers
def __call__(self, inputs):
out = shortcut = inputs
if self.use_projection:
shortcut = self.proj_conv(shortcut)
shortcut = self.proj_batchnorm(shortcut)
for i, (conv_i, bn_i) in enumerate(self.layers):
out = conv_i(out)
out = bn_i(out)
if i < len(self.layers) - 1:
out = jax.nn.relu(out)
return jax.nn.relu(out + shortcut)
class BlockGroup(Module):
"""Group of Blocks."""
def __init__(
self,
in_channels: int,
out_channels: int,
num_blocks: int,
stride,
bottleneck: bool,
use_projection,
):
super().__init__()
blocks = []
for i in range(num_blocks):
blocks.append(
ResnetBlock(
in_channels=(in_channels if i == 0 else out_channels),
out_channels=out_channels,
stride=(1 if i else stride),
use_projection=(i == 0 and use_projection),
bottleneck=bottleneck,
)
)
self.blocks = blocks
def __call__(self, inputs):
out = inputs
for block in self.blocks:
out = block(out)
return out
def check_length(length, value, name):
if len(value) != length:
raise ValueError(f"`{name}` must be of length 4 not {len(value)}")
class ResNet(Module):
"""A generic ResNet module."""
CONFIGS = {
18: {
"blocks_per_group": (2, 2, 2, 2),
"bottleneck": False,
"channels_per_group": (64, 128, 256, 512),
"use_projection": (False, True, True, True),
},
34: {
"blocks_per_group": (3, 4, 6, 3),
"bottleneck": False,
"channels_per_group": (64, 128, 256, 512),
"use_projection": (False, True, True, True),
},
50: {
"blocks_per_group": (3, 4, 6, 3),
"bottleneck": True,
"channels_per_group": (256, 512, 1024, 2048),
"use_projection": (True, True, True, True),
},
101: {
"blocks_per_group": (3, 4, 23, 3),
"bottleneck": True,
"channels_per_group": (256, 512, 1024, 2048),
"use_projection": (True, True, True, True),
},
152: {
"blocks_per_group": (3, 8, 36, 3),
"bottleneck": True,
"channels_per_group": (256, 512, 1024, 2048),
"use_projection": (True, True, True, True),
},
200: {
"blocks_per_group": (3, 24, 36, 3),
"bottleneck": True,
"channels_per_group": (256, 512, 1024, 2048),
"use_projection": (True, True, True, True),
},
}
def __init__(
self,
input_channels: int,
blocks_per_group: Sequence[int],
num_classes: int,
bottleneck: bool = True,
channels_per_group: Sequence[int] = (256, 512, 1024, 2048),
use_projection: Sequence[bool] = (True, True, True, True),
logits_config=None,
initial_conv_config=None,
name=None,
):
super().__init__(name=name)
check_length(4, blocks_per_group, "blocks_per_group")
check_length(4, channels_per_group, "channels_per_group")
logits_config = dict(logits_config or {})
logits_config.setdefault("w_init", jax.nn.initializers.zeros)
initial_conv_config = dict(initial_conv_config or {})
initial_conv_config.setdefault("in_features", input_channels)
initial_conv_config.setdefault("out_features", 64)
initial_conv_config.setdefault("kernel_shape", 7)
initial_conv_config.setdefault("stride", 2)
initial_conv_config.setdefault("with_bias", False)
initial_conv_config.setdefault("padding", [(3, 3), (3, 3)])
initial_conv_config.setdefault("data_format", "NCHW")
self.initial_conv = Conv2D(**initial_conv_config, name="conv1")
self.initial_batchnorm = BatchNorm2D(
initial_conv_config["out_features"],
True,
True,
0.9,
data_format="NCHW",
name="bn1",
)
block_groups = []
strides = (1, 2, 2, 2)
for i in range(4):
block_groups.append(
BlockGroup(
in_channels=(
initial_conv_config["out_features"]
if i == 0
else channels_per_group[i - 1]
),
out_channels=channels_per_group[i],
num_blocks=blocks_per_group[i],
stride=strides[i],
bottleneck=bottleneck,
use_projection=use_projection[i],
)
)
self.block_groups = block_groups
self.logits = Linear(
channels_per_group[-1], num_classes, **logits_config, name="fc"
)
def __call__(self, inputs):
out = inputs
out = self.initial_conv(out)
out = self.initial_batchnorm(out)
out = jax.nn.relu(out)
out = jnp.pad(out, [(0, 0), (0, 0), (1, 1), (1, 1)])
out = max_pool(
out,
window_shape=(1, 1, 3, 3),
strides=(1, 1, 2, 2),
padding="VALID",
channel_axis=1,
)
for block_group in self.block_groups:
out = block_group(out)
out = jnp.mean(out, axis=(2, 3))
return self.logits(out)
class ResNet18(ResNet):
"""ResNet18."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[18],
name="ResNet18",
)
class ResNet34(ResNet):
"""ResNet34."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[34],
)
class ResNet50(ResNet):
"""ResNet50."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[50],
)
class ResNet101(ResNet):
"""ResNet101."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[101],
)
class ResNet152(ResNet):
"""ResNet152."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[152],
)
class ResNet200(ResNet):
"""ResNet200."""
def __init__(
self,
input_channels: int,
num_classes: int,
logits_config=None,
initial_conv_config=None,
):
super().__init__(
input_channels=input_channels,
num_classes=num_classes,
initial_conv_config=initial_conv_config,
logits_config=logits_config,
**ResNet.CONFIGS[200],
)
| [
"jax.nn.relu",
"jax.numpy.mean",
"jax.numpy.pad"
] | [((3262, 3289), 'jax.nn.relu', 'jax.nn.relu', (['(out + shortcut)'], {}), '(out + shortcut)\n', (3273, 3289), False, 'import jax\n'), ((8065, 8081), 'jax.nn.relu', 'jax.nn.relu', (['out'], {}), '(out)\n', (8076, 8081), False, 'import jax\n'), ((8096, 8142), 'jax.numpy.pad', 'jnp.pad', (['out', '[(0, 0), (0, 0), (1, 1), (1, 1)]'], {}), '(out, [(0, 0), (0, 0), (1, 1), (1, 1)])\n', (8103, 8142), True, 'import jax.numpy as jnp\n'), ((8420, 8446), 'jax.numpy.mean', 'jnp.mean', (['out'], {'axis': '(2, 3)'}), '(out, axis=(2, 3))\n', (8428, 8446), True, 'import jax.numpy as jnp\n'), ((3229, 3245), 'jax.nn.relu', 'jax.nn.relu', (['out'], {}), '(out)\n', (3240, 3245), False, 'import jax\n')] |
# [1]
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
# [2]
import cv2
import matplotlib.pyplot as plt
import seaborn as sns
import os
from PIL import Image
from keras.preprocessing.image import img_to_array
from keras.preprocessing.image import load_img
from keras.utils import np_utils
# [3]
parasitized_data = os.listdir('../input/cell_images/cell_images/Parasitized/')
print(parasitized_data[:10]) #the output we get are the .png files
uninfected_data = os.listdir('../input/cell_images/cell_images/Uninfected/')
print('\n')
print(uninfected_data[:10])
# [4]
plt.figure(figsize = (12,12))
for i in range(4):
plt.subplot(1, 4, i+1)
img = cv2.imread('../input/cell_images/cell_images/Parasitized' + "/" + parasitized_data[i])
plt.imshow(img)
plt.title('PARASITIZED : 1')
plt.tight_layout()
plt.show()
# [5]
plt.figure(figsize = (12,12))
for i in range(4):
plt.subplot(1, 4, i+1)
img = cv2.imread('../input/cell_images/cell_images/Uninfected' + "/" + uninfected_data[i+1])
plt.imshow(img)
plt.title('UNINFECTED : 0')
plt.tight_layout()
plt.show()
# [6]
data = []
labels = []
for img in parasitized_data:
try:
img_read = plt.imread('../input/cell_images/cell_images/Parasitized/' + "/" + img)
img_resize = cv2.resize(img_read, (50, 50))
img_array = img_to_array(img_resize)
data.append(img_array)
labels.append(1)
except:
None
for img in uninfected_data:
try:
img_read = plt.imread('../input/cell_images/cell_images/Uninfected' + "/" + img)
img_resize = cv2.resize(img_read, (50, 50))
img_array = img_to_array(img_resize)
data.append(img_array)
labels.append(0)
except:
None
# [7]
plt.imshow(data[0])
plt.show()
# [8]
image_data = np.array(data)
labels = np.array(labels)
# [9]
idx = np.arange(image_data.shape[0])
np.random.shuffle(idx)
image_data = image_data[idx]
labels = labels[idx]
# [10]
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(image_data, labels, test_size = 0.2, random_state = 101)
# [11]
y_train = np_utils.to_categorical(y_train, num_classes = 2)
y_test = np_utils.to_categorical(y_test, num_classes = 2)
# [12]
print(f'SHAPE OF TRAINING IMAGE DATA : {x_train.shape}')
print(f'SHAPE OF TESTING IMAGE DATA : {x_test.shape}')
print(f'SHAPE OF TRAINING LABELS : {y_train.shape}')
print(f'SHAPE OF TESTING LABELS : {y_test.shape}')
# [13]
import keras
from keras.layers import Dense, Conv2D
from keras.layers import Flatten
from keras.layers import MaxPooling2D, GlobalAveragePooling2D
from keras.layers import Activation
from keras.layers import BatchNormalization
from keras.layers import Dropout
from keras.models import Sequential
from keras import backend as K
from keras import optimizers
# [14]
def CNNbuild(height, width, classes, channels):
model = Sequential()
inputShape = (height, width, channels)
chanDim = -1
if K.image_data_format() == 'channels_first':
inputShape = (channels, height, width)
model.add(Conv2D(32, (3,3), activation = 'relu', input_shape = inputShape))
model.add(MaxPooling2D(2,2))
model.add(BatchNormalization(axis = chanDim))
model.add(Dropout(0.2))
model.add(Conv2D(32, (3,3), activation = 'relu'))
model.add(MaxPooling2D(2,2))
model.add(BatchNormalization(axis = chanDim))
model.add(Dropout(0.2))
model.add(Conv2D(32, (3,3), activation = 'relu'))
model.add(MaxPooling2D(2,2))
model.add(BatchNormalization(axis = chanDim))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(512, activation = 'relu'))
model.add(BatchNormalization(axis = chanDim))
model.add(Dropout(0.5))
model.add(Dense(classes, activation = 'softmax'))
return model
# [15]
height = 50
width = 50
classes = 2
channels = 3
model = CNNbuild(height = height, width = width, classes = classes, channels = channels)
model.summary()
# [16]
#compile the model
model.compile(loss = 'categorical_crossentropy', optimizer = 'Adam', metrics = ['accuracy'])
# [17]
#fit the model onto the dataset
h = model.fit(x_train, y_train, epochs = 20, batch_size = 32)
# [18]
plt.figure(figsize = (18,8))
plt.plot(range(20), h.history['acc'], label = 'Training Accuracy')
plt.plot(range(20), h.history['loss'], label = 'Taining Loss')
#ax1.set_xticks(np.arange(0, 31, 5))
plt.xlabel("Number of Epoch's")
plt.ylabel('Accuracy/Loss Value')
plt.title('Training Accuracy and Training Loss')
plt.legend(loc = "best")
# ax2.plot(range(20), h.history['loss'], label = 'Training Loss')
# ax2.plot(range(20), h.history['val_loss'], label = 'Validation Loss')
# #ax2.set_xticks(np.arange(0, 31, 5))
# ax2.set_xlabel("Number of Epoch's")
# ax2.set_ylabel('Loss Value')
# ax2.set_title('Training Loss vs Validation Loss')
# ax2.legend(loc = "best")
# [19]
#evaluate the model on test data
predictions = model.evaluate(x_test, y_test)
# [20]
print(f'LOSS : {predictions[0]}')
print(f'ACCURACY : {predictions[1]}')
# [21]
from keras.preprocessing.image import ImageDataGenerator
# [22]
train_datagen = ImageDataGenerator(rescale = 1/255.,
horizontal_flip = True,
width_shift_range = 0.2,
height_shift_range = 0.2,
fill_mode = 'nearest',
zoom_range = 0.3,
rotation_range = 30)
val_datagen = ImageDataGenerator(rescale = 1/255.)
train_generator = train_datagen.flow(x_train, y_train, batch_size = 64, shuffle = False)
val_generator = val_datagen.flow(x_test, y_test, batch_size = 64, shuffle = False)
# [23]
#calling the same model as above
model_aug = CNNbuild(height = height, width = width, classes = classes, channels = channels)
# [24]
#compile the model
optim = optimizers.Adam(lr = 0.001, decay = 0.001 / 64)
model_aug.compile(loss = 'categorical_crossentropy', optimizer = optim, metrics = ['accuracy'])
# [25]
#fit the model on the augmented dataset
h_aug = model_aug.fit_generator(train_generator, steps_per_epoch = len(x_train) // 64, epochs = 50)
# [26]
#evaluate the model on augmented test data
predict = model_aug.evaluate_generator(val_generator, steps = 5)
# [27]
print(f'LOSS ON TEST DATA AFTER DATA AUGMENTATION : {predict[0]}')
print(f'ACCURACY ON TEST DATA AFTER DATA AUGMENTATION : {predict[1]}')
model.save('model_95.h5')
model_aug.save('model_97.h5')
| [
"keras.preprocessing.image.img_to_array",
"keras.layers.Conv2D",
"matplotlib.pyplot.ylabel",
"keras.preprocessing.image.ImageDataGenerator",
"numpy.array",
"keras.layers.Dense",
"numpy.arange",
"matplotlib.pyplot.imshow",
"os.listdir",
"keras.backend.image_data_format",
"matplotlib.pyplot.xlabel",
"keras.optimizers.Adam",
"keras.layers.Flatten",
"keras.layers.MaxPooling2D",
"sklearn.model_selection.train_test_split",
"keras.models.Sequential",
"matplotlib.pyplot.title",
"cv2.resize",
"keras.layers.BatchNormalization",
"cv2.imread",
"keras.layers.Dropout",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"matplotlib.pyplot.imread",
"matplotlib.pyplot.figure",
"keras.utils.np_utils.to_categorical",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.subplot",
"numpy.random.shuffle"
] | [((375, 434), 'os.listdir', 'os.listdir', (['"""../input/cell_images/cell_images/Parasitized/"""'], {}), "('../input/cell_images/cell_images/Parasitized/')\n", (385, 434), False, 'import os\n'), ((521, 579), 'os.listdir', 'os.listdir', (['"""../input/cell_images/cell_images/Uninfected/"""'], {}), "('../input/cell_images/cell_images/Uninfected/')\n", (531, 579), False, 'import os\n'), ((627, 655), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 12)'}), '(figsize=(12, 12))\n', (637, 655), True, 'import matplotlib.pyplot as plt\n'), ((876, 886), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (884, 886), True, 'import matplotlib.pyplot as plt\n'), ((894, 922), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 12)'}), '(figsize=(12, 12))\n', (904, 922), True, 'import matplotlib.pyplot as plt\n'), ((1142, 1152), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1150, 1152), True, 'import matplotlib.pyplot as plt\n'), ((1809, 1828), 'matplotlib.pyplot.imshow', 'plt.imshow', (['data[0]'], {}), '(data[0])\n', (1819, 1828), True, 'import matplotlib.pyplot as plt\n'), ((1829, 1839), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1837, 1839), True, 'import matplotlib.pyplot as plt\n'), ((1860, 1874), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (1868, 1874), True, 'import numpy as np\n'), ((1884, 1900), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (1892, 1900), True, 'import numpy as np\n'), ((1914, 1944), 'numpy.arange', 'np.arange', (['image_data.shape[0]'], {}), '(image_data.shape[0])\n', (1923, 1944), True, 'import numpy as np\n'), ((1945, 1967), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (1962, 1967), True, 'import numpy as np\n'), ((2114, 2183), 'sklearn.model_selection.train_test_split', 'train_test_split', (['image_data', 'labels'], {'test_size': '(0.2)', 'random_state': '(101)'}), '(image_data, labels, test_size=0.2, random_state=101)\n', (2130, 2183), False, 'from sklearn.model_selection import train_test_split\n'), ((2206, 2253), 'keras.utils.np_utils.to_categorical', 'np_utils.to_categorical', (['y_train'], {'num_classes': '(2)'}), '(y_train, num_classes=2)\n', (2229, 2253), False, 'from keras.utils import np_utils\n'), ((2265, 2311), 'keras.utils.np_utils.to_categorical', 'np_utils.to_categorical', (['y_test'], {'num_classes': '(2)'}), '(y_test, num_classes=2)\n', (2288, 2311), False, 'from keras.utils import np_utils\n'), ((4297, 4324), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(18, 8)'}), '(figsize=(18, 8))\n', (4307, 4324), True, 'import matplotlib.pyplot as plt\n'), ((4493, 4524), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of Epoch\'s"""'], {}), '("Number of Epoch\'s")\n', (4503, 4524), True, 'import matplotlib.pyplot as plt\n'), ((4525, 4558), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy/Loss Value"""'], {}), "('Accuracy/Loss Value')\n", (4535, 4558), True, 'import matplotlib.pyplot as plt\n'), ((4559, 4607), 'matplotlib.pyplot.title', 'plt.title', (['"""Training Accuracy and Training Loss"""'], {}), "('Training Accuracy and Training Loss')\n", (4568, 4607), True, 'import matplotlib.pyplot as plt\n'), ((4608, 4630), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (4618, 4630), True, 'import matplotlib.pyplot as plt\n'), ((5214, 5384), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1 / 255.0)', 'horizontal_flip': '(True)', 'width_shift_range': '(0.2)', 'height_shift_range': '(0.2)', 'fill_mode': '"""nearest"""', 'zoom_range': '(0.3)', 'rotation_range': '(30)'}), "(rescale=1 / 255.0, horizontal_flip=True,\n width_shift_range=0.2, height_shift_range=0.2, fill_mode='nearest',\n zoom_range=0.3, rotation_range=30)\n", (5232, 5384), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((5606, 5643), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rescale': '(1 / 255.0)'}), '(rescale=1 / 255.0)\n', (5624, 5643), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((5985, 6028), 'keras.optimizers.Adam', 'optimizers.Adam', ([], {'lr': '(0.001)', 'decay': '(0.001 / 64)'}), '(lr=0.001, decay=0.001 / 64)\n', (6000, 6028), False, 'from keras import optimizers\n'), ((680, 704), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(4)', '(i + 1)'], {}), '(1, 4, i + 1)\n', (691, 704), True, 'import matplotlib.pyplot as plt\n'), ((713, 803), 'cv2.imread', 'cv2.imread', (["('../input/cell_images/cell_images/Parasitized' + '/' + parasitized_data[i])"], {}), "('../input/cell_images/cell_images/Parasitized' + '/' +\n parasitized_data[i])\n", (723, 803), False, 'import cv2\n'), ((804, 819), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (814, 819), True, 'import matplotlib.pyplot as plt\n'), ((824, 852), 'matplotlib.pyplot.title', 'plt.title', (['"""PARASITIZED : 1"""'], {}), "('PARASITIZED : 1')\n", (833, 852), True, 'import matplotlib.pyplot as plt\n'), ((857, 875), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (873, 875), True, 'import matplotlib.pyplot as plt\n'), ((947, 971), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(4)', '(i + 1)'], {}), '(1, 4, i + 1)\n', (958, 971), True, 'import matplotlib.pyplot as plt\n'), ((980, 1072), 'cv2.imread', 'cv2.imread', (["('../input/cell_images/cell_images/Uninfected' + '/' + uninfected_data[i + 1])"], {}), "('../input/cell_images/cell_images/Uninfected' + '/' +\n uninfected_data[i + 1])\n", (990, 1072), False, 'import cv2\n'), ((1071, 1086), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (1081, 1086), True, 'import matplotlib.pyplot as plt\n'), ((1091, 1118), 'matplotlib.pyplot.title', 'plt.title', (['"""UNINFECTED : 0"""'], {}), "('UNINFECTED : 0')\n", (1100, 1118), True, 'import matplotlib.pyplot as plt\n'), ((1123, 1141), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1139, 1141), True, 'import matplotlib.pyplot as plt\n'), ((2971, 2983), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (2981, 2983), False, 'from keras.models import Sequential\n'), ((1239, 1310), 'matplotlib.pyplot.imread', 'plt.imread', (["('../input/cell_images/cell_images/Parasitized/' + '/' + img)"], {}), "('../input/cell_images/cell_images/Parasitized/' + '/' + img)\n", (1249, 1310), True, 'import matplotlib.pyplot as plt\n'), ((1332, 1362), 'cv2.resize', 'cv2.resize', (['img_read', '(50, 50)'], {}), '(img_read, (50, 50))\n', (1342, 1362), False, 'import cv2\n'), ((1383, 1407), 'keras.preprocessing.image.img_to_array', 'img_to_array', (['img_resize'], {}), '(img_resize)\n', (1395, 1407), False, 'from keras.preprocessing.image import img_to_array\n'), ((1554, 1623), 'matplotlib.pyplot.imread', 'plt.imread', (["('../input/cell_images/cell_images/Uninfected' + '/' + img)"], {}), "('../input/cell_images/cell_images/Uninfected' + '/' + img)\n", (1564, 1623), True, 'import matplotlib.pyplot as plt\n'), ((1645, 1675), 'cv2.resize', 'cv2.resize', (['img_read', '(50, 50)'], {}), '(img_read, (50, 50))\n', (1655, 1675), False, 'import cv2\n'), ((1696, 1720), 'keras.preprocessing.image.img_to_array', 'img_to_array', (['img_resize'], {}), '(img_resize)\n', (1708, 1720), False, 'from keras.preprocessing.image import img_to_array\n'), ((3061, 3082), 'keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (3080, 3082), True, 'from keras import backend as K\n'), ((3165, 3226), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""', 'input_shape': 'inputShape'}), "(32, (3, 3), activation='relu', input_shape=inputShape)\n", (3171, 3226), False, 'from keras.layers import Dense, Conv2D\n'), ((3245, 3263), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (3257, 3263), False, 'from keras.layers import MaxPooling2D, GlobalAveragePooling2D\n'), ((3278, 3310), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'chanDim'}), '(axis=chanDim)\n', (3296, 3310), False, 'from keras.layers import BatchNormalization\n'), ((3328, 3340), 'keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (3335, 3340), False, 'from keras.layers import Dropout\n'), ((3357, 3394), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""'}), "(32, (3, 3), activation='relu')\n", (3363, 3394), False, 'from keras.layers import Dense, Conv2D\n'), ((3411, 3429), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (3423, 3429), False, 'from keras.layers import MaxPooling2D, GlobalAveragePooling2D\n'), ((3444, 3476), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'chanDim'}), '(axis=chanDim)\n', (3462, 3476), False, 'from keras.layers import BatchNormalization\n'), ((3494, 3506), 'keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (3501, 3506), False, 'from keras.layers import Dropout\n'), ((3523, 3560), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""'}), "(32, (3, 3), activation='relu')\n", (3529, 3560), False, 'from keras.layers import Dense, Conv2D\n'), ((3577, 3595), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (3589, 3595), False, 'from keras.layers import MaxPooling2D, GlobalAveragePooling2D\n'), ((3610, 3642), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'chanDim'}), '(axis=chanDim)\n', (3628, 3642), False, 'from keras.layers import BatchNormalization\n'), ((3660, 3672), 'keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (3667, 3672), False, 'from keras.layers import Dropout\n'), ((3689, 3698), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (3696, 3698), False, 'from keras.layers import Flatten\n'), ((3719, 3748), 'keras.layers.Dense', 'Dense', (['(512)'], {'activation': '"""relu"""'}), "(512, activation='relu')\n", (3724, 3748), False, 'from keras.layers import Dense, Conv2D\n'), ((3766, 3798), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'chanDim'}), '(axis=chanDim)\n', (3784, 3798), False, 'from keras.layers import BatchNormalization\n'), ((3816, 3828), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3823, 3828), False, 'from keras.layers import Dropout\n'), ((3844, 3880), 'keras.layers.Dense', 'Dense', (['classes'], {'activation': '"""softmax"""'}), "(classes, activation='softmax')\n", (3849, 3880), False, 'from keras.layers import Dense, Conv2D\n')] |
import pandas as pd
import argparse
from estimators import ConstantImputedLR,\
EMLR,\
MICELR
from mlp import MLP_reg
from neumannS0_mlp import Neumann_mlp
from learning_curves import run
parser = argparse.ArgumentParser()
parser.add_argument('data_type', help='type of simulation',
choices=['gaussian_sm', 'probit_sm', 'MCAR',
'MAR_logistic'])
args = parser.parse_args()
n_iter = 20
n_jobs = 40
n_sizes = [1e4, 2e4, 1e5]
n_sizes = [int(i) for i in n_sizes]
n_test = int(1e4)
n_val = int(1e4)
# First fill in data_desc with all default values.
if args.data_type == 'gaussian_sm':
data_type = 'selfmasking'
filename = 'gaussian_sm'
compute_br = True
default_values = {'n_features': 10, 'missing_rate': 0.5,
'prop_latent': 0.5, 'sm_type': 'gaussian', 'sm_param': 2,
'snr': 10, 'perm': False}
elif args.data_type == 'probit_sm':
data_type = 'selfmasking'
filename = 'probit_sm'
compute_br = False
default_values = {'n_features': 10, 'missing_rate': 0.5,
'prop_latent': 0.5, 'sm_type': 'probit', 'sm_param': 0.5,
'snr': 10, 'perm': False}
elif args.data_type == 'MCAR':
data_type = 'MCAR'
filename = 'MCAR'
compute_br = True
default_values = {'n_features': 10, 'missing_rate': 0.5,
'prop_latent': 0.5, 'snr': 10,
'masking': 'MCAR'}
elif args.data_type == 'MAR_logistic':
data_type = 'MAR_logistic'
filename = 'MAR_logistic'
compute_br = True
default_values = {'n_features': 10, 'missing_rate': 0.5,
'prop_latent': 0.5, 'snr': 10,
'masking': 'MAR_logistic', 'prop_for_masking': 0.1}
# Define the list of parameters that should be tested and their range of values
other_values = {'n_features': [20, 50]}
# Then vary parameters one by one while the other parameters remain constant,
# and equal to their default values.
data_descs = [pd.DataFrame([default_values])]
for param, vals in other_values.items():
n = len(vals)
data = pd.DataFrame([default_values]*n)
data.loc[:, param] = vals
data_descs.append(data)
data_descs = pd.concat(data_descs, axis=0)
methods = []
methods.append({'name': 'ConstantImputedLR', 'est': ConstantImputedLR})
methods.append({'name': 'EMLR', 'est': EMLR})
methods.append({'name': 'MICELR', 'est': MICELR})
for q in [1, 2, 3, 4, 5, 10, 15, 20, 30, 40, 50, 100]:
methods.append({'name': 'torchMLP', 'est': MLP_reg, 'type_width': 'linear',
'width': q, 'depth': 1, 'n_epochs': 2000,
'batch_size': 200, 'early_stopping': True,
'verbose': False})
for d in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]:
for residual_connection in [False, True]:
for early_stopping in [False, True]:
methods.append(
{'name': 'Neumann', 'est': Neumann_mlp, 'depth': d,
'n_epochs': 100, 'batch_size': 10,
'early_stopping': early_stopping,
'residual_connection': residual_connection,
'verbose': False})
run_params = {
'n_iter': n_iter,
'n_sizes': n_sizes,
'n_test': n_test,
'n_val': n_val,
'data_type': data_type,
'data_descs': data_descs,
'methods': methods,
'compute_br': compute_br,
'filename': filename,
'n_jobs': n_jobs}
run(**run_params)
| [
"pandas.DataFrame",
"pandas.concat",
"learning_curves.run",
"argparse.ArgumentParser"
] | [((243, 268), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (266, 268), False, 'import argparse\n'), ((2277, 2306), 'pandas.concat', 'pd.concat', (['data_descs'], {'axis': '(0)'}), '(data_descs, axis=0)\n', (2286, 2306), True, 'import pandas as pd\n'), ((3523, 3540), 'learning_curves.run', 'run', ([], {}), '(**run_params)\n', (3526, 3540), False, 'from learning_curves import run\n'), ((2070, 2100), 'pandas.DataFrame', 'pd.DataFrame', (['[default_values]'], {}), '([default_values])\n', (2082, 2100), True, 'import pandas as pd\n'), ((2172, 2206), 'pandas.DataFrame', 'pd.DataFrame', (['([default_values] * n)'], {}), '([default_values] * n)\n', (2184, 2206), True, 'import pandas as pd\n')] |
"""Module supporting unification of types and type variables instantiated as
:class:`discopy.closed.Ty` instances
"""
from discopy.biclosed import Box, Ty, Under
from discopy.cat import Arrow, Ob, AxiomError
import functools
from typing import Generic, TypeVar
import uuid
class TyVar(Ty):
"""Represents a type variable identified by a name, as a subclass of the
:class:`discopy.biclosed.Ty` class
:param str name: A name for the type variable
"""
def __init__(self, name):
super().__init__(name)
def pretty_type(ty, parenthesize=False):
"""Represents a type in nice LaTeX math formatting
:param ty: A type to represent
:type ty: :class:`discopy.biclosed.Ty`
:param bool parenthesize: Whether to enclose the result in parentheses
:return: LaTeX math formatted representation of `ty`
:rtype: str
"""
if isinstance(ty, Under):
result = '%s >> %s' % (pretty_type(ty.left, True),
pretty_type(ty.right))
if parenthesize:
result = '(%s)' % result
else:
if not ty.objects:
result = '\\top'
result = ' \\times '.join([str(obj) for obj in ty.objects])
return result
def type_compound(ty):
"""Predicate describing whether a type is compound or not
:param ty: A type
:type ty: :class:`discopy.biclosed.Ty`
:return: Whether `ty` is compound (a :class:`discopy.biclosed.Under` or a
monoidal product type)
:rtype: bool
"""
return isinstance(ty, Under) or len(ty) > 1
def base_elements(ty):
"""Compute the set of primitive :class:`discopy.biclosed.Ty` elements within
a type
:param ty: A type
:type ty: :class:`discopy.biclosed.Ty`
:return: Set of `ty`'s primitive elements
:rtype: set
"""
if not isinstance(ty, Ty):
return Ty(ty)
if isinstance(ty, Under):
return base_elements(ty.left) | base_elements(ty.right)
bases = {ob for ob in ty.objects if not isinstance(ob, Under)}
recursives = set().union(*[base_elements(ob) for ob in ty.objects])
return bases | recursives
def unique_identifier():
"""Generate a universally unique identifier seven hex digits long
:return: A seven-digit universally unique identifier in hex
:rtype: str
"""
return uuid.uuid4().hex[:7]
def unique_ty():
"""Generate a type with a universally unique name
:return: A type with a universally unique name
:rtype: :class:`discopy.biclosed.Ty`
"""
return Ty(unique_identifier())
UNIFICATION_EXCEPTION_MSG = 'Could not unify %s with %s'
SUBSTITUTION_EXCEPTION_MSG = 'to substitute for %s'
class UnificationException(Exception):
"""Unification failure
:param x: Type on left side of unification equation
:type x: :class:`discopy.biclosed.Ty`
:param y: Type on right side of unification equation
:type y: :class:`discopy.biclosed.Ty`
:param k: Substitution key whose resolution was subject to the equation
:type k: str or None
"""
def __init__(self, x, y, k=None):
self.key = k
self.vals = (x, y)
if k:
msg = UNIFICATION_EXCEPTION_MSG + ' ' + SUBSTITUTION_EXCEPTION_MSG
self.message = msg % (x, y, k)
else:
self.message = UNIFICATION_EXCEPTION_MSG % (x, y)
def try_merge_substitution(lsub, rsub):
"""Try to merge two substitutions by unifying their shared variables
:param dict lsub: Left substitution
:param dict rsub: Right substitution
:raises UnificationException: Failure to unify a shared variable's
substituted values
:return: A substitution enriched by unifying shared variables
:rtype: dict
"""
subst = {}
for k in {**lsub, **rsub}.keys():
if k in lsub and k in rsub:
_, sub = try_unify(lsub[k], rsub[k])
subst.update(sub)
elif k in lsub:
subst[k] = lsub[k]
elif k in rsub:
subst[k] = rsub[k]
return subst
def try_unify(a, b, subst={}):
"""Try to unify two types, potentially raising an exception with the
incompatible components.
:param a: Left type
:type a: :class:`discopy.closed.Ty`
:param b: Right type
:type b: :class:`discopy.closed.Ty`
:param dict subst: An initial substitution from which to fill in variables
:raises UnificationException: Failure to unify elements of a type
:return: A unified type and the substitution under which it was unified
:rtype: tuple
"""
if isinstance(a, Under) and isinstance(b, Under):
l, lsub = try_unify(a.left, b.left)
r, rsub = try_unify(a.right, b.right)
subst = try_merge_substitution(lsub, rsub)
return l >> r, subst
if a == b:
return a, {}
if isinstance(a, TyVar):
return b, {a.name: b}
if isinstance(b, TyVar):
return a, {b.name: a}
if isinstance(a, Ty) and isinstance(b, Ty) and\
len(a.objects) == len(b.objects):
results = [try_unify(ak, bk) for ak, bk in zip(a.objects, b.objects)]
ty = Ty(*[ty for ty, _ in results])
subst = functools.reduce(try_merge_substitution,
[subst for _, subst in results], subst)
return ty, subst
raise UnificationException(a, b)
def unify(a, b, substitution={}):
"""Unify two types, returning their merger or None
:param a: Left type
:type a: :class:`discopy.closed.Ty`
:param b: Right type
:type b: :class:`discopy.closed.Ty`
:param dict substitution: An initial substitution from which to fill in
variables
:return: A unified type, or None
:rtype: :class:`discopy.closed.Ty` or None
"""
try:
result, substitution = try_unify(a, b, substitution)
return substitute(result, substitution)
except UnificationException:
return None
def unifier(a, b, substitution={}):
"""Unify two types, returning the substitution on success, or None
:param a: Left type
:type a: :class:`discopy.closed.Ty`
:param b: Right type
:type b: :class:`discopy.closed.Ty`
:param dict substitution: An initial substitution from which to fill in
variables
:return: A substitution, or None
:rtype: dict or None
"""
try:
_, substitution = try_unify(a, b, substitution)
return substitution
except UnificationException:
return None
def substitute(t, sub):
"""Substitute away the type variables in `t` under `sub`
:param t: A type
:type t: :class:`discopy.closed.Ty`
:param dict sub: A substitution
:return: A type with all variables found in `sub` substituted away
:rtype: :class:`discopy.closed.Ty`
"""
if isinstance(t, Under):
return substitute(t.left, sub) >> substitute(t.right, sub)
if isinstance(t, Ty):
return Ty(*[substitute(ty, sub) for ty in t.objects])
if isinstance(t, TyVar) and t.name in sub:
return sub[t.name]
return t
def fold_arrow(ts):
"""Combine a list of types into an arrow type
:param list ts: A list of :class:`discopy.closed.Ty`'s
:return: An accumulated arrow type, or the sole type in the list
:rtype: :class:`discopy.closed.Ty`
"""
if len(ts) == 1:
return ts[-1]
return fold_arrow(ts[:-2] + [ts[-2] >> ts[-1]])
def unfold_arrow(arrow):
"""Extract a list of types from an arrow type
:param arrow: A type, preferably an arrow type
:type arrow: :class:`discopy.closed.Ty`
:return: A list of the arrow's components, or of the original type
:rtype: list
"""
if isinstance(arrow, Under):
return [arrow.left] + unfold_arrow(arrow.right)
return [arrow]
def fold_product(ts):
"""Combine a list of types into a product type
:param list ts: A list of :class:`discopy.closed.Ty`'s
:return: An accumulated product type, or the sole type in the list
:rtype: :class:`discopy.closed.Ty`
"""
if len(ts) == 1:
return ts[0]
return Ty(*ts)
def unfold_product(ty):
"""Extract a list of types from a product type
:param ty: A type, preferably a product type
:type ty: :class:`discopy.closed.Ty`
:return: A list of the product's components, or of the original type
:rtype: list
"""
if isinstance(ty, Under):
return [ty]
return [Ty(ob) for ob in ty.objects]
| [
"functools.reduce",
"discopy.biclosed.Ty",
"uuid.uuid4"
] | [((8119, 8126), 'discopy.biclosed.Ty', 'Ty', (['*ts'], {}), '(*ts)\n', (8121, 8126), False, 'from discopy.biclosed import Box, Ty, Under\n'), ((1861, 1867), 'discopy.biclosed.Ty', 'Ty', (['ty'], {}), '(ty)\n', (1863, 1867), False, 'from discopy.biclosed import Box, Ty, Under\n'), ((5125, 5155), 'discopy.biclosed.Ty', 'Ty', (['*[ty for ty, _ in results]'], {}), '(*[ty for ty, _ in results])\n', (5127, 5155), False, 'from discopy.biclosed import Box, Ty, Under\n'), ((5172, 5257), 'functools.reduce', 'functools.reduce', (['try_merge_substitution', '[subst for _, subst in results]', 'subst'], {}), '(try_merge_substitution, [subst for _, subst in results], subst\n )\n', (5188, 5257), False, 'import functools\n'), ((8455, 8461), 'discopy.biclosed.Ty', 'Ty', (['ob'], {}), '(ob)\n', (8457, 8461), False, 'from discopy.biclosed import Box, Ty, Under\n'), ((2327, 2339), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2337, 2339), False, 'import uuid\n')] |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
def update(idx):
pt.set_data(x[idx],y[idx])
return pt
x = np.linspace(0,2*np.pi,200)
y = np.sin(x)
fig, ax = plt.subplots()
ln, = ax.plot(x,y)
pt, = ax.plot(x[0], y[0], marker='o', markersize=5, color='r')
ax.grid()
ani = animation.FuncAnimation(fig,update,frames=np.arange(0,len(x)), interval=10, repeat=False)
ani.save('test.mp4',dpi=600)
plt.show()
| [
"numpy.sin",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((162, 192), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(200)'], {}), '(0, 2 * np.pi, 200)\n', (173, 192), True, 'import numpy as np\n'), ((193, 202), 'numpy.sin', 'np.sin', (['x'], {}), '(x)\n', (199, 202), True, 'import numpy as np\n'), ((215, 229), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (227, 229), True, 'import matplotlib.pyplot as plt\n'), ((447, 457), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (455, 457), True, 'import matplotlib.pyplot as plt\n')] |
from django.contrib import auth
from rest_framework import serializers
from models import GameLog, Bet, TeamOpponent, TeamPersonnal
USER_MODEL = auth.get_user_model()
class UserSerializer:
@classmethod
def get_user_as_dict(cls, user):
if user is None:
return dict(id=None, first_name=None, last_name=None, username=None, email=None)
return {
'id': user.id,
'first_name': user.first_name,
'last_name': user.last_name,
'username': user.username,
'email': user.email,
}
@classmethod
def get_info_from_user(cls, user, fields):
user_as_dict = cls.get_user_as_dict(user)
return { attr: user_as_dict[attr] for attr in fields }
class GameLogSerializer(serializers.ModelSerializer):
class Meta:
model = GameLog
fields = '__all__'
class BetSerializer(serializers.ModelSerializer):
class Meta:
model = Bet
fields = '__all__'
| [
"django.contrib.auth.get_user_model"
] | [((147, 168), 'django.contrib.auth.get_user_model', 'auth.get_user_model', ([], {}), '()\n', (166, 168), False, 'from django.contrib import auth\n')] |
from django.contrib import admin
from django.urls import path, include
from .views import home_page, about_page, contact_page
from blog import urls
urlpatterns = [
path("draggle-admin-dash/", admin.site.urls),
path("", home_page),
path("about/", about_page),
path("contact/", contact_page),
path("blog/", include(urls)),
]
| [
"django.urls.path",
"django.urls.include"
] | [((170, 214), 'django.urls.path', 'path', (['"""draggle-admin-dash/"""', 'admin.site.urls'], {}), "('draggle-admin-dash/', admin.site.urls)\n", (174, 214), False, 'from django.urls import path, include\n'), ((220, 239), 'django.urls.path', 'path', (['""""""', 'home_page'], {}), "('', home_page)\n", (224, 239), False, 'from django.urls import path, include\n'), ((245, 271), 'django.urls.path', 'path', (['"""about/"""', 'about_page'], {}), "('about/', about_page)\n", (249, 271), False, 'from django.urls import path, include\n'), ((277, 307), 'django.urls.path', 'path', (['"""contact/"""', 'contact_page'], {}), "('contact/', contact_page)\n", (281, 307), False, 'from django.urls import path, include\n'), ((327, 340), 'django.urls.include', 'include', (['urls'], {}), '(urls)\n', (334, 340), False, 'from django.urls import path, include\n')] |
from tkinter import *
from main import Mysql_db
import db_info
if __name__ == '__main__':
row, column = 0, 0
with Mysql_db(**db_info.db_connect) as db:
sql = "SELECT * from book_item"
book_item = db.search(sql)
row = len(book_item)
column = len(book_item[0])
print(row, column)
for i in range(row):
for j in range(column):
e = Entry(relief=GROOVE)
e.grid(row=i, column=j, sticky=NSEW) # NSEW='nsew'
e.insert(END, '%s' % (book_item[i][j]))
mainloop() | [
"main.Mysql_db"
] | [((124, 154), 'main.Mysql_db', 'Mysql_db', ([], {}), '(**db_info.db_connect)\n', (132, 154), False, 'from main import Mysql_db\n')] |
#!/usr/bin/env python3
# You are probably well aware of the 'birthday paradox'
# https://en.wikipedia.org/wiki/Birthday_problem
# Let's try simulating it
# You will need a list for the 365 day calendar
# You will need a set number of people (e.g. 25)
# During each 'trial' you do the following
# Choose a person
# Git them a random birthday
# Store it in the calendar
# Once you have stored all birthdays, check to see if any have the same day
# Do this for many trials to see what the probability of sharing a birthday is
import random
people = 25
days = 365
trial = 10000
dup = 0
for i in range(trial):
#create empty cal of 0s
calendar = [] #new calander each time
for j in range(days):
calendar.append(0)
#print(calendar)
#fill with random birthdays
for j in range(people):
bday = random.randint(0, days-1)
calendar[bday] += 1
#print(calendar)
#check for duplications
for day in calendar:
if day > 1:
dup += 1
break # if calendar had collusion, then stop
print(dup, trial)
"""
Repeat trial
create empty calendar
fill with random birthdays
check for duplicates
record
report duplicates/trial
"""
"""
python3 birthday.py
0.571
"""
| [
"random.randint"
] | [((828, 855), 'random.randint', 'random.randint', (['(0)', '(days - 1)'], {}), '(0, days - 1)\n', (842, 855), False, 'import random\n')] |
# Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//div[@class='details']/h3[@class='tieude']",
'price' : "//div[@class='info_detail']/p[@class='price']",
'category' : "//ul[@class='breacrum']/li/a",
'description' : "//div[@class='tech']/div[@class='technology']",
'images' : "//div[@class='details']/div[@class='images']/a/@href",
'canonical' : "",
'base_url' : "",
'brand' : ""
}
name = 'khabinh.vn'
allowed_domains = ['khabinh.vn']
start_urls = ['http://khabinh.vn']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-_]+_pro+\d+\.html']), 'parse_item'),
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+-c+\d+\.html', 'page='], deny=['attr=','letter=','/phim/','\?']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
| [
"scrapy.linkextractors.LinkExtractor"
] | [((744, 800), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': "['/[a-zA-Z0-9-_]+_pro+\\\\d+\\\\.html']"}), "(allow=['/[a-zA-Z0-9-_]+_pro+\\\\d+\\\\.html'])\n", (757, 800), False, 'from scrapy.linkextractors import LinkExtractor\n'), ((824, 935), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': "['/[a-zA-Z0-9-]+-c+\\\\d+\\\\.html', 'page=']", 'deny': "['attr=', 'letter=', '/phim/', '\\\\?']"}), "(allow=['/[a-zA-Z0-9-]+-c+\\\\d+\\\\.html', 'page='], deny=[\n 'attr=', 'letter=', '/phim/', '\\\\?'])\n", (837, 935), False, 'from scrapy.linkextractors import LinkExtractor\n')] |
import networkx as nx
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import unittest
import random
#define a class that generates a 2D grid,size=m*, dx=dy=1, x, y start from 0
class init_grid(object):
def __init__(self, m):
self.nodes = m
#generate grid
def generate(self):
self.nxx, self.nyy = (self.nodes, self.nodes)
self.x = np.linspace(0, self.nodes-1, self.nxx)
self.y = np.linspace(0, self.nodes-1, self.nyy)
self.xv, self.yv = np.meshgrid(self.x, self.y)
#used to define node/vertex index in a grid
#pick up m points in the grid as nodes. e.g. #0 (0,0) #1 (1,2) #2 (1,3) #3 (3,2) #4 (4,4)
#xrange denotes nodes' x index in the grid
#yrange denotes nodes' y index in the grid
def coord(self, xrange, yrange):
self.xrange=xrange
self.yrange=yrange | [
"matplotlib.use",
"numpy.meshgrid",
"numpy.linspace"
] | [((40, 61), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (54, 61), False, 'import matplotlib\n'), ((412, 452), 'numpy.linspace', 'np.linspace', (['(0)', '(self.nodes - 1)', 'self.nxx'], {}), '(0, self.nodes - 1, self.nxx)\n', (423, 452), True, 'import numpy as np\n'), ((468, 508), 'numpy.linspace', 'np.linspace', (['(0)', '(self.nodes - 1)', 'self.nyy'], {}), '(0, self.nodes - 1, self.nyy)\n', (479, 508), True, 'import numpy as np\n'), ((534, 561), 'numpy.meshgrid', 'np.meshgrid', (['self.x', 'self.y'], {}), '(self.x, self.y)\n', (545, 561), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-18 09:32
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.auth.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_line', models.CharField(blank=True, default='default text', max_length=10, null=True)),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('name', models.CharField(max_length=10)),
],
options={
'abstract': False,
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
bases=('auth.user',),
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.AddField(
model_name='address',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapp.Customer'),
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.AutoField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((1533, 1621), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""myapp.Customer"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'myapp.Customer')\n", (1550, 1621), False, 'from django.db import migrations, models\n'), ((525, 618), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (541, 618), False, 'from django.db import migrations, models\n'), ((648, 726), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""default text"""', 'max_length': '(10)', 'null': '(True)'}), "(blank=True, default='default text', max_length=10, null=True)\n", (664, 726), False, 'from django.db import migrations, models\n'), ((866, 1042), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': 'settings.AUTH_USER_MODEL'}), '(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n settings.AUTH_USER_MODEL)\n', (886, 1042), False, 'from django.db import migrations, models\n'), ((1060, 1091), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (1076, 1091), False, 'from django.db import migrations, models\n')] |
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="dirtools",
version="0.2.0",
author="<NAME>",
author_email="<EMAIL>",
description="Exclude/ignore files in a directory (using .gitignore like syntax), compute hash, search projects for an entire directory tree and gzip compression.",
license="MIT",
keywords="exclude exclusion directory hash compression gzip",
url="https://github.com/tsileo/dirtools",
py_modules=["dirtools"],
long_description=read("README.rst"),
install_requires=["globster"],
tests_require=["pyfakefs"],
test_suite="test_dirtools",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
],
scripts=["dirtools.py"],
)
| [
"os.path.dirname"
] | [((87, 112), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (102, 112), False, 'import os\n')] |
# -*- coding: utf-8 -*-
#############################################################################
# __________ #
# __ __/ ____/ __ \__ __ This file is part of MicroGP4 v1.0 "Kiwi" #
# / / / / / __/ /_/ / // / (!) by <NAME> and <NAME> #
# / /_/ / /_/ / ____/ // /_ https://github.com/squillero/microgp4 #
# \__ /\____/_/ /__ __/ #
# /_/ --MicroGP4-- /_/ "You don't need a big goal, be μ-ambitious!!" #
# #
#############################################################################
# Copyright 2020 <NAME> and <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
from microgp import *
from ..utils import logging
from .abstract import Parameter
from microgp import random_generator
import microgp as ugp4
class Categorical(Parameter):
"""Categorical parameter. It can take values in 'alternatives'.
**Example:**
>>> registers = ugp4.make_parameter(ugp4.parameter.Categorical, alternatives=['ax', 'bx', 'cx', 'dx'])
Args:
alternatives (list): list of possible values
"""
def is_valid(self, value):
if value is None:
return True
return value in self.alternatives
def mutate(self, strength: float = 0.5):
assert 0 <= strength <= 1, "Invalid strength: " + str(strength) + " (should be 0 <= s <= 1)"
if strength == 0:
logging.debug("strength == 0")
else:
self.value = random_generator.choice(self.alternatives)
def run_paranoia_checks(self) -> bool:
assert getattr(self, 'alternatives', None), "Illegal or missing alternatives list (not using make_parameter?)"
return super().run_paranoia_checks()
class CategoricalSorted(Categorical):
"""CategoricalSorted parameter. It can take values in 'alternatives'. It
behaves differently during the mutation phase.
**Example:**
>>> cat_sor = ugp4.make_parameter(ugp4.parameter.CategoricalSorted, alternatives=['e', 'f', 'g', 'h', 'i', 'l'])
Args:
alternatives (list): sorted list of possible values
"""
def mutate(self, strength: float = 0.5):
assert getattr(self, 'alternatives', None), "Illegal or missing alternatives list (not using make_parameter?)"
assert 0 <= strength <= 1, "Invalid strength: " + str(strength) + " (should be 0 <= s <= 1)"
if strength == 0:
logging.debug("strength == 0")
else:
self.value = random_generator.choice(self.alternatives,
self.alternatives.index(self._value),
strength=strength)
def run_paranoia_checks(self) -> bool:
assert getattr(self, 'alternatives', None), "Illegal or missing alternatives list (not using make_parameter?)"
assert len(self.alternatives) == len(set(
self.alternatives)), f"Found duplicated values in alternatives: {self.alternatives}"
return super().run_paranoia_checks()
| [
"microgp.random_generator.choice"
] | [((2113, 2155), 'microgp.random_generator.choice', 'random_generator.choice', (['self.alternatives'], {}), '(self.alternatives)\n', (2136, 2155), False, 'from microgp import random_generator\n')] |
#!/usr/bin/python
from googleapiclient import discovery
from oauth2client import client
from oauth2client import file
from oauth2client import tools
from datetime import datetime, timedelta
import sys
import re
import json
import os
import httplib2
class Logger:
def debug(self, message):
self._log('DEBUG', message)
def info(self, message):
self._log('INFO', message)
def warn(self, message):
self._log('WARNING', message)
def error(self, message):
self._log('ERROR', message)
def _log(self, level, message):
print('[%s] %s: %s' % (datetime.now(), level, message))
class Expando(object):
pass
class Configuration:
hours = 10
blacklist = []
contentBlacklist = []
removalMethod = 'markAsSpam'
blogId = ''
lastRunOverlapMinutes = 1
def __init__(self, directory):
file = os.path.join(os.path.dirname(directory),'config.json')
with open(file) as h:
cfg = json.load(h)
self.blogId = self._getValue(cfg, 'blogId', self.blogId)
self.hours = self._getValue(cfg, 'hours', self.hours)
self.blacklist = self._getValue(cfg, 'blacklist', self.blacklist)
self.contentBlacklist = self._getValue(cfg, 'contentBlacklist', self.contentBlacklist)
self.removalMethod = self._getValue(cfg, 'removalMethod', self.removalMethod)
self.lastRunOverlapMinutes = int(self._getValue(cfg, 'lastRunOverlapMinutes', self.lastRunOverlapMinutes))
def _getValue(self, cfg, propertyName, default):
if propertyName in cfg and not (cfg[propertyName] is None):
return cfg[propertyName]
else:
return default
class CommentBot:
scannedPosts = 0
scannedComments = 0
removedComments = 0
useThreading = True
_maxResults = 200
_hasErrors = False
def __init__(self, log, config, directory):
self._log = log
self._config = config
self._directory = directory
credentials, service = self.initCredentialsAndService('blogger','v3')
self._credentials = credentials
self._service = service
self._posts = service.posts()
self._comments = service.comments()
self._removalMethod = self.getRemovalMethod(self._comments)
self.loadState()
def scanBlog(self, blogUrl):
blogId = self.getBlogId(blogUrl)
posts = self.getPosts(blogId)
comments = self.getComments(posts)
pendingRemoval = self.getCommentsToRemove(comments)
self.removeComments(pendingRemoval)
self.scannedPosts = len(posts)
self.scannedComments = len(comments)
if self._hasErrors:
self._log.error('At least one error occured. utcLastRun will not be updated.')
else:
self.saveState()
def getBlogId(self, blogUrl):
if re.match('^[0-9]+$', blogUrl):
return blogUrl
blogId = self._service.blogs().getByUrl(url=blogUrl).execute()['id']
self._log.warn('Increase performance by replacing url `blogId` configuration with id %s'%blogId)
return blogId
def getPosts(self, blogId):
startDate = '%sZ'%(datetime.utcnow()-timedelta(hours=self._config.hours)).isoformat('T')
request = self._posts.list(blogId=blogId,startDate=startDate,maxResults=self._maxResults,fields='items(id,blog),nextPageToken')
posts = []
while request != None:
resp = request.execute()
if 'items' in resp and not (resp['items'] is None):
posts.extend(resp['items'])
request = self._posts.list_next(request, resp)
return posts
def getComments(self, posts):
fields = 'items(author/id,blog,content,id,post),nextPageToken'
comments = []
current_requests = []
next_requests = []
if self._utcLastRun is not None and self._config.lastRunOverlapMinutes > -1:
startDate = '%sZ' % (self._utcLastRun - timedelta(minutes=self._config.lastRunOverlapMinutes)).isoformat()
else:
startDate = None
for post in posts:
next_requests.append(self._comments.list(blogId=post['blog']['id'],postId=post['id'],status='live',startDate=startDate,maxResults=self._maxResults))
def on_comments(request_id, response, exception):
if exception is not None:
if exception.resp.status == 404:
return
self._log.error(exception)
self._hasErrors = True
return
request = current_requests[int(request_id)]
next_request = self._posts.list_next(request, response)
if next_request != None:
next_requests.append(next_request)
if 'items' in response and not (response['items'] is None):
comments.extend(response['items'])
batch = self._service.new_batch_http_request(callback=on_comments)
while batch != None:
for i,request in enumerate(next_requests):
batch.add(request, request_id=str(i))
current_requests = next_requests
next_requests = []
batch.execute()
if len(next_requests) > 0:
batch = self._service.new_batch_http_request(callback=on_comments)
else:
batch = None
return comments
def getCommentsToRemove(self, comments):
toRemove = []
for comment in comments:
reason = self.hasReasonToRemove(comment)
if reason:
toRemove.append((comment,reason))
return toRemove
def removeComments(self, removals):
def on_removed(request_id, response, exception):
if exception is not None:
self._log.error(exception)
self._hasErrors = True
return
comment,reason = removals[int(request_id)]
self._log.info('Removed (%s) comment %s in post %s by author %s: %s' % (self._config.removalMethod,comment['id'],comment['post']['id'],comment['author']['id'],reason))
self.removedComments += 1
batch = self._service.new_batch_http_request(callback=on_removed)
for i,removal in enumerate(removals):
comment,reason=removal
batch.add(self._removalMethod(
blogId=comment['blog']['id'],
postId=comment['post']['id'],
commentId=comment['id']
), request_id=str(i))
batch.execute()
def getRemovalMethod(self,comments):
try:
return getattr(comments, self._config.removalMethod)
except AttributeError:
print('Check configuration: removalMethod not valid: %s' % self._config.removalMethod)
sys.exit(1)
def hasReasonToRemove(self,comment):
if comment['author']['id'] in self._config.blacklist:
return 'Author is blacklisted'
if comment['content']:
for term in self._config.contentBlacklist:
if term in comment['content']:
return 'Content contains blacklisted term: %s' % term
return None
def initCredentialsAndService(self, name, version, scope = None, discovery_filename = None):
if scope is None:
scope = 'https://www.googleapis.com/auth/' + name
client_secrets = os.path.join(self._directory, 'client_secrets.json')
flow = client.flow_from_clientsecrets(client_secrets,
scope=scope,
message=tools.message_if_missing(client_secrets))
storage = file.Storage(name + '.dat')
credentials = storage.get()
if credentials is None or credentials.invalid:
flags = Expando()
flags.nonoauth_local_webserver = True
credentials = tools.run_flow(flow, storage, flags)
http = credentials.authorize(http=httplib2.Http())
if discovery_filename is None:
service = discovery.build(name, version, http=http)
else:
with open(discovery_filename) as discovery_file:
service = discovery.build_from_document(
discovery_file.read(),
base='https://www.googleapis.com/',
http=http)
return (credentials, service)
def loadState(self):
file = self.getStateFile()
if os.path.isfile(file):
with open(file, 'r') as handle:
self._state = json.load(handle)
else:
self._state = {}
if 'utcLastRun' in self._state and not (self._state['utcLastRun'] is None):
self._utcLastRun = datetime.strptime(self._state['utcLastRun'], '%Y-%m-%dT%H:%M:%S.%fZ')
else:
self._utcLastRun = None
def saveState(self):
self._state['utcLastRun'] = '%sZ' % datetime.utcnow().isoformat('T')
with open(self.getStateFile(), 'w') as handle:
json.dump(self._state, handle, indent=4, sort_keys=True)
def getStateFile(self):
return os.path.join(self._directory, 'client.state.json')
def main(argv):
directory = os.path.dirname(__file__)
log = Logger()
config = Configuration(os.path.join(directory, 'config.json'))
try:
bot = CommentBot(log, config, directory)
bot.scanBlog(config.blogId)
log.info('%d scanned posts, %d scanned comments, %d removed comments' %(bot.scannedPosts, bot.scannedComments, bot.removedComments))
except client.AccessTokenRefreshError:
print('[%s] ERROR: The credentials have been revoked or expired, please re-run the application to re-authorize.'%(now()))
if __name__ == '__main__':
main(sys.argv)
| [
"httplib2.Http",
"sys.exit",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"os.path.join",
"re.match",
"oauth2client.tools.message_if_missing",
"os.path.isfile",
"os.path.dirname",
"googleapiclient.discovery.build",
"datetime.datetime.now",
"oauth2client.file.Storage",
"json.load",
"oauth2client.tools.run_flow",
"datetime.timedelta",
"json.dump"
] | [((9227, 9252), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (9242, 9252), False, 'import os\n'), ((2885, 2914), 're.match', 're.match', (['"""^[0-9]+$"""', 'blogUrl'], {}), "('^[0-9]+$', blogUrl)\n", (2893, 2914), False, 'import re\n'), ((7456, 7508), 'os.path.join', 'os.path.join', (['self._directory', '"""client_secrets.json"""'], {}), "(self._directory, 'client_secrets.json')\n", (7468, 7508), False, 'import os\n'), ((7678, 7705), 'oauth2client.file.Storage', 'file.Storage', (["(name + '.dat')"], {}), "(name + '.dat')\n", (7690, 7705), False, 'from oauth2client import file\n'), ((8477, 8497), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (8491, 8497), False, 'import os\n'), ((9142, 9192), 'os.path.join', 'os.path.join', (['self._directory', '"""client.state.json"""'], {}), "(self._directory, 'client.state.json')\n", (9154, 9192), False, 'import os\n'), ((9299, 9337), 'os.path.join', 'os.path.join', (['directory', '"""config.json"""'], {}), "(directory, 'config.json')\n", (9311, 9337), False, 'import os\n'), ((885, 911), 'os.path.dirname', 'os.path.dirname', (['directory'], {}), '(directory)\n', (900, 911), False, 'import os\n'), ((975, 987), 'json.load', 'json.load', (['h'], {}), '(h)\n', (984, 987), False, 'import json\n'), ((7903, 7939), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'storage', 'flags'], {}), '(flow, storage, flags)\n', (7917, 7939), False, 'from oauth2client import tools\n'), ((8062, 8103), 'googleapiclient.discovery.build', 'discovery.build', (['name', 'version'], {'http': 'http'}), '(name, version, http=http)\n', (8077, 8103), False, 'from googleapiclient import discovery\n'), ((8750, 8819), 'datetime.datetime.strptime', 'datetime.strptime', (["self._state['utcLastRun']", '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(self._state['utcLastRun'], '%Y-%m-%dT%H:%M:%S.%fZ')\n", (8767, 8819), False, 'from datetime import datetime, timedelta\n'), ((9041, 9097), 'json.dump', 'json.dump', (['self._state', 'handle'], {'indent': '(4)', 'sort_keys': '(True)'}), '(self._state, handle, indent=4, sort_keys=True)\n', (9050, 9097), False, 'import json\n'), ((6856, 6867), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6864, 6867), False, 'import sys\n'), ((7617, 7657), 'oauth2client.tools.message_if_missing', 'tools.message_if_missing', (['client_secrets'], {}), '(client_secrets)\n', (7641, 7657), False, 'from oauth2client import tools\n'), ((7983, 7998), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (7996, 7998), False, 'import httplib2\n'), ((8573, 8590), 'json.load', 'json.load', (['handle'], {}), '(handle)\n', (8582, 8590), False, 'import json\n'), ((594, 608), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (606, 608), False, 'from datetime import datetime, timedelta\n'), ((8940, 8957), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (8955, 8957), False, 'from datetime import datetime, timedelta\n'), ((3208, 3225), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3223, 3225), False, 'from datetime import datetime, timedelta\n'), ((3226, 3261), 'datetime.timedelta', 'timedelta', ([], {'hours': 'self._config.hours'}), '(hours=self._config.hours)\n', (3235, 3261), False, 'from datetime import datetime, timedelta\n'), ((4012, 4065), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'self._config.lastRunOverlapMinutes'}), '(minutes=self._config.lastRunOverlapMinutes)\n', (4021, 4065), False, 'from datetime import datetime, timedelta\n')] |
import spherical_kde.utils as utils
import pytest
import numpy
from numpy.testing import assert_allclose
test_polar = numpy.array([[numpy.pi/3, numpy.pi/3],
[numpy.pi/6, numpy.pi/4],
[numpy.pi/4, numpy.pi/4]])
test_decra = numpy.array([[60., 30.],
[30., 45.],
[45., 45.]])
test_cartesian = numpy.array([[3.**0.5/4, 3./4, 1./2],
[(3./8)**0.5, (8.)**-0.5, (2.)**-0.5],
[1./2, 1./2, (2.)**-0.5]])
def test_cartesian_from_polar_scalar():
for ang, cart0 in zip(test_polar, test_cartesian):
cart1 = utils.cartesian_from_polar(*ang)
assert_allclose(cart0, cart1)
def test_cartesian_from_polar_array():
cart1 = utils.cartesian_from_polar(*test_polar.T)
assert_allclose(test_cartesian.T, cart1)
def test_polar_from_cartesian_scalar():
for ang0, cart in zip(test_polar, test_cartesian):
# Test straightforward
ang1 = utils.polar_from_cartesian(cart)
assert_allclose(ang0, ang1)
# Test normalisation doesn't matter
cart = 3*numpy.array(cart)
ang2 = utils.polar_from_cartesian(cart)
assert_allclose(ang0, ang2)
def test_polar_from_cartesian_array():
ang1 = utils.polar_from_cartesian(test_cartesian.T)
assert_allclose(test_polar.T, ang1)
def test_decra_from_polar_scalar():
for ang, decra0 in zip(test_polar, test_decra):
decra1 = utils.decra_from_polar(*ang)
assert_allclose(decra0, decra1)
def test_decra_from_polar_array():
decra1 = utils.decra_from_polar(*test_polar.T)
assert_allclose(test_decra.T, decra1)
def test_polar_from_decra_scalar():
for ang0, decra in zip(test_polar, test_decra):
ang1 = utils.polar_from_decra(*decra)
assert_allclose(ang0, ang1)
def test_polar_from_decra_array():
polar1 = utils.polar_from_decra(*test_decra.T)
assert_allclose(test_polar.T, polar1)
def test_logsinh():
numpy.random.seed(seed=0)
for i in range(100):
if i > 90:
x = numpy.random.rand(10)
else:
x = numpy.random.rand()
a = utils.logsinh(x)
b = numpy.log(numpy.sinh(x))
assert_allclose(a, b)
def test_logsinh_positive_arg():
with pytest.raises(ValueError):
utils.logsinh(-1)
with pytest.raises(ValueError):
utils.logsinh(numpy.array([1, -1]))
def test_rotation_matrix():
numpy.random.seed(seed=0)
theta = numpy.random.rand(10, 2)*numpy.pi
phi = numpy.random.rand(10, 2)*2*numpy.pi
for (p1, p2), (t1, t2) in zip(phi, theta):
n1 = utils.cartesian_from_polar(p1, t1)
n2 = utils.cartesian_from_polar(p2, t2)
M = utils.rotation_matrix(n1, n2)
assert_allclose(M.dot(n1), n2)
assert_allclose(M.T.dot(n2), n1)
M = utils.rotation_matrix(n1, n1)
assert_allclose(M, numpy.identity(3))
def test_spherical_integrate():
ans = utils.spherical_integrate(lambda theta, phi: 1)
assert_allclose(ans, 4*numpy.pi)
ans = utils.spherical_integrate(lambda theta, phi: theta*phi)
assert_allclose(ans, 2*numpy.pi**3)
ans = utils.spherical_integrate(lambda theta, phi: theta*numpy.cos(phi))
assert_allclose(ans, 0, atol=1e-7)
def test_spherical_kullback_liebler():
def logp(phi, theta):
return numpy.log(numpy.sin(theta)/numpy.pi**2)
def logq(phi, theta):
return numpy.log(1/numpy.pi/4)
assert_allclose(utils.spherical_integrate(logp, log=True), 1)
assert_allclose(utils.spherical_integrate(logq, log=True), 1)
KL = utils.spherical_kullback_liebler(logp, logq)
assert_allclose(KL, 1./2 - numpy.log(numpy.pi/2))
| [
"spherical_kde.utils.spherical_kullback_liebler",
"numpy.identity",
"spherical_kde.utils.cartesian_from_polar",
"numpy.random.rand",
"numpy.testing.assert_allclose",
"numpy.log",
"spherical_kde.utils.polar_from_cartesian",
"spherical_kde.utils.rotation_matrix",
"numpy.sinh",
"numpy.array",
"spherical_kde.utils.polar_from_decra",
"pytest.raises",
"numpy.random.seed",
"numpy.cos",
"numpy.sin",
"spherical_kde.utils.spherical_integrate",
"spherical_kde.utils.logsinh",
"spherical_kde.utils.decra_from_polar"
] | [((119, 227), 'numpy.array', 'numpy.array', (['[[numpy.pi / 3, numpy.pi / 3], [numpy.pi / 6, numpy.pi / 4], [numpy.pi / 4,\n numpy.pi / 4]]'], {}), '([[numpy.pi / 3, numpy.pi / 3], [numpy.pi / 6, numpy.pi / 4], [\n numpy.pi / 4, numpy.pi / 4]])\n', (130, 227), False, 'import numpy\n'), ((277, 332), 'numpy.array', 'numpy.array', (['[[60.0, 30.0], [30.0, 45.0], [45.0, 45.0]]'], {}), '([[60.0, 30.0], [30.0, 45.0], [45.0, 45.0]])\n', (288, 332), False, 'import numpy\n'), ((397, 530), 'numpy.array', 'numpy.array', (['[[3.0 ** 0.5 / 4, 3.0 / 4, 1.0 / 2], [(3.0 / 8) ** 0.5, 8.0 ** -0.5, 2.0 **\n -0.5], [1.0 / 2, 1.0 / 2, 2.0 ** -0.5]]'], {}), '([[3.0 ** 0.5 / 4, 3.0 / 4, 1.0 / 2], [(3.0 / 8) ** 0.5, 8.0 ** \n -0.5, 2.0 ** -0.5], [1.0 / 2, 1.0 / 2, 2.0 ** -0.5]])\n', (408, 530), False, 'import numpy\n'), ((798, 839), 'spherical_kde.utils.cartesian_from_polar', 'utils.cartesian_from_polar', (['*test_polar.T'], {}), '(*test_polar.T)\n', (824, 839), True, 'import spherical_kde.utils as utils\n'), ((844, 884), 'numpy.testing.assert_allclose', 'assert_allclose', (['test_cartesian.T', 'cart1'], {}), '(test_cartesian.T, cart1)\n', (859, 884), False, 'from numpy.testing import assert_allclose\n'), ((1313, 1357), 'spherical_kde.utils.polar_from_cartesian', 'utils.polar_from_cartesian', (['test_cartesian.T'], {}), '(test_cartesian.T)\n', (1339, 1357), True, 'import spherical_kde.utils as utils\n'), ((1362, 1397), 'numpy.testing.assert_allclose', 'assert_allclose', (['test_polar.T', 'ang1'], {}), '(test_polar.T, ang1)\n', (1377, 1397), False, 'from numpy.testing import assert_allclose\n'), ((1624, 1661), 'spherical_kde.utils.decra_from_polar', 'utils.decra_from_polar', (['*test_polar.T'], {}), '(*test_polar.T)\n', (1646, 1661), True, 'import spherical_kde.utils as utils\n'), ((1666, 1703), 'numpy.testing.assert_allclose', 'assert_allclose', (['test_decra.T', 'decra1'], {}), '(test_decra.T, decra1)\n', (1681, 1703), False, 'from numpy.testing import assert_allclose\n'), ((1926, 1963), 'spherical_kde.utils.polar_from_decra', 'utils.polar_from_decra', (['*test_decra.T'], {}), '(*test_decra.T)\n', (1948, 1963), True, 'import spherical_kde.utils as utils\n'), ((1968, 2005), 'numpy.testing.assert_allclose', 'assert_allclose', (['test_polar.T', 'polar1'], {}), '(test_polar.T, polar1)\n', (1983, 2005), False, 'from numpy.testing import assert_allclose\n'), ((2032, 2057), 'numpy.random.seed', 'numpy.random.seed', ([], {'seed': '(0)'}), '(seed=0)\n', (2049, 2057), False, 'import numpy\n'), ((2497, 2522), 'numpy.random.seed', 'numpy.random.seed', ([], {'seed': '(0)'}), '(seed=0)\n', (2514, 2522), False, 'import numpy\n'), ((2890, 2919), 'spherical_kde.utils.rotation_matrix', 'utils.rotation_matrix', (['n1', 'n1'], {}), '(n1, n1)\n', (2911, 2919), True, 'import spherical_kde.utils as utils\n'), ((3006, 3053), 'spherical_kde.utils.spherical_integrate', 'utils.spherical_integrate', (['(lambda theta, phi: 1)'], {}), '(lambda theta, phi: 1)\n', (3031, 3053), True, 'import spherical_kde.utils as utils\n'), ((3058, 3092), 'numpy.testing.assert_allclose', 'assert_allclose', (['ans', '(4 * numpy.pi)'], {}), '(ans, 4 * numpy.pi)\n', (3073, 3092), False, 'from numpy.testing import assert_allclose\n'), ((3102, 3159), 'spherical_kde.utils.spherical_integrate', 'utils.spherical_integrate', (['(lambda theta, phi: theta * phi)'], {}), '(lambda theta, phi: theta * phi)\n', (3127, 3159), True, 'import spherical_kde.utils as utils\n'), ((3162, 3201), 'numpy.testing.assert_allclose', 'assert_allclose', (['ans', '(2 * numpy.pi ** 3)'], {}), '(ans, 2 * numpy.pi ** 3)\n', (3177, 3201), False, 'from numpy.testing import assert_allclose\n'), ((3280, 3315), 'numpy.testing.assert_allclose', 'assert_allclose', (['ans', '(0)'], {'atol': '(1e-07)'}), '(ans, 0, atol=1e-07)\n', (3295, 3315), False, 'from numpy.testing import assert_allclose\n'), ((3646, 3690), 'spherical_kde.utils.spherical_kullback_liebler', 'utils.spherical_kullback_liebler', (['logp', 'logq'], {}), '(logp, logq)\n', (3678, 3690), True, 'import spherical_kde.utils as utils\n'), ((674, 706), 'spherical_kde.utils.cartesian_from_polar', 'utils.cartesian_from_polar', (['*ang'], {}), '(*ang)\n', (700, 706), True, 'import spherical_kde.utils as utils\n'), ((715, 744), 'numpy.testing.assert_allclose', 'assert_allclose', (['cart0', 'cart1'], {}), '(cart0, cart1)\n', (730, 744), False, 'from numpy.testing import assert_allclose\n'), ((1028, 1060), 'spherical_kde.utils.polar_from_cartesian', 'utils.polar_from_cartesian', (['cart'], {}), '(cart)\n', (1054, 1060), True, 'import spherical_kde.utils as utils\n'), ((1069, 1096), 'numpy.testing.assert_allclose', 'assert_allclose', (['ang0', 'ang1'], {}), '(ang0, ang1)\n', (1084, 1096), False, 'from numpy.testing import assert_allclose\n'), ((1192, 1224), 'spherical_kde.utils.polar_from_cartesian', 'utils.polar_from_cartesian', (['cart'], {}), '(cart)\n', (1218, 1224), True, 'import spherical_kde.utils as utils\n'), ((1233, 1260), 'numpy.testing.assert_allclose', 'assert_allclose', (['ang0', 'ang2'], {}), '(ang0, ang2)\n', (1248, 1260), False, 'from numpy.testing import assert_allclose\n'), ((1505, 1533), 'spherical_kde.utils.decra_from_polar', 'utils.decra_from_polar', (['*ang'], {}), '(*ang)\n', (1527, 1533), True, 'import spherical_kde.utils as utils\n'), ((1542, 1573), 'numpy.testing.assert_allclose', 'assert_allclose', (['decra0', 'decra1'], {}), '(decra0, decra1)\n', (1557, 1573), False, 'from numpy.testing import assert_allclose\n'), ((1809, 1839), 'spherical_kde.utils.polar_from_decra', 'utils.polar_from_decra', (['*decra'], {}), '(*decra)\n', (1831, 1839), True, 'import spherical_kde.utils as utils\n'), ((1848, 1875), 'numpy.testing.assert_allclose', 'assert_allclose', (['ang0', 'ang1'], {}), '(ang0, ang1)\n', (1863, 1875), False, 'from numpy.testing import assert_allclose\n'), ((2202, 2218), 'spherical_kde.utils.logsinh', 'utils.logsinh', (['x'], {}), '(x)\n', (2215, 2218), True, 'import spherical_kde.utils as utils\n'), ((2264, 2285), 'numpy.testing.assert_allclose', 'assert_allclose', (['a', 'b'], {}), '(a, b)\n', (2279, 2285), False, 'from numpy.testing import assert_allclose\n'), ((2330, 2355), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2343, 2355), False, 'import pytest\n'), ((2365, 2382), 'spherical_kde.utils.logsinh', 'utils.logsinh', (['(-1)'], {}), '(-1)\n', (2378, 2382), True, 'import spherical_kde.utils as utils\n'), ((2392, 2417), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2405, 2417), False, 'import pytest\n'), ((2536, 2560), 'numpy.random.rand', 'numpy.random.rand', (['(10)', '(2)'], {}), '(10, 2)\n', (2553, 2560), False, 'import numpy\n'), ((2676, 2710), 'spherical_kde.utils.cartesian_from_polar', 'utils.cartesian_from_polar', (['p1', 't1'], {}), '(p1, t1)\n', (2702, 2710), True, 'import spherical_kde.utils as utils\n'), ((2724, 2758), 'spherical_kde.utils.cartesian_from_polar', 'utils.cartesian_from_polar', (['p2', 't2'], {}), '(p2, t2)\n', (2750, 2758), True, 'import spherical_kde.utils as utils\n'), ((2771, 2800), 'spherical_kde.utils.rotation_matrix', 'utils.rotation_matrix', (['n1', 'n2'], {}), '(n1, n2)\n', (2792, 2800), True, 'import spherical_kde.utils as utils\n'), ((2943, 2960), 'numpy.identity', 'numpy.identity', (['(3)'], {}), '(3)\n', (2957, 2960), False, 'import numpy\n'), ((3479, 3506), 'numpy.log', 'numpy.log', (['(1 / numpy.pi / 4)'], {}), '(1 / numpy.pi / 4)\n', (3488, 3506), False, 'import numpy\n'), ((3524, 3565), 'spherical_kde.utils.spherical_integrate', 'utils.spherical_integrate', (['logp'], {'log': '(True)'}), '(logp, log=True)\n', (3549, 3565), True, 'import spherical_kde.utils as utils\n'), ((3590, 3631), 'spherical_kde.utils.spherical_integrate', 'utils.spherical_integrate', (['logq'], {'log': '(True)'}), '(logq, log=True)\n', (3615, 3631), True, 'import spherical_kde.utils as utils\n'), ((1159, 1176), 'numpy.array', 'numpy.array', (['cart'], {}), '(cart)\n', (1170, 1176), False, 'import numpy\n'), ((2118, 2139), 'numpy.random.rand', 'numpy.random.rand', (['(10)'], {}), '(10)\n', (2135, 2139), False, 'import numpy\n'), ((2170, 2189), 'numpy.random.rand', 'numpy.random.rand', ([], {}), '()\n', (2187, 2189), False, 'import numpy\n'), ((2241, 2254), 'numpy.sinh', 'numpy.sinh', (['x'], {}), '(x)\n', (2251, 2254), False, 'import numpy\n'), ((2441, 2461), 'numpy.array', 'numpy.array', (['[1, -1]'], {}), '([1, -1])\n', (2452, 2461), False, 'import numpy\n'), ((2580, 2604), 'numpy.random.rand', 'numpy.random.rand', (['(10)', '(2)'], {}), '(10, 2)\n', (2597, 2604), False, 'import numpy\n'), ((3722, 3745), 'numpy.log', 'numpy.log', (['(numpy.pi / 2)'], {}), '(numpy.pi / 2)\n', (3731, 3745), False, 'import numpy\n'), ((3260, 3274), 'numpy.cos', 'numpy.cos', (['phi'], {}), '(phi)\n', (3269, 3274), False, 'import numpy\n'), ((3407, 3423), 'numpy.sin', 'numpy.sin', (['theta'], {}), '(theta)\n', (3416, 3423), False, 'import numpy\n')] |
from copy import copy
class GameBoard:
EMPTY_PIECE = 0
RED_PIECE = 1
BLACK_PIECE = 2
def __init__(self, num_columns, num_rows):
self.num_columns = num_columns
self.num_rows = num_rows
self.pieces = None
self.total_pieces = 0
self.reset_board()
def make_copy(self):
pieces = self.pieces.copy()
total_pieces = copy(self.total_pieces)
game_board = GameBoard(self.num_columns, self.num_rows)
game_board.pieces = pieces
game_board.total_pieces = total_pieces
return game_board
def reset_board(self):
self.total_pieces = 0
self.pieces = [self.EMPTY_PIECE] * (self.num_columns * self.num_rows)
def get_num_columns(self):
return self.num_columns
def get_num_rows(self):
return self.num_rows
def get_board_size(self):
return self.get_num_columns(), self.get_num_rows()
def can_make_move(self, column):
if column is None:
return False
if column < 0 or column >= self.num_columns:
return False
top_row = self.num_rows - 1
return self.get_piece(column, top_row) == self.EMPTY_PIECE
def get_all_possible_moves(self):
moves = []
for column in range(self.num_columns):
if self.can_make_move(column):
moves.append(column)
return moves
def make_move(self, column, player_color):
row = self.get_first_empty_row(column)
if row is None:
raise Exception(f"Row is none given column {column}")
index = self.get_piece_index(column, row)
self.pieces[index] = player_color
self.total_pieces += 1
def get_first_empty_row(self, column):
if column is None:
raise Exception("Column cannot be None!")
for row in range(self.num_rows):
if self.get_piece(column, row) == GameBoard.EMPTY_PIECE:
return row
return None
def get_row(self, column):
row = self.get_first_empty_row(column)
row = self.num_rows - 1 if row is None else row - 1
return row
def get_piece(self, column, row):
index = self.get_piece_index(column, row)
return self.pieces[index]
def get_piece_index(self, column, row):
index = column * self.num_rows + row
return index
def print_board(self):
for y in range(self.num_rows - 1, -1, -1):
for x in range(self.num_columns):
print(str(self.get_piece(x, y)) + ' ', end='')
print()
def is_full_board(self):
board_size = self.num_columns * self.num_rows
return self.total_pieces >= board_size
| [
"copy.copy"
] | [((388, 411), 'copy.copy', 'copy', (['self.total_pieces'], {}), '(self.total_pieces)\n', (392, 411), False, 'from copy import copy\n')] |
import numpy as np
import ndhist
import sys
# Construct a histogram with 2 bins of type int64 having edges at 0, 1, and 2,
# which are also of type int64
class Value(object):
def __init__(self, v=0):
self._v = v
def __lt__(self, rhs):
print("%f < %f"%(self._v, rhs._v))
return self._v < rhs._v
def __add__(self, rhs):
print("%f + %f"%(self._v, rhs._v))
self._v += rhs._v
return self
h1 = ndhist.ndhist((np.array([-1, 0,1,2,3,4,5,6,7,8,9], dtype=np.dtype(np.float64)),),
dtype=np.dtype(np.float64))
vs = np.random.uniform(-3, 11, size=100)
vs = vs.astype(np.dtype(np.float64))
vs = np.reshape(vs, (vs.shape[0],1))
h1.fill(vs, 1)
print("h1.get_bin_edges(0) = ", h1.get_bin_edges(0))
print("h1.bincontent =", h1.bincontent)
h2 = ndhist.ndhist((np.array([0,1,2,3,4,5,6,7,8,9], dtype=np.dtype(np.float64)),),
dtype=np.dtype(np.float64))
values = np.array([1,2,3,4,5,6,7,8,9,9,8,7,6,5,4,3,2,1,0])
vs = values.astype(np.dtype(np.float64))
vs = np.reshape(vs, (vs.shape[0],1))
h2.fill(vs, 1)
print(h2.bincontent)
print("Create object histogram.")
h3 = ndhist.ndhist((np.array([0,1,2,3,4,5,6,7,8,9,10], dtype=np.dtype(np.float64)),),
dtype=np.dtype(Value),
bc_class=Value)
print("edges h3=", h3.get_bin_edges())
print("h3.ndim=", h3.ndim)
values = np.array([1,2,3,4,5,6,7,8,9,10,9,8,7,6,5,4,3,2,1,0])
vs = values.astype(np.dtype(np.float64))
vs = np.reshape(vs, (vs.shape[0],1))
h3.fill(vs, Value(1))
print(h3.bincontent)
print("[")
for i in range(0, 10):
print("%f,"%h3.bincontent[i]._v)
print("]")
h4 = ndhist.ndhist((np.array([Value(0),Value(1),Value(2)], dtype=np.dtype(object)),),
dtype=np.dtype(np.float64))
vs = np.array([Value(0.1),Value(1.2),Value(2.3)])
h4.fill((vs,), 1.0)
print("h4.bincontent = ", h4.bincontent)
| [
"numpy.array",
"numpy.dtype",
"numpy.reshape",
"numpy.random.uniform"
] | [((586, 621), 'numpy.random.uniform', 'np.random.uniform', (['(-3)', '(11)'], {'size': '(100)'}), '(-3, 11, size=100)\n', (603, 621), True, 'import numpy as np\n'), ((664, 696), 'numpy.reshape', 'np.reshape', (['vs', '(vs.shape[0], 1)'], {}), '(vs, (vs.shape[0], 1))\n', (674, 696), True, 'import numpy as np\n'), ((947, 1014), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0])\n', (955, 1014), True, 'import numpy as np\n'), ((1043, 1075), 'numpy.reshape', 'np.reshape', (['vs', '(vs.shape[0], 1)'], {}), '(vs, (vs.shape[0], 1))\n', (1053, 1075), True, 'import numpy as np\n'), ((1385, 1456), 'numpy.array', 'np.array', (['[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]'], {}), '([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0])\n', (1393, 1456), True, 'import numpy as np\n'), ((1484, 1516), 'numpy.reshape', 'np.reshape', (['vs', '(vs.shape[0], 1)'], {}), '(vs, (vs.shape[0], 1))\n', (1494, 1516), True, 'import numpy as np\n'), ((637, 657), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (645, 657), True, 'import numpy as np\n'), ((1016, 1036), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (1024, 1036), True, 'import numpy as np\n'), ((1457, 1477), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (1465, 1477), True, 'import numpy as np\n'), ((558, 578), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (566, 578), True, 'import numpy as np\n'), ((915, 935), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (923, 935), True, 'import numpy as np\n'), ((1257, 1272), 'numpy.dtype', 'np.dtype', (['Value'], {}), '(Value)\n', (1265, 1272), True, 'import numpy as np\n'), ((1753, 1773), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (1761, 1773), True, 'import numpy as np\n'), ((508, 528), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (516, 528), True, 'import numpy as np\n'), ((865, 885), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (873, 885), True, 'import numpy as np\n'), ((1207, 1227), 'numpy.dtype', 'np.dtype', (['np.float64'], {}), '(np.float64)\n', (1215, 1227), True, 'import numpy as np\n'), ((1707, 1723), 'numpy.dtype', 'np.dtype', (['object'], {}), '(object)\n', (1715, 1723), True, 'import numpy as np\n')] |
from bs4 import BeautifulSoup
import urllib
import matplotlib.pyplot as plt
import threading
import smtplib
import os
cached_posts = []
def find_new_posts(_cached_posts=cached_posts):
t = 10
page = urllib.request.urlopen('http://www.jlawrence.co/tb').read()
soup = BeautifulSoup(page, "html5lib")
posts = soup.find_all("div",class_="post")
cant_send = not(_cached_posts)
# initialization of email server
mail = smtplib.SMTP('smtp.gmail.com', 587)
# initialization of the mail driver
mail.ehlo()
mail.starttls()
senderEmail = ''
senderPass = ''
recieverEmail = '<EMAIL>'
mail.login(senderEmail, senderPass)
for post in posts:
if(not(post in _cached_posts)):
_cached_posts.append(post)
if(not(cant_send)):
mail.sendmail(senderEmail,recieverEmail,'New post at jlawrence.co/tb')
threading.Timer(t, find_new_posts).start()
find_new_posts()
| [
"bs4.BeautifulSoup",
"threading.Timer",
"smtplib.SMTP",
"urllib.request.urlopen"
] | [((281, 312), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page', '"""html5lib"""'], {}), "(page, 'html5lib')\n", (294, 312), False, 'from bs4 import BeautifulSoup\n'), ((446, 481), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.gmail.com"""', '(587)'], {}), "('smtp.gmail.com', 587)\n", (458, 481), False, 'import smtplib\n'), ((209, 261), 'urllib.request.urlopen', 'urllib.request.urlopen', (['"""http://www.jlawrence.co/tb"""'], {}), "('http://www.jlawrence.co/tb')\n", (231, 261), False, 'import urllib\n'), ((898, 932), 'threading.Timer', 'threading.Timer', (['t', 'find_new_posts'], {}), '(t, find_new_posts)\n', (913, 932), False, 'import threading\n')] |
from collections import defaultdict, deque
class CPU(object):
def __init__(self, instructions, _id):
self.instructions = instructions
self.ip = 0
self.registers = defaultdict(lambda: 0)
self.registers['p'] = _id
self._id = _id
self.outqueue = deque()
self.inqueue = [] #TODO
self.sent = 0
def step(self):
if 0 <= self.ip < len(self.instructions):
instruction = self.instructions[self.ip]
else:
print("DONE")
return False, False
name = instruction[0]
if name == 'snd':
self.snd(instruction)
elif name == 'set':
self.set(instruction)
elif name == 'mul':
self.mul(instruction)
elif name == 'add':
self.add(instruction)
elif name == 'mod':
self.mod(instruction)
elif name == 'rcv':
if not self.rcv(instruction):
# We need to switch cpu sides
return True, True
elif name == 'jgz':
self.jgz(instruction)
else:
print("unknown instruction")
self.ip += 1
return True, False
def get_value(self, name):
try:
value = int(name)
except:
value = self.registers[name]
return value
def snd(self, i):
self.outqueue.append(self.get_value(i[1]))
if self._id == 1:
self.sent += 1
print(f's = {self.sent}')
pass
def set(self, i):
self.registers[i[1]] = self.get_value(i[2])
def mul(self, i):
self.registers[i[1]] *= self.get_value(i[2])
def mod(self, i):
self.registers[i[1]] = self.get_value(i[1]) % self.get_value(i[2])
def add(self, i):
self.registers[i[1]] += self.get_value(i[2])
def rcv(self, i):
if len(self.inqueue) == 0:
return False
else:
self.registers[i[1]] = self.inqueue.popleft()
return True
def jgz(self, i):
if self.get_value(i[1]) > 0:
self.ip += self.get_value(i[2]) - 1
def continue_rcv(self):
i = self.instructions[self.ip]
if len(self.inqueue) == 0:
return False
else:
self.registers[i[1]] = self.inqueue.popleft()
self.ip += 1
return True
def do_things():
with open('input.txt') as ifile:
instructions = [ins.strip().split(' ') for ins in ifile.readlines()]
c0 = CPU(instructions, 0)
c1 = CPU(instructions, 1)
c1.inqueue = c0.outqueue
c0.inqueue = c1.outqueue
while True:
running0, waiting0 = c0.step()
if waiting0:
while not(c0.continue_rcv()):
running1, waiting1 = c1.step()
if waiting1 or not running1:
print('deadlock')
return True
running1, waiting1 = c1.step()
if waiting1:
while not(c1.continue_rcv()):
running0, waiting0 = c0.step()
if waiting0 or not running0:
print('deadlock')
return True
if not (running0 and running1):
print("stopped")
print(c0.ip)
print(c1.ip)
return True
do_things()
| [
"collections.deque",
"collections.defaultdict"
] | [((192, 215), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (203, 215), False, 'from collections import defaultdict, deque\n'), ((296, 303), 'collections.deque', 'deque', ([], {}), '()\n', (301, 303), False, 'from collections import defaultdict, deque\n')] |
from exceptions import InvalidUsage, NotCorrectDirection, ErrorOnBoardCoding
from gameserver.types.direction import Direction
from enum import Enum
class BoardNaming(Enum):
Empty = 0
Dino = 1
Robot_Up = 2
Robot_Right = 3
Robot_Down = 4
Robot_Left = 5
def __str__(self):
if self.name == 'Dino':
return 'X'
if self.name == 'Robot_Up':
return Direction.UP.value
if self.name == 'Robot_Down':
return Direction.DOWN.value
if self.name == 'Robot_Left':
return Direction.LEFT.value
if self.name == 'Robot_Right':
return Direction.RIGHT.value
if self.name == 'Empty':
return '-'
@classmethod
def has_value(cls, value):
return value in cls._value2member_map_
ROBOT_VALUES = [
BoardNaming.Robot_Right,
BoardNaming.Robot_Left,
BoardNaming.Robot_Up,
BoardNaming.Robot_Down
]
def direction_to_robot_naming(direction: Direction) -> BoardNaming:
if direction not in list(Direction):
raise NotCorrectDirection
if direction == Direction.UP:
return BoardNaming.Robot_Up.value
elif direction == Direction.DOWN:
return BoardNaming.Robot_Down.value
elif direction == Direction.LEFT:
return BoardNaming.Robot_Left.value
else:
return BoardNaming.Robot_Right.value
def robot_naming_to_direction(naming: BoardNaming) -> Direction:
if naming not in ROBOT_VALUES:
raise ErrorOnBoardCoding
if naming == BoardNaming.Robot_Up:
return Direction.UP
if naming == BoardNaming.Robot_Down:
return Direction.DOWN
if naming == BoardNaming.Robot_Left:
return Direction.LEFT
if naming == BoardNaming.Robot_Right:
return Direction.RIGHT
def is_robot(name: BoardNaming) -> bool:
if name in ROBOT_VALUES:
return True
return False
def string_board_to_array(string: str, size: int) -> list:
return [string[i:i + size] for i in range(0, len(string), size)]
def print_for_response(string: str, size: int) -> list:
keys = BoardNaming._value2member_map_.keys()
values = [str(BoardNaming(x)) for x in BoardNaming._value2member_map_.keys()]
for key, val in zip(keys, values):
string = string.replace(str(key), val)
return string_board_to_array(string, size)
def request_direction_conversion(request_direction: str) -> Direction:
valid_directions = [str.lower(e) for e in Direction.__members__.keys()]
if str.lower(request_direction) not in valid_directions:
raise NotCorrectDirection
return Direction.__members__[str.upper(request_direction)] | [
"gameserver.types.direction.Direction.__members__.keys"
] | [((2489, 2517), 'gameserver.types.direction.Direction.__members__.keys', 'Direction.__members__.keys', ([], {}), '()\n', (2515, 2517), False, 'from gameserver.types.direction import Direction\n')] |
"""
:mod:`zsl.db.model.app_model_json_decoder`
------------------------------------------
.. moduleauthor:: <NAME>
"""
from __future__ import unicode_literals
from json.decoder import WHITESPACE, JSONDecoder
from zsl.utils.import_helper import fetch_class
def get_json_decoder(full_class_name, hints=None):
class AppModelJSONDecoder(JSONDecoder):
def decode(self, s, _w=WHITESPACE.match):
values = JSONDecoder.decode(self, s, _w=_w)
model = fetch_class(full_class_name)(values, 'id', hints)
return model
return AppModelJSONDecoder
| [
"zsl.utils.import_helper.fetch_class",
"json.decoder.JSONDecoder.decode"
] | [((427, 461), 'json.decoder.JSONDecoder.decode', 'JSONDecoder.decode', (['self', 's'], {'_w': '_w'}), '(self, s, _w=_w)\n', (445, 461), False, 'from json.decoder import WHITESPACE, JSONDecoder\n'), ((482, 510), 'zsl.utils.import_helper.fetch_class', 'fetch_class', (['full_class_name'], {}), '(full_class_name)\n', (493, 510), False, 'from zsl.utils.import_helper import fetch_class\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Simple Script for augmenting our images.
- Takes *.JPG images and applies image augmentations with imgaug
- Takes all *.JPG images in the cwd
- Manipulates them with imgaug augmenters
- Saves augmented images in the cwd with the augmentation prefixed to its original file name
Requirements:
* See requirements.txt in https://github.com/aleju/imgaug
* opencv (conda) or python-opencv (pip)
* imgaug
Suggested setup:
* Create new conda environment
* Get requirements.txt from above source -> cat requirements.txt | xargs -i conda [install|add] {}
* Install opencv in env
* Install imgaug from git:
- /env_path/bin/pip install git+https://github.com/aleju/imgaug
How to use:
* Copy this script into directory with training images
* (Source) Activate environment
* Run script
'''
import cv2
import os
from imgaug import augmenters as iaa
def get_files(postfix):
'''
Returns all files in the current working directory with the specified postfix
It's sorted to ensure determinism
'''
filenames = os.listdir(os.getcwd())
return sorted([filename for filename in filenames if filename.endswith(postfix)])
def save_augmented_images(prefix, aug_images, paths):
'''
aug_images - list of numpy.ndarray's
'''
for i in range(len(paths)):
cv2.imwrite(prefix + "_" + paths[i], aug_images[i])
def get_images(paths):
'''
Images of shape := (height, width, channels = 3) in BGR Format
paths - [str]
'''
images = []
for path in paths:
images.append(cv2.imread(path))
return images
def main():
'''
Dirty image augmentation to get better recognition for systems presentation.
'''
augs = ["superpixel","colorspace","grayscale","gaussian_blur", "average_blur","median_blur","edge_detect","add","add_eltwise","invert","contrast_norm","dropout"]
superpixel = iaa.Superpixels(p_replace=(0.4, 0.6), n_segments=(16, 64))
#colorspace = iaa.Sequential([iaa.ChangeColorspace(from_colorspace="BGR", to_colorspace="HSV"), iaa.WithChannels(0, iaa.Add(-50, 50), iaa.ChangeColorspace(from_colorspace="BGR", to_colorspace="BGR")])
grayscale = iaa.Grayscale(alpha = (0.0, 1.0))
gaussian_blur = iaa.GaussianBlur(sigma = (0.0, 3.0))
average_blur = iaa.AverageBlur(k = (2, 10))
median_blur = iaa.MedianBlur(k = (5, 11))
edge_detect = iaa.EdgeDetect(alpha = (0.0 , 1.0))
add = iaa.Add((-50, 50), per_channel = 0.5)
add_eltwise = iaa.AddElementwise((-50, 50), per_channel = 0.5)
invert = iaa.Invert(0.25, per_channel = 0.5)
contrast_norm = iaa.ContrastNormalization((0.5, 1.5), per_channel = 0.5)
dropout = iaa.Dropout(p = (0, 0.3), per_channel = 0.5)
image_paths = get_files("JPG")
cv_images = get_images(image_paths)
for augmentation in augs:
if augmentation == "superpixel":
aug_images = superpixel.augment_images(cv_images)
save_augmented_images("superpixel", aug_images, image_paths)
#elif augmentation == "colorspace":
# aug_images = colorspace.augment_images(cv_images)
# save_augmented_images("colorspace", aug_images, image_paths)
elif augmentation == "grayscale":
aug_images = grayscale.augment_images(cv_images)
save_augmented_images("grayscale", aug_images, image_paths)
elif augmentation == "gaussian_blur":
aug_images = gaussian_blur.augment_images(cv_images)
save_augmented_images("gaussian_blur", aug_images, image_paths)
elif augmentation == "average_blur":
aug_images = average_blur.augment_images(cv_images)
save_augmented_images("average_blur", aug_images, image_paths)
elif augmentation == "edge_detect":
aug_images = edge_detect.augment_images(cv_images)
save_augmented_images("edge_detect", aug_images, image_paths)
elif augmentation == "add":
aug_images = add.augment_images(cv_images)
save_augmented_images("add", aug_images, image_paths)
elif augmentation == "add_eltwise":
aug_images = add_eltwise.augment_images(cv_images)
save_augmented_images("add_eltwise", aug_images, image_paths)
elif augmentation == "invert":
aug_images = invert.augment_images(cv_images)
save_augmented_images("invert", aug_images, image_paths)
elif augmentation == "contrast_norm":
aug_images = contrast_norm.augment_images(cv_images)
save_augmented_images("contrast_norm", aug_images, image_paths)
elif augmentation == "dropout":
aug_images = dropout.augment_images(cv_images)
save_augmented_images("dropout", aug_images, image_paths)
if __name__ == '__main__':
main()
| [
"imgaug.augmenters.Grayscale",
"imgaug.augmenters.AverageBlur",
"imgaug.augmenters.AddElementwise",
"cv2.imwrite",
"imgaug.augmenters.GaussianBlur",
"imgaug.augmenters.Invert",
"os.getcwd",
"imgaug.augmenters.Superpixels",
"imgaug.augmenters.EdgeDetect",
"imgaug.augmenters.ContrastNormalization",
"cv2.imread",
"imgaug.augmenters.Dropout",
"imgaug.augmenters.Add",
"imgaug.augmenters.MedianBlur"
] | [((2087, 2145), 'imgaug.augmenters.Superpixels', 'iaa.Superpixels', ([], {'p_replace': '(0.4, 0.6)', 'n_segments': '(16, 64)'}), '(p_replace=(0.4, 0.6), n_segments=(16, 64))\n', (2102, 2145), True, 'from imgaug import augmenters as iaa\n'), ((2375, 2406), 'imgaug.augmenters.Grayscale', 'iaa.Grayscale', ([], {'alpha': '(0.0, 1.0)'}), '(alpha=(0.0, 1.0))\n', (2388, 2406), True, 'from imgaug import augmenters as iaa\n'), ((2429, 2463), 'imgaug.augmenters.GaussianBlur', 'iaa.GaussianBlur', ([], {'sigma': '(0.0, 3.0)'}), '(sigma=(0.0, 3.0))\n', (2445, 2463), True, 'from imgaug import augmenters as iaa\n'), ((2486, 2512), 'imgaug.augmenters.AverageBlur', 'iaa.AverageBlur', ([], {'k': '(2, 10)'}), '(k=(2, 10))\n', (2501, 2512), True, 'from imgaug import augmenters as iaa\n'), ((2535, 2560), 'imgaug.augmenters.MedianBlur', 'iaa.MedianBlur', ([], {'k': '(5, 11)'}), '(k=(5, 11))\n', (2549, 2560), True, 'from imgaug import augmenters as iaa\n'), ((2583, 2615), 'imgaug.augmenters.EdgeDetect', 'iaa.EdgeDetect', ([], {'alpha': '(0.0, 1.0)'}), '(alpha=(0.0, 1.0))\n', (2597, 2615), True, 'from imgaug import augmenters as iaa\n'), ((2639, 2674), 'imgaug.augmenters.Add', 'iaa.Add', (['(-50, 50)'], {'per_channel': '(0.5)'}), '((-50, 50), per_channel=0.5)\n', (2646, 2674), True, 'from imgaug import augmenters as iaa\n'), ((2697, 2743), 'imgaug.augmenters.AddElementwise', 'iaa.AddElementwise', (['(-50, 50)'], {'per_channel': '(0.5)'}), '((-50, 50), per_channel=0.5)\n', (2715, 2743), True, 'from imgaug import augmenters as iaa\n'), ((2766, 2799), 'imgaug.augmenters.Invert', 'iaa.Invert', (['(0.25)'], {'per_channel': '(0.5)'}), '(0.25, per_channel=0.5)\n', (2776, 2799), True, 'from imgaug import augmenters as iaa\n'), ((2822, 2876), 'imgaug.augmenters.ContrastNormalization', 'iaa.ContrastNormalization', (['(0.5, 1.5)'], {'per_channel': '(0.5)'}), '((0.5, 1.5), per_channel=0.5)\n', (2847, 2876), True, 'from imgaug import augmenters as iaa\n'), ((2899, 2939), 'imgaug.augmenters.Dropout', 'iaa.Dropout', ([], {'p': '(0, 0.3)', 'per_channel': '(0.5)'}), '(p=(0, 0.3), per_channel=0.5)\n', (2910, 2939), True, 'from imgaug import augmenters as iaa\n'), ((1228, 1239), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1237, 1239), False, 'import os\n'), ((1489, 1540), 'cv2.imwrite', 'cv2.imwrite', (["(prefix + '_' + paths[i])", 'aug_images[i]'], {}), "(prefix + '_' + paths[i], aug_images[i])\n", (1500, 1540), False, 'import cv2\n'), ((1739, 1755), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (1749, 1755), False, 'import cv2\n')] |
"""
SOLID-принципы
Принцип единственной ответственности (The Single Responsibility Principle): у каждого объекта должна быть только одна
ответственность. Все поведение этого объекта должно быть направлено на обеспечение этой ответственности и никаких
других.
"""
# Неправильно
class EventHandler: # Обработчик событий
def handle_event_1(self, event):
# Обработчик первого события
pass
def handle_event_2(self, event):
# Обработчик второго события
pass
def handle_event_3(self, event):
# Обработчик третьего события
pass
def database_logger(self, event):
# Метод для записи логов в базу данных
pass
# Правильно
class EventHandler: # Обработчик событий
def handle_event_1(self, event):
# Обработчик первого события
pass
def handle_event_2(self, event):
# Обработчик второго события
pass
def handle_event_3(self, event):
# Обработчик третьего события
pass
class DatabaseLogger:
def database_logger(self, event):
# Метод для записи логов в базу данных
pass
"""
Принцип открытости/закрытости (The Open Closed Principle): классы должны быть открыты для расширения, но закрыты
для изменения. Этот принцип является важным, потому что внесение изменений в существующие компоненты системы может
также привести к непредвиденным изменения в работе самой этой системы. Однако поведение существующих объектов при
необходимости можно расширить при помощи создания новых сущностей.
Рассмотрим на примере. Пусть существует класс Robot. У этого класса есть метод brake. Мы хотим создать робота,
который при поломке кроме всего прочего включает аварийную сигнализацию alarm. При этом мы не должны переписывать сам
класс Robot, а должны создать потомка AlarmRobot, который при вызове break после вызова соответствующего метода
родительского класса будет так же вызывать метод alarm.
"""
"""
Принцип подстановки Барбары Лисков (The Liskov Substitution Principle): функции, которые используют базовый тип
должны иметь возможность использовать его подтипы не зная об этом.
"""
# Неправильный код / Wrong code
class Parent:
def __init__(self, value):
self.value = value
def do_something(self):
print("Function was called")
class Child(Parent):
def do_something(self):
super().do_something()
self.value = 0
def function(obj: Parent):
obj.do_something()
if obj.value > 0:
print("All correct!")
else:
print("SOMETHING IS GOING WRONG!")
# Посмотрим на поведение
parent = Parent(5)
function(parent)
print()
# Данный код должен работать корректно, если вместо родителя подставить потомка
child = Child(5)
function(child)
print()
"""
Принцип разделения интерфейса (The Interface Segregation Principle): клиенты не должны зависеть от методов,
которые они не используют.
"""
import math
# Неправильно / Wrong
class AllScoresCalculator:
def calculate_accuracy(self, y_true, y_pred):
return sum(int(x == y) for x, y in zip(y_true, y_pred)) / len(y_true)
def log_loss(self, y_true, y_pred):
return sum((x * math.log(y) + (1 - x) * math.log(1 - y))
for x, y in zip(y_true, y_pred)) / len(y_true)
# Правильно / Correct
class CalculateLosses:
def log_loss(self, y_true, y_pred):
return sum((x * math.log(y) + (1 - x) * math.log(1 - y))
for x, y in zip(y_true, y_pred)) / len(y_true)
class CalculateMetrics:
def calculate_accuracy(self, y_true, y_pred):
return sum(int(x == y) for x, y in zip(y_true, y_pred)) / len(y_true)
"""
Принцип инверсии зависимостей (The Dependency Inversion Principle):
* Модули верхних уровней не должны зависеть от модулей нижних уровней. Оба типа модулей должны зависеть от абстракций.
* Абстракции не должны зависеть от деталей. Детали должны зависеть от абстракций.
Приведем пример.
Пусть у вас есть базовый класс Distributer, который может отправлять сообщения в различные социальные сети.
У этого класса есть несколько реализаций, например VKDistributer и OKDistributer. Согласно принципу инверсии зависимостей, эти реализации
не должны зависеть от методов класса Distributer (например VK_send_message и OK_send_message). Вместо этого у класса
Destributor должен быть объявлен общий абстрактный метод send_message, который и будет реализован отдельно в каждом
из потомков.
"""
| [
"math.log"
] | [((3176, 3187), 'math.log', 'math.log', (['y'], {}), '(y)\n', (3184, 3187), False, 'import math\n'), ((3200, 3215), 'math.log', 'math.log', (['(1 - y)'], {}), '(1 - y)\n', (3208, 3215), False, 'import math\n'), ((3394, 3405), 'math.log', 'math.log', (['y'], {}), '(y)\n', (3402, 3405), False, 'import math\n'), ((3418, 3433), 'math.log', 'math.log', (['(1 - y)'], {}), '(1 - y)\n', (3426, 3433), False, 'import math\n')] |
import numpy as np
import plotly.graph_objects as go
import plotly.express as px
from plotly.subplots import make_subplots
from ipdb import set_trace as breakpoint
def parallel_coordinates_and_hist(*args, **kwargs):
"""
Augment parallel_coordinates with histograms
Args:
df: dataframe
dimensions: column names
color: the column name for coloring the parallel coordinate plot
color_hist: the column name for coloring the hist
TODO:
1. Now we color hist by labels. We should generalize to any feature. See "Augmenting parallel coordinates plots with color-coded stacked histograms" (Bok 2020). One function to consider is go.Bar. See the following sample code from ["Continuous Color Scales and Color Bars in Python
"](https://plotly.com/python/colorscales/):
```python
import plotly.express as px
df = px.data.gapminder().query("year == 2007").sort_values(by="lifeExp")
fig = px.bar(df, y="continent", x="pop", color="lifeExp", orientation="h",
color_continuous_scale='Bluered_r', hover_name="country")
fig.show()
```
2. Responsive histogram. Hover on one axis show its histogram. Show the histogram of only highlighted lines. Visualize the images of the highlighted images. One potential tool is FigureWidget.
https://community.plotly.com/t/linking-and-brushing-between-parallel-coordinate-and-scatterplot/38047
"""
df = args[0]
#df['label'] = df['label'].astype(int) # should be done outside
dimensions = kwargs['dimensions']
color_hist = kwargs.pop('color_hist', None) #'label'
if color_hist is None:
labels = np.zeros(len(df))
elif isinstance(color_hist, str):
labels = df[color_hist]
else: # e.g., series or ndarray
labels = color_hist
labels_unique = np.unique(labels)
# after this labels would be series or ndarray
# Make subplots
cols = len(dimensions)
fig = go.FigureWidget(make_subplots(rows=2, cols=cols, specs=[
[{'type': 'domain', 'colspan': cols}] + [None] * (cols - 1),
[{} for _ in range(cols)],
]))
# Parallel coordinates
pc = px.parallel_coordinates(*args, **kwargs)
fig.add_trace(pc.data[0], row=1, col=1)
fig.update_layout(pc.layout)
# Histograms
for j in range(cols):
fig_hist = px.histogram(
df,
y=dimensions[j],
color=labels,
color_discrete_map={0: 'steelblue', 1: 'firebrick'}
)
fig_hist.data = sorted(fig_hist.data, key=lambda hist: hist.legendgroup)
fig_hist.update_traces(bingroup=None)
fig.add_traces(fig_hist.data, rows=2, cols=j+1)
fig.update_xaxes(title_text=dimensions[j], row=2, col=j+1)
fig.update_yaxes(title_text=None, row=2, col=j+1)
fig.update_layout(barmode='stack', showlegend=False) # 'group', 'relative', 'overlay'
def update_highlight(dimension, constraintrange):
masks = []
for d in fig.data[0].dimensions:
if d.constraintrange is not None:
crs = np.array(d.constraintrange)
if crs.ndim == 1:
crs = np.expand_dims(crs, axis=0)
masks_dim = []
for cr in crs:
#labels_rev = {v: k for k, v in labels.items()}
#key = labels_rev[d.label]
key = d.label
masks_dim.append(df[key].between(*cr))
masks.append(np.logical_or.reduce(masks_dim))
mask = np.logical_and.reduce(masks)
# Pool doesn't work here probabily because fig are copied to each worker
with fig.batch_update():
for i, d in enumerate(fig.data[0].dimensions):
for j, label in enumerate(labels_unique):
fig.data[i*(len(labels_unique))+j+1].y = df.loc[mask & (labels == label), d.label]
for d in fig.data[0].dimensions:
d.on_change(update_highlight, 'constraintrange')
return fig#, update_highlight (return the handle to debug)
def test_parallel_coordinates_and_hist(data_frame, columns):
"""
assigngroup=True, use the same bin size
```python
columns = ['USFLUXL', 'Ensemble prob']
test_parallel_coordinates_and_hist(df_test, columns=columns)
```
"""
fig = make_subplots(rows=1, cols=len(columns))
for j in range(len(columns)):
#breakpoint()
hist = px.histogram(
data_frame,
x=columns[j],
#color='label',
nbins=10,
)
fig.add_trace(hist.data[0], row=1, col=j+1)
return fig
| [
"plotly.express.histogram",
"numpy.unique",
"numpy.array",
"numpy.logical_and.reduce",
"numpy.expand_dims",
"plotly.express.parallel_coordinates",
"numpy.logical_or.reduce"
] | [((1834, 1851), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (1843, 1851), True, 'import numpy as np\n'), ((2167, 2207), 'plotly.express.parallel_coordinates', 'px.parallel_coordinates', (['*args'], {}), '(*args, **kwargs)\n', (2190, 2207), True, 'import plotly.express as px\n'), ((2348, 2456), 'plotly.express.histogram', 'px.histogram', (['df'], {'y': 'dimensions[j]', 'color': 'labels', 'color_discrete_map': "{(0): 'steelblue', (1): 'firebrick'}"}), "(df, y=dimensions[j], color=labels, color_discrete_map={(0):\n 'steelblue', (1): 'firebrick'})\n", (2360, 2456), True, 'import plotly.express as px\n'), ((3551, 3579), 'numpy.logical_and.reduce', 'np.logical_and.reduce', (['masks'], {}), '(masks)\n', (3572, 3579), True, 'import numpy as np\n'), ((4448, 4496), 'plotly.express.histogram', 'px.histogram', (['data_frame'], {'x': 'columns[j]', 'nbins': '(10)'}), '(data_frame, x=columns[j], nbins=10)\n', (4460, 4496), True, 'import plotly.express as px\n'), ((3088, 3115), 'numpy.array', 'np.array', (['d.constraintrange'], {}), '(d.constraintrange)\n', (3096, 3115), True, 'import numpy as np\n'), ((3176, 3203), 'numpy.expand_dims', 'np.expand_dims', (['crs'], {'axis': '(0)'}), '(crs, axis=0)\n', (3190, 3203), True, 'import numpy as np\n'), ((3503, 3534), 'numpy.logical_or.reduce', 'np.logical_or.reduce', (['masks_dim'], {}), '(masks_dim)\n', (3523, 3534), True, 'import numpy as np\n')] |
x = [int(input('please input a integer: ')), int(input('please input a integer: '))]
x2 = max(x)
x1 = min(x)
print(x2)
import math
a = math.sqrt(x2)
for i in range(x1, x2+1):
print(x1)
| [
"math.sqrt"
] | [((144, 157), 'math.sqrt', 'math.sqrt', (['x2'], {}), '(x2)\n', (153, 157), False, 'import math\n')] |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests import tagged, Form
from odoo.addons.mail.tests.common import mail_new_test_user
from odoo.addons.test_mail.tests.common import TestMailCommon
from odoo.exceptions import AccessError, ValidationError, UserError
from odoo.tools import mute_logger, formataddr
@tagged('mail_channel')
class TestChannelAccessRights(TestMailCommon):
@classmethod
def setUpClass(cls):
super(TestChannelAccessRights, cls).setUpClass()
Channel = cls.env['mail.channel'].with_context(cls._test_context)
cls.user_public = mail_new_test_user(cls.env, login='bert', groups='base.group_public', name='<NAME>')
cls.user_portal = mail_new_test_user(cls.env, login='chell', groups='base.group_portal', name='<NAME>')
# Pigs: base group for tests
cls.group_pigs = Channel.create({
'name': 'Pigs',
'public': 'groups',
'group_public_id': cls.env.ref('base.group_user').id})
# Jobs: public group
cls.group_public = Channel.create({
'name': 'Jobs',
'description': 'NotFalse',
'public': 'public'})
# Private: private group
cls.group_private = Channel.create({
'name': 'Private',
'public': 'private'})
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_access_rights_public(self):
# Read public group -> ok
self.group_public.with_user(self.user_public).read()
# Read Pigs -> ko, restricted to employees
with self.assertRaises(AccessError):
self.group_pigs.with_user(self.user_public).read()
# Read a private group when being a member: ok
self.group_private.write({'channel_partner_ids': [(4, self.user_public.partner_id.id)]})
self.group_private.with_user(self.user_public).read()
# Create group: ko, no access rights
with self.assertRaises(AccessError):
self.env['mail.channel'].with_user(self.user_public).create({'name': 'Test'})
# Update group: ko, no access rights
with self.assertRaises(AccessError):
self.group_public.with_user(self.user_public).write({'name': 'Broutouschnouk'})
# Unlink group: ko, no access rights
with self.assertRaises(AccessError):
self.group_public.with_user(self.user_public).unlink()
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models', 'odoo.models.unlink')
def test_access_rights_groups(self):
# Employee read employee-based group: ok
self.group_pigs.with_user(self.user_employee).read()
# Employee can create a group
self.env['mail.channel'].with_user(self.user_employee).create({'name': 'Test'})
# Employee update employee-based group: ok
self.group_pigs.with_user(self.user_employee).write({'name': 'modified'})
# Employee unlink employee-based group: ok
self.group_pigs.with_user(self.user_employee).unlink()
# Employee cannot read a private group
with self.assertRaises(AccessError):
self.group_private.with_user(self.user_employee).read()
# Employee cannot write on private
with self.assertRaises(AccessError):
self.group_private.with_user(self.user_employee).write({'name': 're-modified'})
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_access_rights_followers_ko(self):
# self.group_private.name has been put in the cache during the setup as sudo
# It must therefore be removed from the cache in other to validate the fact user_portal can't read it.
self.group_private.invalidate_cache(['name'])
with self.assertRaises(AccessError):
self.group_private.with_user(self.user_portal).name
def test_access_rights_followers_portal(self):
# Do: Chell is added into Pigs members and browse it -> ok for messages, ko for partners (no read permission)
self.group_private.write({'channel_partner_ids': [(4, self.user_portal.partner_id.id)]})
chell_pigs = self.group_private.with_user(self.user_portal)
trigger_read = chell_pigs.name
for message in chell_pigs.message_ids:
trigger_read = message.subject
with self.assertRaises(AccessError):
chell_pigs.message_partner_ids
for partner in self.group_private.message_partner_ids:
if partner.id == self.user_portal.partner_id.id:
# Chell can read her own partner record
continue
with self.assertRaises(AccessError):
trigger_read = partner.with_user(self.user_portal).name
@tagged('mail_channel')
class TestChannelFeatures(TestMailCommon):
@classmethod
def setUpClass(cls):
super(TestChannelFeatures, cls).setUpClass()
cls.test_channel = cls.env['mail.channel'].with_context(cls._test_context).create({
'name': 'Test',
'description': 'Description',
'alias_name': 'test',
'public': 'public',
})
cls.test_partner = cls.env['res.partner'].with_context(cls._test_context).create({
'name': '<NAME>',
'email': '<EMAIL>',
})
def _join_channel(self, channel, partners):
for partner in partners:
channel.write({'channel_last_seen_partner_ids': [(0, 0, {'partner_id': partner.id})]})
channel.invalidate_cache()
def _leave_channel(self, channel, partners):
for partner in partners:
channel._action_unfollow(partner)
def test_channel_listeners(self):
self.assertEqual(self.test_channel.message_channel_ids, self.test_channel)
self.assertEqual(self.test_channel.message_partner_ids, self.env['res.partner'])
self.assertEqual(self.test_channel.channel_partner_ids, self.env['res.partner'])
self._join_channel(self.test_channel, self.test_partner)
self.assertEqual(self.test_channel.message_channel_ids, self.test_channel)
self.assertEqual(self.test_channel.message_partner_ids, self.env['res.partner'])
self.assertEqual(self.test_channel.channel_partner_ids, self.test_partner)
self._leave_channel(self.test_channel, self.test_partner)
self.assertEqual(self.test_channel.message_channel_ids, self.test_channel)
self.assertEqual(self.test_channel.message_partner_ids, self.env['res.partner'])
self.assertEqual(self.test_channel.channel_partner_ids, self.env['res.partner'])
def test_channel_post_nofollow(self):
self.test_channel.message_post(body='Test', message_type='comment', subtype_xmlid='mail.mt_comment')
self.assertEqual(self.test_channel.message_channel_ids, self.test_channel)
self.assertEqual(self.test_channel.message_partner_ids, self.env['res.partner'])
@mute_logger('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')
def test_channel_mailing_list_recipients(self):
""" Posting a message on a mailing list should send one email to all recipients """
self.env['ir.config_parameter'].set_param('mail.catchall.domain', 'schlouby.fr')
self.test_channel.write({'email_send': True})
self.user_employee.write({'notification_type': 'email'})
# Subscribe an user without email. We shouldn't try to send email to them.
nomail = self.env['res.users'].create({
"login": "nomail",
"name": "<NAME>",
"email": False,
"notification_type": "email",
})
self._join_channel(self.test_channel, self.user_employee.partner_id | self.test_partner | nomail.partner_id)
with self.mock_mail_gateway():
self.test_channel.message_post(body="Test", message_type='comment', subtype_xmlid='mail.mt_comment')
self.assertSentEmail(self.test_channel.env.user.partner_id, [self.partner_employee, self.test_partner])
@mute_logger('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')
def test_channel_chat_recipients(self):
""" Posting a message on a chat should not send emails """
self.env['ir.config_parameter'].set_param('mail.catchall.domain', 'schlouby.fr')
self.test_channel.write({'email_send': False})
self._join_channel(self.test_channel, self.user_employee.partner_id | self.test_partner)
with self.mock_mail_gateway():
self.test_channel.message_post(body="Test", message_type='comment', subtype_xmlid='mail.mt_comment')
self.assertNotSentEmail()
self.assertEqual(len(self._mails), 0)
@mute_logger('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')
def test_channel_classic_recipients(self):
""" Posting a message on a classic channel should work like classic post """
self.test_channel.write({'alias_name': False})
self.test_channel.message_subscribe([self.user_employee.partner_id.id, self.test_partner.id])
with self.mock_mail_gateway():
self.test_channel.message_post(body="Test", message_type='comment', subtype_xmlid='mail.mt_comment')
self.assertSentEmail(self.test_channel.env.user.partner_id, [self.test_partner])
def test_channel_creation(self):
"""A user that create a private channel should be able to read it."""
channel_form = Form(self.env['mail.channel'].with_user(self.user_employee))
channel_form.name = 'Test private channel'
channel_form.public = 'private'
channel = channel_form.save()
self.assertEqual(channel.name, 'Test private channel', 'Must be able to read the created channel')
def test_channel_get(self):
current_user = self.env['res.users'].create({
"login": "adam",
"name": "Jonas",
})
current_user = current_user.with_user(current_user)
current_partner = current_user.partner_id
other_partner = self.test_partner
# `channel_get` should return a new channel the first time a partner is given
initial_channel_info = current_user.env['mail.channel'].channel_get(partners_to=other_partner.ids)
self.assertEqual(set(p['id'] for p in initial_channel_info['members']), {current_partner.id, other_partner.id})
# `channel_get` should return the existing channel every time the same partner is given
same_channel_info = current_user.env['mail.channel'].channel_get(partners_to=other_partner.ids)
self.assertEqual(same_channel_info['id'], initial_channel_info['id'])
# `channel_get` should return the existing channel when the current partner is given together with the other partner
together_channel_info = current_user.env['mail.channel'].channel_get(partners_to=(current_partner + other_partner).ids)
self.assertEqual(together_channel_info['id'], initial_channel_info['id'])
# `channel_get` should return a new channel the first time just the current partner is given,
# even if a channel containing the current partner together with other partners already exists
solo_channel_info = current_user.env['mail.channel'].channel_get(partners_to=current_partner.ids)
self.assertNotEqual(solo_channel_info['id'], initial_channel_info['id'])
self.assertEqual(set(p['id'] for p in solo_channel_info['members']), {current_partner.id})
# `channel_get` should return the existing channel every time the current partner is given
same_solo_channel_info = current_user.env['mail.channel'].channel_get(partners_to=current_partner.ids)
self.assertEqual(same_solo_channel_info['id'], solo_channel_info['id'])
def test_channel_seen(self):
"""
In case of concurrent channel_seen RPC, ensure the oldest call has no effect.
"""
self.test_channel.write({'channel_type': 'chat'})
self.test_channel.action_follow()
msg_1 = self._add_messages(self.test_channel, 'Body1', author=self.user_employee.partner_id,
channel_ids=[self.test_channel.id])
msg_2 = self._add_messages(self.test_channel, 'Body2', author=self.user_employee.partner_id,
channel_ids=[self.test_channel.id])
ChannelAsUser = self.test_channel.with_user(self.user_employee).browse(self.test_channel.id)
self.test_channel.channel_seen(msg_2.id)
self.assertEqual(
ChannelAsUser.channel_info()[0]['seen_partners_info'][0]['seen_message_id'],
msg_2.id,
"Last message id should have been updated"
)
self.test_channel.channel_seen(msg_1.id)
self.assertEqual(
ChannelAsUser.channel_info()[0]['seen_partners_info'][0]['seen_message_id'],
msg_2.id,
"Last message id should stay the same after mark channel as seen with an older message"
)
@mute_logger('odoo.models.unlink')
def test_channel_auto_unsubscribe_archived_or_deleted_users(self):
"""Archiving / deleting a user should automatically unsubscribe related partner from private channels"""
test_channel_private = self.env['mail.channel'].with_context(self._test_context).create({
'name': 'Winden caves',
'description': 'Channel to travel through time',
'public': 'private',
})
test_channel_group = self.env['mail.channel'].with_context(self._test_context).create({
'name': '<NAME>',
'public': 'groups',
'group_public_id': self.env.ref('base.group_user').id})
test_user = self.env['res.users'].create({
"login": "adam",
"name": "Jonas",
})
test_partner = test_user.partner_id
test_chat = self.env['mail.channel'].with_context(self._test_context).create({
'name': 'test',
'channel_type': 'chat',
'public': 'private',
'channel_partner_ids': [(4, self.user_employee.partner_id.id), (4, test_partner.id)],
})
self._join_channel(self.test_channel, self.user_employee.partner_id | test_partner)
self._join_channel(test_channel_private, self.user_employee.partner_id | test_partner)
self._join_channel(test_channel_group, self.user_employee.partner_id | test_partner)
# Unsubscribe archived user from the private channels, but not from public channels and not from chat
self.user_employee.active = False
self.assertEqual(test_channel_private.channel_partner_ids, test_partner)
self.assertEqual(test_channel_group.channel_partner_ids, test_partner)
self.assertEqual(self.test_channel.channel_partner_ids, self.user_employee.partner_id | test_partner)
self.assertEqual(test_chat.channel_partner_ids, self.user_employee.partner_id | test_partner)
# Unsubscribe deleted user from the private channels, but not from public channels and not from chat
test_user.unlink()
self.assertEqual(test_channel_private.channel_partner_ids, self.env['res.partner'])
self.assertEqual(test_channel_group.channel_partner_ids, self.env['res.partner'])
self.assertEqual(self.test_channel.channel_partner_ids, self.user_employee.partner_id | test_partner)
self.assertEqual(test_chat.channel_partner_ids, self.user_employee.partner_id | test_partner)
def test_channel_unfollow_should_also_unsubscribe_the_partner(self):
self.test_channel.message_subscribe(self.test_partner.ids)
self.test_channel._action_unfollow(self.test_partner)
self.assertFalse(self.test_channel.message_partner_ids)
def test_channel_unfollow_should_not_post_message_if_the_partner_has_been_removed(self):
'''
When a partner leaves a channel, the system will help post a message under
that partner's name in the channel to notify others if `email_sent` is set `False`.
The message should only be posted when the partner is still a member of the channel
before method `_action_unfollow()` is called.
If the partner has been removed earlier, no more messages will be posted
even if `_action_unfollow()` is called again.
'''
self.test_channel.write({'email_send': False})
self._join_channel(self.test_channel, self.test_partner)
self.test_channel.message_subscribe(self.partner_employee.ids)
# a message should be posted to notify others when a partner is about to leave
with self.assertSinglePostNotifications([{'partner': self.partner_employee, 'type': 'inbox'}], {
'message_type': 'notification',
'subtype': 'mail.mt_comment',
}):
self.test_channel._action_unfollow(self.test_partner)
# no more messages should be posted if the partner has been removed before.
with self.assertNoNotifications():
self.test_channel._action_unfollow(self.test_partner)
def test_multi_company_chat(self):
company_A = self.env['res.company'].create({'name': 'Company A'})
company_B = self.env['res.company'].create({'name': 'Company B'})
test_user_1 = self.env['res.users'].create({
'login': 'user1',
'name': 'My First New User',
'company_ids': [(6, 0, company_A.ids)],
'company_id': company_A.id
})
test_user_2 = self.env['res.users'].create({
'login': 'user2',
'name': '<NAME> New User',
'company_ids': [(6, 0, company_B.ids)],
'company_id': company_B.id
})
initial_channel_info = self.env['mail.channel'].with_user(test_user_1).with_context(allowed_company_ids=company_A.ids).channel_get(test_user_2.partner_id.ids)
self.assertTrue(initial_channel_info, 'should be able to chat with multi company user')
def test_multi_company_message_post_notifications(self):
company_1 = self.company_admin
company_2 = self.env['res.company'].create({'name': 'Company 2'})
# Company 1 and notification_type == "inbox"
user_1 = self.user_employee
# Company 1 and notification_type == "email"
user_2 = self.user_admin
user_2.notification_type = 'email'
user_3 = mail_new_test_user(
self.env, login='user3', email='<EMAIL>', groups='base.group_user',
company_id=company_2.id, company_ids=[(6, 0, company_2.ids)],
name='user3', notification_type='inbox')
user_4 = mail_new_test_user(
self.env, login='user4', email='<EMAIL>', groups='base.group_user',
company_id=company_2.id, company_ids=[(6, 0, company_2.ids)],
name='user4', notification_type='email')
partner_without_user = self.env['res.partner'].create({
'name': 'Partner',
'email': '<EMAIL>',
})
mail_channel = self.env['mail.channel'].with_user(user_1).create({
'name': 'Channel',
'channel_partner_ids': [
(4, user_1.partner_id.id),
(4, user_2.partner_id.id),
(4, user_3.partner_id.id),
(4, user_4.partner_id.id),
(4, partner_without_user.id),
],
'email_send': True,
})
mail_channel.invalidate_cache()
(user_1 | user_2 | user_3 | user_4).invalidate_cache()
with self.mock_mail_gateway():
mail_channel.with_user(user_1).with_company(company_1).message_post(
body='Test body message 1337',
channel_ids=mail_channel.ids,
)
self.assertSentEmail(user_1.partner_id, [user_2.partner_id])
self.assertSentEmail(user_1.partner_id, [user_4.partner_id])
self.assertEqual(len(self._mails), 3, 'Should have send only 3 emails to user 2, user 4 and the partner')
self.assertBusNotifications([(self.cr.dbname, 'mail.channel', mail_channel.id)])
# Should not create mail notifications for user 1 & 3
self.assertFalse(self.env['mail.notification'].search([('res_partner_id', '=', user_1.partner_id.id)]))
self.assertFalse(self.env['mail.notification'].search([('res_partner_id', '=', user_3.partner_id.id)]))
# Should create mail notifications for user 2 & 4
self.assertTrue(self.env['mail.notification'].search([('res_partner_id', '=', user_2.partner_id.id)]))
self.assertTrue(self.env['mail.notification'].search([('res_partner_id', '=', user_4.partner_id.id)]))
# Check that we did not send a "channel_seen" notifications
# for the users which receive the notifications by email
notification_seen_user_2 = self.env['bus.bus'].search([('create_uid', '=', user_2.id)])
self.assertFalse(notification_seen_user_2, 'Should not have sent a notification as user 2')
notification_seen_user_4 = self.env['bus.bus'].search([('create_uid', '=', user_4.id)])
self.assertFalse(notification_seen_user_4, 'Should not have sent a notification as user 4')
@tagged('moderation', 'mail_channel')
class TestChannelModeration(TestMailCommon):
@classmethod
def setUpClass(cls):
super(TestChannelModeration, cls).setUpClass()
cls.channel_1 = cls.env['mail.channel'].create({
'name': 'Moderation_1',
'email_send': True,
'moderation': True,
'channel_partner_ids': [(4, cls.partner_employee.id)],
'moderator_ids': [(4, cls.user_employee.id)],
})
# ensure initial data
cls.user_employee_2 = mail_new_test_user(
cls.env, login='employee2', groups='base.group_user', company_id=cls.company_admin.id,
name='<NAME>', notification_type='inbox', signature='--\nEnguerrand'
)
cls.partner_employee_2 = cls.user_employee_2.partner_id
cls.user_portal = cls._create_portal_user()
def test_moderator_consistency(self):
# moderators should be channel members
with self.assertRaises(ValidationError):
self.channel_1.write({'moderator_ids': [(4, self.user_admin.id)]})
# member -> moderator or
self.channel_1.write({'channel_partner_ids': [(4, self.partner_admin.id)]})
self.channel_1.write({'moderator_ids': [(4, self.user_admin.id)]})
# member -> moderator ko if no email
self.channel_1.write({'moderator_ids': [(3, self.partner_admin.id)]})
self.user_admin.write({'email': False})
with self.assertRaises(ValidationError):
self.channel_1.write({'moderator_ids': [(4, self.user_admin.id)]})
def test_moderation_consistency(self):
# moderation enabled channels are restricted to mailing lists
with self.assertRaises(ValidationError):
self.channel_1.write({'email_send': False})
# moderation enabled channels should always have moderators
with self.assertRaises(ValidationError):
self.channel_1.write({'moderator_ids': [(5, 0)]})
def test_moderation_count(self):
self.assertEqual(self.channel_1.moderation_count, 0)
self.channel_1.write({'moderation_ids': [
(0, 0, {'email': '<EMAIL>', 'status': 'allow'}),
(0, 0, {'email': '<EMAIL>', 'status': 'ban'})
]})
self.assertEqual(self.channel_1.moderation_count, 2)
@mute_logger('odoo.addons.mail.models.mail_channel', 'odoo.models.unlink')
def test_send_guidelines(self):
self.channel_1.write({'channel_partner_ids': [(4, self.partner_portal.id), (4, self.partner_admin.id)]})
self.channel_1._update_moderation_email([self.partner_admin.email], 'ban')
with self.mock_mail_gateway():
self.channel_1.with_user(self.user_employee).send_guidelines()
for mail in self._new_mails:
self.assertEqual(mail.author_id, self.partner_employee)
self.assertEqual(mail.subject, 'Guidelines of channel %s' % self.channel_1.name)
self.assertEqual(mail.state, 'outgoing')
self.assertEqual(mail.email_from, self.user_employee.company_id.catchall_formatted)
self.assertEqual(self._new_mails.mapped('recipient_ids'), self.partner_employee | self.partner_portal)
def test_send_guidelines_crash(self):
self.channel_1.write({
'channel_partner_ids': [(4, self.partner_admin.id)],
'moderator_ids': [(4, self.user_admin.id), (3, self.user_employee.id)]
})
with self.assertRaises(UserError):
self.channel_1.with_user(self.user_employee).send_guidelines()
def test_update_moderation_email(self):
self.channel_1.write({'moderation_ids': [
(0, 0, {'email': '<EMAIL>', 'status': 'allow'}),
(0, 0, {'email': '<EMAIL>', 'status': 'ban'})
]})
self.channel_1._update_moderation_email(['<EMAIL>', '<EMAIL>'], 'ban')
self.assertEqual(len(self.channel_1.moderation_ids), 3)
self.assertTrue(all(status == 'ban' for status in self.channel_1.moderation_ids.mapped('status')))
def test_moderation_reset(self):
self.channel_2 = self.env['mail.channel'].create({
'name': 'Moderation_1',
'email_send': True,
'moderation': True,
'channel_partner_ids': [(4, self.partner_employee.id)],
'moderator_ids': [(4, self.user_employee.id)],
})
self.msg_c1_1 = self._add_messages(self.channel_1, 'Body11', author=self.partner_admin, moderation_status='accepted')
self.msg_c1_2 = self._add_messages(self.channel_1, 'Body12', author=self.partner_admin, moderation_status='pending_moderation')
self.msg_c2_1 = self._add_messages(self.channel_2, 'Body21', author=self.partner_admin, moderation_status='pending_moderation')
self.assertEqual(self.env['mail.message'].search_count([
('moderation_status', '=', 'pending_moderation'),
('model', '=', 'mail.channel'), ('res_id', '=', self.channel_1.id)
]), 1)
self.channel_1.write({'moderation': False})
self.assertEqual(self.env['mail.message'].search_count([
('moderation_status', '=', 'pending_moderation'),
('model', '=', 'mail.channel'), ('res_id', '=', self.channel_1.id)
]), 0)
self.assertEqual(self.env['mail.message'].search_count([
('moderation_status', '=', 'pending_moderation'),
('model', '=', 'mail.channel'), ('res_id', '=', self.channel_2.id)
]), 1)
self.channel_2.write({'moderation': False})
self.assertEqual(self.env['mail.message'].search_count([
('moderation_status', '=', 'pending_moderation'),
('model', '=', 'mail.channel'), ('res_id', '=', self.channel_2.id)
]), 0)
@mute_logger('odoo.models.unlink')
def test_message_post(self):
email1 = '<EMAIL>'
email2 = '<EMAIL>'
self.channel_1._update_moderation_email([email1], 'ban')
self.channel_1._update_moderation_email([email2], 'allow')
msg_admin = self.channel_1.message_post(message_type='email', subtype_xmlid='mail.mt_comment', author_id=self.partner_admin.id)
msg_moderator = self.channel_1.message_post(message_type='comment', subtype_xmlid='mail.mt_comment', author_id=self.partner_employee.id)
msg_email1 = self.channel_1.message_post(message_type='comment', subtype_xmlid='mail.mt_comment', email_from=formataddr(("MyName", email1)))
msg_email2 = self.channel_1.message_post(message_type='email', subtype_xmlid='mail.mt_comment', email_from=email2)
msg_notif = self.channel_1.message_post()
messages = self.env['mail.message'].search([('model', '=', 'mail.channel'), ('res_id', '=', self.channel_1.id)])
pending_messages = messages.filtered(lambda m: m.moderation_status == 'pending_moderation')
accepted_messages = messages.filtered(lambda m: m.moderation_status == 'accepted')
self.assertFalse(msg_email1)
self.assertEqual(msg_admin, pending_messages)
self.assertEqual(accepted_messages, msg_moderator | msg_email2 | msg_notif)
self.assertFalse(msg_admin.channel_ids)
self.assertEqual(msg_email2.channel_ids, self.channel_1)
def test_user_is_moderator(self):
self.assertTrue(self.user_employee.is_moderator)
self.assertFalse(self.user_employee_2.is_moderator)
self.channel_1.write({
'channel_partner_ids': [(4, self.partner_employee_2.id)],
'moderator_ids': [(4, self.user_employee_2.id)],
})
self.assertTrue(self.user_employee_2.is_moderator)
def test_user_moderation_counter(self):
self._add_messages(self.channel_1, 'B', moderation_status='pending_moderation', author=self.partner_employee_2)
self._add_messages(self.channel_1, 'B', moderation_status='accepted', author=self.partner_employee_2)
self._add_messages(self.channel_1, 'B', moderation_status='accepted', author=self.partner_employee)
self._add_messages(self.channel_1, 'B', moderation_status='pending_moderation', author=self.partner_employee)
self._add_messages(self.channel_1, 'B', moderation_status='accepted', author=self.partner_employee)
self.assertEqual(self.user_employee.moderation_counter, 2)
self.assertEqual(self.user_employee_2.moderation_counter, 0)
self.channel_1.write({
'channel_partner_ids': [(4, self.partner_employee_2.id)],
'moderator_ids': [(4, self.user_employee_2.id)]
})
self.assertEqual(self.user_employee.moderation_counter, 2)
self.assertEqual(self.user_employee_2.moderation_counter, 0)
| [
"odoo.tests.tagged",
"odoo.tools.mute_logger",
"odoo.tools.formataddr",
"odoo.addons.mail.tests.common.mail_new_test_user"
] | [((377, 399), 'odoo.tests.tagged', 'tagged', (['"""mail_channel"""'], {}), "('mail_channel')\n", (383, 399), False, 'from odoo.tests import tagged, Form\n'), ((4788, 4810), 'odoo.tests.tagged', 'tagged', (['"""mail_channel"""'], {}), "('mail_channel')\n", (4794, 4810), False, 'from odoo.tests import tagged, Form\n'), ((21155, 21191), 'odoo.tests.tagged', 'tagged', (['"""moderation"""', '"""mail_channel"""'], {}), "('moderation', 'mail_channel')\n", (21161, 21191), False, 'from odoo.tests import tagged, Form\n'), ((1374, 1436), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.base.models.ir_model"""', '"""odoo.models"""'], {}), "('odoo.addons.base.models.ir_model', 'odoo.models')\n", (1385, 1436), False, 'from odoo.tools import mute_logger, formataddr\n'), ((2476, 2564), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.base.models.ir_model"""', '"""odoo.models"""', '"""odoo.models.unlink"""'], {}), "('odoo.addons.base.models.ir_model', 'odoo.models',\n 'odoo.models.unlink')\n", (2487, 2564), False, 'from odoo.tools import mute_logger, formataddr\n'), ((3436, 3498), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.base.models.ir_model"""', '"""odoo.models"""'], {}), "('odoo.addons.base.models.ir_model', 'odoo.models')\n", (3447, 3498), False, 'from odoo.tools import mute_logger, formataddr\n'), ((6977, 7047), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.mail.models.mail_mail"""', '"""odoo.models.unlink"""'], {}), "('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')\n", (6988, 7047), False, 'from odoo.tools import mute_logger, formataddr\n'), ((8061, 8131), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.mail.models.mail_mail"""', '"""odoo.models.unlink"""'], {}), "('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')\n", (8072, 8131), False, 'from odoo.tools import mute_logger, formataddr\n'), ((8722, 8792), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.mail.models.mail_mail"""', '"""odoo.models.unlink"""'], {}), "('odoo.addons.mail.models.mail_mail', 'odoo.models.unlink')\n", (8733, 8792), False, 'from odoo.tools import mute_logger, formataddr\n'), ((12977, 13010), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.models.unlink"""'], {}), "('odoo.models.unlink')\n", (12988, 13010), False, 'from odoo.tools import mute_logger, formataddr\n'), ((23474, 23547), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.addons.mail.models.mail_channel"""', '"""odoo.models.unlink"""'], {}), "('odoo.addons.mail.models.mail_channel', 'odoo.models.unlink')\n", (23485, 23547), False, 'from odoo.tools import mute_logger, formataddr\n'), ((26908, 26941), 'odoo.tools.mute_logger', 'mute_logger', (['"""odoo.models.unlink"""'], {}), "('odoo.models.unlink')\n", (26919, 26941), False, 'from odoo.tools import mute_logger, formataddr\n'), ((648, 737), 'odoo.addons.mail.tests.common.mail_new_test_user', 'mail_new_test_user', (['cls.env'], {'login': '"""bert"""', 'groups': '"""base.group_public"""', 'name': '"""<NAME>"""'}), "(cls.env, login='bert', groups='base.group_public', name=\n '<NAME>')\n", (666, 737), False, 'from odoo.addons.mail.tests.common import mail_new_test_user\n'), ((759, 849), 'odoo.addons.mail.tests.common.mail_new_test_user', 'mail_new_test_user', (['cls.env'], {'login': '"""chell"""', 'groups': '"""base.group_portal"""', 'name': '"""<NAME>"""'}), "(cls.env, login='chell', groups='base.group_portal', name\n ='<NAME>')\n", (777, 849), False, 'from odoo.addons.mail.tests.common import mail_new_test_user\n'), ((18353, 18551), 'odoo.addons.mail.tests.common.mail_new_test_user', 'mail_new_test_user', (['self.env'], {'login': '"""user3"""', 'email': '"""<EMAIL>"""', 'groups': '"""base.group_user"""', 'company_id': 'company_2.id', 'company_ids': '[(6, 0, company_2.ids)]', 'name': '"""user3"""', 'notification_type': '"""inbox"""'}), "(self.env, login='user3', email='<EMAIL>', groups=\n 'base.group_user', company_id=company_2.id, company_ids=[(6, 0,\n company_2.ids)], name='user3', notification_type='inbox')\n", (18371, 18551), False, 'from odoo.addons.mail.tests.common import mail_new_test_user\n'), ((18598, 18796), 'odoo.addons.mail.tests.common.mail_new_test_user', 'mail_new_test_user', (['self.env'], {'login': '"""user4"""', 'email': '"""<EMAIL>"""', 'groups': '"""base.group_user"""', 'company_id': 'company_2.id', 'company_ids': '[(6, 0, company_2.ids)]', 'name': '"""user4"""', 'notification_type': '"""email"""'}), "(self.env, login='user4', email='<EMAIL>', groups=\n 'base.group_user', company_id=company_2.id, company_ids=[(6, 0,\n company_2.ids)], name='user4', notification_type='email')\n", (18616, 18796), False, 'from odoo.addons.mail.tests.common import mail_new_test_user\n'), ((21690, 21874), 'odoo.addons.mail.tests.common.mail_new_test_user', 'mail_new_test_user', (['cls.env'], {'login': '"""employee2"""', 'groups': '"""base.group_user"""', 'company_id': 'cls.company_admin.id', 'name': '"""<NAME>"""', 'notification_type': '"""inbox"""', 'signature': '"""--\nEnguerrand"""'}), "(cls.env, login='employee2', groups='base.group_user',\n company_id=cls.company_admin.id, name='<NAME>', notification_type=\n 'inbox', signature='--\\nEnguerrand')\n", (21708, 21874), False, 'from odoo.addons.mail.tests.common import mail_new_test_user\n'), ((27561, 27591), 'odoo.tools.formataddr', 'formataddr', (["('MyName', email1)"], {}), "(('MyName', email1))\n", (27571, 27591), False, 'from odoo.tools import mute_logger, formataddr\n')] |
"""
awschecker.pyc
Inspects an AWS account for ACM certificates.
Reports on invalid certificates, pending certificates, and certificates with
certificate transparency enabled.
"""
import logging
import logging.config
from os import path
import constants
def log_path():
"""Gets the OS and environment independent path to the
logger configuration file."""
log_file_path = path.join(path.dirname(
path.abspath(__file__)), constants.LOGCONFIG)
return log_file_path
logging.config.fileConfig(log_path(),
disable_existing_loggers=False)
LOGGER = logging.getLogger(__name__)
from awschecker import ec2instances
from awschecker import certs
from awschecker.decorator_logging import logged
@logged(logging.DEBUG)
def main():
"""Applicationentry point."""
ec2instances.check_items()
certs.check_items()
logging.shutdown()
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"logging.shutdown",
"awschecker.decorator_logging.logged",
"awschecker.certs.check_items",
"os.path.abspath",
"awschecker.ec2instances.check_items"
] | [((575, 602), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (592, 602), False, 'import logging\n'), ((721, 742), 'awschecker.decorator_logging.logged', 'logged', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (727, 742), False, 'from awschecker.decorator_logging import logged\n'), ((794, 820), 'awschecker.ec2instances.check_items', 'ec2instances.check_items', ([], {}), '()\n', (818, 820), False, 'from awschecker import ec2instances\n'), ((825, 844), 'awschecker.certs.check_items', 'certs.check_items', ([], {}), '()\n', (842, 844), False, 'from awschecker import certs\n'), ((850, 868), 'logging.shutdown', 'logging.shutdown', ([], {}), '()\n', (866, 868), False, 'import logging\n'), ((418, 440), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (430, 440), False, 'from os import path\n')] |
#! /usr/bin/env python
# The line above tells some systems (e.g. Linux/Apple shells) what program to
# use to execute this script.
##############################################################################
# You don't need to understand most of this yet- you can just skip to the #
# large comment section below if this is all a bit daunting! #
##############################################################################
# Import the libraries we need
import sys
from music21 import (
environment,
metadata,
note,
stream,
)
# Tell music21 what to use to play midi and display score
environment.set('midiPath', '/usr/bin/timidity')
environment.set('musicxmlPath', '/usr/bin/musescore')
##############################################################################
# LESSON 2 STARTS HERE #
#############################################################################
# This is our first lesson, so play a scale with our right hand
# Let's play the first bar, in the default 4/4 time
bar1_right = stream.Measure()
# Now we need to add the notes to it
for next_note in ('c4', 'd4', 'e4', 'f4', 'g4', 'a4', 'b4', 'c5'):
bar1_right.append(note.Note(
next_note,
quarterLength=0.5,
))
# You can try putting in the bars to play back down to middle C here.
# Try to make them be faster, or slower.
# Don't forget to add them to the right hand's bars below!
# Add both of these bars to the right hand's part
right_hand = stream.Part()
right_hand.append(bar1_right)
# Add the right hand to the score
tune = stream.Score(right_hand)
# Add a title
tune.metadata = metadata.Metadata(title='Python TTTGLS: Lesson 2')
##########################################################################
# LESSON 2 ENDS HERE #
##########################################################################
# Only run this if the script is executed directly, not imported
if __name__ == '__main__':
# Complain if there were no arguments passed by the user
if len(sys.argv) == 1:
# First, print a helpful message
print('add a "score" argument to see the score.')
print('add a "text" argument to see the python objects.')
print('add a "midi" argument to hear it.')
print('e.g. To hear the tune: {command} midi'.format(
command=sys.argv[0],
))
# Now exit without doing anything
sys.exit()
# See if the user put the word 'midi' in the arguments
if 'midi' in sys.argv:
# The stream.Score (tune) object knows how to play itself using the
# environment set midi player and will do so when its show method is
# called with a 'midi' argument.
tune.show('midi')
# See if the user put the word 'text' in the arguments
if 'text' in sys.argv:
# The stream.Score (tune) object knows how to display itself as python
# objects in text, and will do so when its show method is called with
# a 'text' argument.
tune.show('text')
# See if the user put the word 'score' in the arguments
if 'score' in sys.argv:
# The stream.Score (tune) object knows how to display itself as
# musical score, and will do so by default when its show method is
# called with no arguments.
tune.show()
| [
"music21.stream.Measure",
"music21.metadata.Metadata",
"music21.note.Note",
"music21.stream.Score",
"music21.environment.set",
"music21.stream.Part",
"sys.exit"
] | [((631, 679), 'music21.environment.set', 'environment.set', (['"""midiPath"""', '"""/usr/bin/timidity"""'], {}), "('midiPath', '/usr/bin/timidity')\n", (646, 679), False, 'from music21 import environment, metadata, note, stream\n'), ((680, 733), 'music21.environment.set', 'environment.set', (['"""musicxmlPath"""', '"""/usr/bin/musescore"""'], {}), "('musicxmlPath', '/usr/bin/musescore')\n", (695, 733), False, 'from music21 import environment, metadata, note, stream\n'), ((1102, 1118), 'music21.stream.Measure', 'stream.Measure', ([], {}), '()\n', (1116, 1118), False, 'from music21 import environment, metadata, note, stream\n'), ((1548, 1561), 'music21.stream.Part', 'stream.Part', ([], {}), '()\n', (1559, 1561), False, 'from music21 import environment, metadata, note, stream\n'), ((1634, 1658), 'music21.stream.Score', 'stream.Score', (['right_hand'], {}), '(right_hand)\n', (1646, 1658), False, 'from music21 import environment, metadata, note, stream\n'), ((1690, 1740), 'music21.metadata.Metadata', 'metadata.Metadata', ([], {'title': '"""Python TTTGLS: Lesson 2"""'}), "(title='Python TTTGLS: Lesson 2')\n", (1707, 1740), False, 'from music21 import environment, metadata, note, stream\n'), ((1245, 1284), 'music21.note.Note', 'note.Note', (['next_note'], {'quarterLength': '(0.5)'}), '(next_note, quarterLength=0.5)\n', (1254, 1284), False, 'from music21 import environment, metadata, note, stream\n'), ((2524, 2534), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2532, 2534), False, 'import sys\n')] |
import numpy as np
from matplotlib import pyplot as plt
import csv
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
#from PyQt5.QtWebEngineWidgets import QWebEngineView
from PyQt5.QtPrintSupport import *
import time
def graph(a):
none=[]
name=[]
rollno=[]
mob=[]
email=[]
gender=[]
hostelite=[]
ut1dbms=[]
ut1sepm=[]
ut1cn=[]
ut1isee=[]
ut1toc=[]
ut2dbms=[]
ut2sepm=[]
ut2cn=[]
ut2isee=[]
ut2toc=[]
with open('result.csv','r') as csvfile:
plots=csv.reader(csvfile,delimiter=',')
for row in plots:
none.append(row[0])
name.append(row[1])
rollno.append(row[2])
mob.append(row[3])
email.append(row[4])
gender.append(row[5])
hostelite.append(row[6])
ut1dbms.append(row[7])
ut1sepm.append(row[8])
ut1cn.append(row[9])
ut1isee.append(row[10])
ut1toc.append(row[11])
ut2dbms.append(row[12])
ut2sepm.append(row[13])
ut2cn.append(row[14])
ut2isee.append(row[15])
ut2toc.append(row[16])
if a=="DBMS":
x=ut1dbms
y=ut2dbms
elif a=="SEPM":
x=ut1sepm
y=ut2sepm
elif a=="CN":
x=ut1cn
y=ut2cn
elif a=="TOC":
x=ut1toc
y=ut2toc
elif a=="ISEE":
x=ut1isee
y=ut2isee
w=0.3
plt.ion()
index=np.arange(len(name))
plt.scatter(rollno,x,label="UNIT TEST MARKS 1",color="blue",marker="*",s=150)
plt.scatter(rollno,y,label="UNIT TEST MARKS 2",color="red",marker="*",s=150)
minut1dbms=min(ut1dbms)
minut1toc=min(ut1toc)
minut1isee=min(ut1isee)
minut1cn=min(ut1cn)
minut1sepm=min(ut1sepm)
minut2dbms=min(ut2dbms)
plt.show()
'''
time.sleep(5)
plt.close("all")
'''
#graph("TOC")
def pie_chart (self) :
none=[]
name=[]
rollno=[]
mob=[]
email=[]
gender=[]
hostelite=[]
ut1dbms=[]
ut1sepm=[]
ut1cn=[]
ut1isee=[]
ut1toc=[]
ut2dbms=[]
ut2sepm=[]
ut2cn=[]
ut2isee=[]
ut2toc=[]
with open('result.csv','r') as csvfile:
plots=csv.reader(csvfile,delimiter=',')
for row in plots:
none.append(row[0])
name.append(row[1])
rollno.append(row[2])
mob.append(row[3])
email.append(row[4])
gender.append(row[5])
hostelite.append(row[6])
ut1dbms.append(row[7])
ut1sepm.append(row[8])
ut1cn.append(row[9])
ut1isee.append(row[10])
ut1toc.append(row[11])
ut2dbms.append(row[12])
ut2sepm.append(row[13])
ut2cn.append(row[14])
ut2isee.append(row[15])
ut2toc.append(row[16])
count_male=0
count_female=0
for i in gender :
if i=="male": count_male+=1
else : count_female+=1
slices=[count_male,count_female]
color = ['cyan', 'pink']#, 'g', 'b']
l=['male','female']
plt.pie(slices,labels=l,colors=color,startangle=90,autopct='%.1f%%', shadow = True)
plt.title("GENDER DISTRIBUTION")
plt.legend()
plt.show()
def host (self) :
none=[]
name=[]
rollno=[]
mob=[]
email=[]
gender=[]
hostelite=[]
ut1dbms=[]
ut1sepm=[]
ut1cn=[]
ut1isee=[]
ut1toc=[]
ut2dbms=[]
ut2sepm=[]
ut2cn=[]
ut2isee=[]
ut2toc=[]
with open('result.csv','r') as csvfile:
plots=csv.reader(csvfile,delimiter=',')
for row in plots:
none.append(row[0])
name.append(row[1])
rollno.append(row[2])
mob.append(row[3])
email.append(row[4])
gender.append(row[5])
hostelite.append(row[6])
ut1dbms.append(row[7])
ut1sepm.append(row[8])
ut1cn.append(row[9])
ut1isee.append(row[10])
ut1toc.append(row[11])
ut2dbms.append(row[12])
ut2sepm.append(row[13])
ut2cn.append(row[14])
ut2isee.append(row[15])
ut2toc.append(row[16])
count_yes=0
count_no=0
for i in hostelite :
if i=="yes": count_yes+=1
else : count_no+=1
slices=[count_yes,count_no]
color = ['blue', 'green']#, 'g', 'b']
l=['hostelite','not hostelite']
plt.pie(slices,labels=l,colors=color,startangle=90,autopct='%.1f%%', shadow = True)
plt.title("HOSTELITE STATUS ")
plt.legend()
plt.show()
| [
"matplotlib.pyplot.pie",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.title",
"csv.reader",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((1221, 1230), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (1228, 1230), True, 'from matplotlib import pyplot as plt\n'), ((1260, 1346), 'matplotlib.pyplot.scatter', 'plt.scatter', (['rollno', 'x'], {'label': '"""UNIT TEST MARKS 1"""', 'color': '"""blue"""', 'marker': '"""*"""', 's': '(150)'}), "(rollno, x, label='UNIT TEST MARKS 1', color='blue', marker='*',\n s=150)\n", (1271, 1346), True, 'from matplotlib import pyplot as plt\n'), ((1339, 1424), 'matplotlib.pyplot.scatter', 'plt.scatter', (['rollno', 'y'], {'label': '"""UNIT TEST MARKS 2"""', 'color': '"""red"""', 'marker': '"""*"""', 's': '(150)'}), "(rollno, y, label='UNIT TEST MARKS 2', color='red', marker='*',\n s=150)\n", (1350, 1424), True, 'from matplotlib import pyplot as plt\n'), ((1561, 1571), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1569, 1571), True, 'from matplotlib import pyplot as plt\n'), ((2596, 2685), 'matplotlib.pyplot.pie', 'plt.pie', (['slices'], {'labels': 'l', 'colors': 'color', 'startangle': '(90)', 'autopct': '"""%.1f%%"""', 'shadow': '(True)'}), "(slices, labels=l, colors=color, startangle=90, autopct='%.1f%%',\n shadow=True)\n", (2603, 2685), True, 'from matplotlib import pyplot as plt\n'), ((2681, 2713), 'matplotlib.pyplot.title', 'plt.title', (['"""GENDER DISTRIBUTION"""'], {}), "('GENDER DISTRIBUTION')\n", (2690, 2713), True, 'from matplotlib import pyplot as plt\n'), ((2715, 2727), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2725, 2727), True, 'from matplotlib import pyplot as plt\n'), ((2730, 2740), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2738, 2740), True, 'from matplotlib import pyplot as plt\n'), ((3699, 3788), 'matplotlib.pyplot.pie', 'plt.pie', (['slices'], {'labels': 'l', 'colors': 'color', 'startangle': '(90)', 'autopct': '"""%.1f%%"""', 'shadow': '(True)'}), "(slices, labels=l, colors=color, startangle=90, autopct='%.1f%%',\n shadow=True)\n", (3706, 3788), True, 'from matplotlib import pyplot as plt\n'), ((3784, 3814), 'matplotlib.pyplot.title', 'plt.title', (['"""HOSTELITE STATUS """'], {}), "('HOSTELITE STATUS ')\n", (3793, 3814), True, 'from matplotlib import pyplot as plt\n'), ((3816, 3828), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3826, 3828), True, 'from matplotlib import pyplot as plt\n'), ((3831, 3841), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3839, 3841), True, 'from matplotlib import pyplot as plt\n'), ((511, 545), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (521, 545), False, 'import csv\n'), ((1899, 1933), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (1909, 1933), False, 'import csv\n'), ((3002, 3036), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (3012, 3036), False, 'import csv\n')] |
from pathlib import Path
import numpy as np
import matplotlib.pyplot as plt
from ._colors import *
from ._Profiles import Profiles
from ._Fitting import HSW_Fitting
class CVA(HSW_Fitting):
def __init__(self, InputFile, **kwargs):
"""
Class for carrying out a first Cosmic Voids Analysis (CVA).
The analysis can be carried out for several cosmology at the same time.
Parameters:
-----------
InputFile : str
Path to the catalog directory
Other parameters:
-----------------
**kwargs
Omega_M_array : 1-D float array
bins : 1-D array
Catalog_Omega_M : float
fast : boolean
new : boolean
"""
print(f"{col.HEADER2}**************************************************{col.END}")
print(f"{col.HEADER2}Initializing Comic Voids Analysis{col.END}")
self.InputFile = InputFile
self.CreateDirectories(self.InputFile)
self.Omega_M_array = kwargs.get('Omega_M_array', None)
if np.any(self.Omega_M_array == None):
raise InputError('No Omega_M_array inserted')
self.ranges = kwargs.get('ranges', None)
if np.any(self.ranges == None):
self.ranges = np.arange(40,161,20)
print(f'{col.NOTICE}Standard voids radii ranges used!{col.END}')
self.bins = kwargs.get('bins', None)
if np.any(self.bins == None):
self.bins = np.append(0,np.cumsum(np.flip(np.diff(np.logspace(0,np.log10(4),41)-1))))
print(f'{col.NOTICE}Standard bins values used!{col.END}')
if kwargs.get('Omega_M',None) != None:
raise InputError("Don't use Omega_M as kwargs but Omega_M_array")
print(f"{col.HEADER2}**************************************************{col.END}")
def profiles_analysis(self, **kwargs):
print(f"{col.HEADER2}**************************************************{col.END}")
print(f"{col.HEADER2}Analysis:{col.END}")
self.compare_same = kwargs.get('compare_same',False)
estimator=kwargs.get('estimator', '')
n_Om = np.size(self.Omega_M_array)
n_r = np.size(self.ranges[:-1])
n_b = np.size(self.bins)
# Initialize profiles_tot
self.profiles_tot = np.zeros((n_Om*n_r,n_b-1))
self.profiles_errors_tot = np.zeros((n_Om*n_r,n_b-1))
self.profiles_bins_tot = np.zeros((n_Om*n_r,n_b))
self.studied_ranges = '_R'+str(int(n_r))+'_'+str(int(self.ranges[0]))+'_'+str(int(self.ranges[1]-self.ranges[0]))
for i, Om in enumerate(self.Omega_M_array):
self.add_M = '_M' + str(Om).split('.')[-1]
print(f"{col.NOTICE}--------------------------------------------------{col.END}")
print(f"{col.NOTICE}Omega_M: {Om}{col.END}")
if self.compare_same:
text = r"\Omega_M"
print(f"{col.NOTICE}Binning voids' radii for {text} = 0.270{col.END}")
nameFile_Profiles = self.folder_profiles+'/Profile_sameR'+estimator+self.studied_ranges+self.add_M+'.h5'
else:
nameFile_Profiles = self.folder_profiles+'/Profile'+estimator+self.studied_ranges+self.add_M+'.h5'
# Retrieve profiles or calculate it
new_computation = True
if Path(nameFile_Profiles).is_file():
data = self.Upload(nameFile_Profiles)
if np.all(data['profiles']['bins'] == self.bins):
print(f'{col.NOTICE}Retrieving Profiles: {col.END}', nameFile_Profiles)
catalog, voids, tracers = self.LoadCatalog(self.InputFile, Omega_M = Om, **kwargs)
self.profiles = data['profiles']['profiles']
self.profiles_errors = data['profiles']['profiles_errors']
self.profiles_bins = data['profiles']['profiles_bins']
new_computation = False
new_computation = True
if new_computation:
catalog, voids, tracers = self.LoadCatalog(self.InputFile, Omega_M = Om, **kwargs)
# fsky key in MeanDensity to consider masked part
self.MeanDensity()
self.CreateRandoms()
#self.DistanceVoidsTracers()
#self.StackVoids(self.ranges, bins=self.bins, compare_same= self.compare_same)
#self.randoms = self.tracers
self.CorrelationVoidsTracersRandoms()
data_temp = {
'bins':self.bins,
'profiles':self.profiles,
'profiles_errors':self.profiles_errors,
'profiles_bins':self.profiles_bins
}
self.Save(nameFile_Profiles, profiles=data_temp)
ii=0
for j in np.arange(i*n_r, i*n_r+n_r, 1):
self.profiles_tot[j] = self.profiles[ii]
self.profiles_errors_tot[j] = self.profiles_errors[ii]
self.profiles_bins_tot[j] = self.profiles_bins[ii]
ii += 1
print(f"{col.NOTICE}--------------------------------------------------{col.END}")
print(f"{col.HEADER2}**************************************************{col.END}")
def profiles_plot(self, xmax=3.0, estimator=''):
print(f"{col.HEADER2}**************************************************{col.END}")
print(f"{col.HEADER2}Plotting:{col.END}")
if self.compare_same:
nameFile_ProfilesPlot = self.folder_results+'/PlotProfiles_sameR'+estimator+self.studied_ranges+self.add_M+'.pdf'
else:
nameFile_ProfilesPlot = self.folder_results+'/PlotProfiles'+estimator+self.studied_ranges+self.add_M+'.pdf'
n_r = np.size(self.ranges[:-1])
n_Om = np.size(self.Omega_M_array)
styles = ['.','x','v', '^', '+', 'p']
lin_styles = ['-', '-.',':','--']
# Cycling over Omegas and Ranges
for i, Om in enumerate(self.Omega_M_array):
for j in np.arange(n_r):
k = i*n_r+j
profile = self.profiles_tot[k]
profile_errors = self.profiles_errors_tot[k]
x = (self.profiles_bins_tot[k][1:]+self.profiles_bins_tot[k][:-1])/2.
label = 'Radii range: ['+str(self.ranges[j])+','+str(self.ranges[j+1])+') Mpc/h'
if n_Om >1:
label2 = r'$\Omega_M$: '+str(Om)
label = label +'\n'+label2
style = styles[j]
lin_style = lin_styles[j]
shades = col.colors(n_Om)
colo = shades[i]
else:
style = '.'
lin_style = '-'
shades2 = col.colors(n_r)
colo = shades2[j]
plt.plot(x, profile, linewidth=0.2, linestyle=lin_style, color=colo)
plt.errorbar(x, profile, yerr=profile_errors, fmt=style, markersize=2,
color= colo, ecolor=colo, elinewidth=0.2, label=label)
plt.grid()
plt.legend(bbox_to_anchor=(1.05,1), loc='upper left', prop={'size':8})
plt.xlabel(r'$R/R_v$')
plt.ylabel(r'$d(x)/d_{mean}-1$')
plt.xlim(0.0, xmax)
print('Saving on '+self.print_folder(nameFile_ProfilesPlot, last=1, start='')+' ...')
plt.savefig(nameFile_ProfilesPlot, format='pdf', bbox_inches='tight')
print('Done!')
print(f"{col.HEADER2}**************************************************{col.END}")
def profiles_fitting(self,**kwargs):
## Kwargs:
# General:
# method : fit, MCMC, both
# new : default False
# fitting_limits : Radii range in which to fit
# MCMC:
# n_walkers : int (default 64)
# n_iteration : int (default 5000)
method = kwargs.get('method', '')
print(f"{col.HEADER2}**************************************************{col.END}")
print(f"{col.HEADER2}Fitting {method}:{col.END}")
if method == 'fit':
HSW_Fitting.fitting(self, **kwargs)
HSW_Fitting.plot(self, **kwargs)
elif method == 'MCMC':
HSW_Fitting.fitting_MCMC(self, **kwargs)
HSW_Fitting.plot(self, **kwargs)
elif method == 'both':
HSW_Fitting.fitting_MCMC(self, **kwargs)
print(self.parameters)
print(self.parameters_errors)
self.parameters_MCMC = self.parameters
HSW_Fitting.fitting(self, **kwargs)
print(self.parameters)
print(self.parameters_errors)
else:
print(f'{col.FAIL}No method =\"{method}\" found{col.END}')
raise InputError("Wrong fitting method inputted. Chose between: fit, MCMC")
print(f"{col.HEADER2}**************************************************{col.END}")
| [
"numpy.all",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"numpy.log10",
"matplotlib.pyplot.ylabel",
"numpy.arange",
"pathlib.Path",
"numpy.size",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.any",
"numpy.zeros",
"matplotlib.pyplot.errorbar",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.legend"
] | [((1140, 1174), 'numpy.any', 'np.any', (['(self.Omega_M_array == None)'], {}), '(self.Omega_M_array == None)\n', (1146, 1174), True, 'import numpy as np\n'), ((1303, 1330), 'numpy.any', 'np.any', (['(self.ranges == None)'], {}), '(self.ranges == None)\n', (1309, 1330), True, 'import numpy as np\n'), ((1521, 1546), 'numpy.any', 'np.any', (['(self.bins == None)'], {}), '(self.bins == None)\n', (1527, 1546), True, 'import numpy as np\n'), ((2251, 2278), 'numpy.size', 'np.size', (['self.Omega_M_array'], {}), '(self.Omega_M_array)\n', (2258, 2278), True, 'import numpy as np\n'), ((2293, 2318), 'numpy.size', 'np.size', (['self.ranges[:-1]'], {}), '(self.ranges[:-1])\n', (2300, 2318), True, 'import numpy as np\n'), ((2333, 2351), 'numpy.size', 'np.size', (['self.bins'], {}), '(self.bins)\n', (2340, 2351), True, 'import numpy as np\n'), ((2415, 2446), 'numpy.zeros', 'np.zeros', (['(n_Om * n_r, n_b - 1)'], {}), '((n_Om * n_r, n_b - 1))\n', (2423, 2446), True, 'import numpy as np\n'), ((2477, 2508), 'numpy.zeros', 'np.zeros', (['(n_Om * n_r, n_b - 1)'], {}), '((n_Om * n_r, n_b - 1))\n', (2485, 2508), True, 'import numpy as np\n'), ((2537, 2564), 'numpy.zeros', 'np.zeros', (['(n_Om * n_r, n_b)'], {}), '((n_Om * n_r, n_b))\n', (2545, 2564), True, 'import numpy as np\n'), ((5982, 6007), 'numpy.size', 'np.size', (['self.ranges[:-1]'], {}), '(self.ranges[:-1])\n', (5989, 6007), True, 'import numpy as np\n'), ((6023, 6050), 'numpy.size', 'np.size', (['self.Omega_M_array'], {}), '(self.Omega_M_array)\n', (6030, 6050), True, 'import numpy as np\n'), ((7335, 7345), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (7343, 7345), True, 'import matplotlib.pyplot as plt\n'), ((7354, 7426), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'bbox_to_anchor': '(1.05, 1)', 'loc': '"""upper left"""', 'prop': "{'size': 8}"}), "(bbox_to_anchor=(1.05, 1), loc='upper left', prop={'size': 8})\n", (7364, 7426), True, 'import matplotlib.pyplot as plt\n'), ((7433, 7454), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$R/R_v$"""'], {}), "('$R/R_v$')\n", (7443, 7454), True, 'import matplotlib.pyplot as plt\n'), ((7464, 7495), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$d(x)/d_{mean}-1$"""'], {}), "('$d(x)/d_{mean}-1$')\n", (7474, 7495), True, 'import matplotlib.pyplot as plt\n'), ((7510, 7529), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0.0)', 'xmax'], {}), '(0.0, xmax)\n', (7518, 7529), True, 'import matplotlib.pyplot as plt\n'), ((7632, 7701), 'matplotlib.pyplot.savefig', 'plt.savefig', (['nameFile_ProfilesPlot'], {'format': '"""pdf"""', 'bbox_inches': '"""tight"""'}), "(nameFile_ProfilesPlot, format='pdf', bbox_inches='tight')\n", (7643, 7701), True, 'import matplotlib.pyplot as plt\n'), ((1358, 1380), 'numpy.arange', 'np.arange', (['(40)', '(161)', '(20)'], {}), '(40, 161, 20)\n', (1367, 1380), True, 'import numpy as np\n'), ((5039, 5075), 'numpy.arange', 'np.arange', (['(i * n_r)', '(i * n_r + n_r)', '(1)'], {}), '(i * n_r, i * n_r + n_r, 1)\n', (5048, 5075), True, 'import numpy as np\n'), ((6254, 6268), 'numpy.arange', 'np.arange', (['n_r'], {}), '(n_r)\n', (6263, 6268), True, 'import numpy as np\n'), ((3575, 3620), 'numpy.all', 'np.all', (["(data['profiles']['bins'] == self.bins)"], {}), "(data['profiles']['bins'] == self.bins)\n", (3581, 3620), True, 'import numpy as np\n'), ((7075, 7143), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'profile'], {'linewidth': '(0.2)', 'linestyle': 'lin_style', 'color': 'colo'}), '(x, profile, linewidth=0.2, linestyle=lin_style, color=colo)\n', (7083, 7143), True, 'import matplotlib.pyplot as plt\n'), ((7160, 7288), 'matplotlib.pyplot.errorbar', 'plt.errorbar', (['x', 'profile'], {'yerr': 'profile_errors', 'fmt': 'style', 'markersize': '(2)', 'color': 'colo', 'ecolor': 'colo', 'elinewidth': '(0.2)', 'label': 'label'}), '(x, profile, yerr=profile_errors, fmt=style, markersize=2,\n color=colo, ecolor=colo, elinewidth=0.2, label=label)\n', (7172, 7288), True, 'import matplotlib.pyplot as plt\n'), ((3467, 3490), 'pathlib.Path', 'Path', (['nameFile_Profiles'], {}), '(nameFile_Profiles)\n', (3471, 3490), False, 'from pathlib import Path\n'), ((1624, 1635), 'numpy.log10', 'np.log10', (['(4)'], {}), '(4)\n', (1632, 1635), True, 'import numpy as np\n')] |
#!/usr/bin/env python
"""
EOSS State Tracker
===================
Maintains the current state of the TUI
including in terms of EOSS configuration
"""
from datui.tui_state_tracker import TuiStateTracker
from dabot.srv import TuiState, TuiStateResponse
from dautils import get_ros_param
import rospy
from std_msgs.msg import String
class EossStateTracker(TuiStateTracker):
"""
"""
NUM_ORBITS = 5
orbits = []
def __init__(self):
self.set_workspace_bounds()
# initialize the parent
TuiStateTracker.__init__(self, node_name="tui_state")
self.get_config_server = rospy.Service("get_config_state", TuiState, self.get_config_state)
self.config_publisher = rospy.Publisher("/tui_state_configs", String, queue_size=1)
self.block_update_subscriber = rospy.Subscriber("/blocks", String, self.publish_config)
self.param_update_subscriber = rospy.Subscriber("/param_update", String, self.handle_param_update)
def set_workspace_bounds(self, bounds=None):
if bounds is not None:
self.orbits_min = bounds["ORBITS_MIN"]
self.orbits_max = bounds["ORBITS_MAX"]
self.orbits_left = bounds["ORBITS_LEFT"]
self.orbits_right = bounds["ORBITS_RIGHT"]
else:
self.orbits_min = get_ros_param("ORBITS_MIN", "Orbit boundaries must be configured.")
self.orbits_max = get_ros_param("ORBITS_MAX", "Orbit boundaries must be configured.")
self.orbits_left = get_ros_param("ORBITS_LEFT", "Orbit boundaries must be configured.")
self.orbits_right = get_ros_param("ORBITS_RIGHT", "Orbit boundaries must be configured.")
self.orbit_height = (self.orbits_max-self.orbits_min)/self.NUM_ORBITS
self.orbits = []
for i in range(self.NUM_ORBITS):
#lower_bound = self.orbits_max-(i+1)*self.orbit_height
lower_bound = self.orbits_min+i*self.orbit_height
#upper_bound = self.orbits_max-i*self.orbit_height
upper_bound = self.orbits_min+(i+1)*self.orbit_height
self.orbits.append([lower_bound, upper_bound])
def handle_param_update(self, message):
self.set_workspace_bounds()
def get_config_state(self, request):
if request.frame_of_reference == "eoss_config":
return TuiStateResponse(self.blocks2bitstring(self.block_state))
# note that orbits are currently specified in tuio space. needs to be in arm space.
def get_orbit(self, y):
for i, orbit in enumerate(self.orbits):
if y > orbit[0] and y < orbit[1]:
return i
return None
def blocks2bitstring(self, block_arr):
raw_btstr = '0'*60
for block in block_arr:
if block["x"] > self.orbits_left or block["x"] < self.orbits_right:
continue
if block["y"] < self.orbits_min or block["y"] > self.orbits_max:
continue
id = block["id"]
orbit = self.get_orbit(block["y"])
if(orbit is not None):
index = 12*orbit + id
raw_btstr = raw_btstr[:index] + '1' + raw_btstr[index+1:] # b/c can't modify string
return raw_btstr
def publish_config(self, message):
# the point of this is to allow us to compare with /configs (which may have local search points as well)
rospy.sleep(0.005)
self.config_publisher.publish(String(self.blocks2bitstring(self.block_state)))
def shutdown_tracker(self):
self.get_config_server.shutdown()
TuiStateTracker.shutdown_tracker(self)
if __name__ == '__main__':
e = EossStateTracker()
rospy.spin()
| [
"datui.tui_state_tracker.TuiStateTracker.__init__",
"datui.tui_state_tracker.TuiStateTracker.shutdown_tracker",
"rospy.Subscriber",
"rospy.Service",
"dautils.get_ros_param",
"rospy.spin",
"rospy.sleep",
"rospy.Publisher"
] | [((3692, 3704), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3702, 3704), False, 'import rospy\n'), ((524, 577), 'datui.tui_state_tracker.TuiStateTracker.__init__', 'TuiStateTracker.__init__', (['self'], {'node_name': '"""tui_state"""'}), "(self, node_name='tui_state')\n", (548, 577), False, 'from datui.tui_state_tracker import TuiStateTracker\n'), ((611, 677), 'rospy.Service', 'rospy.Service', (['"""get_config_state"""', 'TuiState', 'self.get_config_state'], {}), "('get_config_state', TuiState, self.get_config_state)\n", (624, 677), False, 'import rospy\n'), ((710, 769), 'rospy.Publisher', 'rospy.Publisher', (['"""/tui_state_configs"""', 'String'], {'queue_size': '(1)'}), "('/tui_state_configs', String, queue_size=1)\n", (725, 769), False, 'import rospy\n'), ((809, 865), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/blocks"""', 'String', 'self.publish_config'], {}), "('/blocks', String, self.publish_config)\n", (825, 865), False, 'import rospy\n'), ((905, 972), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/param_update"""', 'String', 'self.handle_param_update'], {}), "('/param_update', String, self.handle_param_update)\n", (921, 972), False, 'import rospy\n'), ((3404, 3422), 'rospy.sleep', 'rospy.sleep', (['(0.005)'], {}), '(0.005)\n', (3415, 3422), False, 'import rospy\n'), ((3593, 3631), 'datui.tui_state_tracker.TuiStateTracker.shutdown_tracker', 'TuiStateTracker.shutdown_tracker', (['self'], {}), '(self)\n', (3625, 3631), False, 'from datui.tui_state_tracker import TuiStateTracker\n'), ((1308, 1375), 'dautils.get_ros_param', 'get_ros_param', (['"""ORBITS_MIN"""', '"""Orbit boundaries must be configured."""'], {}), "('ORBITS_MIN', 'Orbit boundaries must be configured.')\n", (1321, 1375), False, 'from dautils import get_ros_param\n'), ((1406, 1473), 'dautils.get_ros_param', 'get_ros_param', (['"""ORBITS_MAX"""', '"""Orbit boundaries must be configured."""'], {}), "('ORBITS_MAX', 'Orbit boundaries must be configured.')\n", (1419, 1473), False, 'from dautils import get_ros_param\n'), ((1505, 1573), 'dautils.get_ros_param', 'get_ros_param', (['"""ORBITS_LEFT"""', '"""Orbit boundaries must be configured."""'], {}), "('ORBITS_LEFT', 'Orbit boundaries must be configured.')\n", (1518, 1573), False, 'from dautils import get_ros_param\n'), ((1606, 1675), 'dautils.get_ros_param', 'get_ros_param', (['"""ORBITS_RIGHT"""', '"""Orbit boundaries must be configured."""'], {}), "('ORBITS_RIGHT', 'Orbit boundaries must be configured.')\n", (1619, 1675), False, 'from dautils import get_ros_param\n')] |
# This file is part of the P3IV Simulator (https://github.com/fzi-forschungszentrum-informatik/P3IV),
# copyright by FZI Forschungszentrum Informatik, licensed under the BSD-3 license (see LICENSE file in main directory)
import numpy as np
from p3iv_modules.interfaces.planning import PlannerInterface
from p3iv_types.motion import MotionPlan, MotionPlans
from p3iv_utils.coordinate_transformation import CoordinateTransform
from p3iv_utils.vehicle_models import get_control_inputs
class Planner(PlannerInterface):
def __init__(self, ego_id, ego_width, ego_length, configurations, *args, **kwargs):
super(Planner, self).__init__(ego_id, ego_width, ego_length, configurations, *args, **kwargs)
self._id = ego_id
self._width = ego_width
self._length = ego_length
self.dt = configurations["temporal"]["dt"] / 1000.0
self.n = configurations["temporal"]["N"]
self.timestamp = 0
# store intermediate stuff for convenience
self._coordinate_transform = None
self._state = None
self._progress = None
def __call__(self, timestamp, state, scene_model, situation_model, decision_base, *args, **kwargs):
PlannerInterface.type_check(timestamp, state, scene_model, situation_model, decision_base, *args, **kwargs)
self.setCurrentTimestamp(timestamp)
self.setDrivingCorridor(decision_base.corridor)
foo = 0.0
self.setMotionState(state, foo)
mp = self.solve()
mps = MotionPlans()
mps.append(mp)
return mps
def setCurrentTimestamp(self, timestamp):
assert isinstance(timestamp, int)
self.timestamp = timestamp
def setDrivingCorridor(self, corridor):
self._corridor_centerline = corridor.center
self._coordinate_transform = CoordinateTransform(self._corridor_centerline)
def setMotionState(self, state, progress):
self._state = state
self._progress = progress
def solve(self, *args, **kwargs):
current_pos = self._progress
profile = np.array([])
for i in range(self.n):
new_pos = current_pos + self._state.speed * self.dt
profile = np.append(profile, new_pos)
current_pos = new_pos
frenet_l = np.append(self._progress, profile)
xy = self._coordinate_transform.expand(self._state.position.mean, frenet_l, ignore_lateral_offset=True)
mp = MotionPlan()
mp.states(xy, dt=self.dt)
mp.controls.acceleration = np.zeros(self.n + 1)
wheelbase = 0.7 * self._length
mp.controls.steering = get_control_inputs(mp.states.yaw.mean, mp.states.speed, wheelbase, self.dt)[:, 0]
PlannerInterface.overwrite_with_current_state(mp, self._state)
assert len(mp.states) == self.n + 1 # 1-> current state
return mp
| [
"p3iv_modules.interfaces.planning.PlannerInterface.type_check",
"numpy.append",
"numpy.array",
"numpy.zeros",
"p3iv_types.motion.MotionPlans",
"p3iv_types.motion.MotionPlan",
"p3iv_utils.vehicle_models.get_control_inputs",
"p3iv_modules.interfaces.planning.PlannerInterface.overwrite_with_current_state",
"p3iv_utils.coordinate_transformation.CoordinateTransform"
] | [((1200, 1311), 'p3iv_modules.interfaces.planning.PlannerInterface.type_check', 'PlannerInterface.type_check', (['timestamp', 'state', 'scene_model', 'situation_model', 'decision_base', '*args'], {}), '(timestamp, state, scene_model, situation_model,\n decision_base, *args, **kwargs)\n', (1227, 1311), False, 'from p3iv_modules.interfaces.planning import PlannerInterface\n'), ((1508, 1521), 'p3iv_types.motion.MotionPlans', 'MotionPlans', ([], {}), '()\n', (1519, 1521), False, 'from p3iv_types.motion import MotionPlan, MotionPlans\n'), ((1822, 1868), 'p3iv_utils.coordinate_transformation.CoordinateTransform', 'CoordinateTransform', (['self._corridor_centerline'], {}), '(self._corridor_centerline)\n', (1841, 1868), False, 'from p3iv_utils.coordinate_transformation import CoordinateTransform\n'), ((2073, 2085), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (2081, 2085), True, 'import numpy as np\n'), ((2285, 2319), 'numpy.append', 'np.append', (['self._progress', 'profile'], {}), '(self._progress, profile)\n', (2294, 2319), True, 'import numpy as np\n'), ((2446, 2458), 'p3iv_types.motion.MotionPlan', 'MotionPlan', ([], {}), '()\n', (2456, 2458), False, 'from p3iv_types.motion import MotionPlan, MotionPlans\n'), ((2529, 2549), 'numpy.zeros', 'np.zeros', (['(self.n + 1)'], {}), '(self.n + 1)\n', (2537, 2549), True, 'import numpy as np\n'), ((2711, 2773), 'p3iv_modules.interfaces.planning.PlannerInterface.overwrite_with_current_state', 'PlannerInterface.overwrite_with_current_state', (['mp', 'self._state'], {}), '(mp, self._state)\n', (2756, 2773), False, 'from p3iv_modules.interfaces.planning import PlannerInterface\n'), ((2204, 2231), 'numpy.append', 'np.append', (['profile', 'new_pos'], {}), '(profile, new_pos)\n', (2213, 2231), True, 'import numpy as np\n'), ((2620, 2695), 'p3iv_utils.vehicle_models.get_control_inputs', 'get_control_inputs', (['mp.states.yaw.mean', 'mp.states.speed', 'wheelbase', 'self.dt'], {}), '(mp.states.yaw.mean, mp.states.speed, wheelbase, self.dt)\n', (2638, 2695), False, 'from p3iv_utils.vehicle_models import get_control_inputs\n')] |
"""Metadata utilities."""
from attr import attr, NOTHING
TYPE_METADATA_KEY = "cattr_type_metadata"
def typed(type, default=NOTHING, validator=None,
repr=True, cmp=True, hash=True, init=True,
convert=None, metadata={}):
"""Just like `attr.ib`, but with type metadata."""
if not metadata:
metadata = {TYPE_METADATA_KEY: type}
else:
metadata[TYPE_METADATA_KEY] = type
return attr(default, validator, repr, cmp, hash, init, convert, metadata)
| [
"attr.attr"
] | [((428, 494), 'attr.attr', 'attr', (['default', 'validator', 'repr', 'cmp', 'hash', 'init', 'convert', 'metadata'], {}), '(default, validator, repr, cmp, hash, init, convert, metadata)\n', (432, 494), False, 'from attr import attr, NOTHING\n')] |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# @author jsbxyyx
# @since 1.0
import datetime
import time
from seata.rm.datasource.sql.struct.KeyType import KeyType
from seata.rm.datasource.Types import Types
class Field(object):
def __init__(self):
self.name = None
self.key_type = KeyType.NULL
self.type = 0
# any
self.value = None
def set_value(self, value):
result = None
if self.type == Types.BIT or \
self.type == Types.BLOB:
result = str(self.value)
elif self.type == Types.TINYINT or \
self.type == Types.SMALLINT or \
self.type == Types.INTEGER or \
self.type == Types.INT or \
self.type == Types.BIGINT or \
self.type == Types.FLOAT or \
self.type == Types.REAL or \
self.type == Types.DOUBLE or \
self.type == Types.NUMERIC or \
self.type == Types.DECIMAL or \
self.type == Types.CHAR or \
self.type == Types.VARCHAR or \
self.type == Types.LONGVARCHAR or \
self.type == Types.TINYTEXT or \
self.type == Types.TEXT or \
self.type == Types.MEDIUMTEXT or \
self.type == Types.LONGTEXT or \
self.type == Types.JSON:
result = str(value)
elif self.type == Types.DATE:
if isinstance(value, str):
result = value
else:
# datetime.date
result = value.strftime('%Y-%m-%d')
elif self.type == Types.TIME:
if isinstance(value, str):
result = value
else:
# time.timedelta
result = value.strftime('%H:%M:%S.%f')
elif self.type == Types.DATETIME:
if isinstance(value, str):
result = value
else:
# datetime.datetime
result = value.strftime('%Y-%m-%d %H:%M:%S.%f')
else:
result = str(self.value)
self.value = result
def get_value(self):
val_str = self.value
if self.type == Types.TINYINT or \
self.type == Types.SMALLINT or \
self.type == Types.INTEGER or \
self.type == Types.INT or \
self.type == Types.BIGINT or \
self.type == Types.NUMERIC:
return int(val_str)
elif self.type == Types.FLOAT or \
self.type == Types.REAL or \
self.type == Types.DOUBLE or \
self.type == Types.DECIMAL:
return float(val_str)
elif self.type == Types.CHAR or \
self.type == Types.VARCHAR or \
self.type == Types.LONGVARCHAR or \
self.type == Types.TINYTEXT or \
self.type == Types.TEXT or \
self.type == Types.MEDIUMTEXT or \
self.type == Types.LONGTEXT or \
self.type == Types.JSON:
return val_str
elif self.type == Types.DATE:
string = val_str
return datetime.datetime.strptime(string, "%Y-%m-%d").date()
elif self.type == Types.TIME:
string = val_str
return datetime.datetime.strptime(string, "%H:%M:%S.%f").time()
elif self.type == Types.DATETIME:
string = val_str
return datetime.datetime.strptime(string, "%Y-%m-%d %H:%M:%S.%f")
elif self.type == Types.BIT or \
self.type == Types.BLOB:
return val_str.decode('utf-8')
else:
return val_str
def __eq__(self, other):
return self.__dict__ == other.__dict__
| [
"datetime.datetime.strptime"
] | [((3230, 3276), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['string', '"""%Y-%m-%d"""'], {}), "(string, '%Y-%m-%d')\n", (3256, 3276), False, 'import datetime\n'), ((3517, 3575), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['string', '"""%Y-%m-%d %H:%M:%S.%f"""'], {}), "(string, '%Y-%m-%d %H:%M:%S.%f')\n", (3543, 3575), False, 'import datetime\n'), ((3370, 3419), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['string', '"""%H:%M:%S.%f"""'], {}), "(string, '%H:%M:%S.%f')\n", (3396, 3419), False, 'import datetime\n')] |
#ml2015
from floodsystem.stationdata import build_station_list
from floodsystem.geo import stations_within_radius
def run():
stations = build_station_list()
cam_station_list = stations_within_radius(stations, (52.2053, 0.1218), 10)
cam_name_list=[]
for station in cam_station_list:
cam_name_list.append(station.name)
sorted_list=sorted(cam_name_list)
print(sorted_list)
if __name__ == "__main__":
run() | [
"floodsystem.stationdata.build_station_list",
"floodsystem.geo.stations_within_radius"
] | [((144, 164), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (162, 164), False, 'from floodsystem.stationdata import build_station_list\n'), ((189, 244), 'floodsystem.geo.stations_within_radius', 'stations_within_radius', (['stations', '(52.2053, 0.1218)', '(10)'], {}), '(stations, (52.2053, 0.1218), 10)\n', (211, 244), False, 'from floodsystem.geo import stations_within_radius\n')] |
# -*- coding: utf-8 -*-
import os
micro_controler = (os.uname().machine == 'ESP module with ESP8266')
if micro_controler:
from machine import Pin, I2C
import ssd1306
class Dispplay_conf:
size_x = 128
size_y = 64
class Display_stub:
def __init__(self, size_x, size_y):
self.size_x = size_x
self.size_y = size_y
def text(self, str, x, y, c = 1):
print(str)
def pixel(self, x, y, c):
pass
def show(self):
pass
def invert(self, state):
pass
def bar_chart_function(data, x, y, width, height):
elements = len(data)
bar_width = int(x / elements)
y_scale = height / max(data)
for i in range(0, elements - 1):
x_start = i * bar_width
if micro_controler:
display.fill_rect(x_start, y, bar_width, int(y_scale * data[i]),1)
else:
print(x, Dispplay_conf.size_y - 1, x + bar_width, int(y_scale * data[i]))
#ssd1306_gfx.SSD1306_I2C_SETUP.bar_chart = bar_chart_function
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
if micro_controler:
# Set LED on
led = Pin(2, Pin.OUT)
led.value(0)
# Setup ssd1306 OLED display
display = ssd1306.SSD1306_I2C(Dispplay_conf.size_x, Dispplay_conf.size_y, I2C(sda=Pin(4), scl=Pin(5)))
#display = ssd1306_gfx.SSD1306_I2C_SETUP(Pin(4), Pin(5), Dispplay_conf.size_x, Dispplay_conf.size_y)
else:
display = Display_stub(Dispplay_conf.size_y, Dispplay_conf.size_x)
display.bar_chart = bar_chart_function
#display.fill(1)
display.invert(True)
print(os.uname().machine)
#display.text(os.uname().machine, 0, 0, 1)
display.text('Nu pris: 5,45 Kr.', 0, 0, 1)
display.text('3t slot: kl 14', 0, 10, 1)
display.text('6t slot: kl 12', 0, 20, 1)
display.pixel(63, 47, 1)
display.show()
print("Done")
if micro_controler:
led.value(1)
display.invert(False)
spot_price = [1.23,2.3,3.8,5.9,6.2,3.7,2.1,0.3,0.3,0.3,0.3,0.3,0.3,0.3,1.23,2.3,3.8,5.9]
display.bar_chart(spot_price, 0, Dispplay_conf.size_y-1, Dispplay_conf.size_x, Dispplay_conf.size_y-24)
display.show() | [
"machine.Pin",
"os.uname"
] | [((54, 64), 'os.uname', 'os.uname', ([], {}), '()\n', (62, 64), False, 'import os\n'), ((1155, 1170), 'machine.Pin', 'Pin', (['(2)', 'Pin.OUT'], {}), '(2, Pin.OUT)\n', (1158, 1170), False, 'from machine import Pin, I2C\n'), ((1648, 1658), 'os.uname', 'os.uname', ([], {}), '()\n', (1656, 1658), False, 'import os\n'), ((1320, 1326), 'machine.Pin', 'Pin', (['(4)'], {}), '(4)\n', (1323, 1326), False, 'from machine import Pin, I2C\n'), ((1332, 1338), 'machine.Pin', 'Pin', (['(5)'], {}), '(5)\n', (1335, 1338), False, 'from machine import Pin, I2C\n')] |
from __future__ import print_function, absolute_import, division
import logging
import time
import boto3
logger = logging.getLogger(__name__)
_DEBUG_TOTAL_WRITTEN_BYTES = 0
def describe_logfiles_of_instance(name): # pragma: no cover (covered by it)
client = boto3.client('rds')
response = client.describe_db_log_files(
DBInstanceIdentifier=name)
if 'DescribeDBLogFiles' in response:
return response['DescribeDBLogFiles']
return []
def metric(name, value=1):
# DogStatsD format (https://www.datadoghq.com/blog/statsd/)
# MONITORING|unix_epoch_timestamp|metric_value|metric_type|my.metric.name|#tag1:value,tag2
print('MONITORING|{ts}|{value}|{type}|rds_log_dog.{name}|'.format(
ts=int(time.time()), type='count', name=name, value=value))
def download(instance_name, logfile_name, filename): # pragma: no cover (covered by it)
client = boto3.client('rds')
next_position_marker = '0'
data = ''
retries = 0
max_retries = 3
size = 0
fetch_this_number_of_lines = 0
with open(filename, 'w') as datafile:
while True:
if retries > max_retries:
metric('retries_exceeded')
logger.error(
'Retry count (%d) exceeding max retries (%d). Gave up.',
retries, max_retries)
break
response = client.download_db_log_file_portion(
DBInstanceIdentifier=instance_name,
LogFileName=logfile_name, Marker=next_position_marker,
NumberOfLines=fetch_this_number_of_lines)
if 'LogFileData' not in response:
logger.warn('no LogFileData in response. Retrying.')
retries += 1
continue # means retry
data = response['LogFileData']
if data[-35:].strip().endswith('[Your log message was truncated]'):
metric('truncated')
fetch_this_number_of_lines = data.count('\n')-2
retries += 1
continue # means retry
logger.debug('going to write %d bytes to disc.\
Already written: %d bytes', len(data), size)
datafile.write(data)
size += len(data)
global _DEBUG_TOTAL_WRITTEN_BYTES
_DEBUG_TOTAL_WRITTEN_BYTES += len(data)
if not response['AdditionalDataPending']:
break
next_position_marker = response['Marker']
fetch_this_number_of_lines = 0
retries = 0
metric('rds_log_size', size)
def get_size(instance_name, logfile_name):
for logfile in describe_logfiles_of_instance(instance_name):
if logfile['LogFileName'] == logfile_name:
return logfile['Size']
| [
"logging.getLogger",
"boto3.client",
"time.time"
] | [((116, 143), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (133, 143), False, 'import logging\n'), ((269, 288), 'boto3.client', 'boto3.client', (['"""rds"""'], {}), "('rds')\n", (281, 288), False, 'import boto3\n'), ((901, 920), 'boto3.client', 'boto3.client', (['"""rds"""'], {}), "('rds')\n", (913, 920), False, 'import boto3\n'), ((744, 755), 'time.time', 'time.time', ([], {}), '()\n', (753, 755), False, 'import time\n')] |
"""Tools for interacting with ConceptNet in English and interpreting
the results. Translate between plain words and the ConceptNet format"""
import random
import requests
import pattern.en as en
def query(concept, rel, reverse=False, all_edges=False):
"""Return the node(s) that are related to concept by rel"""
direction = ('start', 'end') if not reverse else ('end', 'start')
edges = requests.get("http://api.conceptnet.io/query?{}={}&rel=/r/{}" \
"&limit=1000".format(direction[0], concept, rel)).json()['edges']
if not len(edges): return []
if not all_edges: return random.choice(edges)[direction[1]]
return [x[direction[1]] for x in edges]
def isRich(concept, threshold=20):
obj = requests.get('http://api.conceptnet.io{}'.format(concept)).json()
return concept != "/c/en/" and len(obj.get('edges', [])) >= threshold
def attemptSingularization(term):
"""Return the singular form of this term, unless the plural form
has more edges connecting it to other nodes."""
term_node = requests.get('http://api.conceptnet.io{}'.format(term)).json()
richness = len(term_node.get('edges', []))
singular = readableToTerm(en.singularize(termToReadable(term)))
return singular if isRich(singular, threshold=richness) else term
def richness(concept):
#print("Determining richness of: {}".format(concept))
if concept == "/c/en/": return 0
obj = requests.get('http://api.conceptnet.io{}'.format(concept)).json()
return len(obj.get('edges', []))
def termToReadable(t):
return t[6:].replace('_', ' ')
def readableToTerm(s):
return "/c/en/" + s.replace(' ', '_')
def conjugateVerbs(sentence, tense):
"""Use parse trees to identify the verbs in a phrase. Assume the
first word in the phrase is guaranteed to be a verb. Return the
phrase with each verb converted to the desired tense."""
if not sentence: return None
"""pattern-en's conjugation() often does not work,
but lexeme() generates conjugations in a predictable order"""
lexeme_indicies = {"infinitive" : 0, "continuous" : 2}
t = lexeme_indicies[tense.lower()]
words = en.parsetree(sentence)[0]
words[0].string = en.lexeme(words[0].string)[t]
for word in words:
if word.type[0] == "V":
word.string = en.lexeme(word.string)[t]
return words.string
def findDirectObject(sentence):
"""Return the direct object of the input verb phrase."""
try:
remainder = sentence[sentence.index(' ')+1:]
except:
return None
"""Doesn't work for certain verbs, typically those that have another,
non-verb sense (e.g. 'ready' the weapon, 'storm' the hill). So we simply
replace this difficult verb with a more easily understood one: 'see' """
tree = en.parsetree('You see {}'.format(remainder),
tags=True, chunks=True, relations=True)
rel = tree.sentences[0].relations
obj_dict = rel["OBJ"]
if len(obj_dict):
obj = next(iter(obj_dict.values())).head.string
return readableToTerm(obj)
for word in tree.sentences[0].words:
if word.type[0] == 'N': return readableToTerm(word.string)
return None
def correctSpelling(s):
words = [en.suggest(w) for w in s.split()]
return ' '.join(words) | [
"pattern.en.lexeme",
"pattern.en.parsetree",
"pattern.en.suggest",
"random.choice"
] | [((2055, 2077), 'pattern.en.parsetree', 'en.parsetree', (['sentence'], {}), '(sentence)\n', (2067, 2077), True, 'import pattern.en as en\n'), ((2100, 2126), 'pattern.en.lexeme', 'en.lexeme', (['words[0].string'], {}), '(words[0].string)\n', (2109, 2126), True, 'import pattern.en as en\n'), ((3032, 3045), 'pattern.en.suggest', 'en.suggest', (['w'], {}), '(w)\n', (3042, 3045), True, 'import pattern.en as en\n'), ((581, 601), 'random.choice', 'random.choice', (['edges'], {}), '(edges)\n', (594, 601), False, 'import random\n'), ((2195, 2217), 'pattern.en.lexeme', 'en.lexeme', (['word.string'], {}), '(word.string)\n', (2204, 2217), True, 'import pattern.en as en\n')] |
# coding: utf-8
import gevent.monkey
gevent.monkey.patch_all()
import multiprocessing
bind = 'unix:/var/run/june.sock'
max_requests = 10000
keepalive = 5
workers = multiprocessing.cpu_count() * 2 + 1
worker_class = 'gunicorn.workers.ggevent.GeventWorker'
loglevel = 'info'
errorlog = '-'
x_forwarded_for_header = 'X-FORWARDED-FOR'
secure_scheme_headers = {
'X-SCHEME': 'https',
}
| [
"multiprocessing.cpu_count"
] | [((167, 194), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (192, 194), False, 'import multiprocessing\n')] |
import numpy as np
import batch_norm as bn
import relu as relu
import loss_functions.functions as fn
import sigmoid as sigmoid
import categorical_converter2 as cc
import mnist
def convert_y(s):
if s==b'Iris-setosa':
return int(1)
elif s==b'Iris-versicolor':
return int(2)
else:
return int(3)
def y_to_classification_form(y,n_classes):
"""
THIS WILL WORK, but the best solurtion is is implemented
y_count=len(y)
return_y=np.zeros(y_count*n_classes).reshape(y_count,n_classes)
for ix,item in enumerate(y):
return_y[ix,int(item)-1]=1.
return return_y
"""
return np.eye(n_classes)[y]
def fit(x_train,x_test,y_train,y_test):
"""temp for testing, load data from locafolder
"""
#data=np.loadtxt("/home/manjunath/iris/iris.csv", comments=None, delimiter=',', usecols=(0,1,2,3,4), converters={4: convert_y })
h=(10,10,10)
step_size=0.001
tolerence=0.001
iteration_max=1000
iteration=0
#Regularisation param, added to gradients
reg=0.01
K=np.unique(y_train).shape[0]
#x=np.loadtxt("/home/manjunath/iris/iris.csv", comments=None, delimiter=',', converters=None, usecols=(0,1,2,3))
"""
train_mean=np.mean(x_train,axis=0)
x_train=x_train-train_mean
#std_x = np.sqrt(np.sum(np.square(x_train - train_mean),axis=0)/x_train.shape[1])
std_x=np.std(x_train,axis=0)
x_train=x_train/std_x
x_test=x_test - train_mean
x_test=x_test/std_x
"""
y_train=y_to_classification_form(y_train,K)
y_test=y_to_classification_form(y_test,K)
n_samples,n_features=x_train.shape
gamma2=np.random.randn(h[0]).reshape(1,h[0])
beta2=np.random.randn(h[0]).reshape(1,h[0])
gamma3=np.random.randn(h[1]).reshape(1,h[1])
beta3=np.random.randn(h[1]).reshape(1,h[1])
eps=0.001
w1=(np.random.randn(n_features*h[0]).reshape(n_features,h[0]))/np.sqrt(2/(n_features+h[0]))
w2=(np.random.randn(h[0]*h[1]).reshape(h[0],h[1]))/np.sqrt(2/(h[0]+h[1]))
w3=(np.random.randn(h[1]*h[2]).reshape(h[1],h[2]))/np.sqrt(2/(h[1]+h[2]))
dw1_priv=np.zeros(w1.shape)
dw2_priv=np.zeros(w2.shape)
dw3_priv=np.zeros(w3.shape)
#w3=(np.random.randn(h[1]*K).reshape(h[1],K)*0.5)/np.sqrt(2/h[1]+K)
#Basically no significance, added bias for completion
b1 = np.zeros((1,h[0]))
b2 = np.zeros((1,h[1]))
b3 = np.zeros((1,K))
while iteration<iteration_max :
#Calculate scores
scores_layer1=np.dot(x_train,w1)+b1 # 125x4,4x10 = 125x10
#print("iteration",iteration, "first layer",np.any(np.isnan(scores_layer1)))
#Do not use sigmoid, you will be stuck in long mess of nans and inf and overflows and div by zeros
#x2=1/1+np.exp(-scores_layer1) # 150 x 4
#Use reLU
#x2=np.maximum(0,scores_layer1)
bn_x2,bn_cache2=bn.batch_norm_forword(scores_layer1,gamma2,beta2) #125x10
#print("iteration",iteration, "first layer BN",np.any(np.isnan(bn_x2)))
#x2=relu.relu_forword(bn_x2.T)
x2=relu.relu_forword(bn_x2) #125x10
#print("iteration",iteration, "first layer relu",np.any(np.isnan(x2)))
score_layer2=np.dot(x2,w2)+b2 #125x10,10x10=125x10
#print("iteration",iteration, "second layer",np.any(np.isnan(score_layer2)))
bn_x3,bn_cache3=bn.batch_norm_forword(score_layer2,gamma3,beta3) #125x10
x3=relu.relu_forword(bn_x3) #125x10
final_scores=np.dot(x3,w3)+b3 # 125x10,10x3=125x3
#Again, use softmax or sigmoid loss for classification, MSE or distance is for regression only
probs=fn.softmax(final_scores) #125x3
dscores=fn.cross_enropy_grad_singleclass(probs,y_train) # 125x3
#There is possibility of only 1 class for data, so use below, else the implementation will be bit complex
#print(x3.shape)
dw3=np.dot(x3.T,dscores) # 10x125,125x3=10x3
dx3=np.dot(w3,dscores.T) # 10x3,3x125=10x125
#dhid2=dx3.T
#dhid2[x3<=0]=0
dhid2=relu.relu_backword(dx3.T,x3) #125x10
#print("dhid2",dhid2.shape)
bn_dhid2,dgamma3,dbeta3=bn.batch_norm_backword(dhid2,bn_cache3) #125x10
#dprod = (x2 * (1- x2)) * dx2.T # this is wrong, find out why, we mostly need to multiply with upstream gradient
dw2=np.dot(x2.T,bn_dhid2) # 10x125,125x10=10x10
dx2=np.dot(w2,dhid2.T) #10x10,10x125=10x125
#dhid1=dx2.T
#dhid1[x2<=0]=0
dhid1=relu.relu_backword(dx2.T,x2) #125x10
bn_dx2,dgamma2,dbeta2=bn.batch_norm_backword(dhid1,bn_cache2) #125x10
#print(dprod.shape)
dw1 = np.dot( x_train.T,bn_dx2) # 125x4,12510=4x10
db1=np.sum(b1,axis=0,keepdims=True)
db2=np.sum(b2,axis=0,keepdims=True)
db3=np.sum(b3,axis=0,keepdims=True)
#Regularisation of gradients
#Optimisation
#dw1 = (dw1+dw1_priv)/2
#dw2 = (dw2+dw2_priv)/2
#dw3 = (dw3+dw3_priv)/2
dw3 += reg*w3
dw2 += reg*w2
dw1 += reg*w1
w1 = w1 - (step_size * dw1)
w2 = w2 - (step_size * dw2)
w3 = w3 - (step_size * dw3)
#print(dw1)
#print(dw2)
#print(dw3)
#dw1_priv=dw1
#dw2_priv=dw2
#dw3_priv=dw3
"""
redundant parameters after batch normalization
"""
b1 = b1 - (step_size * db1)
b2 = b2 - (step_size * db2)
b3 = b3 - (step_size * db3)
gamma2= gamma2 - (step_size * dgamma2)
beta2 = beta2 - (step_size * dbeta2)
gamma3= gamma3 - (step_size * dgamma3)
beta3 = beta3 - (step_size * dbeta3)
if iteration%10 == 0 :
#print("****iteration:",iteration)
#x_test /= 10
s1=np.dot(x_test,w1)
#px2=1/1+np.exp(-s1)
bn_x2t,bn_cache2t=bn.batch_norm_forword(s1,gamma2,beta2)
px2=relu.relu_forword(bn_x2t)
s2=np.dot(px2,w2)
bn_x3t,bn_cache3t=bn.batch_norm_forword(s2,gamma3,beta3)
px3=relu.relu_forword(bn_x3t)
out=np.dot(px3,w3)
counter=0
for y_p,y_a in zip(np.argmax(out,axis=1),y_test):
if np.argmax(y_a)==y_p:
counter +=1
print("accuracy: ", (counter/10000) *100,"%")
loss=fn.cross_entropy_loss_singleclass(probs,y_train) # scalar
print('Loss',loss/n_samples)
dw1_p=np.zeros_like(dw1)
dw2_p=np.zeros_like(dw2)
dw3_p=np.zeros_like(dw3)
print("dw1",dw1==dw1_p)
print("dw1",dw2==dw2_p)
print("dw1",dw3==dw3_p)
dw1_p=dw1
dw2_p=dw2
dw3_p=dw3
#print("gamma2",gamma2)
#print("beta2",beta2)
iteration=iteration+1
#print('FInal weights are: ', w1,w2)
if __name__ == '__main__':
"""
data=np.genfromtxt("/home/manjunath/Downloads/abalone.data",dtype="str",delimiter=",")
data=cc.convert(data,(0,))
data=np.array(data,dtype=np.float16)
np.random.seed(14)
np.random.shuffle(data)
x_train=data[0:3500,0:-1]
x_test=data[3500:-1,0:-1]
y_train=data[0:3500,-1]
y_test=data[3500:-1,-1]
del data
"""
"""
data=np.loadtxt("C:\\MLDatabases\\iris\\iris.csv", comments=None, delimiter=',', usecols=(0,1,2,3,4), converters={4: convert_y })
np.random.seed(14)
np.random.shuffle(data)
x_train=data[0:125,0:-1]
x_test=data[125:-1,0:-1]
y_train=data[0:125,-1]
y_test=data[125:-1,-1]
del data
"""
"""
data=np.genfromtxt("C:\\MLDatabases\\iris\\bank-full.csv",dtype="str",delimiter=";")
data=np.delete(data,0,0)
data=cc.convert(data,(1,2,3,4,6,7,8,10,15,16))
data=np.array(data,dtype=np.float16)
data=data[~np.isnan(data).any(axis=1)]
data=data[~np.isinf(data).any(axis=1)]
print(data.shape)
if np.sum(np.isinf(data))>0 or np.sum(np.isnan(data))>0 :
print("has invalid values")
np.random.seed(14)
np.random.shuffle(data)
x_train=data[0:40000,0:-1]
x_test=data[40000:-1,0:-1]
y_train=data[0:40000,-1]
y_test=data[40000:-1,-1]
del data
"""
train_images = mnist.train_images()
train_labels = mnist.train_labels()
test_images = mnist.test_images()
test_labels = mnist.test_labels()
print("train_images",train_images.shape)
print("test_images",test_images.shape)
print("test_labels",test_labels.shape)
print("train_labels",test_labels.shape)
#fit(x_train,x_test,y_train,y_test)
fit(train_images.reshape(60000,28*28),test_images.reshape(10000,28*28),train_labels,test_labels)
| [
"numpy.sqrt",
"relu.relu_backword",
"mnist.train_images",
"batch_norm.batch_norm_forword",
"loss_functions.functions.cross_entropy_loss_singleclass",
"loss_functions.functions.cross_enropy_grad_singleclass",
"mnist.test_images",
"numpy.dot",
"numpy.eye",
"mnist.test_labels",
"numpy.argmax",
"numpy.random.randn",
"relu.relu_forword",
"numpy.unique",
"batch_norm.batch_norm_backword",
"numpy.sum",
"numpy.zeros",
"loss_functions.functions.softmax",
"mnist.train_labels",
"numpy.zeros_like"
] | [((2213, 2231), 'numpy.zeros', 'np.zeros', (['w1.shape'], {}), '(w1.shape)\n', (2221, 2231), True, 'import numpy as np\n'), ((2245, 2263), 'numpy.zeros', 'np.zeros', (['w2.shape'], {}), '(w2.shape)\n', (2253, 2263), True, 'import numpy as np\n'), ((2277, 2295), 'numpy.zeros', 'np.zeros', (['w3.shape'], {}), '(w3.shape)\n', (2285, 2295), True, 'import numpy as np\n'), ((2440, 2459), 'numpy.zeros', 'np.zeros', (['(1, h[0])'], {}), '((1, h[0]))\n', (2448, 2459), True, 'import numpy as np\n'), ((2468, 2487), 'numpy.zeros', 'np.zeros', (['(1, h[1])'], {}), '((1, h[1]))\n', (2476, 2487), True, 'import numpy as np\n'), ((2496, 2512), 'numpy.zeros', 'np.zeros', (['(1, K)'], {}), '((1, K))\n', (2504, 2512), True, 'import numpy as np\n'), ((8838, 8858), 'mnist.train_images', 'mnist.train_images', ([], {}), '()\n', (8856, 8858), False, 'import mnist\n'), ((8878, 8898), 'mnist.train_labels', 'mnist.train_labels', ([], {}), '()\n', (8896, 8898), False, 'import mnist\n'), ((8918, 8937), 'mnist.test_images', 'mnist.test_images', ([], {}), '()\n', (8935, 8937), False, 'import mnist\n'), ((8956, 8975), 'mnist.test_labels', 'mnist.test_labels', ([], {}), '()\n', (8973, 8975), False, 'import mnist\n'), ((696, 713), 'numpy.eye', 'np.eye', (['n_classes'], {}), '(n_classes)\n', (702, 713), True, 'import numpy as np\n'), ((2010, 2042), 'numpy.sqrt', 'np.sqrt', (['(2 / (n_features + h[0]))'], {}), '(2 / (n_features + h[0]))\n', (2017, 2042), True, 'import numpy as np\n'), ((2094, 2120), 'numpy.sqrt', 'np.sqrt', (['(2 / (h[0] + h[1]))'], {}), '(2 / (h[0] + h[1]))\n', (2101, 2120), True, 'import numpy as np\n'), ((2172, 2198), 'numpy.sqrt', 'np.sqrt', (['(2 / (h[1] + h[2]))'], {}), '(2 / (h[1] + h[2]))\n', (2179, 2198), True, 'import numpy as np\n'), ((3008, 3059), 'batch_norm.batch_norm_forword', 'bn.batch_norm_forword', (['scores_layer1', 'gamma2', 'beta2'], {}), '(scores_layer1, gamma2, beta2)\n', (3029, 3059), True, 'import batch_norm as bn\n'), ((3196, 3220), 'relu.relu_forword', 'relu.relu_forword', (['bn_x2'], {}), '(bn_x2)\n', (3213, 3220), True, 'import relu as relu\n'), ((3486, 3536), 'batch_norm.batch_norm_forword', 'bn.batch_norm_forword', (['score_layer2', 'gamma3', 'beta3'], {}), '(score_layer2, gamma3, beta3)\n', (3507, 3536), True, 'import batch_norm as bn\n'), ((3554, 3578), 'relu.relu_forword', 'relu.relu_forword', (['bn_x3'], {}), '(bn_x3)\n', (3571, 3578), True, 'import relu as relu\n'), ((3799, 3823), 'loss_functions.functions.softmax', 'fn.softmax', (['final_scores'], {}), '(final_scores)\n', (3809, 3823), True, 'import loss_functions.functions as fn\n'), ((3875, 3923), 'loss_functions.functions.cross_enropy_grad_singleclass', 'fn.cross_enropy_grad_singleclass', (['probs', 'y_train'], {}), '(probs, y_train)\n', (3907, 3923), True, 'import loss_functions.functions as fn\n'), ((4091, 4112), 'numpy.dot', 'np.dot', (['x3.T', 'dscores'], {}), '(x3.T, dscores)\n', (4097, 4112), True, 'import numpy as np\n'), ((4145, 4166), 'numpy.dot', 'np.dot', (['w3', 'dscores.T'], {}), '(w3, dscores.T)\n', (4151, 4166), True, 'import numpy as np\n'), ((4264, 4293), 'relu.relu_backword', 'relu.relu_backword', (['dx3.T', 'x3'], {}), '(dx3.T, x3)\n', (4282, 4293), True, 'import relu as relu\n'), ((4369, 4409), 'batch_norm.batch_norm_backword', 'bn.batch_norm_backword', (['dhid2', 'bn_cache3'], {}), '(dhid2, bn_cache3)\n', (4391, 4409), True, 'import batch_norm as bn\n'), ((4566, 4588), 'numpy.dot', 'np.dot', (['x2.T', 'bn_dhid2'], {}), '(x2.T, bn_dhid2)\n', (4572, 4588), True, 'import numpy as np\n'), ((4623, 4642), 'numpy.dot', 'np.dot', (['w2', 'dhid2.T'], {}), '(w2, dhid2.T)\n', (4629, 4642), True, 'import numpy as np\n'), ((4741, 4770), 'relu.relu_backword', 'relu.relu_backword', (['dx2.T', 'x2'], {}), '(dx2.T, x2)\n', (4759, 4770), True, 'import relu as relu\n'), ((4817, 4857), 'batch_norm.batch_norm_backword', 'bn.batch_norm_backword', (['dhid1', 'bn_cache2'], {}), '(dhid1, bn_cache2)\n', (4839, 4857), True, 'import batch_norm as bn\n'), ((4917, 4942), 'numpy.dot', 'np.dot', (['x_train.T', 'bn_dx2'], {}), '(x_train.T, bn_dx2)\n', (4923, 4942), True, 'import numpy as np\n'), ((4976, 5009), 'numpy.sum', 'np.sum', (['b1'], {'axis': '(0)', 'keepdims': '(True)'}), '(b1, axis=0, keepdims=True)\n', (4982, 5009), True, 'import numpy as np\n'), ((5028, 5061), 'numpy.sum', 'np.sum', (['b2'], {'axis': '(0)', 'keepdims': '(True)'}), '(b2, axis=0, keepdims=True)\n', (5034, 5061), True, 'import numpy as np\n'), ((5080, 5113), 'numpy.sum', 'np.sum', (['b3'], {'axis': '(0)', 'keepdims': '(True)'}), '(b3, axis=0, keepdims=True)\n', (5086, 5113), True, 'import numpy as np\n'), ((1137, 1155), 'numpy.unique', 'np.unique', (['y_train'], {}), '(y_train)\n', (1146, 1155), True, 'import numpy as np\n'), ((1741, 1762), 'numpy.random.randn', 'np.random.randn', (['h[0]'], {}), '(h[0])\n', (1756, 1762), True, 'import numpy as np\n'), ((1789, 1810), 'numpy.random.randn', 'np.random.randn', (['h[0]'], {}), '(h[0])\n', (1804, 1810), True, 'import numpy as np\n'), ((1838, 1859), 'numpy.random.randn', 'np.random.randn', (['h[1]'], {}), '(h[1])\n', (1853, 1859), True, 'import numpy as np\n'), ((1886, 1907), 'numpy.random.randn', 'np.random.randn', (['h[1]'], {}), '(h[1])\n', (1901, 1907), True, 'import numpy as np\n'), ((2622, 2641), 'numpy.dot', 'np.dot', (['x_train', 'w1'], {}), '(x_train, w1)\n', (2628, 2641), True, 'import numpy as np\n'), ((3338, 3352), 'numpy.dot', 'np.dot', (['x2', 'w2'], {}), '(x2, w2)\n', (3344, 3352), True, 'import numpy as np\n'), ((3618, 3632), 'numpy.dot', 'np.dot', (['x3', 'w3'], {}), '(x3, w3)\n', (3624, 3632), True, 'import numpy as np\n'), ((6206, 6224), 'numpy.dot', 'np.dot', (['x_test', 'w1'], {}), '(x_test, w1)\n', (6212, 6224), True, 'import numpy as np\n'), ((6287, 6327), 'batch_norm.batch_norm_forword', 'bn.batch_norm_forword', (['s1', 'gamma2', 'beta2'], {}), '(s1, gamma2, beta2)\n', (6308, 6327), True, 'import batch_norm as bn\n'), ((6342, 6367), 'relu.relu_forword', 'relu.relu_forword', (['bn_x2t'], {}), '(bn_x2t)\n', (6359, 6367), True, 'import relu as relu\n'), ((6400, 6415), 'numpy.dot', 'np.dot', (['px2', 'w2'], {}), '(px2, w2)\n', (6406, 6415), True, 'import numpy as np\n'), ((6449, 6489), 'batch_norm.batch_norm_forword', 'bn.batch_norm_forword', (['s2', 'gamma3', 'beta3'], {}), '(s2, gamma3, beta3)\n', (6470, 6489), True, 'import batch_norm as bn\n'), ((6504, 6529), 'relu.relu_forword', 'relu.relu_forword', (['bn_x3t'], {}), '(bn_x3t)\n', (6521, 6529), True, 'import relu as relu\n'), ((6559, 6574), 'numpy.dot', 'np.dot', (['px3', 'w3'], {}), '(px3, w3)\n', (6565, 6574), True, 'import numpy as np\n'), ((6818, 6867), 'loss_functions.functions.cross_entropy_loss_singleclass', 'fn.cross_entropy_loss_singleclass', (['probs', 'y_train'], {}), '(probs, y_train)\n', (6851, 6867), True, 'import loss_functions.functions as fn\n'), ((6948, 6966), 'numpy.zeros_like', 'np.zeros_like', (['dw1'], {}), '(dw1)\n', (6961, 6966), True, 'import numpy as np\n'), ((6985, 7003), 'numpy.zeros_like', 'np.zeros_like', (['dw2'], {}), '(dw2)\n', (6998, 7003), True, 'import numpy as np\n'), ((7022, 7040), 'numpy.zeros_like', 'np.zeros_like', (['dw3'], {}), '(dw3)\n', (7035, 7040), True, 'import numpy as np\n'), ((1951, 1985), 'numpy.random.randn', 'np.random.randn', (['(n_features * h[0])'], {}), '(n_features * h[0])\n', (1966, 1985), True, 'import numpy as np\n'), ((2047, 2075), 'numpy.random.randn', 'np.random.randn', (['(h[0] * h[1])'], {}), '(h[0] * h[1])\n', (2062, 2075), True, 'import numpy as np\n'), ((2125, 2153), 'numpy.random.randn', 'np.random.randn', (['(h[1] * h[2])'], {}), '(h[1] * h[2])\n', (2140, 2153), True, 'import numpy as np\n'), ((6640, 6662), 'numpy.argmax', 'np.argmax', (['out'], {'axis': '(1)'}), '(out, axis=1)\n', (6649, 6662), True, 'import numpy as np\n'), ((6690, 6704), 'numpy.argmax', 'np.argmax', (['y_a'], {}), '(y_a)\n', (6699, 6704), True, 'import numpy as np\n')] |
"""
SharedOne
---------
SharedTwo
---------
"""
from multiprocessing.shared_memory import ShareableList
from multiprocessing.managers import SharedMemoryManager
from multiprocessing.connection import Client
from abc import ABC, abstractmethod
import logging
from pickletools import optimize
import pickle
import time
import os
import psutil
from pandas.core.frame import DataFrame
from shareable.managers_decorators import Resources
logging.basicConfig(format='%(message)s', level=logging.INFO)
class AbstractShared(ABC):
"""Abstraction of shared objects."""
@classmethod
def __init_subclass__(cls):
required_class_attrs = [
"shm",
"shared_obj",
"pid",
"sent_queue",
"rec_queue",
"ADDR",
"SECRET",
]
for attr in required_class_attrs:
if not hasattr(cls, attr):
raise NotImplementedError(f"{cls} missing required {attr} attr")
@abstractmethod
def start(self):
"""
Starts a shared memory instance
:return:
None
"""
raise NotImplementedError
@abstractmethod
def listen(self):
"""
Starts a listener for a second shared memory instance
:return:
None
"""
raise NotImplementedError
@abstractmethod
def send(self, value):
"""
Sends a message holding the shared memory process name
:param value:
shared_memory name
:return:
None
"""
raise NotImplementedError
@abstractmethod
def clean_up(self):
"""
Cleans up threads and shared memory process on exit
:return:
None
"""
raise NotImplementedError
class Shared(AbstractShared):
"""parent shared object"""
ADDR = ("localhost", 6000)
SECRET = bytes("secret".encode("utf-8"))
shm = None
shared_obj = None
sent_queue = []
rec_queue = []
pid = os.getpid()
def start(self):
pass
def listen(self):
"""
Starts a listener for a second shared memory instance
:return:
self
"""
socket = self.ADDR[1]
while True:
try:
with Resources(self.ADDR, authkey=self.SECRET) as message:
counter = 0
while counter == 0:
self.rec_queue.append(message)
counter = 1
break
except OSError:
socket += 1
logging.info(f"Socket {(socket - 1)} is in use, trying {socket}")
def send(self, value):
"""
Sends a message holding the shared memory process name.
:param value:
shared_memory name
:return:
self
"""
with Client(self.ADDR, authkey=self.SECRET) as conn:
conn.send(value)
self.sent_queue.append(value)
def clean_up(self):
"""
Cleans up threads and shared memory process on exit.
:return:
None
"""
self.shm.shutdown()
logging.info("Destroyed shared resources")
process = psutil.Process(self.pid)
for i in process.children(recursive=True):
p_temp = psutil.Process(i.pid)
p_temp.kill()
logging.info("Killed all child processes")
class SharedOne(Shared):
"""
Shared object child, starts shared mem process.
"""
def __init__(self, obj):
self.obj = obj
self.shareable = self.pickled()
self.pid = os.getpid()
self.shm = SharedMemoryManager()
self.shm.start()
self.shared_obj = self.shm.ShareableList([self.pickled()])
def start(self):
"""
Starts a shared memory instance.
:return:
None
"""
if not isinstance(self.obj, DataFrame):
self.pop("temp_space")
iteration = 0
while iteration == 0:
try:
self.send(self.shared_obj.shm.name)
iteration = 1
except ConnectionRefusedError:
time.sleep(5)
def pop(self, key):
"""
Custom pop method to set shared memory obj attrs.
:param key:
...
:return:
None
"""
temp = pickle.loads(self.shared_obj[-1])
temp.__delattr__(key)
self.shared_obj[-1] = optimize(pickle.dumps(temp))
def pickled(self):
"""
Manually allocate memory, I haven't looked into
whether there is support for 'size=num' for shared_memory.
:return:
object
"""
temp_space = os.urandom(1000)
self.obj.temp_space = temp_space
# if not isinstance(dict, self.obj):
# self.obj.temp_space = temp_space
# else:
# self.obj['temp_space'] = temp_space
return optimize(pickle.dumps(self.obj))
class SharedTwo(Shared):
"""
shared object child, listens for shared mem process
"""
def __init__(self):
self.shared_obj = None
self.shm = SharedMemoryManager()
def start(self):
"""
Starts a shared memory instance.
:return:
None
"""
self.shm.start()
self.listen()
name = self.rec_queue[0]
self.shared_obj = ShareableList(name=name)
| [
"logging.basicConfig",
"shareable.managers_decorators.Resources",
"pickle.dumps",
"os.urandom",
"psutil.Process",
"time.sleep",
"multiprocessing.connection.Client",
"multiprocessing.managers.SharedMemoryManager",
"multiprocessing.shared_memory.ShareableList",
"os.getpid",
"pickle.loads",
"logging.info"
] | [((435, 496), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(message)s"""', 'level': 'logging.INFO'}), "(format='%(message)s', level=logging.INFO)\n", (454, 496), False, 'import logging\n'), ((2029, 2040), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2038, 2040), False, 'import os\n'), ((3201, 3243), 'logging.info', 'logging.info', (['"""Destroyed shared resources"""'], {}), "('Destroyed shared resources')\n", (3213, 3243), False, 'import logging\n'), ((3262, 3286), 'psutil.Process', 'psutil.Process', (['self.pid'], {}), '(self.pid)\n', (3276, 3286), False, 'import psutil\n'), ((3415, 3457), 'logging.info', 'logging.info', (['"""Killed all child processes"""'], {}), "('Killed all child processes')\n", (3427, 3457), False, 'import logging\n'), ((3665, 3676), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3674, 3676), False, 'import os\n'), ((3696, 3717), 'multiprocessing.managers.SharedMemoryManager', 'SharedMemoryManager', ([], {}), '()\n', (3715, 3717), False, 'from multiprocessing.managers import SharedMemoryManager\n'), ((4432, 4465), 'pickle.loads', 'pickle.loads', (['self.shared_obj[-1]'], {}), '(self.shared_obj[-1])\n', (4444, 4465), False, 'import pickle\n'), ((4784, 4800), 'os.urandom', 'os.urandom', (['(1000)'], {}), '(1000)\n', (4794, 4800), False, 'import os\n'), ((5222, 5243), 'multiprocessing.managers.SharedMemoryManager', 'SharedMemoryManager', ([], {}), '()\n', (5241, 5243), False, 'from multiprocessing.managers import SharedMemoryManager\n'), ((5472, 5496), 'multiprocessing.shared_memory.ShareableList', 'ShareableList', ([], {'name': 'name'}), '(name=name)\n', (5485, 5496), False, 'from multiprocessing.shared_memory import ShareableList\n'), ((2905, 2943), 'multiprocessing.connection.Client', 'Client', (['self.ADDR'], {'authkey': 'self.SECRET'}), '(self.ADDR, authkey=self.SECRET)\n', (2911, 2943), False, 'from multiprocessing.connection import Client\n'), ((3359, 3380), 'psutil.Process', 'psutil.Process', (['i.pid'], {}), '(i.pid)\n', (3373, 3380), False, 'import psutil\n'), ((4535, 4553), 'pickle.dumps', 'pickle.dumps', (['temp'], {}), '(temp)\n', (4547, 4553), False, 'import pickle\n'), ((5024, 5046), 'pickle.dumps', 'pickle.dumps', (['self.obj'], {}), '(self.obj)\n', (5036, 5046), False, 'import pickle\n'), ((2307, 2348), 'shareable.managers_decorators.Resources', 'Resources', (['self.ADDR'], {'authkey': 'self.SECRET'}), '(self.ADDR, authkey=self.SECRET)\n', (2316, 2348), False, 'from shareable.managers_decorators import Resources\n'), ((2622, 2685), 'logging.info', 'logging.info', (['f"""Socket {socket - 1} is in use, trying {socket}"""'], {}), "(f'Socket {socket - 1} is in use, trying {socket}')\n", (2634, 2685), False, 'import logging\n'), ((4225, 4238), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4235, 4238), False, 'import time\n')] |
'''
Author: Leon-Francis
Date: 2021-07-08 21:35:35
Contact: <EMAIL>
LastEditTime: 2021-07-09 00:39:40
LastEditors: Leon-Francis
Description: encrypt and decrypt
FilePath: /Network_Security_Experiment/crypto.py
(C)Copyright 2020-2021, Leon-Francis
'''
import random
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.PublicKey import RSA
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
import sys
MSG_HEADER_LEN = 10
def get_RSA_keys():
key = RSA.generate(1024)
return key.export_key(), key.publickey().export_key()
def get_msg_len(msg):
msg_len = str(len(msg))
return msg_len.zfill(MSG_HEADER_LEN)
def crypto_encode(msg, my_private_key, other_pub_key):
with open('AES_key', 'rb') as f:
AES_key = f.read()
cipher = AES.new(AES_key, AES.MODE_EAX)
msg = msg.encode('utf-8')
digest = SHA256.new()
digest.update(msg)
privateKey = RSA.import_key(my_private_key)
signer = PKCS1_v1_5.new(privateKey)
signature = signer.sign(digest)
encrypt_msg = cipher.encrypt(msg + signature)
return encrypt_msg, cipher.nonce
def crypto_decode(encrypt_msg, private_key, other_pub_key, nonce):
with open('AES_key', 'rb') as f:
AES_key = f.read()
cipher = AES.new(AES_key, AES.MODE_EAX, nonce=nonce)
msg_sign = cipher.decrypt(encrypt_msg)
msg = msg_sign[:-128]
signature = msg_sign[-128:]
digest = SHA256.new()
digest.update(msg)
verifyKey = RSA.import_key(other_pub_key)
verifier = PKCS1_v1_5.new(verifyKey)
if not verifier.verify(digest, signature):
print('message has been modified!')
sys.exit()
return msg.decode('utf-8')
def send_nonce(conn, private_key, client_pub_key, nonce):
clientPublicKey = RSA.import_key(client_pub_key)
cipher = PKCS1_OAEP.new(clientPublicKey)
privateKey = RSA.import_key(private_key)
signer = PKCS1_v1_5.new(privateKey)
encrypted_key = cipher.encrypt(nonce)
conn.send(encrypted_key)
digest = SHA256.new()
digest.update(nonce)
signature = signer.sign(digest)
conn.send(signature)
def receive_nonce(s, private_key, server_pub_key):
AES_crypto_nonce = s.recv(128)
privateKey = RSA.import_key(private_key)
cipher = PKCS1_OAEP.new(privateKey)
AES_crypto_nonce = cipher.decrypt(AES_crypto_nonce)
digest = SHA256.new()
digest.update(AES_crypto_nonce)
signature = s.recv(128)
publicKey = RSA.import_key(server_pub_key)
signer = PKCS1_v1_5.new(publicKey)
if not signer.verify(digest, signature):
print('server public key has been modified!')
sys.exit()
return AES_crypto_nonce | [
"Crypto.Cipher.PKCS1_OAEP.new",
"Crypto.PublicKey.RSA.generate",
"Crypto.Signature.PKCS1_v1_5.new",
"Crypto.Cipher.AES.new",
"sys.exit",
"Crypto.Hash.SHA256.new",
"Crypto.PublicKey.RSA.import_key"
] | [((554, 572), 'Crypto.PublicKey.RSA.generate', 'RSA.generate', (['(1024)'], {}), '(1024)\n', (566, 572), False, 'from Crypto.PublicKey import RSA\n'), ((858, 888), 'Crypto.Cipher.AES.new', 'AES.new', (['AES_key', 'AES.MODE_EAX'], {}), '(AES_key, AES.MODE_EAX)\n', (865, 888), False, 'from Crypto.Cipher import AES, PKCS1_OAEP\n'), ((932, 944), 'Crypto.Hash.SHA256.new', 'SHA256.new', ([], {}), '()\n', (942, 944), False, 'from Crypto.Hash import SHA256\n'), ((985, 1015), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['my_private_key'], {}), '(my_private_key)\n', (999, 1015), False, 'from Crypto.PublicKey import RSA\n'), ((1029, 1055), 'Crypto.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['privateKey'], {}), '(privateKey)\n', (1043, 1055), False, 'from Crypto.Signature import PKCS1_v1_5\n'), ((1325, 1368), 'Crypto.Cipher.AES.new', 'AES.new', (['AES_key', 'AES.MODE_EAX'], {'nonce': 'nonce'}), '(AES_key, AES.MODE_EAX, nonce=nonce)\n', (1332, 1368), False, 'from Crypto.Cipher import AES, PKCS1_OAEP\n'), ((1483, 1495), 'Crypto.Hash.SHA256.new', 'SHA256.new', ([], {}), '()\n', (1493, 1495), False, 'from Crypto.Hash import SHA256\n'), ((1535, 1564), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['other_pub_key'], {}), '(other_pub_key)\n', (1549, 1564), False, 'from Crypto.PublicKey import RSA\n'), ((1580, 1605), 'Crypto.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['verifyKey'], {}), '(verifyKey)\n', (1594, 1605), False, 'from Crypto.Signature import PKCS1_v1_5\n'), ((1830, 1860), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['client_pub_key'], {}), '(client_pub_key)\n', (1844, 1860), False, 'from Crypto.PublicKey import RSA\n'), ((1874, 1905), 'Crypto.Cipher.PKCS1_OAEP.new', 'PKCS1_OAEP.new', (['clientPublicKey'], {}), '(clientPublicKey)\n', (1888, 1905), False, 'from Crypto.Cipher import AES, PKCS1_OAEP\n'), ((1924, 1951), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['private_key'], {}), '(private_key)\n', (1938, 1951), False, 'from Crypto.PublicKey import RSA\n'), ((1965, 1991), 'Crypto.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['privateKey'], {}), '(privateKey)\n', (1979, 1991), False, 'from Crypto.Signature import PKCS1_v1_5\n'), ((2078, 2090), 'Crypto.Hash.SHA256.new', 'SHA256.new', ([], {}), '()\n', (2088, 2090), False, 'from Crypto.Hash import SHA256\n'), ((2283, 2310), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['private_key'], {}), '(private_key)\n', (2297, 2310), False, 'from Crypto.PublicKey import RSA\n'), ((2324, 2350), 'Crypto.Cipher.PKCS1_OAEP.new', 'PKCS1_OAEP.new', (['privateKey'], {}), '(privateKey)\n', (2338, 2350), False, 'from Crypto.Cipher import AES, PKCS1_OAEP\n'), ((2421, 2433), 'Crypto.Hash.SHA256.new', 'SHA256.new', ([], {}), '()\n', (2431, 2433), False, 'from Crypto.Hash import SHA256\n'), ((2516, 2546), 'Crypto.PublicKey.RSA.import_key', 'RSA.import_key', (['server_pub_key'], {}), '(server_pub_key)\n', (2530, 2546), False, 'from Crypto.PublicKey import RSA\n'), ((2560, 2585), 'Crypto.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['publicKey'], {}), '(publicKey)\n', (2574, 2585), False, 'from Crypto.Signature import PKCS1_v1_5\n'), ((1705, 1715), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1713, 1715), False, 'import sys\n'), ((2693, 2703), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2701, 2703), False, 'import sys\n')] |
import io
import requests
import os
import zipfile
import json
from application import app, db
from application.users.model import User
from application.datastore.model import Datastore
import click
from flask_migrate import MigrateCommand
from flask.cli import AppGroup
extensions = AppGroup('app')
@extensions.command()
def init_db():
db.drop_all()
db.create_all()
click.echo('Database has been rebuilt.')
def export_storage():
datastore = Datastore.query.all()
archive = io.BytesIO()
with zipfile.ZipFile(archive, mode='w') as zf:
for data in datastore:
user = User.query.filter(data.user.id == User.id).first()
entry = {
'id': data.key,
'creator': {
'local_id': user.id,
'remote_id': user.remote_id,
'domain': user.domain
},
'revision': data.revision,
'transcription': json.loads(data.get_data())
}
zf.writestr('%s.json' % data.key, json.dumps(entry, indent=4))
archive.seek(0)
return archive
@extensions.command()
@click.argument('path')
def dump_storage(path):
archive = export_storage()
with open(path, 'wb') as f_out:
f_out.write(archive.getvalue())
@extensions.command()
@click.argument('url')
def push_storage(url):
archive = export_storage()
headers = os.environ.get("ATS_PUSH_HEADERS", None)
if headers is not None:
headers = json.loads(headers)
request = requests.post(url, files={'file': archive}, headers=headers)
print(request)
app.cli.add_command(extensions)
| [
"click.argument",
"requests.post",
"json.loads",
"application.app.cli.add_command",
"zipfile.ZipFile",
"flask.cli.AppGroup",
"application.datastore.model.Datastore.query.all",
"json.dumps",
"io.BytesIO",
"os.environ.get",
"click.echo",
"application.db.drop_all",
"application.db.create_all",
"application.users.model.User.query.filter"
] | [((288, 303), 'flask.cli.AppGroup', 'AppGroup', (['"""app"""'], {}), "('app')\n", (296, 303), False, 'from flask.cli import AppGroup\n'), ((1213, 1235), 'click.argument', 'click.argument', (['"""path"""'], {}), "('path')\n", (1227, 1235), False, 'import click\n'), ((1392, 1413), 'click.argument', 'click.argument', (['"""url"""'], {}), "('url')\n", (1406, 1413), False, 'import click\n'), ((1687, 1718), 'application.app.cli.add_command', 'app.cli.add_command', (['extensions'], {}), '(extensions)\n', (1706, 1718), False, 'from application import app, db\n'), ((346, 359), 'application.db.drop_all', 'db.drop_all', ([], {}), '()\n', (357, 359), False, 'from application import app, db\n'), ((364, 379), 'application.db.create_all', 'db.create_all', ([], {}), '()\n', (377, 379), False, 'from application import app, db\n'), ((384, 424), 'click.echo', 'click.echo', (['"""Database has been rebuilt."""'], {}), "('Database has been rebuilt.')\n", (394, 424), False, 'import click\n'), ((464, 485), 'application.datastore.model.Datastore.query.all', 'Datastore.query.all', ([], {}), '()\n', (483, 485), False, 'from application.datastore.model import Datastore\n'), ((500, 512), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (510, 512), False, 'import io\n'), ((1482, 1522), 'os.environ.get', 'os.environ.get', (['"""ATS_PUSH_HEADERS"""', 'None'], {}), "('ATS_PUSH_HEADERS', None)\n", (1496, 1522), False, 'import os\n'), ((1604, 1664), 'requests.post', 'requests.post', (['url'], {'files': "{'file': archive}", 'headers': 'headers'}), "(url, files={'file': archive}, headers=headers)\n", (1617, 1664), False, 'import requests\n'), ((523, 557), 'zipfile.ZipFile', 'zipfile.ZipFile', (['archive'], {'mode': '"""w"""'}), "(archive, mode='w')\n", (538, 557), False, 'import zipfile\n'), ((1569, 1588), 'json.loads', 'json.loads', (['headers'], {}), '(headers)\n', (1579, 1588), False, 'import json\n'), ((1120, 1147), 'json.dumps', 'json.dumps', (['entry'], {'indent': '(4)'}), '(entry, indent=4)\n', (1130, 1147), False, 'import json\n'), ((615, 657), 'application.users.model.User.query.filter', 'User.query.filter', (['(data.user.id == User.id)'], {}), '(data.user.id == User.id)\n', (632, 657), False, 'from application.users.model import User\n')] |
import urllib2, bs4
import unicodecsv as csv
url = 'https://www.nytimes.com/'
page = urllib2.urlopen(url)
soup = bs4.BeautifulSoup(page, features="html.parser")
out = open('nytimes.csv', 'wb')
writer = csv.writer(out)
h2s = soup.find_all('h2')
for h2 in h2s:
title = h2.text
links = h2.find_parents('a')
if len(links) == 0:
continue
href = "https://www.nytimes.com%s" % links[0]['href']
print(title)
print(href)
writer.writerow([href, title])
out.close()
| [
"bs4.BeautifulSoup",
"urllib2.urlopen",
"unicodecsv.writer"
] | [((86, 106), 'urllib2.urlopen', 'urllib2.urlopen', (['url'], {}), '(url)\n', (101, 106), False, 'import urllib2, bs4\n'), ((114, 161), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['page'], {'features': '"""html.parser"""'}), "(page, features='html.parser')\n", (131, 161), False, 'import urllib2, bs4\n'), ((204, 219), 'unicodecsv.writer', 'csv.writer', (['out'], {}), '(out)\n', (214, 219), True, 'import unicodecsv as csv\n')] |
from torch import nn
import torch
from torch.optim.adam import Adam
from torch.distributions.categorical import Categorical
import numpy as np
from .rl_algo import Rl_algo
__all__ = ['SimplestPolicyGradient']
class SimplestPolicyGradient(Rl_algo):
def __init__(self, env, hidden_layers, discret_actions=True, discret_spaces=True,
activation=nn.Tanh,
out_activation=nn.Identity,
lr = 0.01,
dis = 0.98,
batchsize = 5000):
super(SimplestPolicyGradient, self).__init__(env, discret_actions, discret_spaces, dis, lr)
self.hidden_layers = hidden_layers
self.model = self._mlp(self.obs_dim, self.hidden_layers, self.act_dim, activation, out_activation)
self.optimizer = Adam(self.model.parameters(), lr=lr)
self.batchsize = batchsize
def _mlp(self, inputs, hidden_layers, outputs, activation=nn.Tanh, out_activation=nn.Identity):
sizes = [inputs]+hidden_layers+[outputs]
layers = []
for j in range(len(sizes)-1):
layers += [nn.Linear(sizes[j], sizes[j+1]), activation() if j < len(sizes)-2 else out_activation()]
return nn.Sequential(*layers)
def _policy(self, obs):
output = self.model(obs)
return Categorical(logits=output)
def action(self, obs):
return self._policy(obs).sample().item()
def loss(self, obs, act, weights):
logp = self._policy(obs).log_prob(act)
return (-(logp * weights)).mean()
def render(self, aff=False):
if aff:
self.env.render()
def train_one_epoch(self):
obs = self.env.reset()
batch_obs = []
batch_act = []
batch_weights = []
ep_rewards = []
batch_returns = []
batch_lengths = []
# Forward
while True:
# Rendu
self.render()
# Choisir l'action
action = self.action(torch.as_tensor(obs, dtype=torch.float32))
# Recevoir la nouvelle observation et le résultat
new_obs, reward, done, _ = self.env.step(action)
# Remplir les batch
batch_obs.append(obs.copy())
batch_act.append(action)
ep_rewards.append(reward)
obs = new_obs
# end episode
if done:
ret = sum(ep_rewards)
length = len(ep_rewards)
batch_returns.append(ret)
batch_lengths.append(length)
batch_weights += [ret] * length
# reset
obs = self.env.reset()
done = False
ep_rewards = []
if len(batch_obs) > self.batchsize:
break
# Backward
self.optimizer.zero_grad()
loss = self.loss( torch.as_tensor(batch_obs, dtype=torch.float32),
torch.as_tensor(batch_act, dtype=torch.float32),
torch.as_tensor(batch_weights, dtype=torch.float32))
loss.backward()
self.optimizer.step()
return loss, batch_returns, batch_lengths
def _train(self, n_epochs):
ok = 0
for i in range(n_epochs):
loss, ret, lens = self.train_one_epoch()
print(f"{i}/{n_epochs} | Loss:{loss}, returns:{np.mean(ret)}, lengths:{np.mean(lens)}")
if np.mean(ret) == 500.0:
ok += 1
else:
ok = 0
if ok == 5:
break
def test_one(self):
done = False
obs = self.env.reset()
while not done:
self.env.render()
action = self.action(torch.as_tensor(obs, dtype=torch.float32))
# Recevoir la nouvelle observation et le résultat
obs, reward, done, _ = self.env.step(action)
def load_model(self, file_path):
self.model.load_state_dict(torch.load(file_path))
def save_model(self, file_path):
torch.save(self.model.state_dict(), file_path)
print("Modele sauvegardé :", file_path) | [
"numpy.mean",
"torch.as_tensor",
"torch.nn.Sequential",
"torch.load",
"torch.nn.Linear",
"torch.distributions.categorical.Categorical"
] | [((1215, 1237), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (1228, 1237), False, 'from torch import nn\n'), ((1319, 1345), 'torch.distributions.categorical.Categorical', 'Categorical', ([], {'logits': 'output'}), '(logits=output)\n', (1330, 1345), False, 'from torch.distributions.categorical import Categorical\n'), ((3041, 3088), 'torch.as_tensor', 'torch.as_tensor', (['batch_obs'], {'dtype': 'torch.float32'}), '(batch_obs, dtype=torch.float32)\n', (3056, 3088), False, 'import torch\n'), ((3118, 3165), 'torch.as_tensor', 'torch.as_tensor', (['batch_act'], {'dtype': 'torch.float32'}), '(batch_act, dtype=torch.float32)\n', (3133, 3165), False, 'import torch\n'), ((3195, 3246), 'torch.as_tensor', 'torch.as_tensor', (['batch_weights'], {'dtype': 'torch.float32'}), '(batch_weights, dtype=torch.float32)\n', (3210, 3246), False, 'import torch\n'), ((4172, 4193), 'torch.load', 'torch.load', (['file_path'], {}), '(file_path)\n', (4182, 4193), False, 'import torch\n'), ((1111, 1144), 'torch.nn.Linear', 'nn.Linear', (['sizes[j]', 'sizes[j + 1]'], {}), '(sizes[j], sizes[j + 1])\n', (1120, 1144), False, 'from torch import nn\n'), ((2055, 2096), 'torch.as_tensor', 'torch.as_tensor', (['obs'], {'dtype': 'torch.float32'}), '(obs, dtype=torch.float32)\n', (2070, 2096), False, 'import torch\n'), ((3615, 3627), 'numpy.mean', 'np.mean', (['ret'], {}), '(ret)\n', (3622, 3627), True, 'import numpy as np\n'), ((3925, 3966), 'torch.as_tensor', 'torch.as_tensor', (['obs'], {'dtype': 'torch.float32'}), '(obs, dtype=torch.float32)\n', (3940, 3966), False, 'import torch\n'), ((3559, 3571), 'numpy.mean', 'np.mean', (['ret'], {}), '(ret)\n', (3566, 3571), True, 'import numpy as np\n'), ((3583, 3596), 'numpy.mean', 'np.mean', (['lens'], {}), '(lens)\n', (3590, 3596), True, 'import numpy as np\n')] |
import pytest
pytestmark = [
pytest.mark.django_db,
pytest.mark.usefixtures('purchase'),
]
@pytest.fixture
def another_answer(another_answer, answer, even_another_user):
another_answer.parent = answer
another_answer.author = even_another_user
another_answer.save()
return another_answer
@pytest.fixture
def child_of_another_answer(mixer, question, another_answer, another_user, api):
return mixer.blend(
'homework.Answer',
question=question,
author=another_user,
parent=another_answer,
)
@pytest.fixture
def even_another_user(mixer):
return mixer.blend('users.User')
def test_no_descendants_by_default(api, answer):
got = api.get(f'/api/v2/homework/answers/{answer.slug}/')
assert got['descendants'] == []
def test_child_answers(api, answer, another_answer):
got = api.get(f'/api/v2/homework/answers/{answer.slug}/')
assert got['descendants'][0]['slug'] == str(another_answer.slug)
assert got['descendants'][0]['author']['first_name'] == another_answer.author.first_name
assert got['descendants'][0]['author']['last_name'] == another_answer.author.last_name
def test_multilevel_child_answers(api, answer, another_answer, child_of_another_answer):
child_of_another_answer.author = api.user # make child_of_another_answer accessible
child_of_another_answer.save()
got = api.get(f'/api/v2/homework/answers/{answer.slug}/')
assert got['descendants'][0]['slug'] == str(another_answer.slug)
assert got['descendants'][0]['descendants'][0]['slug'] == str(child_of_another_answer.slug)
assert got['descendants'][0]['descendants'][0]['descendants'] == []
@pytest.mark.usefixtures('child_of_another_answer')
def test_only_immediate_siblings_are_included(api, answer):
got = api.get(f'/api/v2/homework/answers/{answer.slug}/')
assert len(got['descendants']) == 1
| [
"pytest.mark.usefixtures"
] | [((1683, 1733), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""child_of_another_answer"""'], {}), "('child_of_another_answer')\n", (1706, 1733), False, 'import pytest\n'), ((61, 96), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""purchase"""'], {}), "('purchase')\n", (84, 96), False, 'import pytest\n')] |
# _*_ coding:UTF-8 _*_
'''
使用requests得到POST后的使用BeautifulSoup 来解析并得到解密后的信息
最好加上请求头 Cookie(?)
'''
import re
import requests
# 注意 requests中post的数据必须是原本的数据
#不能被urlencode过 (来自被坑了的作者的忠告)
from bs4 import BeautifulSoup
class md5_online_crack(object):
def __init__(self,md5):
self.md5 = md5
def dmd5_online(self):
data = {'_VIEWRESOURSE':'c4c92e61011684fc23405bfd5ebc2b31','result':'c1fa08ee052e00e5b8e7527f9211d9c0453bc6f335a6181f2f58c7816f79278e75b69a1b7b15134cf0c8e85babf20959a886cf755794b796d9313ae57cbe48d6ac8eb7d2b168fff553584bff499fd06bd9dd0dbef033481c9a0609c9208ac7fe5449d50bd300580c2f85ed40a13f7b7cb52544a8a54f53c0cd6d65abf92be35e087578f55212be438fd6c238acaafbce81ef24d38e688395'}
data['md5'] = self.md5
r = requests.post('http://www.dmd5.com/md5-decrypter.jsp', data=data)
bsOBJ = BeautifulSoup(r.text,'html.parser')
nameList = bsOBJ.findAll('p')
for i in nameList:
m = re.search('解密结果:[0-9]*',str(i))
if m:
print(m.group(0))
def pmd5_online(self):
# 除了key以外要post的数据
data = {'__VIEWSTATE':'/wEPDwUKMTM4NTE3OTkzOWRkP4hmXYtPPhcBjbupZdLOLfmeTK4=','__VIEWSTATEGENERATOR':'CA0B0334','__EVENTVALIDATION':'/wEWAwK75ZuyDwLigPTXCQKU9f3vAheUenitfEuJ6eGUVe2GyFzb7HKC','jiemi':'MD5解密'}
data['key'] = self.md5
r = requests.post('http://pmd5.com/#',data=data)
bsOBJ = BeautifulSoup(r.text,'html.parser')
nameList = bsOBJ.findAll('em')
i = str(nameList[1])
print(i[4:-5])
def test():
a = md5_online_crack('887BF855FE35AFA4598232AC82880463')
a.dmd5_online()
a.pmd5_online()
if __name__ == '__main__':
test()
| [
"bs4.BeautifulSoup",
"requests.post"
] | [((757, 822), 'requests.post', 'requests.post', (['"""http://www.dmd5.com/md5-decrypter.jsp"""'], {'data': 'data'}), "('http://www.dmd5.com/md5-decrypter.jsp', data=data)\n", (770, 822), False, 'import requests\n'), ((839, 875), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (852, 875), False, 'from bs4 import BeautifulSoup\n'), ((1352, 1397), 'requests.post', 'requests.post', (['"""http://pmd5.com/#"""'], {'data': 'data'}), "('http://pmd5.com/#', data=data)\n", (1365, 1397), False, 'import requests\n'), ((1413, 1449), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (1426, 1449), False, 'from bs4 import BeautifulSoup\n')] |
# -*- coding: utf-8 -*-
# Import all lib
from numpy import*
# Imports just a specific part of the lib and names it
from numpy import random as rr
# Imports the lib only in function
import numpy as np
np.random.rand(1) | [
"numpy.random.rand"
] | [((207, 224), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (221, 224), True, 'import numpy as np\n')] |
import uuid
from pyvultr.base_api import SupportHttpMethod
from pyvultr.v2 import UserInfo
from tests.v2 import BaseTestV2
class TestUser(BaseTestV2):
def test_list(self):
"""Test list users."""
with self._get("response/users") as mock:
_excepted_result = mock.python_body["users"][0]
excepted_result = UserInfo.from_dict(_excepted_result)
_real_result = self.api_v2.user.list(capacity=1)
real_result: UserInfo = _real_result.first()
self.assertEqual(mock.url, "https://api.vultr.com/v2/users")
self.assertEqual(mock.method, SupportHttpMethod.GET.value)
self.assertEqual(real_result, excepted_result)
def test_create(self):
"""Test create user."""
with self._post("response/user", expected_returned=UserInfo, status_code=201) as mock:
excepted_result = mock.python_body
name = "test_name"
email = "<EMAIL>"
password = "<PASSWORD>" # nosec: <PASSWORD>(hardcoded_password_string) by bandit
real_result: UserInfo = self.api_v2.user.create(name=name, email=email, password=password)
self.assertEqual(mock.url, "https://api.vultr.com/v2/users")
self.assertEqual(mock.method, SupportHttpMethod.POST.value)
self.assertEqual(mock.req_json["name"], name)
self.assertEqual(mock.req_json["email"], email)
self.assertEqual(mock.req_json["password"], password)
self.assertEqual(mock.status_code, 201)
self.assertEqual(real_result, excepted_result)
def test_get(self):
"""Test get user."""
with self._get("response/user", expected_returned=UserInfo) as mock:
excepted_result = mock.python_body
user_id = str(uuid.uuid4())
real_result: UserInfo = self.api_v2.user.get(user_id=user_id)
self.assertEqual(mock.url, f"https://api.vultr.com/v2/users/{user_id}")
self.assertEqual(mock.method, SupportHttpMethod.GET.value)
self.assertEqual(real_result, excepted_result)
def test_update(self):
"""Test update user."""
with self._patch(status_code=204) as mock:
user_id = str(uuid.uuid4())
password = "<PASSWORD>" # nosec: false B105(hardcoded_password_string) by bandit
real_result: UserInfo = self.api_v2.user.update(user_id, password=password)
self.assertEqual(mock.url, f"https://api.vultr.com/v2/users/{user_id}")
self.assertEqual(mock.method, SupportHttpMethod.PATCH.value)
self.assertEqual(mock.req_json["password"], password)
self.assertEqual(mock.status_code, 204)
self.assertIsNone(real_result)
def test_delete(self):
"""Test delete user."""
with self._delete(status_code=204) as mock:
user_id = str(uuid.uuid4())
self.api_v2.user.delete(user_id=user_id)
self.assertEqual(mock.url, f"https://api.vultr.com/v2/users/{user_id}")
self.assertEqual(mock.method, SupportHttpMethod.DELETE.value)
self.assertEqual(mock.status_code, 204)
| [
"pyvultr.v2.UserInfo.from_dict",
"uuid.uuid4"
] | [((350, 386), 'pyvultr.v2.UserInfo.from_dict', 'UserInfo.from_dict', (['_excepted_result'], {}), '(_excepted_result)\n', (368, 386), False, 'from pyvultr.v2 import UserInfo\n'), ((1817, 1829), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1827, 1829), False, 'import uuid\n'), ((2257, 2269), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2267, 2269), False, 'import uuid\n'), ((2910, 2922), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2920, 2922), False, 'import uuid\n')] |
import pyautogui
import time
import os
import sys
from Content.Back_End.Objects.WorkerSignals import WorkerSignals
from PyQt5.QtCore import QThread,QEventLoop,QTimer
class JobProcessor(QThread):
def __init__(self, joblist,language, parent=None):
super(JobProcessor, self).__init__()
self.joblist = joblist
self.parent = parent
self.language = language
print("Parent !", self.parent.parent())
self.signals = WorkerSignals()
self.searchBar = None
def resource_path(self,relative_path):
""" Get the absolute path to the resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".\\Visual_Ressources\\"+self.language+"\\")
# "."
# 'Content\\Back_End\\'
return os.path.join(base_path, relative_path)
def pyqtsleep(self,time):
loop = QEventLoop()
QTimer.singleShot(time*1000, loop.exit)
loop.exec_()
def slowClick(self, lapse):
pyautogui.mouseDown()
self.pyqtsleep(lapse)
pyautogui.mouseUp()
def doubleClick(self):
pyautogui.mouseDown()
pyautogui.mouseUp()
pyautogui.mouseDown()
pyautogui.mouseUp()
def changeCurrentJobSpecs(self, job):
self.currentOufit = job.outfit
self.currentItem = job.item
self.currentQuantity = int(job.quantity)
self.currentMacro = job.macro
self.currentTimeStop = int(job.timeStop)
def testVisualCues(self):
self.fabricationButton = pyautogui.locateOnScreen(self.resource_path(
'fabricate.PNG'), confidence=0.9)
self.outfitButton = pyautogui.locateOnScreen(self.resource_path(
'outfit.PNG'), confidence=0.9)
self.searchBar = pyautogui.locateOnScreen(self.resource_path(
'search.PNG'), confidence=0.9)
print(self.fabricationButton)
print(self.outfitButton)
print(self.searchBar)
def notifityCraft(self,i):
self.signals.changeText.emit(str(self.currentItem) + " Crafted :" +str(i)+"/"+str(self.currentQuantity))
def AddLabel(self):
self.signals.addLabel.emit("New job")
def equipOutfit(self):
pyautogui.press("p")
self.pyqtsleep(1)
self.outfitButton = pyautogui.locateOnScreen(self.resource_path(
'outfit.PNG'), confidence=0.9)
pyautogui.click(pyautogui.center(self.outfitButton))
self.slowClick(0.4)
self.pyqtsleep(1)
self.currentOutfitPlace = pyautogui.locateOnScreen(self.resource_path(
'outfit_' + str(self.currentOufit) + '.PNG'), confidence=0.8)
pyautogui.click(self.currentOutfitPlace)
self.doubleClick()
self.pyqtsleep(1)
pyautogui.press("p")
def repairOutfit(self):
pyautogui.press("p")
self.pyqtsleep(1)
self.outfitButton = pyautogui.locateOnScreen(self.resource_path(
'outfit.PNG'), confidence=0.9)
self.pyqtsleep(0.5)
pyautogui.rightClick(x=self.outfitButton[0]-10,y=self.outfitButton[1]+50)
self.pyqtsleep(1)
self.repairButton = pyautogui.locateOnScreen(self.resource_path(
'repair.PNG'), confidence=0.9)
pyautogui.moveTo(pyautogui.center(self.repairButton))
self.slowClick(0.4)
self.pyqtsleep(1)
pyautogui.press("p")
self.pyqtsleep(1)
self.repairAllButton = pyautogui.locateOnScreen(self.resource_path(
'repair_all.PNG'), confidence=0.8)
pyautogui.moveTo(pyautogui.center(self.repairAllButton))
self.slowClick(0.4)
self.pyqtsleep(1)
def cleanSearchbar(self):
pyautogui.press("n")
pyautogui.click(10, 10)
def searchItem(self):
pyautogui.press("n")
self.pyqtsleep(1)
if self.searchBar == None:
self.searchBar = pyautogui.center(
pyautogui.locateOnScreen(self.resource_path('search.PNG'), confidence=0.9))
pyautogui.click(self.searchBar)
pyautogui.write(self.currentItem)
pyautogui.press("enter")
self.pyqtsleep(2)
pyautogui.click(pyautogui.center(
pyautogui.locateOnScreen(self.resource_path('item_found_color.PNG'), confidence=0.9)))
def adjustMaterialsQuality(self, job):
self.pyqtsleep(0.5)
highQualityLabelPlace = pyautogui.locateOnScreen(self.resource_path(
'HQ.PNG'), confidence=0.9)
self.pyqtsleep(0.5)
highQualityItemButtonsList = list(pyautogui.locateAllOnScreen(self.resource_path(
'quality_button.PNG'), confidence=0.9, region=(highQualityLabelPlace[0], highQualityLabelPlace[1], 30, 2000)))
for key, value in job.highQuality.items():
for i in range(0, value):
pyautogui.moveTo(pyautogui.center(
highQualityItemButtonsList[key]))
self.slowClick(0.2)
def fabricate(self):
self.fabricationButton = pyautogui.center(
pyautogui.locateOnScreen(self.resource_path('fabricate.PNG'), confidence=0.9))
pyautogui.click(self.fabricationButton)
pyautogui.click(self.fabricationButton)
self.pyqtsleep(3)
pyautogui.keyDown(self.currentMacro[0])
self.pyqtsleep(0.1)
pyautogui.keyDown(self.currentMacro[1])
self.pyqtsleep(0.1)
pyautogui.keyUp(self.currentMacro[0])
pyautogui.keyUp(self.currentMacro[1])
self.pyqtsleep(self.currentTimeStop)
def testActivityFeed(self):
self.AddLabel()
self.currentItem = "madrier"
self.currentQuantity = 10
for i in range(0,11):
self.notifityCraft(i)
self.pyqtsleep(0.2)
self.AddLabel()
self.currentItem = "lingot"
self.currentQuantity = 20
for i in range(0,21):
self.notifityCraft(i)
self.pyqtsleep(0.2)
def doJobs(self):
for job in self.joblist:
if job.item == "repair":
print("repair")
self.repairOutfit()
else:
self.AddLabel()
self.changeCurrentJobSpecs(job)
self.equipOutfit()
self.searchItem()
self.adjustMaterialsQuality(job)
for i in range(0, self.currentQuantity):
self.fabricate()
self.notifityCraft(i)
self.cleanSearchbar()
print("All jobs done !!!")
def run(self):
print("FFXIV Craft Manager : Online")
self.signals.addLabel.emit("---- Craft Begin ----")
self.pyqtsleep(5)
self.doJobs()
self.signals.addLabel.emit("---- Craft Ended ----")
| [
"pyautogui.write",
"pyautogui.press",
"pyautogui.rightClick",
"PyQt5.QtCore.QTimer.singleShot",
"Content.Back_End.Objects.WorkerSignals.WorkerSignals",
"pyautogui.mouseUp",
"os.path.join",
"pyautogui.keyDown",
"pyautogui.mouseDown",
"pyautogui.click",
"pyautogui.center",
"pyautogui.keyUp",
"os.path.abspath",
"PyQt5.QtCore.QEventLoop"
] | [((461, 476), 'Content.Back_End.Objects.WorkerSignals.WorkerSignals', 'WorkerSignals', ([], {}), '()\n', (474, 476), False, 'from Content.Back_End.Objects.WorkerSignals import WorkerSignals\n'), ((947, 985), 'os.path.join', 'os.path.join', (['base_path', 'relative_path'], {}), '(base_path, relative_path)\n', (959, 985), False, 'import os\n'), ((1032, 1044), 'PyQt5.QtCore.QEventLoop', 'QEventLoop', ([], {}), '()\n', (1042, 1044), False, 'from PyQt5.QtCore import QThread, QEventLoop, QTimer\n'), ((1053, 1094), 'PyQt5.QtCore.QTimer.singleShot', 'QTimer.singleShot', (['(time * 1000)', 'loop.exit'], {}), '(time * 1000, loop.exit)\n', (1070, 1094), False, 'from PyQt5.QtCore import QThread, QEventLoop, QTimer\n'), ((1159, 1180), 'pyautogui.mouseDown', 'pyautogui.mouseDown', ([], {}), '()\n', (1178, 1180), False, 'import pyautogui\n'), ((1219, 1238), 'pyautogui.mouseUp', 'pyautogui.mouseUp', ([], {}), '()\n', (1236, 1238), False, 'import pyautogui\n'), ((1275, 1296), 'pyautogui.mouseDown', 'pyautogui.mouseDown', ([], {}), '()\n', (1294, 1296), False, 'import pyautogui\n'), ((1305, 1324), 'pyautogui.mouseUp', 'pyautogui.mouseUp', ([], {}), '()\n', (1322, 1324), False, 'import pyautogui\n'), ((1333, 1354), 'pyautogui.mouseDown', 'pyautogui.mouseDown', ([], {}), '()\n', (1352, 1354), False, 'import pyautogui\n'), ((1363, 1382), 'pyautogui.mouseUp', 'pyautogui.mouseUp', ([], {}), '()\n', (1380, 1382), False, 'import pyautogui\n'), ((2381, 2401), 'pyautogui.press', 'pyautogui.press', (['"""p"""'], {}), "('p')\n", (2396, 2401), False, 'import pyautogui\n'), ((2820, 2860), 'pyautogui.click', 'pyautogui.click', (['self.currentOutfitPlace'], {}), '(self.currentOutfitPlace)\n', (2835, 2860), False, 'import pyautogui\n'), ((2922, 2942), 'pyautogui.press', 'pyautogui.press', (['"""p"""'], {}), "('p')\n", (2937, 2942), False, 'import pyautogui\n'), ((2980, 3000), 'pyautogui.press', 'pyautogui.press', (['"""p"""'], {}), "('p')\n", (2995, 3000), False, 'import pyautogui\n'), ((3188, 3266), 'pyautogui.rightClick', 'pyautogui.rightClick', ([], {'x': '(self.outfitButton[0] - 10)', 'y': '(self.outfitButton[1] + 50)'}), '(x=self.outfitButton[0] - 10, y=self.outfitButton[1] + 50)\n', (3208, 3266), False, 'import pyautogui\n'), ((3528, 3548), 'pyautogui.press', 'pyautogui.press', (['"""p"""'], {}), "('p')\n", (3543, 3548), False, 'import pyautogui\n'), ((3856, 3876), 'pyautogui.press', 'pyautogui.press', (['"""n"""'], {}), "('n')\n", (3871, 3876), False, 'import pyautogui\n'), ((3886, 3909), 'pyautogui.click', 'pyautogui.click', (['(10)', '(10)'], {}), '(10, 10)\n', (3901, 3909), False, 'import pyautogui\n'), ((3945, 3965), 'pyautogui.press', 'pyautogui.press', (['"""n"""'], {}), "('n')\n", (3960, 3965), False, 'import pyautogui\n'), ((4174, 4205), 'pyautogui.click', 'pyautogui.click', (['self.searchBar'], {}), '(self.searchBar)\n', (4189, 4205), False, 'import pyautogui\n'), ((4214, 4247), 'pyautogui.write', 'pyautogui.write', (['self.currentItem'], {}), '(self.currentItem)\n', (4229, 4247), False, 'import pyautogui\n'), ((4256, 4280), 'pyautogui.press', 'pyautogui.press', (['"""enter"""'], {}), "('enter')\n", (4271, 4280), False, 'import pyautogui\n'), ((5284, 5323), 'pyautogui.click', 'pyautogui.click', (['self.fabricationButton'], {}), '(self.fabricationButton)\n', (5299, 5323), False, 'import pyautogui\n'), ((5332, 5371), 'pyautogui.click', 'pyautogui.click', (['self.fabricationButton'], {}), '(self.fabricationButton)\n', (5347, 5371), False, 'import pyautogui\n'), ((5406, 5445), 'pyautogui.keyDown', 'pyautogui.keyDown', (['self.currentMacro[0]'], {}), '(self.currentMacro[0])\n', (5423, 5445), False, 'import pyautogui\n'), ((5482, 5521), 'pyautogui.keyDown', 'pyautogui.keyDown', (['self.currentMacro[1]'], {}), '(self.currentMacro[1])\n', (5499, 5521), False, 'import pyautogui\n'), ((5558, 5595), 'pyautogui.keyUp', 'pyautogui.keyUp', (['self.currentMacro[0]'], {}), '(self.currentMacro[0])\n', (5573, 5595), False, 'import pyautogui\n'), ((5604, 5641), 'pyautogui.keyUp', 'pyautogui.keyUp', (['self.currentMacro[1]'], {}), '(self.currentMacro[1])\n', (5619, 5641), False, 'import pyautogui\n'), ((2568, 2603), 'pyautogui.center', 'pyautogui.center', (['self.outfitButton'], {}), '(self.outfitButton)\n', (2584, 2603), False, 'import pyautogui\n'), ((3429, 3464), 'pyautogui.center', 'pyautogui.center', (['self.repairButton'], {}), '(self.repairButton)\n', (3445, 3464), False, 'import pyautogui\n'), ((3723, 3761), 'pyautogui.center', 'pyautogui.center', (['self.repairAllButton'], {}), '(self.repairAllButton)\n', (3739, 3761), False, 'import pyautogui\n'), ((816, 880), 'os.path.abspath', 'os.path.abspath', (["('.\\\\Visual_Ressources\\\\' + self.language + '\\\\')"], {}), "('.\\\\Visual_Ressources\\\\' + self.language + '\\\\')\n", (831, 880), False, 'import os\n'), ((5000, 5049), 'pyautogui.center', 'pyautogui.center', (['highQualityItemButtonsList[key]'], {}), '(highQualityItemButtonsList[key])\n', (5016, 5049), False, 'import pyautogui\n')] |
# On this example we are going to increment a counter on each
# page load. On normal conditions the session wouldn't expire
# until the user closed the browser, so the counter will never
# get reset to 0, but we are going to set a timeout for the
# session, so after that time passes, the counter will be 0
import os
# We'll use timedelta for the time related operations
from datetime import timedelta
# We need the session object to be able to store session variables
# and we'll render an html template, so we also need render_template
from flask import Flask, session, render_template
app = Flask(__name__)
# Generate a secret random key for the session
app.secret_key = os.urandom(24)
# Set the timeout for our session to 10 seconds
# The session will be lost after 10 seconds with no interaction
# form the user.
# +INFO: http://flask.pocoo.org/docs/api/#flask.Flask.permanent_session_lifetime
app.permanent_session_lifetime = timedelta(seconds=10)
# Define a route for the webserver
@app.route('/')
def index():
# For the timeout/session lifetime config to work we need
# to make the sessions permanent. It's false by default
# +INFO: http://flask.pocoo.org/docs/api/#flask.session.permanent
session.permanent = True
# On each page load we are going to increment a counter
# stored on the session data.
try:
session['counter'] += 1
except KeyError:
session['counter'] = 1
return "Number of reloads on the current session: %d" % session['counter']
if __name__ == '__main__':
app.run(
host="0.0.0.0",
port=int("80")
)
| [
"os.urandom",
"datetime.timedelta",
"flask.Flask"
] | [((595, 610), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (600, 610), False, 'from flask import Flask, session, render_template\n'), ((676, 690), 'os.urandom', 'os.urandom', (['(24)'], {}), '(24)\n', (686, 690), False, 'import os\n'), ((934, 955), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (943, 955), False, 'from datetime import timedelta\n')] |
from setuptools import setup
setup(
name='cdios',
version='1.0.0',
description='A minimal C frontend to Diospyros',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/cucapra/diospyros',
license='MIT',
platforms='ALL',
install_requires=['click'],
py_modules=['cdios'],
entry_points={
'console_scripts': [
'cdios = cdios:cdios',
],
},
classifiers=[
'Environment :: Console',
'Programming Language :: Python :: 3',
],
) | [
"setuptools.setup"
] | [((30, 443), 'setuptools.setup', 'setup', ([], {'name': '"""cdios"""', 'version': '"""1.0.0"""', 'description': '"""A minimal C frontend to Diospyros"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/cucapra/diospyros"""', 'license': '"""MIT"""', 'platforms': '"""ALL"""', 'install_requires': "['click']", 'py_modules': "['cdios']", 'entry_points': "{'console_scripts': ['cdios = cdios:cdios']}", 'classifiers': "['Environment :: Console', 'Programming Language :: Python :: 3']"}), "(name='cdios', version='1.0.0', description=\n 'A minimal C frontend to Diospyros', author='<NAME>', author_email=\n '<EMAIL>', url='https://github.com/cucapra/diospyros', license='MIT',\n platforms='ALL', install_requires=['click'], py_modules=['cdios'],\n entry_points={'console_scripts': ['cdios = cdios:cdios']}, classifiers=\n ['Environment :: Console', 'Programming Language :: Python :: 3'])\n", (35, 443), False, 'from setuptools import setup\n')] |
import hashlib
import html
import json
from bson.objectid import ObjectId
from flask import (
render_template, redirect, url_for, jsonify, request, Response
)
from ..utils.helpers import now_time, paranoid_clean
from flask import current_app as app
from flask_login import login_required, current_user
from google_alerts import GoogleAlerts
from . import core
from .. import mongo, celery, logger
from .forms import MonitorForm
@core.route('/monitors/add-monitor', methods=['POST'])
@login_required
def add_monitor():
"""Render the index page."""
form = MonitorForm(request.form)
if not form.validate():
errors = ','.join([value[0] for value in form.errors.values()])
return jsonify({'errors': errors})
g = mongo.db[app.config['GLOBAL_COLLECTION']]
gdata = g.find_one(dict(), {'_id': 0})
ga = GoogleAlerts(gdata['email'], gdata['password'])
ga.authenticate()
options = {'delivery': 'RSS', 'exact': form.type.data == 'Exact'}
response = ga.create(form.term.data, options)
if len(response) > 0:
response = response[0]
monitors = mongo.db[app.config['MONITORS_COLLECTION']]
to_hash = form.type.data.encode('utf-8') + form.term.data.encode('utf-8')
item = {'term': form.term.data,
'exact': form.type.data == 'Exact',
'tags': form.tags.data.split(','),
'category': form.category.data.lower(),
'username': current_user.get_id(),
'created': now_time(),
'active': True,
'metadata': response,
'hits': 0,
'hashed': hashlib.sha256(to_hash).hexdigest(),
'checked': now_time()}
_id = monitors.insert(item)
return redirect(url_for('core.root'))
@core.route('/monitors', methods=['GET'])
@login_required
def get_monitor_details():
"""Render the index page."""
monitor_id = paranoid_clean(request.args.get('id'))
monitors = mongo.db[app.config['MONITORS_COLLECTION']]
monitor = monitors.find_one({'hashed': monitor_id}, {'_id': 0})
if not monitor:
return jsonify({'success': False, 'error': 'Monitor was not found.'})
articles = mongo.db[app.config['ARTICLES_COLLECTION']]
link = monitor['metadata']['rss_link']
articles = list(articles.find({'feed_source': link}, {'_id': 0}))
for idx, item in enumerate(articles):
articles[idx]['title'] = html.unescape(item['title'])
articles[idx]['date'] = item['collected'][:10]
articles.sort(key=lambda x: x['collected'], reverse=True)
return jsonify({'success': True, 'monitor': monitor, 'articles': articles})
@core.route('/monitors/list', methods=['GET'])
@login_required
def get_monitors():
"""Render the index page."""
monitors = mongo.db[app.config['MONITORS_COLLECTION']]
results = monitors.find({'active': True}, {'_id': 0})
results = [x for x in results]
results.sort(key=lambda x: x['hits'], reverse=True)
return render_template('monitors.html', monitors=results)
@core.route('/export/<term_id>', methods=['GET'])
@login_required
def export_monitor(term_id):
"""Export monitor article matches."""
term_id = paranoid_clean(term_id)
monitors = mongo.db[app.config['MONITORS_COLLECTION']]
result = monitors.find_one({'hashed': term_id}, {'_id': 0})
articles = mongo.db[app.config['ARTICLES_COLLECTION']]
results = articles.find({'feed_source': result['metadata']['rss_link']}, {'_id': 0})
to_write = list()
for item in results:
del item['tokens']
to_write.append(item)
content = json.dumps(to_write, indent=4, sort_keys=True)
file_name = "chirp_%s_%s.json" % (result['term'], result['checked'][:10])
headers = {'Content-Disposition': 'attachment;filename=%s' % file_name}
return Response(content, mimetype='application/json', headers=headers)
@core.route('/monitor/<term_id>/', methods=['GET'])
@login_required
def adjust_monitor(term_id):
"""Render the index page."""
term_id = paranoid_clean(term_id)
monitors = mongo.db[app.config['MONITORS_COLLECTION']]
articles = mongo.db[app.config['ARTICLES_COLLECTION']]
result = monitors.find_one({'hashed': term_id}, {'_id': 0})
action = request.args.get('action')
if action == 'archive':
monitors.update({'hashed': term_id}, {'$set': {'active': False}})
response = {'success': True}
mid = result['metadata']['monitor_id']
celery.send_task('remove_monitor', kwargs={'monitor_id': mid})
elif action == 'delete':
monitors.remove({'hashed': term_id})
articles.remove({'feed_source': result['metadata']['rss_link']})
mid = result['metadata']['monitor_id']
celery.send_task('remove_monitor', kwargs={'monitor_id': mid})
response = {'success': True}
else:
response = {'success': False, 'error': 'Action was invalid'}
return jsonify(response)
@core.route('/async-rss')
@login_required
def trigger_rss():
"""Run an async job in the background."""
logger.debug("Executing the heartbeat task and returning")
celery.send_task('process_all_rss', kwargs={'reprocess': False})
return render_template('index.html', name="HEARTBEAT")
@core.route('/async-reprocess')
@login_required
def reprocess_all_feeds():
"""Run an async job in the background."""
logger.debug("Executing the heartbeat task and returning")
celery.send_task('process_all_rss', kwargs={'reprocess': True})
return render_template('index.html', name="HEARTBEAT")
| [
"flask.render_template",
"flask.request.args.get",
"hashlib.sha256",
"flask_login.current_user.get_id",
"json.dumps",
"html.unescape",
"flask.url_for",
"google_alerts.GoogleAlerts",
"flask.Response",
"flask.jsonify"
] | [((840, 887), 'google_alerts.GoogleAlerts', 'GoogleAlerts', (["gdata['email']", "gdata['password']"], {}), "(gdata['email'], gdata['password'])\n", (852, 887), False, 'from google_alerts import GoogleAlerts\n'), ((2547, 2615), 'flask.jsonify', 'jsonify', (["{'success': True, 'monitor': monitor, 'articles': articles}"], {}), "({'success': True, 'monitor': monitor, 'articles': articles})\n", (2554, 2615), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((2953, 3003), 'flask.render_template', 'render_template', (['"""monitors.html"""'], {'monitors': 'results'}), "('monitors.html', monitors=results)\n", (2968, 3003), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((3572, 3618), 'json.dumps', 'json.dumps', (['to_write'], {'indent': '(4)', 'sort_keys': '(True)'}), '(to_write, indent=4, sort_keys=True)\n', (3582, 3618), False, 'import json\n'), ((3784, 3847), 'flask.Response', 'Response', (['content'], {'mimetype': '"""application/json"""', 'headers': 'headers'}), "(content, mimetype='application/json', headers=headers)\n", (3792, 3847), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((4214, 4240), 'flask.request.args.get', 'request.args.get', (['"""action"""'], {}), "('action')\n", (4230, 4240), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((4892, 4909), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (4899, 4909), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((5162, 5209), 'flask.render_template', 'render_template', (['"""index.html"""'], {'name': '"""HEARTBEAT"""'}), "('index.html', name='HEARTBEAT')\n", (5177, 5209), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((5475, 5522), 'flask.render_template', 'render_template', (['"""index.html"""'], {'name': '"""HEARTBEAT"""'}), "('index.html', name='HEARTBEAT')\n", (5490, 5522), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((709, 736), 'flask.jsonify', 'jsonify', (["{'errors': errors}"], {}), "({'errors': errors})\n", (716, 736), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((1431, 1452), 'flask_login.current_user.get_id', 'current_user.get_id', ([], {}), '()\n', (1450, 1452), False, 'from flask_login import login_required, current_user\n'), ((1720, 1740), 'flask.url_for', 'url_for', (['"""core.root"""'], {}), "('core.root')\n", (1727, 1740), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((1894, 1916), 'flask.request.args.get', 'request.args.get', (['"""id"""'], {}), "('id')\n", (1910, 1916), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((2080, 2142), 'flask.jsonify', 'jsonify', (["{'success': False, 'error': 'Monitor was not found.'}"], {}), "({'success': False, 'error': 'Monitor was not found.'})\n", (2087, 2142), False, 'from flask import render_template, redirect, url_for, jsonify, request, Response\n'), ((2390, 2418), 'html.unescape', 'html.unescape', (["item['title']"], {}), "(item['title'])\n", (2403, 2418), False, 'import html\n'), ((1596, 1619), 'hashlib.sha256', 'hashlib.sha256', (['to_hash'], {}), '(to_hash)\n', (1610, 1619), False, 'import hashlib\n')] |
from random import sample
from enum import IntEnum
class Item(IntEnum):
BLANK = -1
BOMB = 0
RUPOOR = -20
GREEN = 1
BLUE = 5
RED = 20
SILVER = 100
GOLDEN = 300
item_danger = [ Item.GREEN, Item.BLUE, Item.BLUE, Item.RED, Item.RED,
Item.SILVER, Item.SILVER, Item.GOLDEN, Item.GOLDEN ]
item_code = {
Item.BLANK : " ",
Item.GREEN : "G",
Item.BLUE : "B",
Item.RED : "R",
Item.SILVER : "S",
Item.GOLDEN : "*",
Item.BOMB : "X",
Item.RUPOOR : "-"
}
class CellState(IntEnum):
COVERED = 0
UNCOVERED = 1
class Board():
__board = []
__playboard = []
__width = 0
__height = 0
__bombs = 0
__rupoors = 0
def __init__(self, width, height, bombs, rupoors):
self.__width = width
self.__height = height
self.__bombs = bombs
self.__rupoors = rupoors
board_coordinates = [(x, y) for x in range(0,self.__height) for y in range(0, self.__width)]
bombs_coordinates = sample(board_coordinates, self.__bombs)
remaining_coordinates = [a for a in board_coordinates if a not in bombs_coordinates]
rupoors_coordinates = sample(remaining_coordinates, self.__rupoors)
self.__playboard = [[CellState.COVERED for i in range(0,self.__width)] for j in range(0,self.__height)]
self.__board = [[Item.BLANK for i in range(0,self.__width)] for j in range(0,self.__height)]
for bomb in bombs_coordinates:
x,y = bomb
self.__board[x][y] = Item.BOMB
for rupoor in rupoors_coordinates:
x,y = rupoor
self.__board[x][y] = Item.RUPOOR
for x,row in enumerate(self.__board):
for y,cell in enumerate(row):
neighbors = [(x-1,y),(x-1,y+1),(x,y-1),(x+1,y-1),(x+1,y),(x+1,y+1),(x,y+1),(x-1,y-1)]
hazard = 0
for n in neighbors:
nx, ny = n
if 0 <= nx <= self.__height-1 and 0 <= ny <= self.__width-1:
item = self.__board[nx][ny]
if item == Item.BOMB or item == Item.RUPOOR:
hazard +=1
if cell != Item.BOMB and cell != Item.RUPOOR:
self.__board[x][y] = item_danger[hazard]
def dig(self,x,y):
if x >= self.__height or y >= self.__width or x < 0 or y < 0:
raise IndexError("Cell is out of bounds. Can't dig there")
item = self.__board[x][y]
self.__playboard[x][y] = CellState.UNCOVERED
return item
def cell_state(self,x,y):
return self.__playboard[x][y]
def get_board(self):
board = [[(CellState.COVERED, Item.BLANK) for i in range(0,self.__width)] for j in range(0,self.__height)]
for x,row in enumerate(self.__playboard):
for y,cell in enumerate(row):
if cell == CellState.UNCOVERED:
board[x][y] = (CellState.UNCOVERED, self.__board[x][y])
return board
def get_shape(self):
return (self.__width, self.__height)
def get_hazards(self):
return (self.__bombs, self.__rupoors)
def __str__(self):
ret = ""
board = self.get_board()
for x,row in enumerate(board):
for y,cell in enumerate(row):
state, item = cell
if state == CellState.UNCOVERED:
ret += f"[{item_code[item]}]"
else:
ret += "[ ]"
ret += "\n"
return ret | [
"random.sample"
] | [((1014, 1053), 'random.sample', 'sample', (['board_coordinates', 'self.__bombs'], {}), '(board_coordinates, self.__bombs)\n', (1020, 1053), False, 'from random import sample\n'), ((1177, 1222), 'random.sample', 'sample', (['remaining_coordinates', 'self.__rupoors'], {}), '(remaining_coordinates, self.__rupoors)\n', (1183, 1222), False, 'from random import sample\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 10.Python 标准库概览(http://www.pythondoc.com/pythontutorial3/stdlib.html)
# 操作系统接口模块:os
# 文件目录管理模块:shutil
# 文件工具:glob
# 字符串处理:re
# 数学操作:math
# 随机数:random
# 网络请求模块:request
# 日期和时间:datetime
# 数据压缩:zlib,gzip,bz2,lzma,zipfile,tarfile
# 性能度量:timeit
# 时间度量工具:profile 和 pstats
# 质量控制:doctest(扫描模块并根据程序中内嵌的文档字符串执行测试)
# 远程调用:xmlrpc.client 和 cmlrpc.server
# 管理邮件信息库:email
# 信息交换格式:xml.dom 和 xml.sax
# 通过数据库格式:cvs
# 国际化:gettext、locale 和 codecs
import os
import shutil
import glob
import re
import math
import random
import datetime
import zlib
import doctest
import unittest
from urllib.request import urlopen
from datetime import date
from timeit import Timer
class TestStatisticalFunctions(unittest.TestCase):
def test_average(self):
self.assertEqual(average([20, 30, 70]), 40.0)
# self.assertEqual(round(average([1, 5, 7])), 4.3)
with self.assertRaises(ZeroDivisionError):
average([])
with self.assertRaises(TypeError):
average(20, 30, 70)
def average(values):
"""Computes the arithmetic mean of a list of numbers.
>>> print(average([20, 30, 70]))
40.0
"""
return sum(values) / len(values)
if __name__ == '__main__':
print(os.getcwd()) # Return the current working directory
os.chdir('/Users/Pan/Language/Python')
# re 模块
print(re.findall(r'\bf[a-z]*', 'which foot or hand fell fastest'))
print('tea for too'.replace('too', 'two'))
# math 模块
print(math.cos(math.pi / 4.0))
print(math.log(1024, 2))
# random 模块
print(random.choice(['apple', 'pear', 'banana']))
print(random.sample(range(100), 10)) # sampling without replacement
print(random.random()) # random float
print(random.randrange(6)) # random integer chosen from range(6)
# request 模块
# for line in urlopen('http://tycho.usno.navy.mil/cgi-bin/timer.pl'):
# line = line.decode('utf-8') # Decoding the binary data to text
# if 'EST' in line or 'EDT' in line: # look for Eastern Time
# print(line)
# datetime 日期和时间
# dates are easily constructed and formatted
now = date.today()
print(now)
print(datetime.date(2013, 12, 3))
print(now.strftime("%m-%d-%y. %d %b %Y is a %A on the %d day of %B."))
# dates support calendar arithmetic
birthday = date(1964, 7, 31)
age = now - birthday
print(age.days)
# 数据压缩:zlib,gzip,bz2,lzma,zipfile,tarfile
s = b'witch which has which witches wrist watch'
print(len(s))
t = zlib.compress(s)
print(len(t))
print(zlib.decompress(t))
print(zlib.crc32(s))
# 性能度量:timeit
print(Timer('t=a; a=b; b=t', 'a=1; b=2').timeit())
print(Timer('a,b = b,a', 'a=1; b=2').timeit())
# 质量控制:doctest
doctest.testmod() # automatically validate the embedded tests
unittest.main()
# os.system('mkdir test')
# dir(os)
# help(os)
# dir(glob)
# help(glob)
# print(glob.glob('/Users/Pan/Language/Python/Demo/*.py', recursive=True))
| [
"random.choice",
"timeit.Timer",
"random.randrange",
"zlib.crc32",
"zlib.compress",
"os.chdir",
"os.getcwd",
"re.findall",
"math.cos",
"doctest.testmod",
"datetime.date",
"math.log",
"random.random",
"unittest.main",
"datetime.date.today",
"zlib.decompress"
] | [((1317, 1355), 'os.chdir', 'os.chdir', (['"""/Users/Pan/Language/Python"""'], {}), "('/Users/Pan/Language/Python')\n", (1325, 1355), False, 'import os\n'), ((2175, 2187), 'datetime.date.today', 'date.today', ([], {}), '()\n', (2185, 2187), False, 'from datetime import date\n'), ((2371, 2388), 'datetime.date', 'date', (['(1964)', '(7)', '(31)'], {}), '(1964, 7, 31)\n', (2375, 2388), False, 'from datetime import date\n'), ((2560, 2576), 'zlib.compress', 'zlib.compress', (['s'], {}), '(s)\n', (2573, 2576), False, 'import zlib\n'), ((2799, 2816), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (2814, 2816), False, 'import doctest\n'), ((2868, 2883), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2881, 2883), False, 'import unittest\n'), ((1258, 1269), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1267, 1269), False, 'import os\n'), ((1378, 1437), 're.findall', 're.findall', (['"""\\\\bf[a-z]*"""', '"""which foot or hand fell fastest"""'], {}), "('\\\\bf[a-z]*', 'which foot or hand fell fastest')\n", (1388, 1437), False, 'import re\n'), ((1511, 1534), 'math.cos', 'math.cos', (['(math.pi / 4.0)'], {}), '(math.pi / 4.0)\n', (1519, 1534), False, 'import math\n'), ((1546, 1563), 'math.log', 'math.log', (['(1024)', '(2)'], {}), '(1024, 2)\n', (1554, 1563), False, 'import math\n'), ((1592, 1634), 'random.choice', 'random.choice', (["['apple', 'pear', 'banana']"], {}), "(['apple', 'pear', 'banana'])\n", (1605, 1634), False, 'import random\n'), ((1721, 1736), 'random.random', 'random.random', ([], {}), '()\n', (1734, 1736), False, 'import random\n'), ((1766, 1785), 'random.randrange', 'random.randrange', (['(6)'], {}), '(6)\n', (1782, 1785), False, 'import random\n'), ((2213, 2239), 'datetime.date', 'datetime.date', (['(2013)', '(12)', '(3)'], {}), '(2013, 12, 3)\n', (2226, 2239), False, 'import datetime\n'), ((2605, 2623), 'zlib.decompress', 'zlib.decompress', (['t'], {}), '(t)\n', (2620, 2623), False, 'import zlib\n'), ((2635, 2648), 'zlib.crc32', 'zlib.crc32', (['s'], {}), '(s)\n', (2645, 2648), False, 'import zlib\n'), ((2679, 2713), 'timeit.Timer', 'Timer', (['"""t=a; a=b; b=t"""', '"""a=1; b=2"""'], {}), "('t=a; a=b; b=t', 'a=1; b=2')\n", (2684, 2713), False, 'from timeit import Timer\n'), ((2734, 2764), 'timeit.Timer', 'Timer', (['"""a,b = b,a"""', '"""a=1; b=2"""'], {}), "('a,b = b,a', 'a=1; b=2')\n", (2739, 2764), False, 'from timeit import Timer\n')] |
from containers.BinaryTree import BinaryTree, Node
from containers.BST import BST
def test__BST_super():
x = BST()
assert isinstance(x,BinaryTree)
# the very first thing to do whenever creating a data structure
# is to write a function to check if the invariant holds
# (in this case the BST property)
# and create test cases for whether that function works
def test__BST_is_bst_satisified0():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(1)
assert not bst.is_bst_satisfied()
def test__BST_is_bst_satisified1():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(-2)
bst.root.left.left = Node(-3)
bst.root.left.right = Node(-1)
bst.root.right = Node(2)
bst.root.right.left = Node(1)
bst.root.right.right = Node(-3)
assert not bst.is_bst_satisfied()
def test__BST_is_bst_satisfied2():
bst = BST()
bst.root = Node(-2)
bst.root.left = Node(-3)
bst.root.right = Node(-4)
assert not bst.is_bst_satisfied()
def test__BST_is_bst_satisified3():
bst = BST()
assert bst.is_bst_satisfied()
def test__BST_is_bst_satisified4():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(-1)
assert bst.is_bst_satisfied()
def test__BST_is_bst_satisified5():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(-2)
bst.root.left.left = Node(-3)
bst.root.left.right = Node(-1)
bst.root.right = Node(2)
bst.root.right.left = Node(1)
bst.root.right.right = Node(3)
assert bst.is_bst_satisfied()
def test__BST_is_bst_satisified6():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(-2)
bst.root.left.left = Node(-3)
bst.root.left.right = Node(-1)
bst.root.right = Node(2)
bst.root.right.left = Node(-1)
bst.root.right.right = Node(3)
assert not bst.is_bst_satisfied()
def test__BST_is_bst_satisified7():
bst = BST()
bst.root = Node(0)
bst.root.left = Node(-2)
bst.root.left.left = Node(-3)
bst.root.left.right = Node(1)
bst.root.right = Node(2)
bst.root.right.left = Node(1)
bst.root.right.right = Node(3)
assert not bst.is_bst_satisfied()
################################################################################
import random
import copy
from hypothesis import given
import hypothesis.strategies as st
ints = st.lists(st.integers())
@given(xs=ints)
def test__BST_insert(xs):
xs = list(set(xs))
bst = BST()
for x in xs:
bst.insert(x)
assert x in bst.to_list('inorder')
assert bst.is_bst_satisfied()
@given(xs=ints)
def test__BST_insert_list(xs):
xs = list(set(xs))
bst = BST()
bst.insert_list(xs)
assert bst.is_bst_satisfied()
@given(xs=ints)
def test__BST___init__(xs):
xs = list(set(xs))
bst = BST(xs)
assert bst.is_bst_satisfied()
@given(xs=ints)
def test__BST___contains__1(xs):
'''
Checks that if a value is in the bst then __contains__ returns True
'''
xs = list(set(xs))
if len(xs)>0:
x = random.choice(xs)
bst = BST(xs)
assert x in bst
@given(xs=ints)
def test__BST___contains__2(xs):
'''
Checks that if a value is NOT in the bst then __contains__ returns False
'''
xs = list(set(xs))
if len(xs)>0:
while True:
x = random.uniform(min(xs)-1,max(xs)+1)
if x not in xs:
break
else:
x = 10
bst = BST(xs)
assert x not in bst
@given(xs=ints)
def test__BST_find_smallest(xs):
xs = list(set(xs))
if len(xs)>0:
x = min(xs)
bst = BST(xs)
assert x == bst.find_smallest()
@given(xs=ints)
def test__BST_find_largest(xs):
xs = list(set(xs))
if len(xs)>0:
x = max(xs)
bst = BST(xs)
assert x == bst.find_largest()
@given(xs=ints)
def test__BST_remove1(xs):
'''
This tests the remove function.
In order to test the remove function, we must be able to generate valid BSTs.
Therefore, you must have all the insert functionality completed before this test can pass.
'''
xs = list(set(xs))
bst = BST(xs)
while len(xs)>0:
x = random.choice(xs)
xs.remove(x)
assert x in bst
bst.remove(x)
assert x not in bst
assert bst.to_list('inorder')==sorted(xs)
assert bst.is_bst_satisfied()
@given(xs=ints)
def test__BST_remove2(xs):
'''
If we remove something from the BST that is not in the BST,
then the BST should remain unchanged.
'''
xs = list(set(xs))
bst = BST(xs)
y = 0
while y in xs:
y += 1
bst.remove(y)
assert bst.to_list('inorder')==sorted(xs)
@given(xs=ints, ys=ints)
def test__BST_remove_list1(xs,ys):
xs = list(set(xs))
bst = BST(xs)
bst.remove_list(ys)
for y in ys:
assert y not in bst
@given(xs=ints, ys=ints)
def test__BST_remove_list2(xs,ys):
xs = list(set(xs))
bst = BST(xs)
bst.remove_list(ys)
for y in ys:
if y in xs:
xs.remove(y)
assert bst.to_list('inorder') == sorted(xs)
@given(xs=ints,ys=ints)
def test__BST_remove_and_insert1(xs,ys):
'''
This test performs a mixture of both insertions and removals.
This ensures that there are no weird interactions between inserting and removing.
'''
xs = list(set(xs))
bst = BST(xs)
for y in ys:
bst.insert(y)
x = random.choice(bst.to_list('inorder'))
bst.remove(x)
assert bst.is_bst_satisfied()
@given(xs=ints,ys=ints)
def test__BST_remove_and_insert2(xs,ys):
'''
This test performs a mixture of both insertions and removals.
This ensures that there are no weird interactions between inserting and removing.
'''
xs = list(set(xs))
bst = BST(xs)
for y in ys:
bst.insert(y)
x = bst.find_largest()
bst.remove(x)
assert bst.is_bst_satisfied()
@given(xs=ints,ys=ints)
def test__BST_remove_and_insert3(xs,ys):
'''
This test performs a mixture of both insertions and removals.
This ensures that there are no weird interactions between inserting and removing.
'''
xs = list(set(xs))
bst = BST(xs)
for y in ys:
bst.insert(y)
x = bst.find_smallest()
bst.remove(x)
assert bst.is_bst_satisfied()
@given(xs=ints)
def test__BST_inorder_property(xs):
'''
The order we insert objects into a BST can affect the structure of the tree,
but it should NOT affect the list we get out from an inorder traversal.
(Recall that the inorder traversal of a BST should always be a sorted list.)
This test randomly shuffles the input list two different ways
and checks that both shufflings give the same output list.
This tests both the insertion functions and the traversal functions
to ensure that there are no bad interactions between theses functions.
'''
xs = list(set(xs))
xs1 = copy.copy(xs)
random.shuffle(xs1)
bst1 = BST(xs1)
xs2 = copy.copy(xs)
random.shuffle(xs2)
bst2 = BST(xs2)
assert bst1.to_list('inorder') == bst2.to_list('inorder')
@given(xs=ints)
def test__BST_eq(xs):
'''
This test is essentially the same as the previous one,
but tests the == operator specifically.
'''
xs = list(set(xs))
xs1 = copy.copy(xs)
random.shuffle(xs1)
bst1 = BST(xs1)
xs2 = copy.copy(xs)
random.shuffle(xs2)
bst2 = BST(xs2)
assert bst1 == bst2
| [
"random.choice",
"random.shuffle",
"hypothesis.strategies.integers",
"containers.BinaryTree.Node",
"containers.BST.BST",
"hypothesis.given",
"copy.copy"
] | [((2357, 2371), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (2362, 2371), False, 'from hypothesis import given\n'), ((2564, 2578), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (2569, 2578), False, 'from hypothesis import given\n'), ((2714, 2728), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (2719, 2728), False, 'from hypothesis import given\n'), ((2835, 2849), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (2840, 2849), False, 'from hypothesis import given\n'), ((3091, 3105), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (3096, 3105), False, 'from hypothesis import given\n'), ((3465, 3479), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (3470, 3479), False, 'from hypothesis import given\n'), ((3639, 3653), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (3644, 3653), False, 'from hypothesis import given\n'), ((3811, 3825), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (3816, 3825), False, 'from hypothesis import given\n'), ((4360, 4374), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (4365, 4374), False, 'from hypothesis import given\n'), ((4676, 4699), 'hypothesis.given', 'given', ([], {'xs': 'ints', 'ys': 'ints'}), '(xs=ints, ys=ints)\n', (4681, 4699), False, 'from hypothesis import given\n'), ((4848, 4871), 'hypothesis.given', 'given', ([], {'xs': 'ints', 'ys': 'ints'}), '(xs=ints, ys=ints)\n', (4853, 4871), False, 'from hypothesis import given\n'), ((5085, 5108), 'hypothesis.given', 'given', ([], {'xs': 'ints', 'ys': 'ints'}), '(xs=ints, ys=ints)\n', (5090, 5108), False, 'from hypothesis import given\n'), ((5510, 5533), 'hypothesis.given', 'given', ([], {'xs': 'ints', 'ys': 'ints'}), '(xs=ints, ys=ints)\n', (5515, 5533), False, 'from hypothesis import given\n'), ((5916, 5939), 'hypothesis.given', 'given', ([], {'xs': 'ints', 'ys': 'ints'}), '(xs=ints, ys=ints)\n', (5921, 5939), False, 'from hypothesis import given\n'), ((6323, 6337), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (6328, 6337), False, 'from hypothesis import given\n'), ((7135, 7149), 'hypothesis.given', 'given', ([], {'xs': 'ints'}), '(xs=ints)\n', (7140, 7149), False, 'from hypothesis import given\n'), ((115, 120), 'containers.BST.BST', 'BST', ([], {}), '()\n', (118, 120), False, 'from containers.BST import BST\n'), ((419, 424), 'containers.BST.BST', 'BST', ([], {}), '()\n', (422, 424), False, 'from containers.BST import BST\n'), ((440, 447), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (444, 447), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((468, 475), 'containers.BinaryTree.Node', 'Node', (['(1)'], {}), '(1)\n', (472, 475), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((561, 566), 'containers.BST.BST', 'BST', ([], {}), '()\n', (564, 566), False, 'from containers.BST import BST\n'), ((582, 589), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (586, 589), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((610, 618), 'containers.BinaryTree.Node', 'Node', (['(-2)'], {}), '(-2)\n', (614, 618), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((644, 652), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (648, 652), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((679, 687), 'containers.BinaryTree.Node', 'Node', (['(-1)'], {}), '(-1)\n', (683, 687), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((709, 716), 'containers.BinaryTree.Node', 'Node', (['(2)'], {}), '(2)\n', (713, 716), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((743, 750), 'containers.BinaryTree.Node', 'Node', (['(1)'], {}), '(1)\n', (747, 750), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((778, 786), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (782, 786), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((871, 876), 'containers.BST.BST', 'BST', ([], {}), '()\n', (874, 876), False, 'from containers.BST import BST\n'), ((892, 900), 'containers.BinaryTree.Node', 'Node', (['(-2)'], {}), '(-2)\n', (896, 900), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((921, 929), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (925, 929), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((951, 959), 'containers.BinaryTree.Node', 'Node', (['(-4)'], {}), '(-4)\n', (955, 959), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1045, 1050), 'containers.BST.BST', 'BST', ([], {}), '()\n', (1048, 1050), False, 'from containers.BST import BST\n'), ((1132, 1137), 'containers.BST.BST', 'BST', ([], {}), '()\n', (1135, 1137), False, 'from containers.BST import BST\n'), ((1153, 1160), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (1157, 1160), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1181, 1189), 'containers.BinaryTree.Node', 'Node', (['(-1)'], {}), '(-1)\n', (1185, 1189), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1271, 1276), 'containers.BST.BST', 'BST', ([], {}), '()\n', (1274, 1276), False, 'from containers.BST import BST\n'), ((1292, 1299), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (1296, 1299), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1320, 1328), 'containers.BinaryTree.Node', 'Node', (['(-2)'], {}), '(-2)\n', (1324, 1328), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1354, 1362), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (1358, 1362), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1389, 1397), 'containers.BinaryTree.Node', 'Node', (['(-1)'], {}), '(-1)\n', (1393, 1397), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1419, 1426), 'containers.BinaryTree.Node', 'Node', (['(2)'], {}), '(2)\n', (1423, 1426), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1453, 1460), 'containers.BinaryTree.Node', 'Node', (['(1)'], {}), '(1)\n', (1457, 1460), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1488, 1495), 'containers.BinaryTree.Node', 'Node', (['(3)'], {}), '(3)\n', (1492, 1495), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1577, 1582), 'containers.BST.BST', 'BST', ([], {}), '()\n', (1580, 1582), False, 'from containers.BST import BST\n'), ((1598, 1605), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (1602, 1605), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1626, 1634), 'containers.BinaryTree.Node', 'Node', (['(-2)'], {}), '(-2)\n', (1630, 1634), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1660, 1668), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (1664, 1668), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1695, 1703), 'containers.BinaryTree.Node', 'Node', (['(-1)'], {}), '(-1)\n', (1699, 1703), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1725, 1732), 'containers.BinaryTree.Node', 'Node', (['(2)'], {}), '(2)\n', (1729, 1732), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1759, 1767), 'containers.BinaryTree.Node', 'Node', (['(-1)'], {}), '(-1)\n', (1763, 1767), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1795, 1802), 'containers.BinaryTree.Node', 'Node', (['(3)'], {}), '(3)\n', (1799, 1802), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1888, 1893), 'containers.BST.BST', 'BST', ([], {}), '()\n', (1891, 1893), False, 'from containers.BST import BST\n'), ((1909, 1916), 'containers.BinaryTree.Node', 'Node', (['(0)'], {}), '(0)\n', (1913, 1916), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1937, 1945), 'containers.BinaryTree.Node', 'Node', (['(-2)'], {}), '(-2)\n', (1941, 1945), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((1971, 1979), 'containers.BinaryTree.Node', 'Node', (['(-3)'], {}), '(-3)\n', (1975, 1979), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((2006, 2013), 'containers.BinaryTree.Node', 'Node', (['(1)'], {}), '(1)\n', (2010, 2013), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((2035, 2042), 'containers.BinaryTree.Node', 'Node', (['(2)'], {}), '(2)\n', (2039, 2042), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((2069, 2076), 'containers.BinaryTree.Node', 'Node', (['(1)'], {}), '(1)\n', (2073, 2076), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((2104, 2111), 'containers.BinaryTree.Node', 'Node', (['(3)'], {}), '(3)\n', (2108, 2111), False, 'from containers.BinaryTree import BinaryTree, Node\n'), ((2339, 2352), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (2350, 2352), True, 'import hypothesis.strategies as st\n'), ((2431, 2436), 'containers.BST.BST', 'BST', ([], {}), '()\n', (2434, 2436), False, 'from containers.BST import BST\n'), ((2643, 2648), 'containers.BST.BST', 'BST', ([], {}), '()\n', (2646, 2648), False, 'from containers.BST import BST\n'), ((2790, 2797), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (2793, 2797), False, 'from containers.BST import BST\n'), ((3430, 3437), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (3433, 3437), False, 'from containers.BST import BST\n'), ((4115, 4122), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (4118, 4122), False, 'from containers.BST import BST\n'), ((4557, 4564), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (4560, 4564), False, 'from containers.BST import BST\n'), ((4768, 4775), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (4771, 4775), False, 'from containers.BST import BST\n'), ((4940, 4947), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (4943, 4947), False, 'from containers.BST import BST\n'), ((5350, 5357), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (5353, 5357), False, 'from containers.BST import BST\n'), ((5775, 5782), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (5778, 5782), False, 'from containers.BST import BST\n'), ((6181, 6188), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (6184, 6188), False, 'from containers.BST import BST\n'), ((6938, 6951), 'copy.copy', 'copy.copy', (['xs'], {}), '(xs)\n', (6947, 6951), False, 'import copy\n'), ((6956, 6975), 'random.shuffle', 'random.shuffle', (['xs1'], {}), '(xs1)\n', (6970, 6975), False, 'import random\n'), ((6987, 6995), 'containers.BST.BST', 'BST', (['xs1'], {}), '(xs1)\n', (6990, 6995), False, 'from containers.BST import BST\n'), ((7007, 7020), 'copy.copy', 'copy.copy', (['xs'], {}), '(xs)\n', (7016, 7020), False, 'import copy\n'), ((7025, 7044), 'random.shuffle', 'random.shuffle', (['xs2'], {}), '(xs2)\n', (7039, 7044), False, 'import random\n'), ((7056, 7064), 'containers.BST.BST', 'BST', (['xs2'], {}), '(xs2)\n', (7059, 7064), False, 'from containers.BST import BST\n'), ((7325, 7338), 'copy.copy', 'copy.copy', (['xs'], {}), '(xs)\n', (7334, 7338), False, 'import copy\n'), ((7343, 7362), 'random.shuffle', 'random.shuffle', (['xs1'], {}), '(xs1)\n', (7357, 7362), False, 'import random\n'), ((7374, 7382), 'containers.BST.BST', 'BST', (['xs1'], {}), '(xs1)\n', (7377, 7382), False, 'from containers.BST import BST\n'), ((7394, 7407), 'copy.copy', 'copy.copy', (['xs'], {}), '(xs)\n', (7403, 7407), False, 'import copy\n'), ((7412, 7431), 'random.shuffle', 'random.shuffle', (['xs2'], {}), '(xs2)\n', (7426, 7431), False, 'import random\n'), ((7443, 7451), 'containers.BST.BST', 'BST', (['xs2'], {}), '(xs2)\n', (7446, 7451), False, 'from containers.BST import BST\n'), ((3024, 3041), 'random.choice', 'random.choice', (['xs'], {}), '(xs)\n', (3037, 3041), False, 'import random\n'), ((3056, 3063), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (3059, 3063), False, 'from containers.BST import BST\n'), ((3588, 3595), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (3591, 3595), False, 'from containers.BST import BST\n'), ((3761, 3768), 'containers.BST.BST', 'BST', (['xs'], {}), '(xs)\n', (3764, 3768), False, 'from containers.BST import BST\n'), ((4156, 4173), 'random.choice', 'random.choice', (['xs'], {}), '(xs)\n', (4169, 4173), False, 'import random\n')] |
"""
Unit tests
"""
import unittest
from pyspark.sql import DataFrame, SparkSession
from pyspark.sql import functions as F
from metaframe import MetaFrame
class TestMetaFrame(unittest.TestCase):
"""MetaFrame unit tests"""
@classmethod
def setUpClass(cls):
cls.spark = SparkSession.builder.master("local[4]").getOrCreate()
def test_invent_dataframe(self):
"""
Test MetaFrame use cases
"""
df = self.spark.createDataFrame(
[
("p1", 6, "2019-12-31"),
("p2", 4, "2019-12-31"),
("p3", 10, "2019-12-31"),
("p4", 4, "2019-12-31"),
("p5", 3, "2019-12-31"),
("p6", 7, "2019-12-31"),
("p7", 18, "2019-12-31"),
("p8", 44, "2019-12-31"),
("p1", 6, "2020-01-01"),
("p2", 4, "2020-01-01"),
("p3", 10, "2020-01-01"),
("p4", 4, "2020-01-01"),
("p5", 3, "2020-01-01"),
("p6", 7, "2020-01-01"),
("p7", 18, "2020-01-01"),
("p8", 44, "2020-01-01"),
("p1", 16, "2020-01-02"),
("p2", 4, "2020-01-02"),
("p3", 3, "2020-01-02"),
("p4", 6, "2020-01-02"),
("p5", 7, "2020-01-02"),
("p6", 7, "2020-01-02"),
],
["product_id", "quantity", "date"],
)
metadata = {"foo": "bar"}
# Creation
df = MetaFrame(df, metadata)
self.assertIsInstance(df.df, DataFrame)
self.assertDictEqual(df.metadata, metadata)
# Non callables
self.assertListEqual(df.columns, ["product_id", "quantity", "date"])
self.assertIsInstance(df, MetaFrame)
self.assertDictEqual(df.metadata, metadata)
# Get item
self.assertEqual(str(df["product_id"]), str(F.col("product_id")))
self.assertIsInstance(df, MetaFrame)
self.assertDictEqual(df.metadata, metadata)
# Callables with no MetaFrame returns
df.show()
self.assertIsInstance(df, MetaFrame)
self.assertDictEqual(df.metadata, metadata)
# Callabes with MetaFrame returns
df = df.withColumn("new_col", F.lit(0))
self.assertIsInstance(df, MetaFrame)
self.assertDictEqual(df.metadata, metadata)
self.assertListEqual(df.columns, ["product_id", "quantity", "date", "new_col"])
# Selecting
df = df.select("product_id", "new_col")
self.assertDictEqual(df.metadata, metadata)
self.assertListEqual(df.columns, ["product_id", "new_col"])
# Set metadata
df = df.set_metadata(foo="baz")
self.assertDictEqual(df.metadata, {"foo": "baz"})
def test__set_pk_after(self):
"""
test _set_pk_after method of MetaFrame that automatically sets
primary key information after groupBy, dropDuplicates and distinct
"""
df = self.spark.createDataFrame(
[
("p1", 6, "2019-12-31"),
("p2", 4, "2019-12-31"),
("p3", 10, "2019-12-31"),
("p4", 4, "2019-12-31"),
("p5", 3, "2019-12-31"),
("p6", 7, "2019-12-31"),
("p7", 18, "2019-12-31"),
("p8", 44, "2019-12-31"),
("p1", 6, "2020-01-01"),
("p2", 4, "2020-01-01"),
("p3", 10, "2020-01-01"),
("p4", 4, "2020-01-01"),
("p5", 3, "2020-01-01"),
("p6", 7, "2020-01-01"),
("p7", 18, "2020-01-01"),
("p8", 44, "2020-01-01"),
("p1", 16, "2020-01-02"),
("p2", 4, "2020-01-02"),
("p3", 3, "2020-01-02"),
("p4", 6, "2020-01-02"),
("p5", 7, "2020-01-02"),
("p6", 7, "2020-01-02"),
],
["product_id", "quantity", "date"],
)
metadata = {"foo": "bar"}
df = MetaFrame(df, metadata)
# initially pk is empty
self.assertIsNone(df.primary_key)
# test groupBy & groupby
df_date: MetaFrame = df.groupBy("date").agg(F.sum("quantity").alias("quantity"))
self.assertEqual(df_date.primary_key, ["date"])
df_date: MetaFrame = df.groupby("date").agg(F.sum("quantity").alias("quantity"))
self.assertEqual(df_date.primary_key, ["date"])
# test dropDuplicates
df_products: MetaFrame = df.dropDuplicates(["product_id"])
self.assertEqual(df_products.primary_key, ["product_id"])
# test dropDuplicates with kwarg
df_products: MetaFrame = df.dropDuplicates(subset=["product_id"])
self.assertEqual(df_products.primary_key, ["product_id"])
# test dropDuplicates without arg
df_products: MetaFrame = df.select("product_id").dropDuplicates()
self.assertEqual(df_products.primary_key, ["product_id"])
# test drop_duplicates
df_products: MetaFrame = df.drop_duplicates(["product_id"])
self.assertEqual(df_products.primary_key, ["product_id"])
# test drop_duplicates with kwarg
df_products: MetaFrame = df.drop_duplicates(subset=["product_id"])
self.assertEqual(df_products.primary_key, ["product_id"])
# test drop_duplicates without arg
df_products: MetaFrame = df.select("product_id").drop_duplicates()
self.assertEqual(df_products.primary_key, ["product_id"])
# test distinct
df_products: MetaFrame = df.select("product_id").distinct()
self.assertEqual(df_products.primary_key, ["product_id"])
| [
"pyspark.sql.functions.lit",
"pyspark.sql.SparkSession.builder.master",
"pyspark.sql.functions.col",
"pyspark.sql.functions.sum",
"metaframe.MetaFrame"
] | [((1544, 1567), 'metaframe.MetaFrame', 'MetaFrame', (['df', 'metadata'], {}), '(df, metadata)\n', (1553, 1567), False, 'from metaframe import MetaFrame\n'), ((4090, 4113), 'metaframe.MetaFrame', 'MetaFrame', (['df', 'metadata'], {}), '(df, metadata)\n', (4099, 4113), False, 'from metaframe import MetaFrame\n'), ((2297, 2305), 'pyspark.sql.functions.lit', 'F.lit', (['(0)'], {}), '(0)\n', (2302, 2305), True, 'from pyspark.sql import functions as F\n'), ((292, 331), 'pyspark.sql.SparkSession.builder.master', 'SparkSession.builder.master', (['"""local[4]"""'], {}), "('local[4]')\n", (319, 331), False, 'from pyspark.sql import DataFrame, SparkSession\n'), ((1937, 1956), 'pyspark.sql.functions.col', 'F.col', (['"""product_id"""'], {}), "('product_id')\n", (1942, 1956), True, 'from pyspark.sql import functions as F\n'), ((4275, 4292), 'pyspark.sql.functions.sum', 'F.sum', (['"""quantity"""'], {}), "('quantity')\n", (4280, 4292), True, 'from pyspark.sql import functions as F\n'), ((4421, 4438), 'pyspark.sql.functions.sum', 'F.sum', (['"""quantity"""'], {}), "('quantity')\n", (4426, 4438), True, 'from pyspark.sql import functions as F\n')] |
from model.contact import Contact
import random
def test_delete_some_contact(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.add_new(Contact(firstname="ivan", lastname="taranov", nickname="tara", company="book", address="lenina 1", homephone="555555", mobilephone="666666", workphone="777777", secondaryphone="888888", email="<EMAIL>", byear="1985"))
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
app.contact.delete_contact_by_id(contact.id)
new_contacts = db.get_contact_list()
assert len(old_contacts) - 1 == len(new_contacts)
old_contacts.remove(contact)
assert old_contacts == new_contacts
if check_ui:
assert sorted(new_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
| [
"random.choice",
"model.contact.Contact"
] | [((439, 466), 'random.choice', 'random.choice', (['old_contacts'], {}), '(old_contacts)\n', (452, 466), False, 'import random\n'), ((166, 391), 'model.contact.Contact', 'Contact', ([], {'firstname': '"""ivan"""', 'lastname': '"""taranov"""', 'nickname': '"""tara"""', 'company': '"""book"""', 'address': '"""lenina 1"""', 'homephone': '"""555555"""', 'mobilephone': '"""666666"""', 'workphone': '"""777777"""', 'secondaryphone': '"""888888"""', 'email': '"""<EMAIL>"""', 'byear': '"""1985"""'}), "(firstname='ivan', lastname='taranov', nickname='tara', company=\n 'book', address='lenina 1', homephone='555555', mobilephone='666666',\n workphone='777777', secondaryphone='888888', email='<EMAIL>', byear='1985')\n", (173, 391), False, 'from model.contact import Contact\n')] |
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import fsolve
from ODEmethods.methods import sym_methods
from ODEmethods.symplectic import SymIntegrator
# Henon-Heiles galactic potential:
def hehe(t, y, par=None):
# y is of the form [x, vx, y, vy]
v = y[1::2]
ax1 = - y[0] - 2.*y[0]*y[2]
ay1 = - y[2] - np.power(y[0],2.) + np.power(y[2],2.)
return [v[0], ax1, v[1], ay1]
def ymax(y, e):
return 2.*e - y**2. + (2./3.)*y**3.
def poincare(si):
# poincare map for x=0
si = np.array(si)
y = []
for i in range(len(si)):
if (si[i,0] > 0. and si[i-1,0] <= 0.):
dx = si[i,0] - si[i-1,0]
y_poincare = si[i-1] + ((si[i] - si[i-1])/dx)*(0.-si[i-1,0])
y.append(y_poincare)
return np.array(y)
n = 20000
h = 0.2
eps = 1e-3
e_ar = [1./10., 1./8., (1./6.)-eps]
fig, axs = plt.subplots(1, len(e_ar))
plt.suptitle("Hénon-Heiles: Poincare section")
fig.set_size_inches(16, 5)
for i in range(len(e_ar)):
#print(i)
e = e_ar[i]
y_init = np.linspace(0., fsolve(ymax, 0.5, e), 20)
v_init = np.sqrt(2.*e - np.power(y_init, 2.) + (2./3.)*np.power(y_init, 3.))
for y0, v0 in zip(y_init, v_init):
problem_si = SymIntegrator(sym_methods["VEFRL"], hehe, parameters=None)
si = problem_si.run(x0=0, y0=[0., v0, y0, 0.], stepnum=n, stepsize=h)
pmap = poincare(si[1]) # linear interpolation
axs[i].scatter(pmap[:,2], pmap[:,3], marker='.')
axs[i].set_title(r'$E=$'+str(e))
axs[i].set_xlabel(r'$y$')
axs[i].set_ylabel(r'$dy/dt$')
axs[i].set_aspect(1)
plt.subplots_adjust(left=0.1, right=0.95, bottom=0.1, top=0.9, wspace=0.25, hspace=0.3)
plt.savefig("henon_heiles.png")
plt.show() | [
"scipy.optimize.fsolve",
"matplotlib.pyplot.savefig",
"numpy.power",
"ODEmethods.symplectic.SymIntegrator",
"numpy.array",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.show"
] | [((898, 944), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['"""Hénon-Heiles: Poincare section"""'], {}), "('Hénon-Heiles: Poincare section')\n", (910, 944), True, 'import matplotlib.pyplot as plt\n'), ((1613, 1704), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0.1)', 'right': '(0.95)', 'bottom': '(0.1)', 'top': '(0.9)', 'wspace': '(0.25)', 'hspace': '(0.3)'}), '(left=0.1, right=0.95, bottom=0.1, top=0.9, wspace=0.25,\n hspace=0.3)\n', (1632, 1704), True, 'import matplotlib.pyplot as plt\n'), ((1701, 1732), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""henon_heiles.png"""'], {}), "('henon_heiles.png')\n", (1712, 1732), True, 'import matplotlib.pyplot as plt\n'), ((1733, 1743), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1741, 1743), True, 'import matplotlib.pyplot as plt\n'), ((527, 539), 'numpy.array', 'np.array', (['si'], {}), '(si)\n', (535, 539), True, 'import numpy as np\n'), ((781, 792), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (789, 792), True, 'import numpy as np\n'), ((363, 382), 'numpy.power', 'np.power', (['y[2]', '(2.0)'], {}), '(y[2], 2.0)\n', (371, 382), True, 'import numpy as np\n'), ((1059, 1079), 'scipy.optimize.fsolve', 'fsolve', (['ymax', '(0.5)', 'e'], {}), '(ymax, 0.5, e)\n', (1065, 1079), False, 'from scipy.optimize import fsolve\n'), ((1226, 1284), 'ODEmethods.symplectic.SymIntegrator', 'SymIntegrator', (["sym_methods['VEFRL']", 'hehe'], {'parameters': 'None'}), "(sym_methods['VEFRL'], hehe, parameters=None)\n", (1239, 1284), False, 'from ODEmethods.symplectic import SymIntegrator\n'), ((343, 362), 'numpy.power', 'np.power', (['y[0]', '(2.0)'], {}), '(y[0], 2.0)\n', (351, 362), True, 'import numpy as np\n'), ((1113, 1134), 'numpy.power', 'np.power', (['y_init', '(2.0)'], {}), '(y_init, 2.0)\n', (1121, 1134), True, 'import numpy as np\n'), ((1144, 1165), 'numpy.power', 'np.power', (['y_init', '(3.0)'], {}), '(y_init, 3.0)\n', (1152, 1165), True, 'import numpy as np\n')] |
# Copyright (C) 2014-2017 <NAME>, <NAME>, <NAME>, <NAME> (in alphabetic order)
#
# This file is part of OpenModal.
#
# OpenModal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# OpenModal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenModal. If not, see <http://www.gnu.org/licenses/>.
__author__ = 'Matjaz'
import sys, subprocess, os
try:
import DAQTask as dq
import daqprocess as dp
except NotImplementedError as nie:
dq = None
dp = None
from string import Template
import qtawesome as qta
# import DAQTask as dq
from PyQt5 import QtCore, QtGui, QtWidgets
import pyqtgraph as pg
import numpy as np
from OpenModal.gui.templates import COLOR_PALETTE
MAX_WINDOW_LENGTH = 1e9
class ExportSelector(QtWidgets.QWidget):
"""Measurement configuration window.
"""
def __init__(self, desktop_widget, status_bar, modaldata_object, *args, **kwargs):
super().__init__(*args, **kwargs)
self.status_bar = status_bar
self.modaldata_object = modaldata_object
self.desktop_widget = desktop_widget
self.data_types_list = ['nodes', 'lines', 'elements', 'measurements', 'analyses']
self.data_types_names = ['Nodes', 'Lines', 'Elements', 'Measurements', 'Analysis results']
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
p = self.palette()
p.setColor(self.backgroundRole(), QtCore.Qt.white)
self.setPalette(p)
self.setAutoFillBackground(True)
self.fields = dict()
self.save = QtWidgets.QPushButton('Done')
self.save.setObjectName('small')
# self.save.setDisabled(True)
self.dismiss = QtWidgets.QPushButton('Dismiss')
self.dismiss.setObjectName('small')
self.setGeometry(400, 50, 600, 800)
self.setContentsMargins(25, 0, 25, 25)
with open('gui/styles/style_template.css', 'r', encoding='utf-8') as fh:
src = Template(fh.read())
src = src.substitute(COLOR_PALETTE)
self.setStyleSheet(src)
hbox = QtWidgets.QHBoxLayout()
# hbox.addWidget(self.left_menu)
title_label = QtWidgets.QLabel('EXPORT DATA')
font = title_label.font()
font.setPointSize(8)
font.setFamily('Verdana')
title_label.setFont(font)
title_label.setContentsMargins(5, 0, 0, 25)
title_label.setObjectName('title_label')
models_group = QtWidgets.QGroupBox('Models')
models_group.setStyleSheet("QGroupBox {font-weight: bold;}")
models_grid = QtWidgets.QGridLayout()
models_grid.setContentsMargins(80, 20, 80, 20)
models_grid.setColumnStretch(1, 0)
models_grid.setColumnStretch(1, 2)
self.model_db = self.modaldata_object.tables['info']
models = ['{0} {1:.0f}'.format(model, model_id) for model, model_id in
zip(self.model_db.model_name, self.model_db.model_id)]
# models = ['Nosilec', 'Transformator', 'Jedro', 'Pralni stroj', 'Letalo']
self.model_checkbox_widgets = [QtWidgets.QCheckBox() for model in models]
model_label_widgets = [QtWidgets.QLabel(model) for model in models]
for i, (checkbox, label) in enumerate(zip(self.model_checkbox_widgets,model_label_widgets)):
models_grid.addWidget(checkbox, i//2, 0 + (i%2)*2)
models_grid.addWidget(label, i//2, 1 + (i%2)*2, alignment=QtCore.Qt.AlignLeft)
checkbox.setChecked(True)
models_group.setLayout(models_grid)
data_type_group = QtWidgets.QGroupBox('Data')
data_type_group.setStyleSheet("QGroupBox {font-weight: bold;}")
data_type_grid = QtWidgets.QGridLayout()
data_type_grid.setContentsMargins(80, 20, 80, 20)
data_type_grid.setColumnStretch(1, 0)
data_type_grid.setColumnStretch(1, 2)
data_types_keys = ['geometry', 'lines', 'elements_index', 'measurement_index', 'analysis_index']
data_types_populated = [True if self.modaldata_object.tables[key].size != 0 else False
for key in data_types_keys]
self.data_type_checkbox_widgets = [QtWidgets.QCheckBox() for data_type in self.data_types_names]
model_label_widgets = [QtWidgets.QLabel(data_type) for data_type in self.data_types_names]
for i, (checkbox, label) in enumerate(zip(self.data_type_checkbox_widgets,model_label_widgets)):
data_type_grid.addWidget(checkbox, i, 0)
data_type_grid.addWidget(label, i, 1, alignment=QtCore.Qt.AlignLeft)
if data_types_populated[i]:
checkbox.setChecked(True)
data_type_group.setLayout(data_type_grid)
other_group = QtWidgets.QGroupBox('Separate Files for Data Types (UFF)')
other_group.setStyleSheet("QGroupBox {font-weight: bold;}")
one_file_radio = QtWidgets.QRadioButton()
self.multiple_file_radio = QtWidgets.QRadioButton()
one_file_radio_label = QtWidgets.QLabel('No')
multiple_file_radio_label = QtWidgets.QLabel('Yes')
one_file_radio.setChecked(True)
h_files = QtWidgets.QGridLayout()
h_files.setContentsMargins(80, 20, 80, 20)
h_files.setColumnStretch(1, 0)
h_files.setColumnStretch(1, 2)
h_files.addWidget(self.multiple_file_radio, 0, 0)
h_files.addWidget(multiple_file_radio_label, 0, 1)
h_files.addWidget(one_file_radio, 0, 2)
h_files.addWidget(one_file_radio_label, 0, 3)
other_group.setLayout(h_files)
button_export_xls = QtWidgets.QPushButton(qta.icon('fa.line-chart', color='white', scale_factor=1.2),
' Export CSV')
button_export_xls.setObjectName('altpushbutton_')
button_export_xls.clicked.connect(self.ExportCSV)
button_export_xls_hbox = QtWidgets.QHBoxLayout()
button_export_xls_hbox.addStretch()
button_export_xls_hbox.addWidget(button_export_xls)
button_export_xls_hbox.addStretch()
button_export_unv = QtWidgets.QPushButton(qta.icon('fa.rocket', color='white', scale_factor=1.2),
' Export UFF')
button_export_unv.setObjectName('altpushbutton_')
button_export_unv.clicked.connect(self.ExportUff)
button_export_unv_hbox = QtWidgets.QHBoxLayout()
button_export_unv_hbox.addStretch()
button_export_unv_hbox.addWidget(button_export_unv)
button_export_unv_hbox.addStretch()
title_layout = QtWidgets.QHBoxLayout()
title_layout.addWidget(title_label)
title_layout.addStretch()
vbox = QtWidgets.QVBoxLayout()
vbox.addLayout(title_layout)
vbox.setContentsMargins(0, 1, 0, 0)
vbox.addLayout(hbox)
vbox.addWidget(models_group)
vbox.addWidget(data_type_group)
vbox.addWidget(other_group)
vbox.addStretch()
vbox.addLayout(button_export_xls_hbox)
vbox.addLayout(button_export_unv_hbox)
vbox.addStretch()
# button_layout = QtGui.QHBoxLayout()
# button_layout.addStretch()
# button_layout.addWidget(self.save)
# button_layout.addWidget(self.dismiss)
# vbox.addStretch()
# vbox.addLayout(button_layout)
vbox.setContentsMargins(20, 20, 20, 20)
vbox_outer = QtWidgets.QVBoxLayout()
vbox_outer.setContentsMargins(0, 0, 0, 0)
vbox_outer.addLayout(vbox)
vbox_outer.addWidget(QtWidgets.QSizeGrip(self.parent()), 0, QtCore.Qt.AlignBottom |QtCore.Qt.AlignRight)
self.setContentsMargins(0, 0, 0, 0)
self.setLayout(vbox_outer)
def paintEvent(self, event):
self.painter = QtGui.QPainter()
self.painter.begin(self)
self.painter.setBrush(QtCore.Qt.white)
self.painter.setPen(QtCore.Qt.lightGray)
# .. Draw a rectangle around the main window.
self.painter.drawRect(0, 0, self.width()-1, self.height()-1)
self.painter.fillRect(QtCore.QRect(1, 1, self.width()-2, 40), QtGui.QColor(245, 245, 245))
pen = QtGui.QPen()
pen.setWidth(2)
pen.setBrush(QtCore.Qt.gray)
pen.setCapStyle(QtCore.Qt.RoundCap)
pen.setJoinStyle(QtCore.Qt.RoundJoin)
self.painter.setPen(pen)
# close cross
self.painter.drawLine(self.width() - 30, 30, self.width() - 10, 10)
self.painter.drawLine(self.width() - 30, 10, self.width() - 10, 30)
self.painter.end()
def mouseMoveEvent(self, event):
if event.buttons() and QtCore.Qt.LeftButton:
self.move(event.globalPos() - self.mouse_drag_position)
event.accept()
def mousePressEvent(self, event):
add = 0
if event.button() == QtCore.Qt.LeftButton:
if (event.pos().x() < (self.width() - 10 - add)) and (event.pos().x() > (self.width()-30-add))\
and (event.pos().y() < (30+add)) and (event.pos().y() > (10+add)):
self.close()
self.mouse_drag_position = event.globalPos() - self.frameGeometry().topLeft()
def ExportUff(self):
""" File dialog for exporting uff files. """
# if variant == 'PySide':
# file_name, filtr = QtGui.QFileDialog.getSaveFileName(self, self.tr("Choose Folder"), "/.",
# QtGui.QFileDialog.Directory)
# elif variant == 'PyQt4':
file_name = QtWidgets.QFileDialog.getExistingDirectory(self, 'Select Directory')
# file_name = QtGui.QFileDialog.getSaveFileName(self, self.tr("Chose Folder"), "/.",
# QtGui.QFileDialog.Directory)
self.exportfile = file_name
model_ids = [model_id for model_id, check_box_field in zip(self.model_db.model_id, self.model_checkbox_widgets)
if check_box_field.isChecked()]
data_types = [data_type for data_type, check_box_field in
zip(self.data_types_list, self.data_type_checkbox_widgets) if check_box_field.isChecked()]
separate_files_flag = self.multiple_file_radio.isChecked()
print(model_ids)
self.status_bar.setBusy('root', 'exporting')
class IOThread(QtCore.QThread):
def __init__(self, modaldata, file_name, model_ids=[], data_types=[], separate_files_flag=False):
super().__init__()
self.modaldata_object = modaldata
self.file_name = file_name
self.model_ids = model_ids
self.data_types = data_types
self.separate_files_flag = separate_files_flag
def run(self):
self.modaldata_object.export_to_uff(self.file_name, self.model_ids, self.data_types, self.separate_files_flag)
self.thread = IOThread(self.modaldata_object, file_name, model_ids, data_types, separate_files_flag)
self.thread.finished.connect(lambda: self.status_bar.setNotBusy('root'))
self.thread.start()
self.hide()
def ExportCSV(self):
""" File dialog for exporting uff files. """
# if variant == 'PySide':
# file_name, filtr = QtGui.QFileDialog.getSaveFileName(self, self.tr("Select Directory"), "/.",
# QtGui.QFileDialog.Directory)
# elif variant == 'PyQt4':
file_name = QtWidgets.QFileDialog.getExistingDirectory(self, 'Select Directory')
# file_name = QtGui.QFileDialog.getSaveFileName(self, self.tr("Chose Folder"), "/.",
# QtGui.QFileDialog.Directory)
self.exportfile = file_name
model_ids = [model_id for model_id, check_box_field in zip(self.model_db.model_id, self.model_checkbox_widgets)
if check_box_field.isChecked()]
data_types = [data_type for data_type, check_box_field in
zip(self.data_types_list, self.data_type_checkbox_widgets) if check_box_field.isChecked()]
print(model_ids)
self.status_bar.setBusy('root', 'exporting')
class IOThread(QtCore.QThread):
def __init__(self, modaldata, file_name, model_ids=[], data_types=[]):
super().__init__()
self.modaldata_object = modaldata
self.file_name = file_name
self.model_ids = model_ids
self.data_types = data_types
def run(self):
self.modaldata_object.export_to_csv(self.file_name, self.model_ids, self.data_types)
self.thread = IOThread(self.modaldata_object, file_name, model_ids, data_types)
self.thread.finished.connect(lambda: self.status_bar.setNotBusy('root'))
self.thread.start()
self.hide()
| [
"PyQt5.QtWidgets.QFileDialog.getExistingDirectory",
"qtawesome.icon",
"PyQt5.QtGui.QPen",
"PyQt5.QtGui.QPainter",
"PyQt5.QtWidgets.QRadioButton",
"PyQt5.QtGui.QColor",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QGroupBox",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QCheckBox",
"PyQt5.QtWidgets.QPushButton"
] | [((1924, 1953), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['"""Done"""'], {}), "('Done')\n", (1945, 1953), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2057, 2089), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['"""Dismiss"""'], {}), "('Dismiss')\n", (2078, 2089), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2446, 2469), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (2467, 2469), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2534, 2565), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""EXPORT DATA"""'], {}), "('EXPORT DATA')\n", (2550, 2565), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2822, 2851), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['"""Models"""'], {}), "('Models')\n", (2841, 2851), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2943, 2966), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (2964, 2966), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3932, 3959), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['"""Data"""'], {}), "('Data')\n", (3951, 3959), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4057, 4080), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (4078, 4080), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5093, 5151), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['"""Separate Files for Data Types (UFF)"""'], {}), "('Separate Files for Data Types (UFF)')\n", (5112, 5151), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5246, 5270), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', ([], {}), '()\n', (5268, 5270), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5306, 5330), 'PyQt5.QtWidgets.QRadioButton', 'QtWidgets.QRadioButton', ([], {}), '()\n', (5328, 5330), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5362, 5384), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""No"""'], {}), "('No')\n", (5378, 5384), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5421, 5444), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""Yes"""'], {}), "('Yes')\n", (5437, 5444), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5504, 5527), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (5525, 5527), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6237, 6260), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (6258, 6260), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6726, 6749), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (6747, 6749), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6922, 6945), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (6943, 6945), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7040, 7063), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (7061, 7063), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7748, 7771), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (7769, 7771), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8109, 8125), 'PyQt5.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (8123, 8125), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8495, 8507), 'PyQt5.QtGui.QPen', 'QtGui.QPen', ([], {}), '()\n', (8505, 8507), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9869, 9937), 'PyQt5.QtWidgets.QFileDialog.getExistingDirectory', 'QtWidgets.QFileDialog.getExistingDirectory', (['self', '"""Select Directory"""'], {}), "(self, 'Select Directory')\n", (9911, 9937), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11854, 11922), 'PyQt5.QtWidgets.QFileDialog.getExistingDirectory', 'QtWidgets.QFileDialog.getExistingDirectory', (['self', '"""Select Directory"""'], {}), "(self, 'Select Directory')\n", (11896, 11922), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3447, 3468), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', ([], {}), '()\n', (3466, 3468), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3521, 3544), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['model'], {}), '(model)\n', (3537, 3544), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4536, 4557), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', ([], {}), '()\n', (4555, 4557), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4629, 4656), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['data_type'], {}), '(data_type)\n', (4645, 4656), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5967, 6025), 'qtawesome.icon', 'qta.icon', (['"""fa.line-chart"""'], {'color': '"""white"""', 'scale_factor': '(1.2)'}), "('fa.line-chart', color='white', scale_factor=1.2)\n", (5975, 6025), True, 'import qtawesome as qta\n'), ((6460, 6514), 'qtawesome.icon', 'qta.icon', (['"""fa.rocket"""'], {'color': '"""white"""', 'scale_factor': '(1.2)'}), "('fa.rocket', color='white', scale_factor=1.2)\n", (6468, 6514), True, 'import qtawesome as qta\n'), ((8451, 8478), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(245)', '(245)', '(245)'], {}), '(245, 245, 245)\n', (8463, 8478), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetSceneResult',
'AwaitableGetSceneResult',
'get_scene',
'get_scene_output',
]
@pulumi.output_type
class GetSceneResult:
def __init__(__self__, arn=None, capabilities=None, content_location=None, creation_date_time=None, description=None, tags=None, update_date_time=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if capabilities and not isinstance(capabilities, list):
raise TypeError("Expected argument 'capabilities' to be a list")
pulumi.set(__self__, "capabilities", capabilities)
if content_location and not isinstance(content_location, str):
raise TypeError("Expected argument 'content_location' to be a str")
pulumi.set(__self__, "content_location", content_location)
if creation_date_time and not isinstance(creation_date_time, str):
raise TypeError("Expected argument 'creation_date_time' to be a str")
pulumi.set(__self__, "creation_date_time", creation_date_time)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if update_date_time and not isinstance(update_date_time, str):
raise TypeError("Expected argument 'update_date_time' to be a str")
pulumi.set(__self__, "update_date_time", update_date_time)
@property
@pulumi.getter
def arn(self) -> Optional[str]:
"""
The ARN of the scene.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def capabilities(self) -> Optional[Sequence[str]]:
"""
A list of capabilities that the scene uses to render.
"""
return pulumi.get(self, "capabilities")
@property
@pulumi.getter(name="contentLocation")
def content_location(self) -> Optional[str]:
"""
The relative path that specifies the location of the content definition file.
"""
return pulumi.get(self, "content_location")
@property
@pulumi.getter(name="creationDateTime")
def creation_date_time(self) -> Optional[str]:
"""
The date and time when the scene was created.
"""
return pulumi.get(self, "creation_date_time")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description of the scene.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def tags(self) -> Optional[Any]:
"""
A key-value pair to associate with a resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="updateDateTime")
def update_date_time(self) -> Optional[str]:
"""
The date and time of the current update.
"""
return pulumi.get(self, "update_date_time")
class AwaitableGetSceneResult(GetSceneResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSceneResult(
arn=self.arn,
capabilities=self.capabilities,
content_location=self.content_location,
creation_date_time=self.creation_date_time,
description=self.description,
tags=self.tags,
update_date_time=self.update_date_time)
def get_scene(scene_id: Optional[str] = None,
workspace_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSceneResult:
"""
Resource schema for AWS::IoTTwinMaker::Scene
:param str scene_id: The ID of the scene.
:param str workspace_id: The ID of the scene.
"""
__args__ = dict()
__args__['sceneId'] = scene_id
__args__['workspaceId'] = workspace_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:iottwinmaker:getScene', __args__, opts=opts, typ=GetSceneResult).value
return AwaitableGetSceneResult(
arn=__ret__.arn,
capabilities=__ret__.capabilities,
content_location=__ret__.content_location,
creation_date_time=__ret__.creation_date_time,
description=__ret__.description,
tags=__ret__.tags,
update_date_time=__ret__.update_date_time)
@_utilities.lift_output_func(get_scene)
def get_scene_output(scene_id: Optional[pulumi.Input[str]] = None,
workspace_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSceneResult]:
"""
Resource schema for AWS::IoTTwinMaker::Scene
:param str scene_id: The ID of the scene.
:param str workspace_id: The ID of the scene.
"""
...
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] | [((2401, 2438), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""contentLocation"""'}), "(name='contentLocation')\n", (2414, 2438), False, 'import pulumi\n'), ((2670, 2708), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""creationDateTime"""'}), "(name='creationDateTime')\n", (2683, 2708), False, 'import pulumi\n'), ((3289, 3325), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""updateDateTime"""'}), "(name='updateDateTime')\n", (3302, 3325), False, 'import pulumi\n'), ((745, 777), 'pulumi.set', 'pulumi.set', (['__self__', '"""arn"""', 'arn'], {}), "(__self__, 'arn', arn)\n", (755, 777), False, 'import pulumi\n'), ((927, 977), 'pulumi.set', 'pulumi.set', (['__self__', '"""capabilities"""', 'capabilities'], {}), "(__self__, 'capabilities', capabilities)\n", (937, 977), False, 'import pulumi\n'), ((1137, 1195), 'pulumi.set', 'pulumi.set', (['__self__', '"""content_location"""', 'content_location'], {}), "(__self__, 'content_location', content_location)\n", (1147, 1195), False, 'import pulumi\n'), ((1361, 1423), 'pulumi.set', 'pulumi.set', (['__self__', '"""creation_date_time"""', 'creation_date_time'], {}), "(__self__, 'creation_date_time', creation_date_time)\n", (1371, 1423), False, 'import pulumi\n'), ((1568, 1616), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (1578, 1616), False, 'import pulumi\n'), ((1742, 1776), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (1752, 1776), False, 'import pulumi\n'), ((1936, 1994), 'pulumi.set', 'pulumi.set', (['__self__', '"""update_date_time"""', 'update_date_time'], {}), "(__self__, 'update_date_time', update_date_time)\n", (1946, 1994), False, 'import pulumi\n'), ((2134, 2157), 'pulumi.get', 'pulumi.get', (['self', '"""arn"""'], {}), "(self, 'arn')\n", (2144, 2157), False, 'import pulumi\n'), ((2348, 2380), 'pulumi.get', 'pulumi.get', (['self', '"""capabilities"""'], {}), "(self, 'capabilities')\n", (2358, 2380), False, 'import pulumi\n'), ((2613, 2649), 'pulumi.get', 'pulumi.get', (['self', '"""content_location"""'], {}), "(self, 'content_location')\n", (2623, 2649), False, 'import pulumi\n'), ((2853, 2891), 'pulumi.get', 'pulumi.get', (['self', '"""creation_date_time"""'], {}), "(self, 'creation_date_time')\n", (2863, 2891), False, 'import pulumi\n'), ((3047, 3078), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (3057, 3078), False, 'import pulumi\n'), ((3244, 3268), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (3254, 3268), False, 'import pulumi\n'), ((3463, 3499), 'pulumi.get', 'pulumi.get', (['self', '"""update_date_time"""'], {}), "(self, 'update_date_time')\n", (3473, 3499), False, 'import pulumi\n'), ((4472, 4494), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (4492, 4494), False, 'import pulumi\n'), ((4586, 4689), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""aws-native:iottwinmaker:getScene"""', '__args__'], {'opts': 'opts', 'typ': 'GetSceneResult'}), "('aws-native:iottwinmaker:getScene', __args__, opts=\n opts, typ=GetSceneResult)\n", (4607, 4689), False, 'import pulumi\n')] |
#Standard python libraries
import os
#Dependencies
import numpy as np
import yaml
import matplotlib.pyplot as plt
from pyfftw.interfaces.numpy_fft import fft, fftshift, ifft, ifftshift, fftfreq
#TransientAbsorption implemented using UF2
from ultrafastultrafast.signals import TransientAbsorption
"""The following definitions of I4_mat and kdelvec are based
upon the formulas given in Appendix B of Molecular Quantum
Electrodynamics, by <NAME>
"""
I4_mat = np.array([[4,-1,-1],[-1,4,-1],[-1,-1,4]])/30
def kdel(x,y):
"""Kronecker Delta"""
if x == y:
return 1
else:
return 0
def kdel2(a,b,c,d):
"""Product of 2 Kronecker Deltas"""
return kdel(a,b)*kdel(c,d)
def kdelvec(i,j,k,l):
"""Length 3 vector of Kronecker Delta products, as defined in """
vec = [kdel2(i,j,k,l),
kdel2(i,k,j,l),
kdel2(i,l,j,k)]
return np.array(vec)
class TransientAbsorptionIsotropicAverage(object):
"""This class performs the isotropic average of the 4th order tensor
which is the material response produced by 4-wave mixing process"""
def __init__(self,parameter_file_path,efield_polarization,*, num_conv_points=138,
initial_state=0,dt=0.1,total_num_time_points = 3686):
# This is the object that will actually calculate all of the signals
self.TA = TransientAbsorption(parameter_file_path,
num_conv_points=num_conv_points,
initial_state=initial_state, dt=dt,
total_num_time_points=total_num_time_points)
# Lab frame polarization sequence for pulses
self.efield_polarization = efield_polarization
# Working directory
self.base_path = self.TA.base_path
def set_homogeneous_linewidth(self,*args,**kwargs):
self.TA.set_homogeneous_linewidth(*args,**kwargs)
def set_inhomogeneous_linewidth(self,*args,**kwargs):
self.TA.set_inhomogeneous_linewidth(*args,**kwargs)
def recenter(self,*args,**kwargs):
self.TA.recenter(*args,**kwargs)
def set_pulse_shapes(self,*args,**kwargs):
# Pass pulse shapes on to the self.TA object
self.TA.set_pulse_shapes(*args,**kwargs)
def calculate_spectra(self,delay_times):
# The isotropic averaging is performed based upon the real orientation of the lab frame pulses
# The calculation will differ depending on whether the pump and probe are aligned or crossed
left_vec = kdelvec(*self.efield_polarization)
xyz = ['x','y','z']
pol_options = []
for i in range(3):
# Check to see if the dipole operator has any non-zero components along the given
# molecular frame axis, if the dipole exists only in the x-y plane, for example,
# then we can avoid doing quite a few unnecessary calculations!
if not np.allclose(self.TA.mu_GSM_to_SEM[:,:,i],0):
pol_options.append(xyz[i])
signal = np.zeros((self.TA.w.size,delay_times.size))
for i in pol_options:
for j in pol_options:
for k in pol_options:
for l in pol_options:
# generate the vector of kronecker delta products
right_vec = kdelvec(i,j,k,l)
if np.allclose(right_vec,0):
# If the vector is 0, don't bother!
pass
else:
# If not, set the polarization sequence, do the calculation, and
# add the weight given by the isotropic weight matrix, I4_mat
# Note the the polarization sequences are not the lab frame
# polarization sequence of the pulses.
self.TA.set_polarization_sequence([i,j,k,l])
weight = I4_mat.dot(right_vec)
weight = np.dot(left_vec,weight)
signal += weight * self.TA.calculate_pump_probe_spectra_vs_delay_time(delay_times)
# Full isotorpically averaged signal
self.signal_vs_delay_times = signal
self.delay_times = delay_times
self.w = self.TA.w
# Center frequency of pulses in the RWA
self.center = self.TA.center
return signal
def save(self,**kwargs):
self.save_pump_probe_spectra_vs_delay_time(**kwargs)
def save_pump_probe_spectra_vs_delay_time(self,*,save_file_name='default'):
if save_file_name == 'default':
save_name = os.path.join(self.base_path,'TA_spectra_iso_ave.npz')
else:
save_name = os.path.join(self.base_path,save_file_name)
np.savez(save_name,signal = self.signal_vs_delay_times, delay_times = self.delay_times, frequencies = self.w, pulse_center = self.center)
def load_pump_probe_spectra_vs_delay_time(self):
load_name = self.base_path + 'TA_spectra_iso_ave.npz'
arch = np.load(load_name)
self.signal_vs_delay_times = arch['signal']
self.delay_times = arch['delay_times']
self.w = arch['frequencies']
try:
self.center = arch['pulse_center']
except KeyError:
warnings.warn('Pulse center was not saved in archive, setting center = 0')
self.center = 0
def plot_pump_probe_spectra(self,*,frequency_range=[-1000,1000], subtract_DC = True, create_figure=True,
color_range = 'auto',draw_colorbar = True,save_fig=True):
"""Plots the transient absorption spectra with detection frequency on the
y-axis and delay time on the x-axis.
Args:
frequency_range (list): sets the min (list[0]) and max (list[1]) detection frequency for y-axis
subtract_DC (bool): if True subtract the DC component of the TA
color_range (list): sets the min (list[0]) and max (list[1]) value for the colorbar
draw_colorbar (bool): if True add a colorbar to the plot
save_fig (bool): if True save the figure that is produced
"""
# Cut out unwanted detection frequency points
w_ind = np.where((self.w > frequency_range[0]) & (self.w < frequency_range[1]))[0]
w = self.w[w_ind]
sig = self.signal_vs_delay_times[w_ind,:]
if subtract_DC:
sig_fft = fft(sig,axis=1)
sig_fft[:,0] = 0
sig = np.real(ifft(sig_fft))
ww, tt = np.meshgrid(self.delay_times,w)
if create_figure:
plt.figure()
if color_range == 'auto':
plt.pcolormesh(ww,tt,sig)
else:
plt.pcolormesh(ww,tt,sig,vmin=color_range[0],vmax=color_range[1])
if draw_colorbar:
plt.colorbar()
plt.xlabel('Delay time ($\omega_0^{-1}$)',fontsize=16)
plt.ylabel('Detection Frequency ($\omega_0$)',fontsize=16)
if save_fig:
plt.savefig(self.base_path + 'TA_spectra_iso_ave')
if __name__=='__main__':
print(kdelvec('x','x','x','x','x','x'))
print(kdelvec('x','x','x','x','y','y'))
print(kdelvec('x','x','x','y','y','y'))
right = kdelvec('x','x','x','x','y','y')
left = kdelvec('x','x','x','x','x','x')
print(left)
rightprod = I6_mat.dot(right)
print(rightprod)
prod = np.dot(left,rightprod)
print(prod)
| [
"numpy.savez",
"ultrafastultrafast.signals.TransientAbsorption",
"matplotlib.pyplot.savefig",
"numpy.allclose",
"pyfftw.interfaces.numpy_fft.ifft",
"matplotlib.pyplot.ylabel",
"numpy.where",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.colorbar",
"os.path.join",
"matplotlib.pyplot.pcolormesh",
"numpy.array",
"numpy.dot",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.meshgrid",
"numpy.load",
"pyfftw.interfaces.numpy_fft.fft"
] | [((461, 510), 'numpy.array', 'np.array', (['[[4, -1, -1], [-1, 4, -1], [-1, -1, 4]]'], {}), '([[4, -1, -1], [-1, 4, -1], [-1, -1, 4]])\n', (469, 510), True, 'import numpy as np\n'), ((884, 897), 'numpy.array', 'np.array', (['vec'], {}), '(vec)\n', (892, 897), True, 'import numpy as np\n'), ((7505, 7528), 'numpy.dot', 'np.dot', (['left', 'rightprod'], {}), '(left, rightprod)\n', (7511, 7528), True, 'import numpy as np\n'), ((1351, 1514), 'ultrafastultrafast.signals.TransientAbsorption', 'TransientAbsorption', (['parameter_file_path'], {'num_conv_points': 'num_conv_points', 'initial_state': 'initial_state', 'dt': 'dt', 'total_num_time_points': 'total_num_time_points'}), '(parameter_file_path, num_conv_points=num_conv_points,\n initial_state=initial_state, dt=dt, total_num_time_points=\n total_num_time_points)\n', (1370, 1514), False, 'from ultrafastultrafast.signals import TransientAbsorption\n'), ((3046, 3090), 'numpy.zeros', 'np.zeros', (['(self.TA.w.size, delay_times.size)'], {}), '((self.TA.w.size, delay_times.size))\n', (3054, 3090), True, 'import numpy as np\n'), ((4865, 5000), 'numpy.savez', 'np.savez', (['save_name'], {'signal': 'self.signal_vs_delay_times', 'delay_times': 'self.delay_times', 'frequencies': 'self.w', 'pulse_center': 'self.center'}), '(save_name, signal=self.signal_vs_delay_times, delay_times=self.\n delay_times, frequencies=self.w, pulse_center=self.center)\n', (4873, 5000), True, 'import numpy as np\n'), ((5142, 5160), 'numpy.load', 'np.load', (['load_name'], {}), '(load_name)\n', (5149, 5160), True, 'import numpy as np\n'), ((6624, 6656), 'numpy.meshgrid', 'np.meshgrid', (['self.delay_times', 'w'], {}), '(self.delay_times, w)\n', (6635, 6656), True, 'import numpy as np\n'), ((6932, 6988), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Delay time ($\\\\omega_0^{-1}$)"""'], {'fontsize': '(16)'}), "('Delay time ($\\\\omega_0^{-1}$)', fontsize=16)\n", (6942, 6988), True, 'import matplotlib.pyplot as plt\n'), ((6995, 7055), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Detection Frequency ($\\\\omega_0$)"""'], {'fontsize': '(16)'}), "('Detection Frequency ($\\\\omega_0$)', fontsize=16)\n", (7005, 7055), True, 'import matplotlib.pyplot as plt\n'), ((4721, 4775), 'os.path.join', 'os.path.join', (['self.base_path', '"""TA_spectra_iso_ave.npz"""'], {}), "(self.base_path, 'TA_spectra_iso_ave.npz')\n", (4733, 4775), False, 'import os\n'), ((4813, 4857), 'os.path.join', 'os.path.join', (['self.base_path', 'save_file_name'], {}), '(self.base_path, save_file_name)\n', (4825, 4857), False, 'import os\n'), ((6323, 6394), 'numpy.where', 'np.where', (['((self.w > frequency_range[0]) & (self.w < frequency_range[1]))'], {}), '((self.w > frequency_range[0]) & (self.w < frequency_range[1]))\n', (6331, 6394), True, 'import numpy as np\n'), ((6521, 6537), 'pyfftw.interfaces.numpy_fft.fft', 'fft', (['sig'], {'axis': '(1)'}), '(sig, axis=1)\n', (6524, 6537), False, 'from pyfftw.interfaces.numpy_fft import fft, fftshift, ifft, ifftshift, fftfreq\n'), ((6694, 6706), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6704, 6706), True, 'import matplotlib.pyplot as plt\n'), ((6753, 6780), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['ww', 'tt', 'sig'], {}), '(ww, tt, sig)\n', (6767, 6780), True, 'import matplotlib.pyplot as plt\n'), ((6805, 6874), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['ww', 'tt', 'sig'], {'vmin': 'color_range[0]', 'vmax': 'color_range[1]'}), '(ww, tt, sig, vmin=color_range[0], vmax=color_range[1])\n', (6819, 6874), True, 'import matplotlib.pyplot as plt\n'), ((6909, 6923), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (6921, 6923), True, 'import matplotlib.pyplot as plt\n'), ((7087, 7137), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(self.base_path + 'TA_spectra_iso_ave')"], {}), "(self.base_path + 'TA_spectra_iso_ave')\n", (7098, 7137), True, 'import matplotlib.pyplot as plt\n'), ((2932, 2978), 'numpy.allclose', 'np.allclose', (['self.TA.mu_GSM_to_SEM[:, :, i]', '(0)'], {}), '(self.TA.mu_GSM_to_SEM[:, :, i], 0)\n', (2943, 2978), True, 'import numpy as np\n'), ((6592, 6605), 'pyfftw.interfaces.numpy_fft.ifft', 'ifft', (['sig_fft'], {}), '(sig_fft)\n', (6596, 6605), False, 'from pyfftw.interfaces.numpy_fft import fft, fftshift, ifft, ifftshift, fftfreq\n'), ((3389, 3414), 'numpy.allclose', 'np.allclose', (['right_vec', '(0)'], {}), '(right_vec, 0)\n', (3400, 3414), True, 'import numpy as np\n'), ((4049, 4073), 'numpy.dot', 'np.dot', (['left_vec', 'weight'], {}), '(left_vec, weight)\n', (4055, 4073), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import pytest
import matplotlib
matplotlib.use('Agg')
import numpy as np # noqa
import pandas as pd # noqa
import pandas_ml as pdml # noqa
import pandas_ml.util.testing as tm # noqa
import sklearn.datasets as datasets # noqa
import xgboost as xgb # noqa
class TestXGBoostPlotting(tm.TestCase):
def test_plotting(self):
iris = datasets.load_iris()
df = pdml.ModelFrame(iris)
df.fit(df.svm.SVC())
# raises if df.estimator is not XGBModel
with pytest.raises(ValueError):
df.xgb.plot_importance()
with pytest.raises(ValueError):
df.xgb.to_graphviz()
with pytest.raises(ValueError):
df.xgb.plot_tree()
df.fit(df.xgb.XGBClassifier())
from matplotlib.axes import Axes
from graphviz import Digraph
try:
ax = df.xgb.plot_importance()
except ImportError:
import nose
# matplotlib.use doesn't work on Travis
# PYTHON=3.4 PANDAS=0.17.1 SKLEARN=0.16.1
raise nose.SkipTest()
self.assertIsInstance(ax, Axes)
assert ax.get_title() == 'Feature importance'
assert ax.get_xlabel() == 'F score'
assert ax.get_ylabel() == 'Features'
assert len(ax.patches) == 4
g = df.xgb.to_graphviz(num_trees=0)
self.assertIsInstance(g, Digraph)
ax = df.xgb.plot_tree(num_trees=0)
self.assertIsInstance(ax, Axes)
| [
"sklearn.datasets.load_iris",
"pandas_ml.ModelFrame",
"matplotlib.use",
"nose.SkipTest",
"pytest.raises"
] | [((55, 76), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (69, 76), False, 'import matplotlib\n'), ((486, 506), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {}), '()\n', (504, 506), True, 'import sklearn.datasets as datasets\n'), ((520, 541), 'pandas_ml.ModelFrame', 'pdml.ModelFrame', (['iris'], {}), '(iris)\n', (535, 541), True, 'import pandas_ml as pdml\n'), ((635, 660), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (648, 660), False, 'import pytest\n'), ((713, 738), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (726, 738), False, 'import pytest\n'), ((787, 812), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (800, 812), False, 'import pytest\n'), ((1196, 1211), 'nose.SkipTest', 'nose.SkipTest', ([], {}), '()\n', (1209, 1211), False, 'import nose\n')] |
from django.db import models
class Blog(models.Model):
name = models.CharField(max_length=256, blank=False)
latest_post = models.IntegerField(default=0)
def __str__(self):
return self.name
class Tag(models.Model):
tag = models.CharField(max_length=256)
def __str__(self):
return self.tag
class Gif(models.Model):
blog = models.ForeignKey(Blog)
tag = models.ManyToManyField(Tag)
url = models.URLField()
def __str__(self):
return self.url | [
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.ManyToManyField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((68, 113), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)', 'blank': '(False)'}), '(max_length=256, blank=False)\n', (84, 113), False, 'from django.db import models\n'), ((132, 162), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (151, 162), False, 'from django.db import models\n'), ((249, 281), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (265, 281), False, 'from django.db import models\n'), ((368, 391), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Blog'], {}), '(Blog)\n', (385, 391), False, 'from django.db import models\n'), ((402, 429), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Tag'], {}), '(Tag)\n', (424, 429), False, 'from django.db import models\n'), ((440, 457), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (455, 457), False, 'from django.db import models\n')] |
from infrastructure import *
from indputils import *
from gurobipy import *
from indp import *
import string
import networkx as nx
import matplotlib.pyplot as plt
import copy
import random
import time
import sys
import scipy.io
try:
import matlab.engine
except ModuleNotFoundError:
print("Can't find module 'matlab.engine'")
def apply_recovery(N,indp_results,t):
for action in indp_results[t]['actions']:
if "/" in action:
# Edge recovery action.
data=action.split("/")
src=tuple([int(x) for x in data[0].split(".")])
dst=tuple([int(x) for x in data[1].split(".")])
N.G[src][dst]['data']['inf_data'].functionality=1.0
else:
# Node recovery action.
node=tuple([int(x) for x in action.split(".")])
#print "Applying recovery:",node
N.G.nodes[node]['data']['inf_data'].repaired=1.0
N.G.nodes[node]['data']['inf_data'].functionality=1.0
def run_mh(params, layers=[1,2,3], controlled_layers=[], functionality={},T=1, validate=False,
save=True,suffix="", forced_actions=False, saveModel=False, print_cmd_line=True,
dynamic_params=None, co_location=True):
""" Runs an INDP problem with specified parameters. Outputs to directory specified in params['OUTPUT_DIR'].
:param params: Global parameters.
:param layers: Layers to consider in the infrastructure network.
:param T: Number of timesteps to optimize over.
:param validate: Validate solution.
"""
# Initialize failure scenario.
InterdepNet=None
if "N" not in params:
InterdepNet=initialize_network(BASE_DIR="../data/INDP_7-20-2015/",sim_number=params['SIM_NUMBER'],magnitude=params["MAGNITUDE"])
else:
InterdepNet=params["N"]
if "NUM_ITERATIONS" not in params:
params["NUM_ITERATIONS"] = 1
if not controlled_layers:
controlled_layers = layers
v_r=params["V"]
if isinstance(v_r, (int)):
outDirSuffixRes = str(v_r)
else:
outDirSuffixRes = str(sum([val for _, val in v_r.items()]))+'_fixed_layer_Cap'
indp_results=INDPResults(params["L"])
if T == 1:
print("--Running iterative MH.")
if print_cmd_line:
print("Num iters=",params["NUM_ITERATIONS"])
# Run INDP for 1 time step (original INDP).
output_dir=params["OUTPUT_DIR"]+'_L'+str(len(layers))+'_m'+str(params["MAGNITUDE"])+"_v"+outDirSuffixRes
# Initial calculations.
if dynamic_params:
original_N = copy.deepcopy(InterdepNet) #!!! deepcopy
dynamic_parameters(InterdepNet, original_N, 0, dynamic_params)
results=indp(InterdepNet,0,1,layers,controlled_layers=controlled_layers,
functionality=functionality, co_location=co_location)
indp_results=results[1]
indp_results.add_components(0,INDPComponents.calculate_components(results[0],InterdepNet,layers=controlled_layers))
for i in range(params["NUM_ITERATIONS"]):
print("-Time Step (MH)",i+1,"/",params["NUM_ITERATIONS"])
if dynamic_params:
dynamic_parameters(InterdepNet, original_N, i+1, dynamic_params)
results=indp(InterdepNet, v_r, T, layers, controlled_layers=controlled_layers,
forced_actions=forced_actions, co_location=co_location)
### Extract matrices and vectors
m = results[0]
var_index = {v.VarName.replace("(", "").replace(")", "").replace(",", "_").\
replace(" ", "").replace("+", "_p").replace("-", "_m"):\
i for i, v in enumerate(m.getVars())}
constr_rhs= {c.ConstrName.replace("(", "").replace(")", "").replace(",", "_").\
replace(" ", "").replace(".", ""): c.RHS for i, c in enumerate(m.getConstrs())}
constr_sense= {c.ConstrName.replace("(", "").replace(")", "").replace(",", "_").\
replace(" ", "").replace(".", ""): c.sense for i, c in enumerate(m.getConstrs())}
obj_coeffs = m.getAttr('Obj', m.getVars())
A = m.getA()
opt_sol = {}
for v in m.getVars():
opt_sol[v.varName.replace("(", "").replace(")", "").replace(",", "_").\
replace(" ", "").replace("+", "_p").replace("-", "_m")]= v.x
scipy.io.savemat('./Metaheuristics/arrdata.mat', mdict={'A': A})
### Run GA in Matlab
eng = matlab.engine.start_matlab("-desktop") #!!! Send as an argument for debugging in MATLAB: "-desktop"
eng.cd('./Metaheuristics/')
eng.eval('dbstop in main.m at 3', nargout=0) #!!!
result_mh = eng.main(var_index, constr_rhs, constr_sense, obj_coeffs, opt_sol)
return result_mh #!!!
# if saveModel:
# save_INDP_model_to_file(results[0],output_dir+"/Model",i+1)
# # Modify network to account for recovery and calculate components.
# apply_recovery(InterdepNet,indp_results,i+1)
# # Save results of current simulation.
# if save:
# if not os.path.exists(output_dir):
# os.makedirs(output_dir)
# indp_results.to_csv(output_dir,params["SIM_NUMBER"],suffix=suffix)
# if not os.path.exists(output_dir+'/agents'):
# os.makedirs(output_dir+'/agents')
# indp_results.to_csv_layer(output_dir+'/agents',params["SIM_NUMBER"],suffix=suffix)
# return indp_results
| [
"copy.deepcopy"
] | [((2562, 2588), 'copy.deepcopy', 'copy.deepcopy', (['InterdepNet'], {}), '(InterdepNet)\n', (2575, 2588), False, 'import copy\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: bengtl
"""
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import numpy
extensions = [Extension("spikebit.encoder", ["spikebit/encoder.pyx"],
include_dirs=[numpy.get_include()]),
Extension("spikebit.patterncheck",
["spikebit/patterncheck.pyx"],
include_dirs=[numpy.get_include()]),
Extension("spikebit.simrun",
["spikebit/simrun.pyx"],
include_dirs=[numpy.get_include()])]
setup(name='spikebit',
description='Electrophysiology data integration',
version='0.24',
install_requires=['mpi4py>=3.0', 'Cython>=0.27', 'numpy>=1.13',
'h5py>=2.7'],
author='<NAME>',
author_email='<EMAIL>',
py_modules=['spikebit'],
platforms='linux',
packages=find_packages(exclude=['test']),
license='MIT',
ext_modules=cythonize(extensions),
include_dirs=[numpy.get_include()],
entry_points={
'console_scripts':
['spikebit-server=spikebit.command_line:server',
'spikebit-singleserver=spikebit.serverwrapper:main',
'spikebit-client=spikebit.command_line:client'],
})
| [
"Cython.Build.cythonize",
"setuptools.find_packages",
"numpy.get_include"
] | [((990, 1021), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test']"}), "(exclude=['test'])\n", (1003, 1021), False, 'from setuptools import setup, find_packages\n'), ((1062, 1083), 'Cython.Build.cythonize', 'cythonize', (['extensions'], {}), '(extensions)\n', (1071, 1083), False, 'from Cython.Build import cythonize\n'), ((1105, 1124), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (1122, 1124), False, 'import numpy\n'), ((315, 334), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (332, 334), False, 'import numpy\n'), ((480, 499), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (497, 499), False, 'import numpy\n'), ((633, 652), 'numpy.get_include', 'numpy.get_include', ([], {}), '()\n', (650, 652), False, 'import numpy\n')] |
import sys
sys.path.append('.')
sys.path.append('D:\\Program Files\\caffe_windows_exe_cup_only_x64')
import caffe
import cv2
import numpy as np
import os
from math import *
deploy = 'models/bbox_regression.prototxt'
caffemodel = 'models/bbox_regression.caffemodel'
net = caffe.Net(deploy, caffemodel, caffe.TEST)
caffe.set_mode_cpu()
#caffe.set_device(0)
def view_bar(num, total):
rate = float(num) / total
rate_num = int(rate * 100)
r = '\r[%s%s]%d%% (%d/%d)' % ("#" * rate_num, " " *
(100 - rate_num), rate_num, num, total)
sys.stdout.write(r)
sys.stdout.flush()
def preprocess(img, size, channel_mean=[104, 117, 123]):
img_new = np.asarray(img, dtype=float)
img_new = cv2.resize(img_new, size)
for i in range(img_new.shape[-1]):
img_new[:, :, i] = img_new[:, :, i] - channel_mean[i]
img_new = np.transpose(img_new, (2, 0, 1))
return img_new
def bboxRegression(img, bboxes):
origin_h, origin_w, ch = img.shape
net.blobs['data'].reshape(len(bboxes), 3, 106, 106)
idx = 0
for bbox in bboxes:
x1 = int(bbox[0])
y1 = int(bbox[1])
x2 = int(bbox[2])
y2 = int(bbox[3])
#x2=max(x1+1,x2)
#y2=max(y1+1,y2)
if (x1>origin_w or x2>origin_w or y1>origin_h or y2>origin_h or x1<0 or x2<0 or y1<0 or y2<0 or x2<=x1 or y2<=y1):
continue
#print("Original image size : "+str(img.shape))
#print("crop window:"+str([x1,y1,x2,y2]))
crop_img = img[y1:y2,x1:x2,:]
#print(str(crop_img.shape))
net.blobs['data'].data[idx] = preprocess(crop_img, (106, 106))
idx += 1
out = net.forward()['ip2']
for i in range(0, len(bboxes)):
x1, y1, x2, y2 = bboxes[i]
w = x2 - x1 + 1
h = y2 - y1 + 1
regressor = out[i, :]
for j in range(len(regressor)):
if regressor[j] > 0:
regressor[j] += 1
regressor[j] = log(regressor[j]) * 0.2
else:
regressor[j] = 1 - regressor[j]
regressor[j] = -log(regressor[j]) * 0.2
bboxes[i, 0] = int(round(max(0, x1 + w * regressor[0])))
bboxes[i, 1] = int(round(max(0, y1 + h * regressor[1])))
bboxes[i, 2] = int(round(min(origin_w, x2 + w * regressor[2])))
bboxes[i, 3] = int(round(min(origin_h, y2 + h * regressor[3])))
#print(regressor)
return bboxes
def writeLabel(filePonter,imageName,Boxes):
label=[]
filePonter.write(imageName+" ")
b=Boxes.reshape(-1).tolist()
for pos in b:
label.append(pos)
filePonter.write(str(label).strip("[]").replace(",",' '))
filePonter.write("\n")
img_root = 'E:/Data/FaceG/IR_labelled/Training/'
infos = [line.strip('\n') for line in open("E:/Data/FaceG/IR_labelled/IR_Training_label.txt")]
N=len(infos)
view_idx=0
f=open("E:/Data/FaceG/IR_labelled/IR_Training_Regressed_label.txt",'w')
for info in infos:
view_idx=view_idx+1
info_strlist = info.split(',')
img_name =img_root+info_strlist[0]+".jpg"
#if not 'graduation_photo' in img_name:
# continue
print(img_name)
img = cv2.imread(img_name)
if img is None:
print("NULL Images "+img_name)
continue
info_strlist=[xx for xx in info_strlist if xx != ""]
bboxes = np.array(info_strlist[1:], dtype=np.float).reshape(-1, 4)
bboxes_new = bboxRegression(img, np.copy(bboxes))
writeLabel(f,info_strlist[0].strip('\''),bboxes_new)
view_bar(view_idx, N)
draw = img.copy()
for i in range(len(bboxes)):
x1, y1, x2, y2 = bboxes[i, :]
# w = x2 - x1 + 1
# h = y2 - y1 + 1
#cv2.putText(draw, str('%.6f' % rectangle[-1]), (int(rectangle[0]), int(
# rectangle[1])), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0))
#cv2.rectangle(draw, (int(bboxes[i, 0]), int(bboxes[i, 1])), (int(
# bboxes[i, 2]), int(bboxes[i, 3])), (255, 0, 0), 1)
cv2.rectangle(draw, (int(bboxes[i, 0]), int(bboxes[i, 1])), (int(
bboxes[i, 2]), int(bboxes[i, 3])), (0, 255, 0), 2)
cv2.rectangle(draw, (int(bboxes_new[i, 0]), int(bboxes_new[i, 1])), (int(
bboxes_new[i, 2]), int(bboxes_new[i, 3])), (0, 0, 255), 2)
# for j in range(0, 106):
# cv2.circle(draw, (int(key_pts[i, 2 * j] * w / float(106) + x1),
# int(key_pts[i, 2 * j + 1] * h / float(106) + y1)), 1, (0, 255, 0), 1)
#cv2.imshow("result", draw)
#cv2.waitKey(0)
outdir="E:/Data/FaceG/IR_labelled/RegressedImages/"+str(info_strlist[0]+".jpg").strip('\'')
cv2.imwrite(outdir, draw)
f.close()
| [
"cv2.imwrite",
"numpy.copy",
"cv2.imread",
"cv2.resize",
"numpy.asarray",
"numpy.array",
"caffe.Net",
"caffe.set_mode_cpu",
"sys.stdout.flush",
"numpy.transpose",
"sys.path.append",
"sys.stdout.write"
] | [((11, 31), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (26, 31), False, 'import sys\n'), ((32, 100), 'sys.path.append', 'sys.path.append', (['"""D:\\\\Program Files\\\\caffe_windows_exe_cup_only_x64"""'], {}), "('D:\\\\Program Files\\\\caffe_windows_exe_cup_only_x64')\n", (47, 100), False, 'import sys\n'), ((272, 313), 'caffe.Net', 'caffe.Net', (['deploy', 'caffemodel', 'caffe.TEST'], {}), '(deploy, caffemodel, caffe.TEST)\n', (281, 313), False, 'import caffe\n'), ((316, 336), 'caffe.set_mode_cpu', 'caffe.set_mode_cpu', ([], {}), '()\n', (334, 336), False, 'import caffe\n'), ((582, 601), 'sys.stdout.write', 'sys.stdout.write', (['r'], {}), '(r)\n', (598, 601), False, 'import sys\n'), ((606, 624), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (622, 624), False, 'import sys\n'), ((699, 727), 'numpy.asarray', 'np.asarray', (['img'], {'dtype': 'float'}), '(img, dtype=float)\n', (709, 727), True, 'import numpy as np\n'), ((742, 767), 'cv2.resize', 'cv2.resize', (['img_new', 'size'], {}), '(img_new, size)\n', (752, 767), False, 'import cv2\n'), ((883, 915), 'numpy.transpose', 'np.transpose', (['img_new', '(2, 0, 1)'], {}), '(img_new, (2, 0, 1))\n', (895, 915), True, 'import numpy as np\n'), ((3197, 3217), 'cv2.imread', 'cv2.imread', (['img_name'], {}), '(img_name)\n', (3207, 3217), False, 'import cv2\n'), ((4763, 4788), 'cv2.imwrite', 'cv2.imwrite', (['outdir', 'draw'], {}), '(outdir, draw)\n', (4774, 4788), False, 'import cv2\n'), ((3483, 3498), 'numpy.copy', 'np.copy', (['bboxes'], {}), '(bboxes)\n', (3490, 3498), True, 'import numpy as np\n'), ((3384, 3426), 'numpy.array', 'np.array', (['info_strlist[1:]'], {'dtype': 'np.float'}), '(info_strlist[1:], dtype=np.float)\n', (3392, 3426), True, 'import numpy as np\n')] |
from utils import resume_training, read_vocab, Tokenizer
from models import EncoderRNN, SelfMonitoring
import torch
import numpy as np
from parser import parser
from gibson2.core.simulator import Simulator
from gibson2.core.physics.scene import BuildingScene, StadiumScene
from gibson2.core.physics.robot_locomotors import Turtlebot
from gibson2.utils.utils import parse_config
from torchvision import models, transforms
import torch
import torch.nn.functional as F
import torch.distributions as D
import matplotlib.pyplot as plt
import time
from make_headings import heading_elevation_feat
# For now: (scene_id, traj_id, instr_idx, instr, starting coords)
good_path_tuples = [
("X7HyMhZNoso", 3134, 0, "Exit the room, turn right and walk down the hallway, turn right after the banister, walk straight, turn right and walk down three steps and stop.", [1.74891996383667, -10.484199523925781, 4.725299835205078]),
("zsNo4HB9uLZ", 122, 1, "Turn around 180 degrees. Go down the hallway to the right. Walk through the kitchen, past the island and stop as soon as you reach the table.", [1.6058199405670166, 3.821429967880249, 1.5538100004196167]),
("x8F5xyUWy9e", 3359, 1, "Turn left and exit the room. Walk straight across and enter the double doors at the end of the hallway. Stop once inside the door.", [-7.6413397789001465, 0.759335994720459, 2.5069499015808105]),
("ZMojNkEp431", 2899, 1, "Walk around the right side of the table, through the pillars into the larger room. Now go around the left side of the long table and stop in the middle of the room with the ping pong table to your right. ", [6.870639801025391, 6.311359882354736, 1.488450050354004]),
("1pXnuDYAj8r", 3821, 2, "Walk out from behind the piano and towards the dining room table. Once you reach the table, turn left and enter the next room with a table. Once in that room, turn left and then stop in front of the mantle. ", [13.605999946594238, 0.5888980031013489, 1.3177499771118164]),
# OOM
("B6ByNegPMKs", 6757, 1, "Walk down the hall towards the exit sign and turn right. Walk into the first door on the left and stop. ", [50.94770050048828, -21.33690071105957, 1.4572299718856812]),
("5q7pvUzZiYa", 5057, 2, "Turn around and walk into the bedroom. Walk out of the bedroom into the hallway. Stop just outside the bedroom. ", [15.51509952545166, -0.6587600111961365, 1.596619963645935]),
("mJXqzFtmKg4", 2235, 0, "Walk out of the kitchen and past the hallway door. Walk into the dining room and turn right. Stop by the piano. ", [-3.7005701065063477, 4.07436990737915, 1.523900032043457]),
("V2XKFyX4ASd", 3474, 0, "Exit the bathroom. Walk forward and go down the stairs. Stop four steps from the bottom. ", [8.059459686279297, -0.07581040263175964, 4.145989894866943]),
("XcA2TqTSSAj", 2907, 1, "Go down the hallway where the bathroom is located and into the bedroom with the dark lacquer wooden floor. ", [4.149159908294678, 2.2838799953460693, 4.6429901123046875]),
("V2XKFyX4ASd", 1726, 1, "Turn right and walk across the bed. Turn slightly left and exit the bedroom. Walk towards the sofa and wait there. ", [5.758540153503418, 6.962540149688721, 4.039840221405029])
]
gibson_path_tuples = [
("Pablo", 0, 0, "Walk past the sofa into the hallway. Keep going until the end of the hall and stop in front of the door.", [0, 0, 1])
]
im_per_ob = 36
# Batch size of images going through imagenet model
B_S = 4
assert(im_per_ob % B_S == 0)
# Model stuff
opts = parser.parse_args()
# opts.resume = "best"
# Text processing
vocab = read_vocab(opts.train_vocab)
base_vocab = ['<PAD>', '<START>', '<EOS>', '<UNK>']
padding_idx = base_vocab.index('<PAD>')
tok = Tokenizer(opts.remove_punctuation == 1, opts.reversed == 1, vocab=vocab, encoding_length=80)
batch_size = 1
trajectory_to_play = [good_path_tuples[5]]
policy_model_kwargs = {
'opts':opts,
'img_fc_dim': opts.img_fc_dim,
'img_fc_use_batchnorm': opts.img_fc_use_batchnorm == 1,
'img_dropout': opts.img_dropout,
'img_feat_input_dim': opts.img_feat_input_dim,
'rnn_hidden_size': opts.rnn_hidden_size,
'rnn_dropout': opts.rnn_dropout,
'max_len': opts.max_cap_length,
'max_navigable': opts.max_navigable
}
encoder_kwargs = {
'opts': opts,
'vocab_size': len(vocab),
'embedding_size': opts.word_embedding_size,
'hidden_size': opts.rnn_hidden_size,
'padding_idx': padding_idx,
'dropout_ratio': opts.rnn_dropout,
'bidirectional': opts.bidirectional == 1,
'num_layers': opts.rnn_num_layers
}
# Model setup
torch.no_grad()
model = SelfMonitoring(**policy_model_kwargs).cuda()
encoder = EncoderRNN(**encoder_kwargs).cuda()
params = list(encoder.parameters()) + list(model.parameters())
optimizer = torch.optim.Adam(params, lr=opts.learning_rate)
resume_training(opts, model, encoder, optimizer)
model.eval()
# model.device = torch.device("cpu")
encoder.eval()
# encoder.device = torch.device("cpu")
resnet = models.resnet152(pretrained=True)
resnet.eval()
resnet.cuda()
# Gibson setup
config = parse_config('ped.yaml')
def transform_img(im):
''' Prep gibson rgb input for pytorch model '''
# RGB pixel mean - from feature precomputing script
im = im[60:540, :, :3].copy()
preprocess = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
])
input_tensor = preprocess(im)
blob = np.zeros((1, 3, im.shape[0], im.shape[1]), dtype=np.float32)
blob[0, :, :, :] = input_tensor
return blob
def _select_action(logit, ended, feedback='argmax', is_prob=False, fix_action_ended=True):
logit_cpu = logit.clone().cpu()
if is_prob:
probs = logit_cpu
else:
probs = F.softmax(logit_cpu, 1)
if feedback == 'argmax':
_, action = probs.max(1) # student forcing - argmax
action = action.detach()
elif feedback == 'sample':
# sampling an action from model
m = D.Categorical(probs)
action = m.sample()
else:
raise ValueError('Invalid feedback option: {}'.format(feedback))
# set action to 0 if already ended
if fix_action_ended:
for i, _ended in enumerate(ended):
if _ended:
action[i] = 0
return action
def rollout(traj, headless=False):
SCENE, traj_id, instr_id, instr, starting_coords = traj
# instr = "Keep going and don't stop"
# starting_coords = [0, 0, 0]
seq = tok.encode_sentence(instr)
tokens = tok.split_sentence(instr)
seq_lengths = [np.argmax(seq == padding_idx, axis=0)]
seq = torch.from_numpy(np.expand_dims(seq, 0)).cuda()
# seq_lengths[seq_lengths == 0] = seq.shape[1] # Full length
ctx, h_t, c_t, ctx_mask = encoder(seq, seq_lengths)
question = h_t
pre_feat = torch.zeros(batch_size, opts.img_feat_input_dim).cuda()
pre_ctx_attend = torch.zeros(batch_size, opts.rnn_hidden_size).cuda()
# Gibson stuff
# 72 fov for 600, 60 for 480
# mode = gui for debug, headless for run
s = Simulator(mode='gui', resolution=640, fov=75, panorama=True)
scene = BuildingScene(SCENE)
# scene = StadiumScene()
ids = s.import_scene(scene)
robot = Turtlebot(config)
ped_id = s.import_robot(robot)
heading_feat_tensor = torch.Tensor(heading_elevation_feat()).view([im_per_ob, 128]).cuda()
s.step()
robot.set_position(starting_coords)
def apply_action(bot: robot, action_idx: int, depth_ok: list, headless=False) -> bool:
print(action_idx)
# action_idx is expected to be 0-13, TODO: make nicer...
if action_idx == 0 or action_idx > 12 or not depth_ok[action_idx - 1]:
print("STOP")
return True
action_idx -= 1
#if action_idx < 3 or (action_idx < 12 and action_idx > 9):
bot.turn_right(0.5235988 * action_idx)
s.step()
if(not headless):
time.sleep(0.2)
bot.move_forward(0.5)
return False
# else:
# if action_idx < 7:
# bot.turn_left(1.57)
# else:
# bot.turn_right(1.57)
bot_is_running = True
while bot_is_running:
s.step()
gib_out = s.renderer.render_robot_cameras(modes=('rgb', '3d'))
rgb = gib_out[::2]
depth = np.array(gib_out[1::2])
processed_rgb = list(map(transform_img, rgb))
batch_obs = np.concatenate(processed_rgb)
imgnet_input = torch.Tensor(batch_obs).cuda()
imgnet_output = torch.zeros([im_per_ob, 2048]).cuda()
# depth processing and filtering
# depth: [36, ]
depth *= depth
depth = depth[:, :, :, :3].sum(axis=3)
depth = np.sqrt(depth)
# filter out 0 distances that are presumably from infinity dist
depth[depth < 0.0001] = 10
# TODO: generalize to non-horizontal moves
depth_ok = depth[12:24, 200:440, 160:480].min(axis=2).min(axis=1)
fig=plt.figure(figsize=(8, 2))
for n, i in enumerate([0, 3, 6, 9]):
fig.add_subplot(1, 4, n + 1)
plt.imshow(depth[12 + i])
plt.show()
# depth_ok *= depth_ok > 1
print(depth_ok)
depth_ok = depth_ok > 0.8
print(depth_ok)
# Each observation has 36 inputs
# We pass rgb images through frozen embedder
for i in range(im_per_ob // B_S):
def hook_fn(m, last_input, o):
imgnet_output[i*B_S:(i+1)*B_S, :] = \
o.detach().squeeze(2).squeeze(2)
imgnet_input[B_S * i : B_S * (i + 1)]
# imgnet_output[B_S * i : B_S * (i + 1)] = resnet(minibatch).detach()
imgnet_output = torch.cat([imgnet_output, heading_feat_tensor], 1)
pano_img_feat = imgnet_output.view([1, im_per_ob, 2176])
navigable_feat = torch.zeros([1, 16, 2176]).cuda()
navigable_feat[0, 1:13] = imgnet_output[12:24] * torch.Tensor(depth_ok).cuda().view(12, 1)
# TODO: make nicer as stated above
navigable_index = [list(map(int, depth_ok))]
print(navigable_index)
# NB: depth_ok replaces navigable_index
h_t, c_t, pre_ctx_attend, img_attn, ctx_attn, logit, value, navigable_mask = model(
pano_img_feat, navigable_feat, pre_feat, question, h_t, c_t, ctx,
pre_ctx_attend, navigable_index, ctx_mask)
print("ATTN")
print(ctx_attn[0])
print(img_attn[0])
plt.bar(range(len(tokens)), ctx_attn.detach().cpu()[0][:len(tokens)])
plt.xticks(range(len(tokens)), tokens)
plt.show()
plt.bar(range(16), img_attn.detach().cpu()[0])
plt.show()
print("NMASK")
print(navigable_mask)
logit.data.masked_fill_((navigable_mask == 0).data, -float('inf'))
m = torch.Tensor([[False] + list(map(lambda b: not b, navigable_index[0])) + [False, False, False]], dtype=bool).cuda()
logit.data.masked_fill_(m, -float('inf'))
action = _select_action(logit, [False])
ended = apply_action(robot, action[0], depth_ok)
bot_is_running = not ended or not headless
if not headless:
time.sleep(.3)
if __name__ == "__main__":
for traj in trajectory_to_play:
rollout(traj) | [
"numpy.sqrt",
"torch.distributions.Categorical",
"utils.resume_training",
"time.sleep",
"numpy.array",
"torch.nn.functional.softmax",
"matplotlib.pyplot.imshow",
"gibson2.utils.utils.parse_config",
"models.EncoderRNN",
"numpy.concatenate",
"torchvision.transforms.ToTensor",
"make_headings.heading_elevation_feat",
"gibson2.core.physics.robot_locomotors.Turtlebot",
"models.SelfMonitoring",
"gibson2.core.simulator.Simulator",
"torch.Tensor",
"numpy.argmax",
"gibson2.core.physics.scene.BuildingScene",
"torchvision.models.resnet152",
"utils.Tokenizer",
"torchvision.transforms.Normalize",
"torch.cat",
"matplotlib.pyplot.show",
"torch.optim.Adam",
"utils.read_vocab",
"numpy.zeros",
"matplotlib.pyplot.figure",
"parser.parser.parse_args",
"numpy.expand_dims",
"torch.no_grad",
"torch.zeros"
] | [((3502, 3521), 'parser.parser.parse_args', 'parser.parse_args', ([], {}), '()\n', (3519, 3521), False, 'from parser import parser\n'), ((3572, 3600), 'utils.read_vocab', 'read_vocab', (['opts.train_vocab'], {}), '(opts.train_vocab)\n', (3582, 3600), False, 'from utils import resume_training, read_vocab, Tokenizer\n'), ((3699, 3795), 'utils.Tokenizer', 'Tokenizer', (['(opts.remove_punctuation == 1)', '(opts.reversed == 1)'], {'vocab': 'vocab', 'encoding_length': '(80)'}), '(opts.remove_punctuation == 1, opts.reversed == 1, vocab=vocab,\n encoding_length=80)\n', (3708, 3795), False, 'from utils import resume_training, read_vocab, Tokenizer\n'), ((4606, 4621), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4619, 4621), False, 'import torch\n'), ((4796, 4843), 'torch.optim.Adam', 'torch.optim.Adam', (['params'], {'lr': 'opts.learning_rate'}), '(params, lr=opts.learning_rate)\n', (4812, 4843), False, 'import torch\n'), ((4844, 4892), 'utils.resume_training', 'resume_training', (['opts', 'model', 'encoder', 'optimizer'], {}), '(opts, model, encoder, optimizer)\n', (4859, 4892), False, 'from utils import resume_training, read_vocab, Tokenizer\n'), ((5006, 5039), 'torchvision.models.resnet152', 'models.resnet152', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (5022, 5039), False, 'from torchvision import models, transforms\n'), ((5093, 5117), 'gibson2.utils.utils.parse_config', 'parse_config', (['"""ped.yaml"""'], {}), "('ped.yaml')\n", (5105, 5117), False, 'from gibson2.utils.utils import parse_config\n'), ((5490, 5550), 'numpy.zeros', 'np.zeros', (['(1, 3, im.shape[0], im.shape[1])'], {'dtype': 'np.float32'}), '((1, 3, im.shape[0], im.shape[1]), dtype=np.float32)\n', (5498, 5550), True, 'import numpy as np\n'), ((7104, 7164), 'gibson2.core.simulator.Simulator', 'Simulator', ([], {'mode': '"""gui"""', 'resolution': '(640)', 'fov': '(75)', 'panorama': '(True)'}), "(mode='gui', resolution=640, fov=75, panorama=True)\n", (7113, 7164), False, 'from gibson2.core.simulator import Simulator\n'), ((7177, 7197), 'gibson2.core.physics.scene.BuildingScene', 'BuildingScene', (['SCENE'], {}), '(SCENE)\n', (7190, 7197), False, 'from gibson2.core.physics.scene import BuildingScene, StadiumScene\n'), ((7271, 7288), 'gibson2.core.physics.robot_locomotors.Turtlebot', 'Turtlebot', (['config'], {}), '(config)\n', (7280, 7288), False, 'from gibson2.core.physics.robot_locomotors import Turtlebot\n'), ((4630, 4667), 'models.SelfMonitoring', 'SelfMonitoring', ([], {}), '(**policy_model_kwargs)\n', (4644, 4667), False, 'from models import EncoderRNN, SelfMonitoring\n'), ((4685, 4713), 'models.EncoderRNN', 'EncoderRNN', ([], {}), '(**encoder_kwargs)\n', (4695, 4713), False, 'from models import EncoderRNN, SelfMonitoring\n'), ((5800, 5823), 'torch.nn.functional.softmax', 'F.softmax', (['logit_cpu', '(1)'], {}), '(logit_cpu, 1)\n', (5809, 5823), True, 'import torch.nn.functional as F\n'), ((6612, 6649), 'numpy.argmax', 'np.argmax', (['(seq == padding_idx)'], {'axis': '(0)'}), '(seq == padding_idx, axis=0)\n', (6621, 6649), True, 'import numpy as np\n'), ((8378, 8401), 'numpy.array', 'np.array', (['gib_out[1::2]'], {}), '(gib_out[1::2])\n', (8386, 8401), True, 'import numpy as np\n'), ((8477, 8506), 'numpy.concatenate', 'np.concatenate', (['processed_rgb'], {}), '(processed_rgb)\n', (8491, 8506), True, 'import numpy as np\n'), ((8775, 8789), 'numpy.sqrt', 'np.sqrt', (['depth'], {}), '(depth)\n', (8782, 8789), True, 'import numpy as np\n'), ((9035, 9061), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 2)'}), '(figsize=(8, 2))\n', (9045, 9061), True, 'import matplotlib.pyplot as plt\n'), ((9194, 9204), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9202, 9204), True, 'import matplotlib.pyplot as plt\n'), ((9766, 9816), 'torch.cat', 'torch.cat', (['[imgnet_output, heading_feat_tensor]', '(1)'], {}), '([imgnet_output, heading_feat_tensor], 1)\n', (9775, 9816), False, 'import torch\n'), ((10653, 10663), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10661, 10663), True, 'import matplotlib.pyplot as plt\n'), ((10727, 10737), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10735, 10737), True, 'import matplotlib.pyplot as plt\n'), ((5330, 5351), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (5349, 5351), False, 'from torchvision import models, transforms\n'), ((5361, 5436), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (5381, 5436), False, 'from torchvision import models, transforms\n'), ((6031, 6051), 'torch.distributions.Categorical', 'D.Categorical', (['probs'], {}), '(probs)\n', (6044, 6051), True, 'import torch.distributions as D\n'), ((6867, 6915), 'torch.zeros', 'torch.zeros', (['batch_size', 'opts.img_feat_input_dim'], {}), '(batch_size, opts.img_feat_input_dim)\n', (6878, 6915), False, 'import torch\n'), ((6944, 6989), 'torch.zeros', 'torch.zeros', (['batch_size', 'opts.rnn_hidden_size'], {}), '(batch_size, opts.rnn_hidden_size)\n', (6955, 6989), False, 'import torch\n'), ((7979, 7994), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (7989, 7994), False, 'import time\n'), ((9160, 9185), 'matplotlib.pyplot.imshow', 'plt.imshow', (['depth[12 + i]'], {}), '(depth[12 + i])\n', (9170, 9185), True, 'import matplotlib.pyplot as plt\n'), ((11239, 11254), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (11249, 11254), False, 'import time\n'), ((6678, 6700), 'numpy.expand_dims', 'np.expand_dims', (['seq', '(0)'], {}), '(seq, 0)\n', (6692, 6700), True, 'import numpy as np\n'), ((8530, 8553), 'torch.Tensor', 'torch.Tensor', (['batch_obs'], {}), '(batch_obs)\n', (8542, 8553), False, 'import torch\n'), ((8585, 8615), 'torch.zeros', 'torch.zeros', (['[im_per_ob, 2048]'], {}), '([im_per_ob, 2048])\n', (8596, 8615), False, 'import torch\n'), ((9908, 9934), 'torch.zeros', 'torch.zeros', (['[1, 16, 2176]'], {}), '([1, 16, 2176])\n', (9919, 9934), False, 'import torch\n'), ((7363, 7387), 'make_headings.heading_elevation_feat', 'heading_elevation_feat', ([], {}), '()\n', (7385, 7387), False, 'from make_headings import heading_elevation_feat\n'), ((9999, 10021), 'torch.Tensor', 'torch.Tensor', (['depth_ok'], {}), '(depth_ok)\n', (10011, 10021), False, 'import torch\n')] |
Subsets and Splits