code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
from aiogram.types import CallbackQuery
from keyboards.inline.lang_choose import lang_choose_kb
from loader import dp
@dp.callback_query_handler(text="Меню вибора мови")
async def ukr(call: CallbackQuery):
await call.message.edit_text("Оберіть будь ласка мову для спілкування👇")
await call.message.edit_reply_markup(reply_markup=lang_choose_kb)
| [
"loader.dp.callback_query_handler"
] | [((121, 171), 'loader.dp.callback_query_handler', 'dp.callback_query_handler', ([], {'text': '"""Меню вибора мови"""'}), "(text='Меню вибора мови')\n", (146, 171), False, 'from loader import dp\n')] |
""" Use various estimation formula of Jasper to determine
energy transfer parameters: sigma, epsilon, and alpha
"""
import itertools
import numpy
from phydat import phycon
import automol.geom
import automol.inchi
import automol.graph
from automol.graph import FunctionalGroup
from automol.etrans._par import LJ_DCT
from automol.etrans._par import LJ_EST_DCT
from automol.etrans._par import Z_ALPHA_EST_DCT
from automol.etrans._par import D0_GRP_LST
from automol.etrans._par import ZROT_DCT
from automol.etrans._fxn import troe_lj_collision_frequency
# CALCULATE THE EFFECTIVE ALPHA VALUE
def alpha(n_eff, eps, sig, mass1, mass2, collider_set,
empirical_factor=2.0):
""" Calculate the alpha param using the method Jasper, et al.
:param n_eff: number of effective rotors
:type n_eff: int
:param zlj_dct: lennard-jones collision frequencies (cm-1?)
:type zlj_dct: dict[float: float]
:param empirical_factor: correction for using 1DME versus 2DM2
:type empirical_factor: float
"""
# Calculate the Lennard-Jones frequencies
red_mass = ((mass1 * mass2) / (mass1 + mass2))
# Calculate Zalpha(Neff) at T = 300, 1000, 2000 K
z_alphas_n_eff = _calculate_z_alpha_terms(n_eff, collider_set)
# Calculate alpha = Zalpha(Neff) / Z(N) at T = 300, 1000, 2000 K
# Empirical correction factor of (1/2) used for 1D Master Equations
alpha_dct = {}
for temp, z_alpha_n_eff in z_alphas_n_eff.items():
zlj = troe_lj_collision_frequency(eps, sig, red_mass, temp)
zlj *= 100**3 # conv. m^3/s to cm^3/ for below
alpha_dct[temp] = (z_alpha_n_eff / zlj) / empirical_factor
# Determine alpha and n for the e-down model
edown_alpha, edown_n = _calculate_energy_down_exponent(alpha_dct)
return edown_alpha, edown_n
def _calculate_z_alpha_terms(n_eff, collider_set):
""" Calculate the [Z*alpha](N_eff)
"""
def _z_alpha(n_eff, coeffs):
""" calculate an effective Z*alpha parameter
"""
return ((coeffs[0] * n_eff**(3) +
coeffs[1] * n_eff**(2) +
coeffs[2] * n_eff**(1) +
coeffs[3]) / 1.0e9)
# Read the proper coefficients from the moldriver dct
coeff_dct = Z_ALPHA_EST_DCT.get(collider_set, None)
if coeff_dct is not None:
# Calculate the three alpha terms
z_alpha_dct = {}
for temp, coeffs in coeff_dct.items():
z_alpha_dct[temp] = _z_alpha(n_eff, coeffs)
return z_alpha_dct
def _calculate_energy_down_exponent(alpha_dct):
""" Calculate power n, for model: E_down = E_down_300 * (T/300)**n
Does a least-squares for n to solve the linear equation
ln(E_down/E_down_300) = [ln(T/300)] * n
:param alpha_dct: temperature-dependent alpha parameters
:type alpha_dct: dict[float: float]
"""
assert 300 in alpha_dct, (
'Must have 300 K in alphas'
)
# Set the edown alpha to the value at 300 K
edown_alpha = alpha_dct[300]
# Build vectors and matrices used for the fitting
temps = numpy.array(list(alpha_dct.keys()), dtype=numpy.float64)
alphas = numpy.array(list(alpha_dct.values()), dtype=numpy.float64)
n_vec = numpy.log(temps / 300.0)
coeff_mat = numpy.array([n_vec], dtype=numpy.float64)
coeff_mat = coeff_mat.transpose()
edown_vec = numpy.log(alphas / edown_alpha)
# Perform the least-squares fit
theta = numpy.linalg.lstsq(coeff_mat, edown_vec, rcond=None)[0]
# Set the the edown n value to the fitting parameter
edown_n = theta[0]
return edown_alpha, edown_n
# CALCULATE THE EFFECTIVE LENNARD-JONES SIGMA AND EPSILON
def lennard_jones_params(n_heavy, collider_set):
""" Returns in angstrom and cm-1.
:param n_heavy: Number of heavy atoms for a species
:type n_heavy: int
"""
def _lj(n_heavy, coeff1, coeff2):
""" calculate Lennard-Jones parameter using estimation
coefficents
"""
return coeff1 * n_heavy**(coeff2)
# See if collider set is in dict with exact numbers
# If not read the estimation coefficents from EST dct and calc. params
params = LJ_DCT.get(collider_set)
if params is not None:
sig, eps = params
else:
coeffs = LJ_EST_DCT.get(collider_set, None)
if coeffs is not None:
# Calculate the effective sigma and epsilon values
sig = _lj(n_heavy, coeffs[0], coeffs[1])
eps = _lj(n_heavy, coeffs[2], coeffs[3])
else:
sig, eps = None, None
# Convert the units to what they should be internally
if sig is not None:
sig *= phycon.ANG2BOHR
if eps is not None:
eps *= phycon.WAVEN2EH
return sig, eps
# DETERMINE N_EFF USED FOR ALPHA AND LJ PARAM CALCULATIONS
def effective_rotor_count(geo):
""" Calculate an effective N parameter using the given parametrization.
:param geo: geometry (Bohr)
:type geo: automol geometry data structure
:rtype: float
"""
# Conver the geo to a graph
gra = automol.geom.graph(geo)
symbs = automol.geom.symbols(geo)
# Count the rotors
(n_pp, n_ps, n_pt, n_pq,
n_ss, n_st, n_sq,
n_tt, n_tq,
n_qq,
n_co, n_oo,
n_ss_ring, n_rings) = _rotor_counts(gra, symbs)
# print(' - Rotor Counts for N_eff:')
# print(' N_pp:{}, N_ps:{}, N_pt:{}, N_pq:{}'.format(
# n_pp, n_ps, n_pt, n_pq))
# print(' N_ss:{}, N_st:{}, N_sq:{}'.format(n_ss, n_st, n_sq))
# print(' N_tt:{}, N_tq:{}'.format(n_tt, n_tq))
# print(' N_qq:{}'.format(n_qq))
# print(' N_co:{}, N_oo:{}'.format(n_co, n_oo))
# print(' N_ss_ring:{}, N_rings:{}'.format(n_ss_ring, n_rings))
# Use the rotor counts and the coefficients to calculate Neff
c_pp_ps_ss, c_pt_st, c_pq_sq = 1.0, 2.0/3.0, 1.0/3.0
c_tt_tq_qq, c_co_oo, c_ss_ring = 0.0, 1.0/3.0, 1.0/2.0
n_eff = 1.0 + (
c_pp_ps_ss * (n_pp + n_ps + n_ss) +
c_pt_st * (n_pt + n_st) +
c_pq_sq * (n_pq + n_sq) +
c_tt_tq_qq * (n_tt + n_tq + n_qq) +
c_co_oo * (n_co + n_oo) +
c_ss_ring * n_ss_ring - n_rings
)
return n_eff
def _rotor_counts(gra, symbs):
""" Count up various types of bonds for a structure.
:param gra: molecular graph of species
:type gra: automol graph data structure
:param symbs: atomic symbols of species
:type symbs: tuple(str)
:rtype: tuple(float)
"""
# Initialize the rotor counts
n_pp, n_ps, n_pt, n_pq = 0, 0, 0, 0
n_ss, n_st, n_sq = 0, 0, 0
n_tt, n_tq = 0, 0
n_qq = 0
n_co, n_oo = 0, 0
n_ss_ring, n_rings = 0, 0
# Get the rings and the number
rings = automol.graph.rings(gra)
ring_keys = set(itertools.chain(*automol.graph.rings_atom_keys(gra)))
n_rings = len(rings)
# Loop over the bonds and count the number of atoms
neighbors = automol.graph.atoms_neighbor_atom_keys(gra)
for bnd in automol.graph.bond_keys(gra):
key1, key2 = bnd
spair = (symbs[key1], symbs[key2])
if spair == ('C', 'C'):
# Figure out which neighbors are not hydrogen and count the number
atom1_neighbors = neighbors[key1]
numc1 = 0
for neighbor1 in atom1_neighbors:
if symbs[neighbor1] != 'H':
numc1 += 1
atom2_neighbors = neighbors[key2]
numc2 = 0
for neighbor2 in atom2_neighbors:
if symbs[neighbor2] != 'H':
numc2 += 1
# Determine appropriate term to increment
npair = (numc1, numc2)
if npair == (1, 1):
n_pp += 1
elif npair in ((1, 2), (2, 1)):
n_ps += 1
elif npair in ((1, 3), (3, 1)):
n_pt += 1
elif npair in ((1, 4), (4, 1)):
n_pq += 1
elif npair == (2, 2):
if {key1, key2} <= ring_keys:
n_ss_ring += 1
else:
n_ss += 1
elif npair in ((2, 3), (3, 2)):
n_st += 1
elif npair in ((2, 4), (4, 2)):
n_sq += 1
elif npair == (3, 3):
n_tt += 1
elif npair in ((3, 4), (4, 3)):
n_tq += 1
elif npair == (4, 4):
n_qq += 1
elif spair in (('C', 'O'), ('O', 'C')):
n_co += 1
elif spair == ('O', 'O'):
n_oo += 1
# Compile counts into a tuple
return (n_pp, n_ps, n_pt, n_pq,
n_ss, n_st, n_sq,
n_tt, n_tq,
n_qq,
n_co, n_oo,
n_ss_ring, n_rings)
# Rotational relaxation number
def rotational_relaxation_number(tgt_ich):
""" Get the rotational relaxation number at 298 K for a given species.
Currently, this is read from internal library or set to 1.
:param tgt_ich: InChI string of species
:type tgt_ich: str
:rtype: float
"""
return ZROT_DCT.get(tgt_ich, 1.0)
# Determine which effective model series to use
def determine_collision_model_series(tgt_ich, bath_ich, collid_param):
""" For the collision between a given tgt and bath species, determine
which effective series would be the most suitable model for
determining the energy transfer parameters
:param collid_param: select 'lj' or 'alpha' for parameter to assign
"""
def _identify_effective_model(tgt_ich, bath_ich):
""" When values cannot be assigned for a collision, try and
identify the best representative series to use for estimation
"""
# Build the graph
tgt_gra = automol.geom.graph(automol.inchi.geometry(tgt_ich))
# Identify the the target model
if automol.graph.radical_species(tgt_gra):
# Determine if peroxy,hydroperoxy groups present to use RO2 series
# otherwise just use alkyl radical series
fgrp_cnt_dct = automol.graph.functional_group_count_dct(tgt_gra)
fgrps = set(fgrp for fgrp, count in fgrp_cnt_dct.items()
if count > 0)
print('fgrps test', fgrps)
_ro2_fgrps = {FunctionalGroup.PEROXY, FunctionalGroup.HYDROPEROXY}
if _ro2_fgrps & fgrps:
tgt_model = 'peroxy'
else:
tgt_model = '1-alkyl'
elif automol.graph.hydrocarbon_species(tgt_gra):
tgt_model = 'n-alkane'
else:
# Set priority based on bond-dissociation energies
# Loop through D0 dct (ordered by ene) and try to find func. grp
tgt_model = None
fgrp_cnt_dct = automol.graph.functional_group_count_dct(tgt_gra)
fgrps = set(fgrp for fgrp, count in fgrp_cnt_dct.items()
if count > 0)
print('fgrps test', fgrps)
for (fgrp, model) in D0_GRP_LST:
if fgrp in fgrps:
tgt_model = model
break
# Set target model to alkanes if nothing found
if tgt_model is None:
tgt_model = 'n-alkane'
return frozenset({tgt_model, bath_ich})
# First check if one should use standard numbers instead of estimating
collider_set = frozenset({tgt_ich, bath_ich})
# Go through a procedure to determine which model to use
if collid_param == 'lj':
if collider_set not in LJ_DCT:
# try to identify a model where possible
collider_set = _identify_effective_model(tgt_ich, bath_ich)
elif collid_param == 'alpha':
if {'InChI=1S/H2/h1H', 'InChI=1S/H'} & collider_set:
# Model cannot be set for these common situations
# of H and H2 colliding with non Ar,N2 bath-gases
collider_set = None
else:
# Last try to identify a model where possible
collider_set = _identify_effective_model(tgt_ich, bath_ich)
return collider_set
| [
"automol.etrans._par.LJ_DCT.get",
"numpy.log",
"automol.etrans._par.LJ_EST_DCT.get",
"automol.etrans._par.ZROT_DCT.get",
"numpy.array",
"automol.etrans._fxn.troe_lj_collision_frequency",
"numpy.linalg.lstsq",
"automol.etrans._par.Z_ALPHA_EST_DCT.get"
] | [((2268, 2307), 'automol.etrans._par.Z_ALPHA_EST_DCT.get', 'Z_ALPHA_EST_DCT.get', (['collider_set', 'None'], {}), '(collider_set, None)\n', (2287, 2307), False, 'from automol.etrans._par import Z_ALPHA_EST_DCT\n'), ((3249, 3273), 'numpy.log', 'numpy.log', (['(temps / 300.0)'], {}), '(temps / 300.0)\n', (3258, 3273), False, 'import numpy\n'), ((3290, 3331), 'numpy.array', 'numpy.array', (['[n_vec]'], {'dtype': 'numpy.float64'}), '([n_vec], dtype=numpy.float64)\n', (3301, 3331), False, 'import numpy\n'), ((3387, 3418), 'numpy.log', 'numpy.log', (['(alphas / edown_alpha)'], {}), '(alphas / edown_alpha)\n', (3396, 3418), False, 'import numpy\n'), ((4206, 4230), 'automol.etrans._par.LJ_DCT.get', 'LJ_DCT.get', (['collider_set'], {}), '(collider_set)\n', (4216, 4230), False, 'from automol.etrans._par import LJ_DCT\n'), ((9160, 9186), 'automol.etrans._par.ZROT_DCT.get', 'ZROT_DCT.get', (['tgt_ich', '(1.0)'], {}), '(tgt_ich, 1.0)\n', (9172, 9186), False, 'from automol.etrans._par import ZROT_DCT\n'), ((1501, 1554), 'automol.etrans._fxn.troe_lj_collision_frequency', 'troe_lj_collision_frequency', (['eps', 'sig', 'red_mass', 'temp'], {}), '(eps, sig, red_mass, temp)\n', (1528, 1554), False, 'from automol.etrans._fxn import troe_lj_collision_frequency\n'), ((3468, 3520), 'numpy.linalg.lstsq', 'numpy.linalg.lstsq', (['coeff_mat', 'edown_vec'], {'rcond': 'None'}), '(coeff_mat, edown_vec, rcond=None)\n', (3486, 3520), False, 'import numpy\n'), ((4311, 4345), 'automol.etrans._par.LJ_EST_DCT.get', 'LJ_EST_DCT.get', (['collider_set', 'None'], {}), '(collider_set, None)\n', (4325, 4345), False, 'from automol.etrans._par import LJ_EST_DCT\n')] |
#!/usr/bin/python3
# coding=utf8
import sys
import cv2
import math
import time
import import_path
import Camera
import threading
import kinematics
import numpy as np
from LABConfig import *
import HiwonderSDK.Misc as Misc
import HiwonderSDK.Board as Board
ik = kinematics.IK()
debug = False
HWSONAR = None
if sys.version_info.major == 2:
print('Please run this program with python3!')
sys.exit(0)
range_rgb = {
'red': (0, 0, 255),
'blue': (255, 0, 0),
'green': (0, 255, 0),
'black': (0, 0, 0),
'white': (255, 255, 255),
}
# Find the contour with the largest area
# The parameter is list of contours to be compared
def getAreaMaxContour(contours):
contour_area_temp = 0
contour_area_max = 0
area_max_contour = None
max_area = 0
for c in contours: # Traverse all contours
contour_area_temp = math.fabs(cv2.contourArea(c)) # Calculate the contour area
if contour_area_temp > contour_area_max:
contour_area_max = contour_area_temp
if contour_area_temp >= 100: # Only when the area is larger than the setting, the contour of the largest area is effective to filter interference
area_max_contour = c
max_area = contour_area_temp
return area_max_contour, max_area # Returns the largest contour area
# Initial position
def initMove():
HWSONAR.setRGBMode(0)
HWSONAR.setRGB(1, (0, 0, 0))
HWSONAR.setRGB(2, (0, 0, 0))
Board.setPWMServoPulse(1, 1500, 500)
Board.setPWMServoPulse(2, servo2, 500)
color_list = []
detect_color = 'None'
action_finish = True
draw_color = range_rgb["black"]
# Variable reset
def reset():
global draw_color
global color_list
global detect_color
global action_finish
color_list = []
detect_color = 'None'
action_finish = True
draw_color = range_rgb["black"]
# app initialize call
def init():
print("ColorDetect Init")
initMove()
__isRunning = False
# app start the games call
def start():
global __isRunning
reset()
__isRunning = True
print("ColorDetect Start")
# app stop the games call
def stop():
global __isRunning
__isRunning = False
print("ColorDetect Stop")
# app exit games call
def exit():
global __isRunning
__isRunning = False
ik.stand(ik.initial_pos)
print("ColorDetect Exit")
def move():
global draw_color
global detect_color
global action_finish
while True:
if debug:
return
if __isRunning:
if detect_color != 'None':
action_finish = False
if detect_color == 'red':
Board.setPWMServoPulse(1, 1800, 200)
time.sleep(0.2)
Board.setPWMServoPulse(1, 1200, 200)
time.sleep(0.2)
Board.setPWMServoPulse(1, 1800, 200)
time.sleep(0.2)
Board.setPWMServoPulse(1, 1200, 200)
time.sleep(0.2)
Board.setPWMServoPulse(1, 1500, 100)
time.sleep(0.1)
detect_color = 'None'
draw_color = range_rgb["black"]
time.sleep(1)
elif detect_color == 'green' or detect_color == 'blue':
Board.setPWMServoPulse(2, 1800, 200)
time.sleep(0.2)
Board.setPWMServoPulse(2, 1200, 200)
time.sleep(0.2)
Board.setPWMServoPulse(2, 1800, 200)
time.sleep(0.2)
Board.setPWMServoPulse(2, 1200, 200)
time.sleep(0.2)
Board.setPWMServoPulse(2, 1500, 100)
time.sleep(0.1)
detect_color = 'None'
draw_color = range_rgb["black"]
time.sleep(1)
else:
time.sleep(0.01)
action_finish = True
detect_color = 'None'
else:
time.sleep(0.01)
else:
time.sleep(0.01)
# Run Thread
th = threading.Thread(target=move)
th.setDaemon(True)
th.start()
size = (320, 240)
def run(img):
global draw_color
global color_list
global detect_color
global action_finish
img_copy = img.copy()
img_h, img_w = img.shape[:2]
if not __isRunning:
return img
frame_resize = cv2.resize(img_copy, size, interpolation=cv2.INTER_NEAREST)
frame_gb = cv2.GaussianBlur(frame_resize, (3, 3), 3)
frame_lab = cv2.cvtColor(frame_gb, cv2.COLOR_BGR2LAB) # Conver the image to LAB space
max_area = 0
color_area_max = None
areaMaxContour_max = 0
if action_finish:
for i in color_range:
if i != 'black' and i != 'white':
frame_mask = cv2.inRange(frame_lab, color_range[i][0], color_range[i][1]) # The original image and mask are bitwise operated
eroded = cv2.erode(frame_mask, cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))) # Corrosion
dilated = cv2.dilate(eroded, cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))) # Inflation
if debug:
cv2.imshow(i, dilated)
contours = cv2.findContours(dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[-2] # Find out the outline
areaMaxContour, area_max = getAreaMaxContour(contours) # Find the maximum contour
if areaMaxContour is not None:
if area_max > max_area:# Find maximum area
max_area = area_max
color_area_max = i
areaMaxContour_max = areaMaxContour
if max_area > 100: # Have found the maximum area
((centerX, centerY), radius) = cv2.minEnclosingCircle(areaMaxContour_max) # Gets the minimum circumferential circle
centerX = int(Misc.map(centerX, 0, size[0], 0, img_w))
centerY = int(Misc.map(centerY, 0, size[1], 0, img_h))
radius = int(Misc.map(radius, 0, size[0], 0, img_w))
cv2.circle(img, (centerX, centerY), radius, range_rgb[color_area_max], 2)# Draw circle
if color_area_max == 'red': # Red biggest
color = 1
elif color_area_max == 'green': # Green biggest
color = 2
elif color_area_max == 'blue': # Blue largest
color = 3
else:
color = 0
color_list.append(color)
if len(color_list) == 3: # Multiple judgment
# Average
color = int(round(np.mean(np.array(color_list))))
color_list = []
if color == 1:
detect_color = 'red'
draw_color = range_rgb["red"]
elif color == 2:
detect_color = 'green'
draw_color = range_rgb["green"]
elif color == 3:
detect_color = 'blue'
draw_color = range_rgb["blue"]
else:
detect_color = 'None'
draw_color = range_rgb["black"]
else:
detect_color = 'None'
draw_color = range_rgb["black"]
cv2.putText(img, "Color: " + detect_color, (10, img.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.65, draw_color, 2)
return img
if __name__ == '__main__':
import HiwonderSDK.Sonar as Sonar
from CameraCalibration.CalibrationConfig import *
# Loading parameter
param_data = np.load(calibration_param_path + '.npz')
# Get parameter
mtx = param_data['mtx_array']
dist = param_data['dist_array']
newcameramtx, _ = cv2.getOptimalNewCameraMatrix(mtx, dist, (640, 480), 0, (640, 480))
mapx, mapy = cv2.initUndistortRectifyMap(mtx, dist, None, newcameramtx, (640, 480), 5)
debug = False
if debug:
print('Debug Mode')
HWSONAR = Sonar.Sonar()
init()
start()
my_camera = Camera.Camera()
my_camera.camera_open()
while True:
img = my_camera.frame
if img is not None:
frame = img.copy()
frame = cv2.remap(frame, mapx, mapy, cv2.INTER_LINEAR) # Distortion correction
Frame = run(frame)
cv2.imshow('Frame', Frame)
key = cv2.waitKey(1)
if key == 27:
break
else:
time.sleep(0.01)
my_camera.camera_close()
cv2.destroyAllWindows()
| [
"cv2.initUndistortRectifyMap",
"HiwonderSDK.Board.setPWMServoPulse",
"cv2.remap",
"time.sleep",
"cv2.imshow",
"numpy.array",
"HiwonderSDK.Sonar.Sonar",
"cv2.destroyAllWindows",
"sys.exit",
"Camera.Camera",
"cv2.contourArea",
"HiwonderSDK.Misc.map",
"cv2.waitKey",
"cv2.minEnclosingCircle",
"cv2.putText",
"cv2.getOptimalNewCameraMatrix",
"cv2.circle",
"cv2.cvtColor",
"cv2.resize",
"cv2.GaussianBlur",
"kinematics.IK",
"cv2.inRange",
"cv2.findContours",
"threading.Thread",
"numpy.load",
"cv2.getStructuringElement"
] | [((262, 277), 'kinematics.IK', 'kinematics.IK', ([], {}), '()\n', (275, 277), False, 'import kinematics\n'), ((4215, 4244), 'threading.Thread', 'threading.Thread', ([], {'target': 'move'}), '(target=move)\n', (4231, 4244), False, 'import threading\n'), ((396, 407), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (404, 407), False, 'import sys\n'), ((1460, 1496), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1500)', '(500)'], {}), '(1, 1500, 500)\n', (1482, 1496), True, 'import HiwonderSDK.Board as Board\n'), ((1501, 1539), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', 'servo2', '(500)'], {}), '(2, servo2, 500)\n', (1523, 1539), True, 'import HiwonderSDK.Board as Board\n'), ((4529, 4588), 'cv2.resize', 'cv2.resize', (['img_copy', 'size'], {'interpolation': 'cv2.INTER_NEAREST'}), '(img_copy, size, interpolation=cv2.INTER_NEAREST)\n', (4539, 4588), False, 'import cv2\n'), ((4604, 4645), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['frame_resize', '(3, 3)', '(3)'], {}), '(frame_resize, (3, 3), 3)\n', (4620, 4645), False, 'import cv2\n'), ((4668, 4709), 'cv2.cvtColor', 'cv2.cvtColor', (['frame_gb', 'cv2.COLOR_BGR2LAB'], {}), '(frame_gb, cv2.COLOR_BGR2LAB)\n', (4680, 4709), False, 'import cv2\n'), ((7462, 7581), 'cv2.putText', 'cv2.putText', (['img', "('Color: ' + detect_color)", '(10, img.shape[0] - 10)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.65)', 'draw_color', '(2)'], {}), "(img, 'Color: ' + detect_color, (10, img.shape[0] - 10), cv2.\n FONT_HERSHEY_SIMPLEX, 0.65, draw_color, 2)\n", (7473, 7581), False, 'import cv2\n'), ((7763, 7803), 'numpy.load', 'np.load', (["(calibration_param_path + '.npz')"], {}), "(calibration_param_path + '.npz')\n", (7770, 7803), True, 'import numpy as np\n'), ((7917, 7984), 'cv2.getOptimalNewCameraMatrix', 'cv2.getOptimalNewCameraMatrix', (['mtx', 'dist', '(640, 480)', '(0)', '(640, 480)'], {}), '(mtx, dist, (640, 480), 0, (640, 480))\n', (7946, 7984), False, 'import cv2\n'), ((8002, 8075), 'cv2.initUndistortRectifyMap', 'cv2.initUndistortRectifyMap', (['mtx', 'dist', 'None', 'newcameramtx', '(640, 480)', '(5)'], {}), '(mtx, dist, None, newcameramtx, (640, 480), 5)\n', (8029, 8075), False, 'import cv2\n'), ((8156, 8169), 'HiwonderSDK.Sonar.Sonar', 'Sonar.Sonar', ([], {}), '()\n', (8167, 8169), True, 'import HiwonderSDK.Sonar as Sonar\n'), ((8209, 8224), 'Camera.Camera', 'Camera.Camera', ([], {}), '()\n', (8222, 8224), False, 'import Camera\n'), ((8677, 8700), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (8698, 8700), False, 'import cv2\n'), ((864, 882), 'cv2.contourArea', 'cv2.contourArea', (['c'], {}), '(c)\n', (879, 882), False, 'import cv2\n'), ((4179, 4195), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (4189, 4195), False, 'import time\n'), ((5932, 5974), 'cv2.minEnclosingCircle', 'cv2.minEnclosingCircle', (['areaMaxContour_max'], {}), '(areaMaxContour_max)\n', (5954, 5974), False, 'import cv2\n'), ((6241, 6314), 'cv2.circle', 'cv2.circle', (['img', '(centerX, centerY)', 'radius', 'range_rgb[color_area_max]', '(2)'], {}), '(img, (centerX, centerY), radius, range_rgb[color_area_max], 2)\n', (6251, 6314), False, 'import cv2\n'), ((8378, 8424), 'cv2.remap', 'cv2.remap', (['frame', 'mapx', 'mapy', 'cv2.INTER_LINEAR'], {}), '(frame, mapx, mapy, cv2.INTER_LINEAR)\n', (8387, 8424), False, 'import cv2\n'), ((8493, 8519), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'Frame'], {}), "('Frame', Frame)\n", (8503, 8519), False, 'import cv2\n'), ((8538, 8552), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (8549, 8552), False, 'import cv2\n'), ((8627, 8643), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (8637, 8643), False, 'import time\n'), ((4136, 4152), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (4146, 4152), False, 'import time\n'), ((4950, 5010), 'cv2.inRange', 'cv2.inRange', (['frame_lab', 'color_range[i][0]', 'color_range[i][1]'], {}), '(frame_lab, color_range[i][0], color_range[i][1])\n', (4961, 5010), False, 'import cv2\n'), ((6044, 6083), 'HiwonderSDK.Misc.map', 'Misc.map', (['centerX', '(0)', 'size[0]', '(0)', 'img_w'], {}), '(centerX, 0, size[0], 0, img_w)\n', (6052, 6083), True, 'import HiwonderSDK.Misc as Misc\n'), ((6111, 6150), 'HiwonderSDK.Misc.map', 'Misc.map', (['centerY', '(0)', 'size[1]', '(0)', 'img_h'], {}), '(centerY, 0, size[1], 0, img_h)\n', (6119, 6150), True, 'import HiwonderSDK.Misc as Misc\n'), ((6177, 6215), 'HiwonderSDK.Misc.map', 'Misc.map', (['radius', '(0)', 'size[0]', '(0)', 'img_w'], {}), '(radius, 0, size[0], 0, img_w)\n', (6185, 6215), True, 'import HiwonderSDK.Misc as Misc\n'), ((2659, 2695), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1800)', '(200)'], {}), '(1, 1800, 200)\n', (2681, 2695), True, 'import HiwonderSDK.Board as Board\n'), ((2716, 2731), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2726, 2731), False, 'import time\n'), ((2752, 2788), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1200)', '(200)'], {}), '(1, 1200, 200)\n', (2774, 2788), True, 'import HiwonderSDK.Board as Board\n'), ((2809, 2824), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2819, 2824), False, 'import time\n'), ((2845, 2881), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1800)', '(200)'], {}), '(1, 1800, 200)\n', (2867, 2881), True, 'import HiwonderSDK.Board as Board\n'), ((2902, 2917), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2912, 2917), False, 'import time\n'), ((2938, 2974), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1200)', '(200)'], {}), '(1, 1200, 200)\n', (2960, 2974), True, 'import HiwonderSDK.Board as Board\n'), ((2995, 3010), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3005, 3010), False, 'import time\n'), ((3031, 3067), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(1)', '(1500)', '(100)'], {}), '(1, 1500, 100)\n', (3053, 3067), True, 'import HiwonderSDK.Board as Board\n'), ((3088, 3103), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3098, 3103), False, 'import time\n'), ((3238, 3251), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3248, 3251), False, 'import time\n'), ((5110, 5159), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_RECT', '(3, 3)'], {}), '(cv2.MORPH_RECT, (3, 3))\n', (5135, 5159), False, 'import cv2\n'), ((5219, 5268), 'cv2.getStructuringElement', 'cv2.getStructuringElement', (['cv2.MORPH_RECT', '(3, 3)'], {}), '(cv2.MORPH_RECT, (3, 3))\n', (5244, 5268), False, 'import cv2\n'), ((5328, 5350), 'cv2.imshow', 'cv2.imshow', (['i', 'dilated'], {}), '(i, dilated)\n', (5338, 5350), False, 'import cv2\n'), ((5378, 5445), 'cv2.findContours', 'cv2.findContours', (['dilated', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_NONE'], {}), '(dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n', (5394, 5445), False, 'import cv2\n'), ((3344, 3380), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', '(1800)', '(200)'], {}), '(2, 1800, 200)\n', (3366, 3380), True, 'import HiwonderSDK.Board as Board\n'), ((3401, 3416), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3411, 3416), False, 'import time\n'), ((3437, 3473), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', '(1200)', '(200)'], {}), '(2, 1200, 200)\n', (3459, 3473), True, 'import HiwonderSDK.Board as Board\n'), ((3494, 3509), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3504, 3509), False, 'import time\n'), ((3530, 3566), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', '(1800)', '(200)'], {}), '(2, 1800, 200)\n', (3552, 3566), True, 'import HiwonderSDK.Board as Board\n'), ((3587, 3602), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3597, 3602), False, 'import time\n'), ((3623, 3659), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', '(1200)', '(200)'], {}), '(2, 1200, 200)\n', (3645, 3659), True, 'import HiwonderSDK.Board as Board\n'), ((3680, 3695), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3690, 3695), False, 'import time\n'), ((3716, 3752), 'HiwonderSDK.Board.setPWMServoPulse', 'Board.setPWMServoPulse', (['(2)', '(1500)', '(100)'], {}), '(2, 1500, 100)\n', (3738, 3752), True, 'import HiwonderSDK.Board as Board\n'), ((3773, 3788), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3783, 3788), False, 'import time\n'), ((3923, 3936), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3933, 3936), False, 'import time\n'), ((3979, 3995), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (3989, 3995), False, 'import time\n'), ((6790, 6810), 'numpy.array', 'np.array', (['color_list'], {}), '(color_list)\n', (6798, 6810), True, 'import numpy as np\n')] |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import ast
import multiprocessing
import numpy as np
import os
from functools import partial
import contextlib
import time
import paddle.fluid.profiler as profiler
import paddle
import paddle.fluid as fluid
import forward_model
import reader
import sys
from config import *
from forward_model import wrap_encoder as encoder
from forward_model import wrap_decoder as decoder
from forward_model import forward_fast_decode
from dense_model import dense_fast_decode
from relative_model import relative_fast_decode
from forward_model import forward_position_encoding_init
from reader import *
def parse_args():
"""
parse_args
"""
parser = argparse.ArgumentParser("Training for Transformer.")
parser.add_argument(
"--val_file_pattern",
type=str,
required=True,
help="The pattern to match test data files.")
parser.add_argument(
"--batch_size",
type=int,
default=50,
help="The number of examples in one run for sequence generation.")
parser.add_argument(
"--pool_size",
type=int,
default=10000,
help="The buffer size to pool data.")
parser.add_argument(
"--special_token",
type=str,
default=["<s>", "<e>", "<unk>"],
nargs=3,
help="The <bos>, <eos> and <unk> tokens in the dictionary.")
parser.add_argument(
"--token_delimiter",
type=lambda x: str(x.encode().decode("unicode-escape")),
default=" ",
help="The delimiter used to split tokens in source or target sentences. "
"For EN-DE BPE data we provided, use spaces as token delimiter. ")
parser.add_argument(
"--use_mem_opt",
type=ast.literal_eval,
default=True,
help="The flag indicating whether to use memory optimization.")
parser.add_argument(
"--use_py_reader",
type=ast.literal_eval,
default=False,
help="The flag indicating whether to use py_reader.")
parser.add_argument(
"--use_parallel_exe",
type=ast.literal_eval,
default=False,
help="The flag indicating whether to use ParallelExecutor.")
parser.add_argument(
"--use_candidate",
type=ast.literal_eval,
default=False,
help="The flag indicating whether to use candidates.")
parser.add_argument(
"--common_ids",
type=str,
default="",
help="The file path of common ids.")
parser.add_argument(
'opts',
help='See config.py for all options',
default=None,
nargs=argparse.REMAINDER)
parser.add_argument(
"--use_delay_load",
type=ast.literal_eval,
default=True,
help=
"The flag indicating whether to load all data into memories at once.")
parser.add_argument(
"--vocab_size",
type=str,
required=True,
help="Size of Vocab.")
parser.add_argument(
"--infer_batch_size",
type=int,
help="Infer batch_size")
parser.add_argument(
"--decode_alpha",
type=float,
help="decode_alpha")
args = parser.parse_args()
# Append args related to dict
#src_dict = reader.DataReader.load_dict(args.src_vocab_fpath)
#trg_dict = reader.DataReader.load_dict(args.trg_vocab_fpath)
#dict_args = [
# "src_vocab_size", str(len(src_dict)), "trg_vocab_size",
# str(len(trg_dict)), "bos_idx", str(src_dict[args.special_token[0]]),
# "eos_idx", str(src_dict[args.special_token[1]]), "unk_idx",
# str(src_dict[args.special_token[2]])
#]
voc_size = args.vocab_size
dict_args = [
"src_vocab_size", voc_size,
"trg_vocab_size", voc_size,
"bos_idx", str(0),
"eos_idx", str(1),
"unk_idx", str(int(voc_size) - 1)
]
merge_cfg_from_list(args.opts + dict_args,
[InferTaskConfig, ModelHyperParams])
return args
def post_process_seq(seq,
bos_idx=ModelHyperParams.bos_idx,
eos_idx=ModelHyperParams.eos_idx,
output_bos=InferTaskConfig.output_bos,
output_eos=InferTaskConfig.output_eos):
"""
Post-process the beam-search decoded sequence. Truncate from the first
<eos> and remove the <bos> and <eos> tokens currently.
"""
eos_pos = len(seq) - 1
for i, idx in enumerate(seq):
if idx == eos_idx:
eos_pos = i
break
seq = [
idx for idx in seq[:eos_pos + 1]
if (output_bos or idx != bos_idx) and (output_eos or idx != eos_idx)
]
return seq
def prepare_batch_input(insts, data_input_names, src_pad_idx, bos_idx, n_head,
d_model):
"""
Put all padded data needed by beam search decoder into a dict.
"""
src_word, src_pos, src_slf_attn_bias, src_max_len = pad_batch_data(
[inst[0] for inst in insts], src_pad_idx, n_head, is_target=False)
source_length = np.asarray([src_max_len], dtype="int64")
src_word = src_word.reshape(-1, src_max_len, 1)
src_pos = src_pos.reshape(-1, src_max_len, 1)
data_input_dict = dict(
zip(data_input_names, [
src_word, src_pos, src_slf_attn_bias, source_length
]))
return data_input_dict
def prepare_feed_dict_list(data_generator, count):
"""
Prepare the list of feed dict for multi-devices.
"""
feed_dict_list = []
if data_generator is not None: # use_py_reader == False
data_input_names = encoder_data_input_fields + fast_decoder_data_input_fields
data = next(data_generator)
for idx, data_buffer in enumerate(data):
data_input_dict = prepare_batch_input(
data_buffer, data_input_names, ModelHyperParams.bos_idx,
ModelHyperParams.bos_idx, ModelHyperParams.n_head,
ModelHyperParams.d_model)
feed_dict_list.append(data_input_dict)
return feed_dict_list if len(feed_dict_list) == count else None
def prepare_dense_feed_dict_list(data_generator, count):
"""
Prepare the list of feed dict for multi-devices.
"""
feed_dict_list = []
if data_generator is not None: # use_py_reader == False
data_input_names = dense_encoder_data_input_fields + fast_decoder_data_input_fields
data = next(data_generator)
for idx, data_buffer in enumerate(data):
data_input_dict = prepare_batch_input(
data_buffer, data_input_names, DenseModelHyperParams.bos_idx,
DenseModelHyperParams.bos_idx, DenseModelHyperParams.n_head,
DenseModelHyperParams.d_model)
feed_dict_list.append(data_input_dict)
return feed_dict_list if len(feed_dict_list) == count else None
def prepare_infer_feed_dict_list(data_generator, count):
feed_dict_list = []
if data_generator is not None: # use_py_reader == False
data_input_names = encoder_data_input_fields + fast_decoder_data_input_fields
dense_data_input_names = dense_encoder_data_input_fields + fast_decoder_data_input_fields
data = next(data_generator)
for idx, data_buffer in enumerate(data):
dense_data_input_dict = prepare_batch_input(
data_buffer, dense_data_input_names, DenseModelHyperParams.bos_idx,
DenseModelHyperParams.bos_idx, DenseModelHyperParams.n_head,
DenseModelHyperParams.d_model)
data_input_dict = prepare_batch_input(data_buffer, data_input_names,
ModelHyperParams.bos_idx, ModelHyperParams.bos_idx,
ModelHyperParams.n_head, ModelHyperParams.d_model)
for key in dense_data_input_dict:
if key not in data_input_dict:
data_input_dict[key] = dense_data_input_dict[key]
feed_dict_list.append(data_input_dict)
return feed_dict_list if len(feed_dict_list) == count else None
def get_trans_res(batch_size, out_list, final_list):
"""
Get trans
"""
for index in xrange(batch_size):
seq = out_list[index][0] #top1 seq
if 1 not in seq:
res = seq[1:-1]
else:
res = seq[1:seq.index(1)]
res = map(str, res)
final_list.append(" ".join(res))
def fast_infer(args):
"""
Inference by beam search decoder based solely on Fluid operators.
"""
test_prog = fluid.Program()
startup_prog = fluid.Program()
#with fluid.program_guard(test_prog, startup_prog):
with fluid.unique_name.guard("new_forward"):
out_ids1, out_scores1 = forward_fast_decode(
ModelHyperParams.src_vocab_size,
ModelHyperParams.trg_vocab_size,
ModelHyperParams.max_length + 50,
ModelHyperParams.n_layer,
ModelHyperParams.n_head,
ModelHyperParams.d_key,
ModelHyperParams.d_value,
ModelHyperParams.d_model,
ModelHyperParams.d_inner_hid,
ModelHyperParams.prepostprocess_dropout,
ModelHyperParams.attention_dropout,
ModelHyperParams.relu_dropout,
ModelHyperParams.preprocess_cmd,
ModelHyperParams.postprocess_cmd,
ModelHyperParams.weight_sharing,
ModelHyperParams.embedding_sharing,
InferTaskConfig.beam_size,
args.infer_batch_size,
InferTaskConfig.max_out_len,
args.decode_alpha,
ModelHyperParams.eos_idx,
params_type="new"
)
with fluid.unique_name.guard("new_relative_position"):
out_ids2, out_scores2 = relative_fast_decode(
ModelHyperParams.src_vocab_size,
ModelHyperParams.trg_vocab_size,
ModelHyperParams.max_length + 50,
ModelHyperParams.n_layer,
ModelHyperParams.n_head,
ModelHyperParams.d_key,
ModelHyperParams.d_value,
ModelHyperParams.d_model,
ModelHyperParams.d_inner_hid,
ModelHyperParams.prepostprocess_dropout,
ModelHyperParams.attention_dropout,
ModelHyperParams.relu_dropout,
ModelHyperParams.preprocess_cmd,
ModelHyperParams.postprocess_cmd,
ModelHyperParams.weight_sharing,
ModelHyperParams.embedding_sharing,
InferTaskConfig.beam_size,
args.infer_batch_size,
InferTaskConfig.max_out_len,
args.decode_alpha,
ModelHyperParams.eos_idx,
params_type="new"
)
DenseModelHyperParams.src_vocab_size = ModelHyperParams.src_vocab_size
DenseModelHyperParams.trg_vocab_size = ModelHyperParams.trg_vocab_size
DenseModelHyperParams.weight_sharing = ModelHyperParams.weight_sharing
DenseModelHyperParams.embedding_sharing = ModelHyperParams.embedding_sharing
with fluid.unique_name.guard("new_dense"):
out_ids3, out_scores3 = dense_fast_decode(
DenseModelHyperParams.src_vocab_size,
DenseModelHyperParams.trg_vocab_size,
DenseModelHyperParams.max_length + 50,
DenseModelHyperParams.n_layer,
DenseModelHyperParams.enc_n_layer,
DenseModelHyperParams.n_head,
DenseModelHyperParams.d_key,
DenseModelHyperParams.d_value,
DenseModelHyperParams.d_model,
DenseModelHyperParams.d_inner_hid,
DenseModelHyperParams.prepostprocess_dropout,
DenseModelHyperParams.attention_dropout,
DenseModelHyperParams.relu_dropout,
DenseModelHyperParams.preprocess_cmd,
DenseModelHyperParams.postprocess_cmd,
DenseModelHyperParams.weight_sharing,
DenseModelHyperParams.embedding_sharing,
InferTaskConfig.beam_size,
args.infer_batch_size,
InferTaskConfig.max_out_len,
args.decode_alpha,
ModelHyperParams.eos_idx,
params_type="new"
)
test_prog = fluid.default_main_program().clone(for_test=True)
# This is used here to set dropout to the test mode.
if InferTaskConfig.use_gpu:
place = fluid.CUDAPlace(0)
dev_count = fluid.core.get_cuda_device_count()
else:
place = fluid.CPUPlace()
dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count()))
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
fluid.io.load_params(
exe,
InferTaskConfig.model_path,
main_program=test_prog)
if args.use_mem_opt:
fluid.memory_optimize(test_prog)
exec_strategy = fluid.ExecutionStrategy()
# For faster executor
exec_strategy.use_experimental_executor = True
exec_strategy.num_threads = 1
build_strategy = fluid.BuildStrategy()
# data reader settings for inference
args.use_token_batch = False
#args.sort_type = reader.SortType.NONE
args.shuffle = False
args.shuffle_batch = False
dev_count = 1
lines_cnt = len(open(args.val_file_pattern, 'r').readlines())
data_reader = line_reader(args.val_file_pattern, args.infer_batch_size, dev_count,
token_delimiter=args.token_delimiter,
max_len=ModelHyperParams.max_length,
parse_line=parse_src_line)
test_data = prepare_data_generator(
args,
is_test=True,
count=dev_count,
pyreader=None,
batch_size=args.infer_batch_size, data_reader=data_reader)
data_generator = test_data()
iter_num = 0
if not os.path.exists("trans"):
os.mkdir("trans")
model_name = InferTaskConfig.model_path.split("/")[-1]
forward_res = open(os.path.join("trans", "forward_%s" % model_name), 'w')
relative_res = open(os.path.join("trans", "relative_%s" % model_name), 'w')
dense_res = open(os.path.join("trans", "dense_%s" % model_name), 'w')
forward_list = []
relative_list = []
dense_list = []
with profile_context(False):
while True:
try:
feed_dict_list = prepare_infer_feed_dict_list(data_generator, dev_count)
forward_seq_ids, relative_seq_ids, dense_seq_ids = exe.run(
program=test_prog,
fetch_list=[out_ids1.name, out_ids2.name, out_ids3.name],
feed=feed_dict_list[0]
if feed_dict_list is not None else None,
return_numpy=False,
use_program_cache=False)
fseq_ids = np.asarray(forward_seq_ids).tolist()
rseq_ids = np.asarray(relative_seq_ids).tolist()
dseq_ids = np.asarray(dense_seq_ids).tolist()
get_trans_res(args.infer_batch_size, fseq_ids, forward_list)
get_trans_res(args.infer_batch_size, rseq_ids, relative_list)
get_trans_res(args.infer_batch_size, dseq_ids, dense_list)
except (StopIteration, fluid.core.EOFException):
break
forward_list = forward_list[:lines_cnt]
relative_list = relative_list[:lines_cnt]
dense_list = dense_list[:lines_cnt]
forward_res.writelines("\n".join(forward_list))
forward_res.flush()
forward_res.close()
relative_res.writelines("\n".join(relative_list))
relative_res.flush()
relative_res.close()
dense_res.writelines("\n".join(dense_list))
dense_res.flush()
dense_res.close()
@contextlib.contextmanager
def profile_context(profile=True):
"""
profile_context
"""
if profile:
with profiler.profiler('All', 'total', './profile_dir/profile_file_tmp'):
yield
else:
yield
if __name__ == "__main__":
args = parse_args()
fast_infer(args)
| [
"multiprocessing.cpu_count",
"paddle.fluid.Executor",
"paddle.fluid.ExecutionStrategy",
"os.path.exists",
"forward_model.forward_fast_decode",
"argparse.ArgumentParser",
"paddle.fluid.default_startup_program",
"numpy.asarray",
"paddle.fluid.default_main_program",
"os.mkdir",
"paddle.fluid.Program",
"paddle.fluid.BuildStrategy",
"paddle.fluid.CPUPlace",
"paddle.fluid.core.get_cuda_device_count",
"paddle.fluid.memory_optimize",
"paddle.fluid.io.load_params",
"relative_model.relative_fast_decode",
"dense_model.dense_fast_decode",
"os.path.join",
"paddle.fluid.unique_name.guard",
"paddle.fluid.CUDAPlace",
"paddle.fluid.profiler.profiler"
] | [((1288, 1340), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Training for Transformer."""'], {}), "('Training for Transformer.')\n", (1311, 1340), False, 'import argparse\n'), ((5661, 5701), 'numpy.asarray', 'np.asarray', (['[src_max_len]'], {'dtype': '"""int64"""'}), "([src_max_len], dtype='int64')\n", (5671, 5701), True, 'import numpy as np\n'), ((9138, 9153), 'paddle.fluid.Program', 'fluid.Program', ([], {}), '()\n', (9151, 9153), True, 'import paddle.fluid as fluid\n'), ((9173, 9188), 'paddle.fluid.Program', 'fluid.Program', ([], {}), '()\n', (9186, 9188), True, 'import paddle.fluid as fluid\n'), ((13142, 13163), 'paddle.fluid.Executor', 'fluid.Executor', (['place'], {}), '(place)\n', (13156, 13163), True, 'import paddle.fluid as fluid\n'), ((13214, 13291), 'paddle.fluid.io.load_params', 'fluid.io.load_params', (['exe', 'InferTaskConfig.model_path'], {'main_program': 'test_prog'}), '(exe, InferTaskConfig.model_path, main_program=test_prog)\n', (13234, 13291), True, 'import paddle.fluid as fluid\n'), ((13406, 13431), 'paddle.fluid.ExecutionStrategy', 'fluid.ExecutionStrategy', ([], {}), '()\n', (13429, 13431), True, 'import paddle.fluid as fluid\n'), ((13564, 13585), 'paddle.fluid.BuildStrategy', 'fluid.BuildStrategy', ([], {}), '()\n', (13583, 13585), True, 'import paddle.fluid as fluid\n'), ((9255, 9293), 'paddle.fluid.unique_name.guard', 'fluid.unique_name.guard', (['"""new_forward"""'], {}), "('new_forward')\n", (9278, 9293), True, 'import paddle.fluid as fluid\n'), ((9327, 10034), 'forward_model.forward_fast_decode', 'forward_fast_decode', (['ModelHyperParams.src_vocab_size', 'ModelHyperParams.trg_vocab_size', '(ModelHyperParams.max_length + 50)', 'ModelHyperParams.n_layer', 'ModelHyperParams.n_head', 'ModelHyperParams.d_key', 'ModelHyperParams.d_value', 'ModelHyperParams.d_model', 'ModelHyperParams.d_inner_hid', 'ModelHyperParams.prepostprocess_dropout', 'ModelHyperParams.attention_dropout', 'ModelHyperParams.relu_dropout', 'ModelHyperParams.preprocess_cmd', 'ModelHyperParams.postprocess_cmd', 'ModelHyperParams.weight_sharing', 'ModelHyperParams.embedding_sharing', 'InferTaskConfig.beam_size', 'args.infer_batch_size', 'InferTaskConfig.max_out_len', 'args.decode_alpha', 'ModelHyperParams.eos_idx'], {'params_type': '"""new"""'}), "(ModelHyperParams.src_vocab_size, ModelHyperParams.\n trg_vocab_size, ModelHyperParams.max_length + 50, ModelHyperParams.\n n_layer, ModelHyperParams.n_head, ModelHyperParams.d_key,\n ModelHyperParams.d_value, ModelHyperParams.d_model, ModelHyperParams.\n d_inner_hid, ModelHyperParams.prepostprocess_dropout, ModelHyperParams.\n attention_dropout, ModelHyperParams.relu_dropout, ModelHyperParams.\n preprocess_cmd, ModelHyperParams.postprocess_cmd, ModelHyperParams.\n weight_sharing, ModelHyperParams.embedding_sharing, InferTaskConfig.\n beam_size, args.infer_batch_size, InferTaskConfig.max_out_len, args.\n decode_alpha, ModelHyperParams.eos_idx, params_type='new')\n", (9346, 10034), False, 'from forward_model import forward_fast_decode\n'), ((10283, 10331), 'paddle.fluid.unique_name.guard', 'fluid.unique_name.guard', (['"""new_relative_position"""'], {}), "('new_relative_position')\n", (10306, 10331), True, 'import paddle.fluid as fluid\n'), ((10365, 11073), 'relative_model.relative_fast_decode', 'relative_fast_decode', (['ModelHyperParams.src_vocab_size', 'ModelHyperParams.trg_vocab_size', '(ModelHyperParams.max_length + 50)', 'ModelHyperParams.n_layer', 'ModelHyperParams.n_head', 'ModelHyperParams.d_key', 'ModelHyperParams.d_value', 'ModelHyperParams.d_model', 'ModelHyperParams.d_inner_hid', 'ModelHyperParams.prepostprocess_dropout', 'ModelHyperParams.attention_dropout', 'ModelHyperParams.relu_dropout', 'ModelHyperParams.preprocess_cmd', 'ModelHyperParams.postprocess_cmd', 'ModelHyperParams.weight_sharing', 'ModelHyperParams.embedding_sharing', 'InferTaskConfig.beam_size', 'args.infer_batch_size', 'InferTaskConfig.max_out_len', 'args.decode_alpha', 'ModelHyperParams.eos_idx'], {'params_type': '"""new"""'}), "(ModelHyperParams.src_vocab_size, ModelHyperParams.\n trg_vocab_size, ModelHyperParams.max_length + 50, ModelHyperParams.\n n_layer, ModelHyperParams.n_head, ModelHyperParams.d_key,\n ModelHyperParams.d_value, ModelHyperParams.d_model, ModelHyperParams.\n d_inner_hid, ModelHyperParams.prepostprocess_dropout, ModelHyperParams.\n attention_dropout, ModelHyperParams.relu_dropout, ModelHyperParams.\n preprocess_cmd, ModelHyperParams.postprocess_cmd, ModelHyperParams.\n weight_sharing, ModelHyperParams.embedding_sharing, InferTaskConfig.\n beam_size, args.infer_batch_size, InferTaskConfig.max_out_len, args.\n decode_alpha, ModelHyperParams.eos_idx, params_type='new')\n", (10385, 11073), False, 'from relative_model import relative_fast_decode\n'), ((11625, 11661), 'paddle.fluid.unique_name.guard', 'fluid.unique_name.guard', (['"""new_dense"""'], {}), "('new_dense')\n", (11648, 11661), True, 'import paddle.fluid as fluid\n'), ((11695, 12523), 'dense_model.dense_fast_decode', 'dense_fast_decode', (['DenseModelHyperParams.src_vocab_size', 'DenseModelHyperParams.trg_vocab_size', '(DenseModelHyperParams.max_length + 50)', 'DenseModelHyperParams.n_layer', 'DenseModelHyperParams.enc_n_layer', 'DenseModelHyperParams.n_head', 'DenseModelHyperParams.d_key', 'DenseModelHyperParams.d_value', 'DenseModelHyperParams.d_model', 'DenseModelHyperParams.d_inner_hid', 'DenseModelHyperParams.prepostprocess_dropout', 'DenseModelHyperParams.attention_dropout', 'DenseModelHyperParams.relu_dropout', 'DenseModelHyperParams.preprocess_cmd', 'DenseModelHyperParams.postprocess_cmd', 'DenseModelHyperParams.weight_sharing', 'DenseModelHyperParams.embedding_sharing', 'InferTaskConfig.beam_size', 'args.infer_batch_size', 'InferTaskConfig.max_out_len', 'args.decode_alpha', 'ModelHyperParams.eos_idx'], {'params_type': '"""new"""'}), "(DenseModelHyperParams.src_vocab_size,\n DenseModelHyperParams.trg_vocab_size, DenseModelHyperParams.max_length +\n 50, DenseModelHyperParams.n_layer, DenseModelHyperParams.enc_n_layer,\n DenseModelHyperParams.n_head, DenseModelHyperParams.d_key,\n DenseModelHyperParams.d_value, DenseModelHyperParams.d_model,\n DenseModelHyperParams.d_inner_hid, DenseModelHyperParams.\n prepostprocess_dropout, DenseModelHyperParams.attention_dropout,\n DenseModelHyperParams.relu_dropout, DenseModelHyperParams.\n preprocess_cmd, DenseModelHyperParams.postprocess_cmd,\n DenseModelHyperParams.weight_sharing, DenseModelHyperParams.\n embedding_sharing, InferTaskConfig.beam_size, args.infer_batch_size,\n InferTaskConfig.max_out_len, args.decode_alpha, ModelHyperParams.\n eos_idx, params_type='new')\n", (11712, 12523), False, 'from dense_model import dense_fast_decode\n'), ((12935, 12953), 'paddle.fluid.CUDAPlace', 'fluid.CUDAPlace', (['(0)'], {}), '(0)\n', (12950, 12953), True, 'import paddle.fluid as fluid\n'), ((12974, 13008), 'paddle.fluid.core.get_cuda_device_count', 'fluid.core.get_cuda_device_count', ([], {}), '()\n', (13006, 13008), True, 'import paddle.fluid as fluid\n'), ((13035, 13051), 'paddle.fluid.CPUPlace', 'fluid.CPUPlace', ([], {}), '()\n', (13049, 13051), True, 'import paddle.fluid as fluid\n'), ((13176, 13207), 'paddle.fluid.default_startup_program', 'fluid.default_startup_program', ([], {}), '()\n', (13205, 13207), True, 'import paddle.fluid as fluid\n'), ((13352, 13384), 'paddle.fluid.memory_optimize', 'fluid.memory_optimize', (['test_prog'], {}), '(test_prog)\n', (13373, 13384), True, 'import paddle.fluid as fluid\n'), ((14361, 14384), 'os.path.exists', 'os.path.exists', (['"""trans"""'], {}), "('trans')\n", (14375, 14384), False, 'import os\n'), ((14394, 14411), 'os.mkdir', 'os.mkdir', (['"""trans"""'], {}), "('trans')\n", (14402, 14411), False, 'import os\n'), ((14499, 14547), 'os.path.join', 'os.path.join', (['"""trans"""', "('forward_%s' % model_name)"], {}), "('trans', 'forward_%s' % model_name)\n", (14511, 14547), False, 'import os\n'), ((14578, 14627), 'os.path.join', 'os.path.join', (['"""trans"""', "('relative_%s' % model_name)"], {}), "('trans', 'relative_%s' % model_name)\n", (14590, 14627), False, 'import os\n'), ((14655, 14701), 'os.path.join', 'os.path.join', (['"""trans"""', "('dense_%s' % model_name)"], {}), "('trans', 'dense_%s' % model_name)\n", (14667, 14701), False, 'import os\n'), ((12779, 12807), 'paddle.fluid.default_main_program', 'fluid.default_main_program', ([], {}), '()\n', (12805, 12807), True, 'import paddle.fluid as fluid\n'), ((16466, 16533), 'paddle.fluid.profiler.profiler', 'profiler.profiler', (['"""All"""', '"""total"""', '"""./profile_dir/profile_file_tmp"""'], {}), "('All', 'total', './profile_dir/profile_file_tmp')\n", (16483, 16533), True, 'import paddle.fluid.profiler as profiler\n'), ((13102, 13129), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (13127, 13129), False, 'import multiprocessing\n'), ((15344, 15371), 'numpy.asarray', 'np.asarray', (['forward_seq_ids'], {}), '(forward_seq_ids)\n', (15354, 15371), True, 'import numpy as np\n'), ((15408, 15436), 'numpy.asarray', 'np.asarray', (['relative_seq_ids'], {}), '(relative_seq_ids)\n', (15418, 15436), True, 'import numpy as np\n'), ((15473, 15498), 'numpy.asarray', 'np.asarray', (['dense_seq_ids'], {}), '(dense_seq_ids)\n', (15483, 15498), True, 'import numpy as np\n')] |
from yats.sorted_set import SortedSet
from yats.tweet import Tweet
class TweetSet(SortedSet):
def __init__(self, data=[]):
if not data or isinstance(data, list):
super().__init__(data)
else:
super().__init__([])
tweets_raw = data["tweets"]
users_raw = data["users"]
for id, content in tweets_raw.items():
tweet = Tweet(content, users_raw)
self.add(tweet)
def __repr__(self):
return f"<yats.TweetSet:{super().__len__()}_tweets>"
| [
"yats.tweet.Tweet"
] | [((413, 438), 'yats.tweet.Tweet', 'Tweet', (['content', 'users_raw'], {}), '(content, users_raw)\n', (418, 438), False, 'from yats.tweet import Tweet\n')] |
import logging
from functools import lru_cache
from typing import Union
from magnus import defaults, exceptions, utils
logger = logging.getLogger(defaults.NAME)
class BaseSecrets:
"""
A base class for Secrets Handler.
All implementations should extend this class.
Note: As a general guideline, do not extract anything from the config to set class level attributes.
Integration patterns modify the config after init to change behaviors.
Access config properties using getters/property of the class.
Raises:
NotImplementedError: Base class and not implemented
"""
service_name = ''
def __init__(self, config: dict, **kwargs): # pylint: disable=unused-argument
self.config = config or {}
def get(self, name: str = None, **kwargs) -> Union[str, dict]:
"""
Return the secret by name.
If no name is give, return all the secrets.
Args:
name (str): The name of the secret to return.
Raises:
NotImplementedError: Base class and hence not implemented.
"""
raise NotImplementedError
class DoNothingSecretManager(BaseSecrets):
"""
Does nothing secret manager
"""
service_name = 'do-nothing'
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
self.secrets = {}
def get(self, name: str = None, **kwargs) -> Union[str, dict]:
"""
If a name is provided, return None else return empty dict.
Args:
name (str): The name of the secret to retrieve
Raises:
Exception: If the secret by the name is not found.
Returns:
[type]: [description]
"""
if name:
return ''
return {}
class DotEnvSecrets(BaseSecrets):
"""
A secret manager which uses .env files for secrets.
We recommend this secrets manager only for local development and should not be used for anything close to
production.
"""
service_name = 'dotenv'
def __init__(self, config, **kwargs):
super().__init__(config, **kwargs)
self.secrets = {}
def get_secrets_location(self):
"""
Return the location of the .env file.
If the user has not over-ridden it, it defaults to .env file in the project root.
Returns:
str: The location of the secrets file
"""
secrets_location = defaults.DOTENV_FILE_LOCATION
if self.config and 'location' in self.config:
secrets_location = self.config['location'] or secrets_location
return secrets_location
def _load_secrets(self):
"""
We assume that a dotenv file is of format,
key=value -> secrets[key]='value'
key1=value1# comment -> secrets[key1]='value1'
key2=value2 # comment. -> secrets[key2]='value2'
We strip the secret value of any empty spaces at the start and end.
Raises:
Exception: If the file at secrets_location is not found.
Exception: If the secrets are not formatted correctly.
"""
secrets_location = self.get_secrets_location()
if not utils.does_file_exist(secrets_location):
raise Exception(f'Did not find the secrets file in {secrets_location}')
with open(secrets_location, 'r') as fr:
for secret_line in fr:
secret_line = secret_line.split('#')[0] # To remove any comments the user might have put
data = secret_line.split('=')
if len(data) != 2:
raise Exception('A secret should be of format, secret_name=secret_value[# any comment]')
key, value = data
self.secrets[key] = value.strip('\n')
def get(self, name: str = None, **kwargs) -> Union[str, dict]:
"""
Get a secret of name from the secrets file.
If no name is provided, we return all
Args:
name (str): The name of the secret to retrieve
Raises:
Exception: If the secret by the name is not found.
Returns:
[type]: [description]
"""
self._load_secrets()
if not name:
return self.secrets
if name in self.secrets:
return self.secrets[name]
secrets_location = self.get_secrets_location()
raise exceptions.SecretNotFoundError(secret_name=name, secret_setting=secrets_location)
| [
"logging.getLogger",
"magnus.utils.does_file_exist",
"magnus.exceptions.SecretNotFoundError"
] | [((130, 162), 'logging.getLogger', 'logging.getLogger', (['defaults.NAME'], {}), '(defaults.NAME)\n', (147, 162), False, 'import logging\n'), ((4443, 4529), 'magnus.exceptions.SecretNotFoundError', 'exceptions.SecretNotFoundError', ([], {'secret_name': 'name', 'secret_setting': 'secrets_location'}), '(secret_name=name, secret_setting=\n secrets_location)\n', (4473, 4529), False, 'from magnus import defaults, exceptions, utils\n'), ((3228, 3267), 'magnus.utils.does_file_exist', 'utils.does_file_exist', (['secrets_location'], {}), '(secrets_location)\n', (3249, 3267), False, 'from magnus import defaults, exceptions, utils\n')] |
from dyndns import log
from dyndns.log import UpdatesDB, DateTime
import _helper
import datetime
import unittest
def clean_log_file(log_file_path):
log_file = open(log_file_path, 'w')
log_file.write('')
log_file.close()
class TestMethodMsg(unittest.TestCase):
def setUp(self):
self.log_file = log.log_file
clean_log_file(self.log_file)
def test_msg(self):
self.assertEqual(log.msg('lol', 'UNCHANGED'), 'UNCHANGED: lol\n')
def test_log_file(self):
log.msg('lol', 'UNCHANGED')
log_file = open(log.log_file, 'r')
result = log_file.read()
self.assertIn('UNCHANGED', result)
self.assertIn('lol', result)
class TestClassDateTime(unittest.TestCase):
def setUp(self):
self.dt = DateTime('2018-06-23 07:49:58.694510')
def test_init(self):
self.assertEqual(str(self.dt.datetime), '2018-06-23 07:49:58.694510')
def test_iso8601(self):
self.assertEqual(self.dt.iso8601(), '2018-06-23 07:49:58.694510')
def test_iso8601_short(self):
self.assertEqual(self.dt.iso8601_short(), '2018-06-23 07:49:58')
class TestClassUpdateDB(unittest.TestCase):
def setUp(self):
_helper.remove_updates_db()
def test_init(self):
db = UpdatesDB()
self.assertTrue(db.db_file)
def test_method_log_update(self):
db = UpdatesDB()
db.log_update(True, 'www.example.com', 'a', '172.16.58.3')
db.cursor.execute('SELECT * FROM updates;')
rows = db.cursor.fetchall()
row = rows[0]
dt = DateTime(row[0])
self.assertEqual(dt.datetime.year, datetime.datetime.now().year)
self.assertEqual(row[1], 1)
self.assertEqual(row[2], 'www.example.com')
self.assertEqual(row[3], 'a')
self.assertEqual(row[4], '172.16.58.3')
db.cursor.execute('SELECT fqdn FROM fqdns;')
row = db.cursor.fetchone()
self.assertEqual(row[0], 'www.example.com')
# Add second entry
db.log_update(True, 'www.example.com', 'a', '172.16.58.3')
# fqdn gets entered only one time
db.cursor.execute('SELECT fqdn FROM fqdns;')
rows = db.cursor.fetchall()
self.assertEqual(len(rows), 1)
db.cursor.execute('SELECT * FROM updates;')
rows = db.cursor.fetchall()
self.assertEqual(len(rows), 2)
def test_method_get_fqdns(self):
db = _helper.get_updates_db()
self.assertEqual(db.get_fqdns(),
['a.example.com', 'b.example.com', 'c.example.com'])
def test_method_is_fqdn_stored(self):
db = UpdatesDB()
self.assertFalse(db._is_fqdn_stored('example.com'))
db.log_update(True, 'example.com', 'a', '172.16.58.3')
self.assertTrue(db._is_fqdn_stored('example.com'))
def test_method_get_updates_by_fqdn(self):
db = _helper.get_updates_db()
result = db.get_updates_by_fqdn('a.example.com')
self.assertTrue(result[0]['update_time'])
self.assertEqual(result[0]['fqdn'], 'a.example.com')
self.assertEqual(result[0]['record_type'], 'a')
self.assertEqual(result[0]['ip'], '1.2.3.4')
if __name__ == '__main__':
unittest.main()
| [
"_helper.get_updates_db",
"dyndns.log.UpdatesDB",
"dyndns.log.msg",
"_helper.remove_updates_db",
"datetime.datetime.now",
"unittest.main",
"dyndns.log.DateTime"
] | [((3220, 3235), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3233, 3235), False, 'import unittest\n'), ((510, 537), 'dyndns.log.msg', 'log.msg', (['"""lol"""', '"""UNCHANGED"""'], {}), "('lol', 'UNCHANGED')\n", (517, 537), False, 'from dyndns import log\n'), ((780, 818), 'dyndns.log.DateTime', 'DateTime', (['"""2018-06-23 07:49:58.694510"""'], {}), "('2018-06-23 07:49:58.694510')\n", (788, 818), False, 'from dyndns.log import UpdatesDB, DateTime\n'), ((1210, 1237), '_helper.remove_updates_db', '_helper.remove_updates_db', ([], {}), '()\n', (1235, 1237), False, 'import _helper\n'), ((1277, 1288), 'dyndns.log.UpdatesDB', 'UpdatesDB', ([], {}), '()\n', (1286, 1288), False, 'from dyndns.log import UpdatesDB, DateTime\n'), ((1377, 1388), 'dyndns.log.UpdatesDB', 'UpdatesDB', ([], {}), '()\n', (1386, 1388), False, 'from dyndns.log import UpdatesDB, DateTime\n'), ((1580, 1596), 'dyndns.log.DateTime', 'DateTime', (['row[0]'], {}), '(row[0])\n', (1588, 1596), False, 'from dyndns.log import UpdatesDB, DateTime\n'), ((2430, 2454), '_helper.get_updates_db', '_helper.get_updates_db', ([], {}), '()\n', (2452, 2454), False, 'import _helper\n'), ((2630, 2641), 'dyndns.log.UpdatesDB', 'UpdatesDB', ([], {}), '()\n', (2639, 2641), False, 'from dyndns.log import UpdatesDB, DateTime\n'), ((2885, 2909), '_helper.get_updates_db', '_helper.get_updates_db', ([], {}), '()\n', (2907, 2909), False, 'import _helper\n'), ((423, 450), 'dyndns.log.msg', 'log.msg', (['"""lol"""', '"""UNCHANGED"""'], {}), "('lol', 'UNCHANGED')\n", (430, 450), False, 'from dyndns import log\n'), ((1640, 1663), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1661, 1663), False, 'import datetime\n')] |
#!/usr/bin/env python
"""Test suite for Stream XML Writer module"""
# Copyright (c) 2009-2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from io import BytesIO
from streamxmlwriter import XMLWriter, XMLSyntaxError, tostring
class XMLWriterTestCase(unittest.TestCase):
def assertOutput(self, writer, output):
self.assertEqual(writer.file.getvalue(), output)
class TestXMLWriter(XMLWriterTestCase):
def test_single_element(self):
w = XMLWriter(BytesIO())
w.start("foo")
w.end()
self.assertOutput(w, b"<foo />")
def test_text_data(self):
w = XMLWriter(BytesIO())
w.start("foo")
w.data("bar")
w.end()
self.assertOutput(w, b"<foo>bar</foo>")
def test_single_attribute(self):
w = XMLWriter(BytesIO())
w.start("foo", {"bar": "baz"})
w.end()
self.assertOutput(w, b'<foo bar="baz" />')
def test_sorted_attributes(self):
w = XMLWriter(BytesIO())
w.start("foo", {"bar": "bar", "baz": "baz"})
w.end()
self.assertOutput(w, b'<foo bar="bar" baz="baz" />')
def test_escape_attributes(self):
w = XMLWriter(BytesIO())
w.start("foo", {"bar": '<>&"'})
w.end()
self.assertOutput(w, b'<foo bar="<>&"" />')
def test_escape_character_data(self):
w = XMLWriter(BytesIO())
w.start("foo")
w.data("<>&")
w.end()
self.assertOutput(w, b"<foo><>&</foo>")
def test_file_encoding(self):
ts = [
({}, b"<foo>\xc3\xa5\xc3\xa4\xc3\xb6\xe2\x98\x83\xe2\x9d\xa4</foo>"),
({"encoding": "us-ascii"}, b"<foo>åäö☃❤</foo>"),
(
{"encoding": "iso-8859-1"},
b"<?xml version='1.0' encoding='iso-8859-1'?>"
b"<foo>\xe5\xe4\xf6☃❤</foo>",
),
(
{"encoding": "utf-8"},
b"<foo>\xc3\xa5\xc3\xa4\xc3\xb6\xe2\x98\x83\xe2\x9d\xa4</foo>",
),
]
for (kwargs, output) in ts:
w = XMLWriter(BytesIO(), **kwargs)
w.start("foo")
w.data(u"\xe5\xe4\xf6\u2603\u2764")
w.end()
self.assertEqual(w.file.getvalue(), output)
def test_close(self):
w = XMLWriter(BytesIO())
w.start("a")
w.start("b")
w.close()
self.assertOutput(w, b"<a><b /></a>")
def test_declaration_late_raises_syntaxerror(self):
w = XMLWriter(BytesIO())
w.start("a")
self.assertRaises(XMLSyntaxError, w.declaration)
def test_ignore_double_declaration(self):
w = XMLWriter(BytesIO())
w.declaration()
w.declaration()
w.close()
self.assertOutput(w, b"<?xml version='1.0' encoding='utf-8'?>")
def test_abbrev_empty(self):
w = XMLWriter(BytesIO(), abbrev_empty=False)
w.start("a")
w.close()
self.assertOutput(w, b"<a></a>")
def test_named_end(self):
w = XMLWriter(BytesIO())
w.start("a")
w.end("a")
w.close()
self.assertTrue(True)
class TestPrettyPrinting(XMLWriterTestCase):
def test_simple(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.start("a")
w.start("b")
w.data("foo")
w.end()
w.start("b")
w.data("bar")
w.end()
w.start("b")
w.start("c")
w.close()
self.assertOutput(
w,
b"""\
<a>
<b>foo</b>
<b>bar</b>
<b>
<c />
</b>
</a>""",
)
def test_comment(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.start("a")
w.comment("comment")
w.start("b")
w.close()
self.assertOutput(w, b"<a>\n <!--comment-->\n <b />\n</a>")
def test_comment_before_root(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.comment("comment")
w.start("a")
w.close()
self.assertOutput(w, b"<!--comment-->\n<a />")
def test_comment_after_root(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.start("a")
w.end()
w.comment("comment")
w.close()
self.assertOutput(w, b"<a />\n<!--comment-->")
def test_pi(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.start("a")
w.pi("foo", "bar")
w.start("b")
w.close()
self.assertOutput(w, b"<a>\n <?foo bar?>\n <b />\n</a>")
def test_pi_before_root(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.pi("foo", "bar")
w.start("a")
w.close()
self.assertOutput(w, b"<?foo bar?>\n<a />")
def test_pi_after_root(self):
w = XMLWriter(BytesIO(), pretty_print=True)
w.start("a")
w.end()
w.pi("foo", "bar")
w.close()
self.assertOutput(w, b"<a />\n<?foo bar?>")
class TestNamespaces(XMLWriterTestCase):
def test_simple(self):
w = XMLWriter(BytesIO())
w.start_ns("", "http://example.org/ns")
w.start("{http://example.org/ns}foo")
w.close()
self.assertOutput(w, b'<foo xmlns="http://example.org/ns" />')
def test_attribute(self):
w = XMLWriter(BytesIO())
w.start_ns("a", "http://example.org/ns")
w.start("foo", {"{http://example.org/ns}bar": "baz"})
w.close()
self.assertOutput(w, b'<foo xmlns:a="http://example.org/ns" a:bar="baz" />')
def test_prefixed_element(self):
w = XMLWriter(BytesIO())
w.start_ns("a", "http://example.org/ns")
w.start("{http://example.org/ns}foo")
w.close()
self.assertOutput(w, b'<a:foo xmlns:a="http://example.org/ns" />')
def test_default_unbinding(self):
w = XMLWriter(BytesIO())
w.start_ns("", "http://example.org/ns")
w.start("{http://example.org/ns}foo")
w.start_ns("", "")
w.start("foo")
w.close()
self.assertOutput(
w, b'<foo xmlns="http://example.org/ns">' b'<foo xmlns="" /></foo>'
)
def test_prefix_rebinding(self):
w = XMLWriter(BytesIO())
w.start_ns("a", "http://example.org/ns")
w.start("{http://example.org/ns}foo")
w.start_ns("a", "http://example.org/ns2")
w.start("{http://example.org/ns2}foo")
w.close()
self.assertOutput(
w,
b'<a:foo xmlns:a="http://example.org/ns">'
b'<a:foo xmlns:a="http://example.org/ns2" />'
b"</a:foo>",
)
def test_attributes_same_local_name(self):
w = XMLWriter(BytesIO())
w.start_ns("a", "http://example.org/ns1")
w.start_ns("b", "http://example.org/ns2")
w.start("foo")
w.start(
"bar",
{"{http://example.org/ns1}attr": "1", "{http://example.org/ns2}attr": "2"},
)
w.close()
self.assertOutput(
w,
b'<foo xmlns:a="http://example.org/ns1"'
b' xmlns:b="http://example.org/ns2">'
b'<bar a:attr="1" b:attr="2" />'
b"</foo>",
)
def test_attributes_same_local_one_prefixed(self):
w = XMLWriter(BytesIO())
w.start_ns("a", "http://example.org/ns")
w.start("foo")
w.start("bar", {"{http://example.org/ns}attr": "1", "attr": "2"})
w.close()
self.assertOutput(
w,
b'<foo xmlns:a="http://example.org/ns">'
b'<bar attr="2" a:attr="1" />'
b"</foo>",
)
def test_attributes_same_local_one_prefixed_one_default(self):
w = XMLWriter(BytesIO())
w.start_ns("", "http://example.org/ns1")
w.start_ns("a", "http://example.org/ns2")
w.start("{http://example.org/ns1}foo")
w.start(
"{http://example.org/ns1}bar",
{"{http://example.org/ns1}attr": "1", "{http://example.org/ns2}attr": "2"},
)
w.close()
self.assertOutput(
w,
b'<foo xmlns="http://example.org/ns1"'
b' xmlns:a="http://example.org/ns2">'
b'<bar attr="1" a:attr="2" />'
b"</foo>",
)
class TestIterwrite(XMLWriterTestCase):
def test_basic(self):
from lxml import etree
from io import BytesIO
w = XMLWriter(BytesIO())
xml = b"""\
<!--comment before--><?pi before?><foo xmlns="http://example.org/ns1">
<?a pi?>
<bar xmlns:b="http://example.org/ns2">
<?pi inside?>some text
<baz attr="1" b:attr="2" />
oh dear<!--comment inside -->text here too
</bar>
</foo><?pi after?><!--comment after-->"""
events = ("start", "end", "start-ns", "end-ns", "pi", "comment")
w.iterwrite(etree.iterparse(BytesIO(xml), events))
w.close()
self.assertOutput(w, xml)
def test_chunked_text(self):
from lxml import etree
from io import BytesIO
for padding in (16382, 32755):
padding = b" " * padding
w = XMLWriter(BytesIO())
xml = b"%s<doc><foo>hello</foo></doc>" % padding
events = ("start", "end")
w.iterwrite(etree.iterparse(BytesIO(xml), events))
w.close()
self.assertOutput(w, xml.strip())
class TestToString(XMLWriterTestCase):
def test_basic(self):
from lxml import etree
elem = etree.Element("foo", bar="baz")
elem.text = "something"
elem.tail = "whatnot"
xml = tostring(elem)
self.assertEqual(xml, b'<foo bar="baz">something</foo>whatnot')
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"lxml.etree.Element",
"io.BytesIO",
"streamxmlwriter.tostring"
] | [((10739, 10754), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10752, 10754), False, 'import unittest\n'), ((10511, 10542), 'lxml.etree.Element', 'etree.Element', (['"""foo"""'], {'bar': '"""baz"""'}), "('foo', bar='baz')\n", (10524, 10542), False, 'from lxml import etree\n'), ((10619, 10633), 'streamxmlwriter.tostring', 'tostring', (['elem'], {}), '(elem)\n', (10627, 10633), False, 'from streamxmlwriter import XMLWriter, XMLSyntaxError, tostring\n'), ((1509, 1518), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1516, 1518), False, 'from io import BytesIO\n'), ((1653, 1662), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1660, 1662), False, 'from io import BytesIO\n'), ((1833, 1842), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1840, 1842), False, 'from io import BytesIO\n'), ((2011, 2020), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2018, 2020), False, 'from io import BytesIO\n'), ((2213, 2222), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2220, 2222), False, 'from io import BytesIO\n'), ((2409, 2418), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2416, 2418), False, 'from io import BytesIO\n'), ((3396, 3405), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3403, 3405), False, 'from io import BytesIO\n'), ((3592, 3601), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3599, 3601), False, 'from io import BytesIO\n'), ((3750, 3759), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3757, 3759), False, 'from io import BytesIO\n'), ((3955, 3964), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3962, 3964), False, 'from io import BytesIO\n'), ((4119, 4128), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4126, 4128), False, 'from io import BytesIO\n'), ((4314, 4323), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4321, 4323), False, 'from io import BytesIO\n'), ((4726, 4735), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4733, 4735), False, 'from io import BytesIO\n'), ((4978, 4987), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (4985, 4987), False, 'from io import BytesIO\n'), ((5193, 5202), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5200, 5202), False, 'from io import BytesIO\n'), ((5408, 5417), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5415, 5417), False, 'from io import BytesIO\n'), ((5650, 5659), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5657, 5659), False, 'from io import BytesIO\n'), ((5855, 5864), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (5862, 5864), False, 'from io import BytesIO\n'), ((6111, 6120), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6118, 6120), False, 'from io import BytesIO\n'), ((6358, 6367), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6365, 6367), False, 'from io import BytesIO\n'), ((6643, 6652), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6650, 6652), False, 'from io import BytesIO\n'), ((6903, 6912), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (6910, 6912), False, 'from io import BytesIO\n'), ((7253, 7262), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (7260, 7262), False, 'from io import BytesIO\n'), ((7734, 7743), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (7741, 7743), False, 'from io import BytesIO\n'), ((8321, 8330), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (8328, 8330), False, 'from io import BytesIO\n'), ((8757, 8766), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (8764, 8766), False, 'from io import BytesIO\n'), ((9462, 9471), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (9469, 9471), False, 'from io import BytesIO\n'), ((3175, 3184), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (3182, 3184), False, 'from io import BytesIO\n'), ((9882, 9894), 'io.BytesIO', 'BytesIO', (['xml'], {}), '(xml)\n', (9889, 9894), False, 'from io import BytesIO\n'), ((10156, 10165), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (10163, 10165), False, 'from io import BytesIO\n'), ((10306, 10318), 'io.BytesIO', 'BytesIO', (['xml'], {}), '(xml)\n', (10313, 10318), False, 'from io import BytesIO\n')] |
# -*- encoding: utf-8 -*-
import json
import django.utils.timezone
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from guardian.decorators import permission_required
from apps.authentication.models import OnlineUser as User
from apps.dashboard.tools import get_base_context, has_access
from apps.marks.dashboard.forms import MarkForm
from apps.marks.models import Mark, MarkUser
@login_required
@permission_required('marks.view_mark', return_403=True)
def index(request):
"""
Marks overview
"""
# Check access
if not has_access(request):
raise PermissionDenied
# Get context
context = get_base_context(request)
# Find all marks and do additional fixes
marks_collection = []
marks = Mark.objects.all().order_by('-added_date')
for mark in marks:
marks_temp = mark
marks_temp.users_num = len(mark.given_to.all())
marks_temp.category_clean = mark.get_category_display()
marks_collection.append(marks_temp)
# Add collection to context
context['marks'] = marks_collection
# Render view
return render(request, 'marks/dashboard/index.html', context)
@login_required
@permission_required('marks.change_mark', return_403=True)
def marks_details(request, pk):
"""
Display details for a given Mark
"""
# Check permission
if not has_access(request):
raise PermissionDenied
# Get context
context = get_base_context(request)
# Get object
mark = get_object_or_404(Mark, pk=pk)
mark.category_clean = mark.get_category_display()
context['mark'] = mark
# Get users connected to the mark
context['mark_users'] = mark.given_to.all()
# AJAX
if request.method == 'POST':
if request.is_ajax and 'action' in request.POST:
resp = {'status': 200}
context, resp = _handle_mark_detail(request, context, resp)
# Set mark
resp['mark'] = {'last_changed_date': context['mark'].last_changed_date.strftime("%Y-%m-%d"),
'last_changed_by': context['mark'].last_changed_by.get_full_name()}
# Return ajax
return HttpResponse(json.dumps(resp), status=resp['status'])
# Render view
return render(request, 'marks/dashboard/marks_details.html', context)
@login_required
@permission_required('marks.add_mark', return_403=True)
def marks_new(request):
if not has_access(request):
raise PermissionDenied
context = get_base_context(request)
if request.method == 'POST':
mark_form = MarkForm(request.POST)
if not mark_form.is_valid():
messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
else:
# Save the form data
new_mark = mark_form.save()
# Save the additional mark data
new_mark.given_by = request.user
new_mark.last_changed_by = request.user
new_mark.save()
# Add news
messages.success(request, 'Prikken ble lagret.')
return redirect(marks_details, pk=new_mark.id)
else:
context['form'] = MarkForm()
return render(request, 'marks/dashboard/marks_new.html', context)
@login_required
@permission_required('marks.change_mark', return_403=True)
def marks_edit(request, pk):
if not has_access(request):
raise PermissionDenied
context = get_base_context(request)
if request.method == 'POST':
mark = get_object_or_404(Mark, pk=pk)
mark_form = MarkForm(request.POST, instance=mark)
if not mark_form.is_valid():
messages.error(request, 'Noen av de påkrevde feltene inneholder feil.')
else:
# Save the form data
new_mark = mark_form.save()
# Save the additional mark data
new_mark.last_changed_by = request.user
new_mark.last_changed_date = django.utils.timezone.now()
new_mark.save()
# Add news
messages.success(request, 'Prikken ble endret.')
return redirect(marks_details, pk=new_mark.id)
else:
mark = get_object_or_404(Mark, pk=pk)
context['form'] = MarkForm(instance=mark)
return render(request, 'marks/dashboard/marks_edit.html', context)
@login_required
@permission_required('marks.delete_mark', return_403=True)
def marks_delete(request, pk):
"""
Display details for a given Mark
"""
# Check permission
if not has_access(request):
raise PermissionDenied
# Get object
mark = get_object_or_404(Mark, pk=pk)
# Save message
messages.success(request, '%s er ble slettet.' % mark.title)
# Delete the mark
mark.delete()
# Redirect user
return redirect(index)
def _handle_mark_detail(request, context, resp):
if request.POST['action'] == 'remove_user':
# Get the correct user
user = get_object_or_404(User, pk=int(request.POST['user_id']))
# Remove from the set
mark_users_filtered = []
for mark_user in context['mark_users']:
if mark_user.user == user:
# Delete the object in the database
mark_user.delete()
else:
mark_users_filtered.append(mark_user)
# Update mark
context['mark'].last_changed_date = django.utils.timezone.now()
context['mark'].last_changed_by = request.user
context['mark'].save()
# Set information to resp
resp['message'] = '%s ble fjernet fra %s' % (user.get_full_name(), context['mark'].title)
resp['mark_users'] = [{'user': mu.user.get_full_name(), 'id': mu.user.id} for mu in mark_users_filtered]
elif request.POST['action'] == 'add_user':
user = get_object_or_404(User, pk=int(request.POST['user_id']))
# Check if user already is the lucky owner of this prikk
for context_mark_user in context['mark_users']:
if context_mark_user.user == user:
resp = {
'status': 500,
'message': '%s har allerede prikken %s.' % (user.get_full_name(), context['mark'].title)
}
# Return ajax
return HttpResponse(json.dumps(resp), status=500)
# Update mark
context['mark'].last_changed_date = django.utils.timezone.now()
context['mark'].last_changed_by = request.user
context['mark'].save()
# New MarkUser
mark_user = MarkUser()
mark_user.mark = context['mark']
mark_user.user = user
mark_user.save()
# Build new list of users
mark_users_list = []
for context_mark_user in context['mark_users']:
mark_users_list.append(context_mark_user)
mark_users_list.append(mark_user)
# Sort the list of mark users
resp['mark_users'] = [{'user': mu.user.get_full_name(), 'id': mu.user.id} for mu in mark_users_list]
resp['mark_users'].sort(key=lambda x: x['user'])
# Set information to resp
resp['message'] = '%s ble tildelt prikken %s.' % (user.get_full_name(), context['mark'].title)
return context, resp
| [
"django.shortcuts.render",
"apps.marks.models.MarkUser",
"guardian.decorators.permission_required",
"django.contrib.messages.error",
"django.shortcuts.get_object_or_404",
"json.dumps",
"apps.dashboard.tools.has_access",
"apps.dashboard.tools.get_base_context",
"django.shortcuts.redirect",
"apps.marks.models.Mark.objects.all",
"django.contrib.messages.success",
"apps.marks.dashboard.forms.MarkForm"
] | [((602, 657), 'guardian.decorators.permission_required', 'permission_required', (['"""marks.view_mark"""'], {'return_403': '(True)'}), "('marks.view_mark', return_403=True)\n", (621, 657), False, 'from guardian.decorators import permission_required\n'), ((1373, 1430), 'guardian.decorators.permission_required', 'permission_required', (['"""marks.change_mark"""'], {'return_403': '(True)'}), "('marks.change_mark', return_403=True)\n", (1392, 1430), False, 'from guardian.decorators import permission_required\n'), ((2537, 2591), 'guardian.decorators.permission_required', 'permission_required', (['"""marks.add_mark"""'], {'return_403': '(True)'}), "('marks.add_mark', return_403=True)\n", (2556, 2591), False, 'from guardian.decorators import permission_required\n'), ((3457, 3514), 'guardian.decorators.permission_required', 'permission_required', (['"""marks.change_mark"""'], {'return_403': '(True)'}), "('marks.change_mark', return_403=True)\n", (3476, 3514), False, 'from guardian.decorators import permission_required\n'), ((4530, 4587), 'guardian.decorators.permission_required', 'permission_required', (['"""marks.delete_mark"""'], {'return_403': '(True)'}), "('marks.delete_mark', return_403=True)\n", (4549, 4587), False, 'from guardian.decorators import permission_required\n'), ((829, 854), 'apps.dashboard.tools.get_base_context', 'get_base_context', (['request'], {}), '(request)\n', (845, 854), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((1299, 1353), 'django.shortcuts.render', 'render', (['request', '"""marks/dashboard/index.html"""', 'context'], {}), "(request, 'marks/dashboard/index.html', context)\n", (1305, 1353), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1636, 1661), 'apps.dashboard.tools.get_base_context', 'get_base_context', (['request'], {}), '(request)\n', (1652, 1661), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((1691, 1721), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Mark'], {'pk': 'pk'}), '(Mark, pk=pk)\n', (1708, 1721), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2455, 2517), 'django.shortcuts.render', 'render', (['request', '"""marks/dashboard/marks_details.html"""', 'context'], {}), "(request, 'marks/dashboard/marks_details.html', context)\n", (2461, 2517), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2694, 2719), 'apps.dashboard.tools.get_base_context', 'get_base_context', (['request'], {}), '(request)\n', (2710, 2719), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((3379, 3437), 'django.shortcuts.render', 'render', (['request', '"""marks/dashboard/marks_new.html"""', 'context'], {}), "(request, 'marks/dashboard/marks_new.html', context)\n", (3385, 3437), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3622, 3647), 'apps.dashboard.tools.get_base_context', 'get_base_context', (['request'], {}), '(request)\n', (3638, 3647), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((4451, 4510), 'django.shortcuts.render', 'render', (['request', '"""marks/dashboard/marks_edit.html"""', 'context'], {}), "(request, 'marks/dashboard/marks_edit.html', context)\n", (4457, 4510), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4788, 4818), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Mark'], {'pk': 'pk'}), '(Mark, pk=pk)\n', (4805, 4818), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4843, 4903), 'django.contrib.messages.success', 'messages.success', (['request', "('%s er ble slettet.' % mark.title)"], {}), "(request, '%s er ble slettet.' % mark.title)\n", (4859, 4903), False, 'from django.contrib import messages\n'), ((4977, 4992), 'django.shortcuts.redirect', 'redirect', (['index'], {}), '(index)\n', (4985, 4992), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((744, 763), 'apps.dashboard.tools.has_access', 'has_access', (['request'], {}), '(request)\n', (754, 763), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((1551, 1570), 'apps.dashboard.tools.has_access', 'has_access', (['request'], {}), '(request)\n', (1561, 1570), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((2627, 2646), 'apps.dashboard.tools.has_access', 'has_access', (['request'], {}), '(request)\n', (2637, 2646), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((2774, 2796), 'apps.marks.dashboard.forms.MarkForm', 'MarkForm', (['request.POST'], {}), '(request.POST)\n', (2782, 2796), False, 'from apps.marks.dashboard.forms import MarkForm\n'), ((3356, 3366), 'apps.marks.dashboard.forms.MarkForm', 'MarkForm', ([], {}), '()\n', (3364, 3366), False, 'from apps.marks.dashboard.forms import MarkForm\n'), ((3555, 3574), 'apps.dashboard.tools.has_access', 'has_access', (['request'], {}), '(request)\n', (3565, 3574), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((3697, 3727), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Mark'], {'pk': 'pk'}), '(Mark, pk=pk)\n', (3714, 3727), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3748, 3785), 'apps.marks.dashboard.forms.MarkForm', 'MarkForm', (['request.POST'], {'instance': 'mark'}), '(request.POST, instance=mark)\n', (3756, 3785), False, 'from apps.marks.dashboard.forms import MarkForm\n'), ((4358, 4388), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Mark'], {'pk': 'pk'}), '(Mark, pk=pk)\n', (4375, 4388), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4415, 4438), 'apps.marks.dashboard.forms.MarkForm', 'MarkForm', ([], {'instance': 'mark'}), '(instance=mark)\n', (4423, 4438), False, 'from apps.marks.dashboard.forms import MarkForm\n'), ((4707, 4726), 'apps.dashboard.tools.has_access', 'has_access', (['request'], {}), '(request)\n', (4717, 4726), False, 'from apps.dashboard.tools import get_base_context, has_access\n'), ((939, 957), 'apps.marks.models.Mark.objects.all', 'Mark.objects.all', ([], {}), '()\n', (955, 957), False, 'from apps.marks.models import Mark, MarkUser\n'), ((2846, 2917), 'django.contrib.messages.error', 'messages.error', (['request', '"""Noen av de påkrevde feltene inneholder feil."""'], {}), "(request, 'Noen av de påkrevde feltene inneholder feil.')\n", (2860, 2917), False, 'from django.contrib import messages\n'), ((3211, 3259), 'django.contrib.messages.success', 'messages.success', (['request', '"""Prikken ble lagret."""'], {}), "(request, 'Prikken ble lagret.')\n", (3227, 3259), False, 'from django.contrib import messages\n'), ((3280, 3319), 'django.shortcuts.redirect', 'redirect', (['marks_details'], {'pk': 'new_mark.id'}), '(marks_details, pk=new_mark.id)\n', (3288, 3319), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3835, 3906), 'django.contrib.messages.error', 'messages.error', (['request', '"""Noen av de påkrevde feltene inneholder feil."""'], {}), "(request, 'Noen av de påkrevde feltene inneholder feil.')\n", (3849, 3906), False, 'from django.contrib import messages\n'), ((4224, 4272), 'django.contrib.messages.success', 'messages.success', (['request', '"""Prikken ble endret."""'], {}), "(request, 'Prikken ble endret.')\n", (4240, 4272), False, 'from django.contrib import messages\n'), ((4293, 4332), 'django.shortcuts.redirect', 'redirect', (['marks_details'], {'pk': 'new_mark.id'}), '(marks_details, pk=new_mark.id)\n', (4301, 4332), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6730, 6740), 'apps.marks.models.MarkUser', 'MarkUser', ([], {}), '()\n', (6738, 6740), False, 'from apps.marks.models import Mark, MarkUser\n'), ((2384, 2400), 'json.dumps', 'json.dumps', (['resp'], {}), '(resp)\n', (2394, 2400), False, 'import json\n'), ((6475, 6491), 'json.dumps', 'json.dumps', (['resp'], {}), '(resp)\n', (6485, 6491), False, 'import json\n')] |
#!/usr/bin/python3.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility functions for file objects and file descriptors.
"""
import os
import fcntl
class MergedIO(object):
"""MergedIO(outfile, infile)
Combines a write stream and a read stream into one read/write object."""
def __init__(self, outf, inf):
self._outf = outf
self._inf = inf
self.mode = "rw"
self.closed = 0
self.softspace = 0
# reading methods
self.read = inf.read
self.readline = inf.readline
self.readlines = inf.readlines
# writing methods
self.write = outf.write
self.flush = outf.flush
self.writelines = outf.writelines
def close(self):
self._outf.close()
self._inf.close()
self._outf = None
self._inf = None
self.closed = 1
def fileno(self):
# ??? punt, since reads are most common, return reader fd
return self._inf.fileno()
def filenos(self):
return self._inf.fileno(), self._outf.fileno()
def isatty(self):
return self._inf.isatty() and self._outf.isatty()
def mode2flags(mode):
"""mode2flags(modestring)
Converts a file mode in string form (e.g. "w+") to an integer flag value
suitable for os.open(). """
flags = getattr("O_LARGEFILE", os, 0)
if mode == "a":
flags = flags | os.O_APPEND | os.O_WRONLY
elif mode == "a+":
flags = flags | os.O_APPEND | os.O_RDWR | os.O_CREAT
elif mode == "w":
flags = flags | os.O_WRONLY | os.O_CREAT
elif mode == "w+":
flags = flags | os.O_RDWR | os.O_CREAT
elif mode == "r":
pass # O_RDONLY is zero already
elif mode == "r+":
flags = flags | os.O_RDWR
return flags
# cache of O_ flags
_OLIST = [n for n in dir(os) if n.startswith("O_")]
def flag_string(fd):
"""flag_string(fd)
where fd is an integer file descriptor of an open file. Returns the files
open flags as a vertical bar (|) delimited string.
"""
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
strlist = [_f for _f in
[(flags & getattr(os, n)) and n for n in _OLIST] if _f]
# hack to accomodate the fact that O_RDONLY is not really a flag...
if not (flags & os.ACCMODE):
strlist.insert(0, "O_RDONLY")
return "|".join(strlist)
# TODO still need to verify this or add more.
_MODEMAP = {
os.O_RDONLY: "r",
os.O_RDWR: "r+",
os.O_WRONLY | os.O_TRUNC: "w",
os.O_RDWR | os.O_CREAT: "w+",
os.O_APPEND | os.O_WRONLY: "a",
os.O_APPEND | os.O_RDWR | os.O_CREAT: "a+",
}
def mode_string(fd):
"""Get a suitalbe mode string for an fd."""
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
mode = _MODEMAP.get(flags)
if mode is None:
return flag_string(fd)
else:
return mode
def close_on_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def set_nonblocking(fd, flag=1):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
if flag:
flags |= os.O_NONBLOCK # set non-blocking
else:
flags &= ~os.O_NONBLOCK # set blocking
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
| [
"fcntl.fcntl"
] | [((2602, 2632), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_GETFL'], {}), '(fd, fcntl.F_GETFL)\n', (2613, 2632), False, 'import fcntl\n'), ((3246, 3276), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_GETFL'], {}), '(fd, fcntl.F_GETFL)\n', (3257, 3276), False, 'import fcntl\n'), ((3427, 3457), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_GETFD'], {}), '(fd, fcntl.F_GETFD)\n', (3438, 3457), False, 'import fcntl\n'), ((3492, 3529), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_SETFD', 'flags'], {}), '(fd, fcntl.F_SETFD, flags)\n', (3503, 3529), False, 'import fcntl\n'), ((3577, 3607), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_GETFL'], {}), '(fd, fcntl.F_GETFL)\n', (3588, 3607), False, 'import fcntl\n'), ((3734, 3771), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_SETFL', 'flags'], {}), '(fd, fcntl.F_SETFL, flags)\n', (3745, 3771), False, 'import fcntl\n')] |
import os
import sys
import math
import random
import multiprocessing
def fib(n):
if n in {0, 1}:
return n
return fib(n - 1) + fib(n - 2)
def bingonacci(fib_range, addends_size: int):
fibs = {}
fib_range = range(fib_range)
for i in fib_range:
fibs[i] = fib(i)
sum_total = 0
for _ in range(addends_size):
sum_total += fibs[random.choice(fib_range)]
return sum_total
def main():
argv = sys.argv[1:]
if len(argv) > 1:
sys.exit("ArgvError: bingonacci [-t]")
if argv and argv[0] != "-t":
sys.exit("ArgvError: bingonacci only accepts -t flag as single argument")
fib_range = 20
addends_size = 1000000
if not argv:
print(bingonacci(fib_range, addends_size))
else:
# Python threading does not take more than 1 CPU, use multiprocessing instead.
proc_num = max((os.cpu_count() or 0) - 1, 1)
split_size = math.ceil(addends_size / proc_num)
pool = multiprocessing.Pool(proc_num)
print(sum(pool.starmap(bingonacci, [(fib_range, split_size) for _ in range(proc_num)])))
pool.close()
sys.exit()
if __name__ == '__main__':
main()
| [
"random.choice",
"math.ceil",
"multiprocessing.Pool",
"os.cpu_count",
"sys.exit"
] | [((1135, 1145), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1143, 1145), False, 'import sys\n'), ((493, 531), 'sys.exit', 'sys.exit', (['"""ArgvError: bingonacci [-t]"""'], {}), "('ArgvError: bingonacci [-t]')\n", (501, 531), False, 'import sys\n'), ((573, 646), 'sys.exit', 'sys.exit', (['"""ArgvError: bingonacci only accepts -t flag as single argument"""'], {}), "('ArgvError: bingonacci only accepts -t flag as single argument')\n", (581, 646), False, 'import sys\n'), ((932, 966), 'math.ceil', 'math.ceil', (['(addends_size / proc_num)'], {}), '(addends_size / proc_num)\n', (941, 966), False, 'import math\n'), ((982, 1012), 'multiprocessing.Pool', 'multiprocessing.Pool', (['proc_num'], {}), '(proc_num)\n', (1002, 1012), False, 'import multiprocessing\n'), ((378, 402), 'random.choice', 'random.choice', (['fib_range'], {}), '(fib_range)\n', (391, 402), False, 'import random\n'), ((882, 896), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (894, 896), False, 'import os\n')] |
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Upload a file attachment to MDC
Usage: python MDC-attach.py <file> <parent page name> <MIME type> <description>
Please set MDC_USER and MDC_PASSWORD in the environment
"""
import os, sys, deki
wikiuser = os.environ['MDC_USER']
wikipw = os.environ['MDC_PASSWORD']
file, pageid, mimetype, description = sys.argv[1:]
wiki = deki.Deki("http://developer.mozilla.org/@api/deki/", wikiuser, wikipw)
wiki.create_file(pageid, os.path.basename(file), open(file).read(), mimetype,
description)
| [
"os.path.basename",
"deki.Deki"
] | [((553, 623), 'deki.Deki', 'deki.Deki', (['"""http://developer.mozilla.org/@api/deki/"""', 'wikiuser', 'wikipw'], {}), "('http://developer.mozilla.org/@api/deki/', wikiuser, wikipw)\n", (562, 623), False, 'import os, sys, deki\n'), ((649, 671), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (665, 671), False, 'import os, sys, deki\n')] |
# -*- coding: utf-8 -*-
# @Author: jpch89
# 打包命令:
"""
pyinstaller --add-data="img\电影.png;img" --add-data="ffprobe.exe;." -i="img\电影.ico" -Fw movielen.py
"""
# man ffprobe 查看ffprobe的帮助文档
import subprocess
import re
import os
import sys
from PyQt5 import sip
from PyQt5.QtWidgets import QApplication, QWidget, QLabel
from PyQt5.QtGui import QIcon, QFont
exts = ['.avi', '.mp4', '.mkv', '.flv']
exts += [i.upper() for i in exts]
version = 'v0.0.4'
# def resource_path(relative_path):
# try:
# base_path = sys._MEIPASS
# except Exception:
# base_path = os.path.abspath('.')
# print(base_path)
# print(relative_path)
# abs_path = os.path.join(base_path, relative_path)
# print(abs_path)
# return abs_path
# 另外一种写法
def resource_path(relative_path):
'''定义一个返回绝对路径的函数'''
if hasattr(sys, '_MEIPASS'):
base_path = sys._MEIPASS
else:
base_path = os.path.abspath('.')
return os.path.join(base_path, relative_path)
def get_length(filepath):
# pyinstaller找不到资源的修改
ffprobe_path = resource_path('ffprobe')
cmd = '%s "%s"' % (ffprobe_path, filepath)
# 不进行资源重定向
# cmd = 'ffprobe "%s"' % filepath
# pyinstaller使用Popen时的修改
# si = subprocess.STARTUPINFO()
# si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# 推测:
# 在REPL的输出是stderr标准错误输出
# 如果把标准错误输出归并到标准输出的话
# 调用命令就不会在REPL有输出了
# 因为标准输出需要显式的读取出来
# sp = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, shell = True, startupinfo = si)
sp = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
# print(sp.stdout.read().decode('gbk'))
results = sp.stdout.readlines()
for result in results:
# print(result.decode('utf-8'))
if b"Duration" in result:
result = result.decode('utf-8')
break
pattern = r'Duration: (\d*:\d*:\d*\.\d*), start'
durations = re.search(pattern, result).group(1)
durations = durations.split(':')
durations = list(map(float, durations))
return durations
# 注意int(a)不是四舍五入,是简单截断
def get_hms(total_len):
quotient, remainder = divmod(total_len[2], 60)
total_len[1] += quotient
total_len[2] = remainder
quotient, remainder = divmod(total_len[1], 60)
total_len[0] += quotient
total_len[1] = remainder
return total_len
def is_movie(file):
filename, ext = os.path.splitext(file)
if ext in exts:
return True
else:
return False
def get_total_len():
total_len = [0.0, 0.0, 0.0]
count = 0
for root, dirs, files in os.walk('.'):
for file in files:
if is_movie(file):
count += 1
filepath = os.path.join(root, file)
durations = get_length(filepath)
for i in range(3):
total_len[i] += durations[i]
# 测试代码:通过!
# file = 'test.mp4'
# total_len = get_length(file)
# total_len = get_hms(total_len)
# return(total_len)
# 测试代码结束
total_len = get_hms(total_len)
total_len[0] = int(total_len[0])
total_len[1] = int(total_len[1])
total_len[2] = round(total_len[2], 2)
return count, total_len
class MovieLen(QWidget):
def __init__(self):
super().__init__()
self.initGUI()
def initGUI(self):
self.setWindowTitle(' '.join(['MovieLen', version]))
self.setWindowIcon(QIcon(resource_path(r'img\电影.png')))
self.resize(400, 160)
self.label = QLabel(self)
self.label.setText(msg)
self.label.setFont(QFont('Microsoft YaHei', 16, QFont.Bold))
self.label.move(20, 50)
self.show()
if __name__ == '__main__':
count, total_len = get_total_len()
msg1 = '共有%d个视频文件' % count
print(msg1)
msg1 += '\n'
msg2 = '总时长:%s小时%s分%s秒' % (total_len[0], total_len[1], total_len[2])
print(msg2)
msg = msg1 + msg2
# GUI
app = QApplication(sys.argv)
# 注意
# 写成movielen = MovieLen()而不是直接MovieLen()
# 否则的话窗口创建之后没有引用
# 导致一闪而过
movielen = MovieLen()
sys.exit(app.exec_())
# 更新日志
# v0.0.1
# 20180722016
# 加入 resource_path 函数之后单文件不再报错!
# v0.0.2
# 201807222221
# 修复使用 pyinstaller 的 -w 参数报错!
# v0.0.3
# 201810302214
# 添加对 .flv 文件的支持
# v0.0.4
# 201812080947
# 添加对大写扩展名的支持
| [
"PyQt5.QtGui.QFont",
"subprocess.Popen",
"os.path.splitext",
"os.path.join",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QApplication",
"os.path.abspath",
"os.walk",
"re.search"
] | [((946, 984), 'os.path.join', 'os.path.join', (['base_path', 'relative_path'], {}), '(base_path, relative_path)\n', (958, 984), False, 'import os\n'), ((1564, 1675), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'shell': '(True)'}), '(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT, shell=True)\n', (1580, 1675), False, 'import subprocess\n'), ((2453, 2475), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (2469, 2475), False, 'import os\n'), ((2645, 2657), 'os.walk', 'os.walk', (['"""."""'], {}), "('.')\n", (2652, 2657), False, 'import os\n'), ((3992, 4014), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (4004, 4014), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel\n'), ((914, 934), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (929, 934), False, 'import os\n'), ((3559, 3571), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['self'], {}), '(self)\n', (3565, 3571), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QLabel\n'), ((1984, 2010), 're.search', 're.search', (['pattern', 'result'], {}), '(pattern, result)\n', (1993, 2010), False, 'import re\n'), ((3631, 3671), 'PyQt5.QtGui.QFont', 'QFont', (['"""Microsoft YaHei"""', '(16)', 'QFont.Bold'], {}), "('Microsoft YaHei', 16, QFont.Bold)\n", (3636, 3671), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((2771, 2795), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (2783, 2795), False, 'import os\n')] |
import os
import discord
import re
client = discord.Client()
@client.event
async def on_ready():
# show status of client & send initial message to bot-echo channel
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
a_message = "ゴーヤちゃん準備完了でち"
channel = discord.utils.get(
client.get_all_channels(), name='bot-echo')
await client.send_message(channel, a_message)
@client.event
async def on_message(message):
# send a response when goya-oji appears
regex_oji = u"(ゴーヤ|ごーや|ゴーヤ)ちゃんいるかな(\?|?)?(\^|^){2}"
is_oji = re.search(regex_oji, message.content)
if is_oji:
# except client sender
if client.user != message.author:
react = "はいでち"
await client.send_message(message.channel, react)
def get_key():
# get a discord's api key from .env
key = os.getenv("APIKEY")
return key
def main():
key = get_key()
client.run(key)
main()
| [
"discord.Client",
"os.getenv",
"re.search"
] | [((45, 61), 'discord.Client', 'discord.Client', ([], {}), '()\n', (59, 61), False, 'import discord\n'), ((598, 635), 're.search', 're.search', (['regex_oji', 'message.content'], {}), '(regex_oji, message.content)\n', (607, 635), False, 'import re\n'), ((880, 899), 'os.getenv', 'os.getenv', (['"""APIKEY"""'], {}), "('APIKEY')\n", (889, 899), False, 'import os\n')] |
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, os.path.dirname(BASE_DIR))
SECRET_KEY = '<KEY>'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'testapp',
)
MIDDLEWARE = (
'qinspect.middleware.QueryInspectMiddleware',
)
ROOT_URLCONF = 'testproject.urls'
WSGI_APPLICATION = 'testproject.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
QUERY_INSPECT_CONFIG = {
'enabled': True,
'absolute_limit': -1,
'header_stats': True,
'log_duplicates': True,
'log_stats': True,
'log_tracebacks': True,
'standard_deviation_limit': 1,
'traceback_roots': [BASE_DIR],
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'memory': {
'level': 'DEBUG',
'class': 'testapp.memorylog.MemoryHandler',
},
},
'loggers': {
'qinspect': {
'handlers': ['memory'],
'level': 'DEBUG',
'propagate': True,
},
},
}
| [
"os.path.dirname"
] | [((50, 75), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (65, 75), False, 'import os\n'), ((96, 121), 'os.path.dirname', 'os.path.dirname', (['BASE_DIR'], {}), '(BASE_DIR)\n', (111, 121), False, 'import os\n')] |
import time
import logging
import os
import fnmatch
class StringFileUtils:
""" helper class for file and string handling """
logger = None
filepath = None
filext = None
#initialze with filepath and filextension
def __init__(self,log,filep,filee):
self.logger = log
self.logger.info("initializing StringFileUtils instance")
self.filepath = filep
self.filext = filee
#returns a timestamped filename
def gettimestampedfilename(self):
self.logger.info('creating filename getTimeStampedFileName() in StringUtils')
return time.strftime("report%Y%m%d-%H%M%S")
# count the number files in filepath + fileext
def countfiles(self):
return len(fnmatch.filter(os.listdir(self.filepath), "*" + self.filext))
# remove oldest file
def removeoldestfiles(self, max_files):
if (self.countfiles() + 1) > max_files:
files = fnmatch.filter(os.listdir(self.filepath), "*" + self.filext)
for mp4file in files:
self.logger.debug('removing: ' + mp4file + ' removeoldestfiles()')
os.remove(self.filepath+"/"+mp4file) | [
"os.listdir",
"time.strftime",
"os.remove"
] | [((625, 661), 'time.strftime', 'time.strftime', (['"""report%Y%m%d-%H%M%S"""'], {}), "('report%Y%m%d-%H%M%S')\n", (638, 661), False, 'import time\n'), ((778, 803), 'os.listdir', 'os.listdir', (['self.filepath'], {}), '(self.filepath)\n', (788, 803), False, 'import os\n'), ((982, 1007), 'os.listdir', 'os.listdir', (['self.filepath'], {}), '(self.filepath)\n', (992, 1007), False, 'import os\n'), ((1161, 1201), 'os.remove', 'os.remove', (["(self.filepath + '/' + mp4file)"], {}), "(self.filepath + '/' + mp4file)\n", (1170, 1201), False, 'import os\n')] |
import itertools as it
def solve24(cs):
os = set(["".join(i) for i in it.product("+-*/",repeat=3)])
ns = set(["".join(i) for i in it.permutations(cs)])
bs = ["( ) ", \
"( ) ", \
" ( ) ", \
" ( ) ", \
"( )( ) ", \
" ( ) ", \
" "]
for oo in os:
for nn in ns:
s = "".join(it.chain(*zip(nn,"....",oo+" ")))
for b in bs:
sb = "".join(it.chain(*zip(b,s))).replace(" ","").replace("X","10").replace("J","11").replace("Q","12").replace("K","13")
try:
if abs(eval(sb)-24.) < 0.001:
print(sb.replace(".","").replace("10","X").replace("11","J").replace("12","Q").replace("13","K"))
except:
pass
solve24("44XX")
solve24("3388")
solve24("3377")
solve24("4477")
solve24("17KK")
| [
"itertools.permutations",
"itertools.product"
] | [((75, 103), 'itertools.product', 'it.product', (['"""+-*/"""'], {'repeat': '(3)'}), "('+-*/', repeat=3)\n", (85, 103), True, 'import itertools as it\n'), ((139, 158), 'itertools.permutations', 'it.permutations', (['cs'], {}), '(cs)\n', (154, 158), True, 'import itertools as it\n')] |
# Generated by Django 2.1.8 on 2019-05-27 10:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Algorithm',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=200)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('m_size', models.BigIntegerField()),
('n_size', models.BigIntegerField()),
('l_size', models.BigIntegerField()),
('time', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('average_time', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('algorithm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='algorithms.Algorithm')),
],
options={
'abstract': False,
},
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.BigIntegerField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((338, 431), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (354, 431), False, 'from django.db import migrations, models\n'), ((461, 500), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (481, 500), False, 'from django.db import migrations, models\n'), ((534, 569), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (554, 569), False, 'from django.db import migrations, models\n'), ((597, 629), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (613, 629), False, 'from django.db import migrations, models\n'), ((833, 926), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (849, 926), False, 'from django.db import migrations, models\n'), ((956, 995), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (976, 995), False, 'from django.db import migrations, models\n'), ((1029, 1064), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1049, 1064), False, 'from django.db import migrations, models\n'), ((1094, 1118), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (1116, 1118), False, 'from django.db import migrations, models\n'), ((1148, 1172), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (1170, 1172), False, 'from django.db import migrations, models\n'), ((1202, 1226), 'django.db.models.BigIntegerField', 'models.BigIntegerField', ([], {}), '()\n', (1224, 1226), False, 'from django.db import migrations, models\n'), ((1254, 1328), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(2)', 'max_digits': '(6)', 'null': '(True)'}), '(blank=True, decimal_places=2, max_digits=6, null=True)\n', (1273, 1328), False, 'from django.db import migrations, models\n'), ((1364, 1438), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(2)', 'max_digits': '(6)', 'null': '(True)'}), '(blank=True, decimal_places=2, max_digits=6, null=True)\n', (1383, 1438), False, 'from django.db import migrations, models\n'), ((1471, 1565), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""algorithms.Algorithm"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'algorithms.Algorithm')\n", (1488, 1565), False, 'from django.db import migrations, models\n')] |
'''
Created on Aug 21, 2015
@author: <NAME> <<EMAIL>>
'''
from __future__ import division
import math
import random
import unittest
from .. import spherical_harmonics as sh
class TestSphericalHarmonics(unittest.TestCase):
""" unit tests for the spherical harmonics """
_multiprocess_can_split_ = True # let nose know that tests can run parallel
def test_spherical_index(self):
""" test the conversion of the spherical index """
# check conversion
for k in range(20):
l, m = sh.spherical_index_lm(k)
self.assertEqual(sh.spherical_index_k(l, m), k)
# check order
k = 0
for l in range(4):
for m in range(-l, l+1):
self.assertEqual(sh.spherical_index_k(l, m), k)
k += 1
def test_spherical_harmonics_real(self):
""" test spherical harmonics """
for l in range(sh.MAX_ORDER_SYM + 1):
for _ in range(5):
theta = math.pi * random.random()
phi = 2 * math.pi * random.random()
y1 = sh.spherical_harmonic_symmetric_scipy(l, theta)
y2 = sh.spherical_harmonic_real_scipy(l, 0, theta, phi)
y3 = sh.spherical_harmonic_symmetric(l, theta)
self.assertAlmostEqual(y1, y2)
self.assertAlmostEqual(y1, y3)
def test_spherical_harmonics_lm(self):
""" test spherical harmonics """
for l in range(sh.MAX_ORDER + 1):
for m in range(-l, l + 1):
for _ in range(5):
k = sh.spherical_index_k(l, m)
msg = 'l=%d, m=%d, k=%d' % (l, m, k)
theta = math.pi * random.random()
phi = 2 * math.pi * random.random()
y1 = sh.spherical_harmonic_real_scipy(l, m, theta, phi)
y2 = sh.spherical_harmonic_real_scipy_k(k, theta, phi)
y3 = sh.spherical_harmonic_real(l, m, theta, phi)
y4 = sh.spherical_harmonic_real_k(k, theta, phi)
self.assertAlmostEqual(y1, y2, msg=msg)
self.assertAlmostEqual(y1, y3, msg=msg)
self.assertAlmostEqual(y1, y4, msg=msg)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"random.random"
] | [((2370, 2385), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2383, 2385), False, 'import unittest\n'), ((1076, 1091), 'random.random', 'random.random', ([], {}), '()\n', (1089, 1091), False, 'import random\n'), ((1128, 1143), 'random.random', 'random.random', ([], {}), '()\n', (1141, 1143), False, 'import random\n'), ((1790, 1805), 'random.random', 'random.random', ([], {}), '()\n', (1803, 1805), False, 'import random\n'), ((1846, 1861), 'random.random', 'random.random', ([], {}), '()\n', (1859, 1861), False, 'import random\n')] |
import json
import logging
import os
import re
from typing import Optional, List
import pandas as pd
import requests
from lxml import html
from . import Scraper
from ..converters.zotero import ZoteroData, parse_row
class WallaceCollectionInformation(object):
def __init__(self, object_id: str, object_name: str, title: str, reference: str, reference_data: str,
place_artist: str, dates_all: str, material: str, dimensions: str, marks: str, museum_number: str,
commentary: str, image_url: Optional[str] = None):
self.__object_id = object_id
self.__object_name = object_name
self.__title = title
self.__reference = reference
self.__reference_data = reference_data
self.__place_artist = place_artist
self.__dates_all = dates_all
self.__material = material
self.__dimensions = dimensions
self.__marks = marks
self.__museum_number = museum_number
self.__commentary = commentary
self.__image_url = image_url
self.__tag = None
@property
def object_id(self):
return self.__object_id
@property
def object_name(self):
return self.__object_name
@property
def title(self):
return self.__title
@property
def reference(self):
return self.__reference
@property
def reference_data(self):
return self.__reference_data
@property
def place_artist(self):
return self.__place_artist
@property
def dates_all(self):
return self.__dates_all
@property
def material(self):
return self.__material
@property
def dimensions(self):
return self.__dimensions
@property
def marks(self):
return self.__marks
@property
def museum_number(self):
return self.__museum_number
@property
def commentary(self):
return self.__commentary
@property
def image_url(self):
return self.__image_url
@image_url.setter
def image_url(self, image_url):
self.__image_url = image_url
@property
def image_name(self):
return f"{self.object_id}.jpg"
@property
def tag(self):
return self.__tag
@tag.setter
def tag(self, tag):
self.__tag = tag
def to_dict(self):
return {
'object_id': self.object_id,
'object_name': self.object_name,
'title': self.title,
'reference': self.reference,
'reference_data': self.__reference_data,
'place_artist': self.place_artist,
'dates_all': self.dates_all,
'material': self.material,
'dimensions': self.dimensions,
'marks': self.marks,
'museum_number': self.museum_number,
'commentary': self.commentary,
'image_url': self.image_url,
'image_name': self.image_name,
'tag': self.tag
}
class WallaceCollection(Scraper):
"""
A scraper for the Wallace Collection at
http://wallacelive.wallacecollection.org/
"""
__URL_PREFIX = "http://wallacelive.wallacecollection.org"
__URL_TEMPLATE = "/eMuseumPlus?service=ExternalInterface&module=collection&viewType=detailView&objectId="
__URL_OBJECT_ID = r"objectId=(?P<objectId>[0-9]+)"
__XPATH = {
'object_name': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[1]/span[1]/text()',
'title': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[2]/span[1]/text()',
'reference': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[3]/span/span/a/span/text()',
'reference_data': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[4]/span[1]/text()',
'place_artist': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[5]/span[1]/text()',
'dates_all': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[6]/span[1]/text()',
'material': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[7]/span[1]/text()',
'dimensions': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[8]/span[1]/text()',
'marks': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[9]/span[1]/text()',
'museum_number': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dd[1]/ul[1]/li[10]/span[1]/text()',
'commentary': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[2]/dd/div/ul/li/span[1]/text()',
'image_url': '/html/body/div[1]/div[4]/div[2]/div[2]/dl[1]/dt[1]/a/@href'
}
def __init__(self):
self.__logger = logging.getLogger(__name__)
@property
def _log(self):
return self.__logger
def scrape(self, **kwargs):
self._log.debug("Called scrape with options: %s", kwargs)
objects: List[ZoteroData] = [
parse_row(row, self.__URL_OBJECT_ID)
for _, row in pd.read_csv(kwargs['input_file']).iterrows()
]
download_progress_file = os.path.join(kwargs['output'], "downloaded.txt")
if os.path.isfile(download_progress_file):
with open(download_progress_file, 'r') as fi:
download_progress = list(set([l.strip() for l in fi]))
else:
download_progress = []
annotations = []
for obj in [o for o in objects if o.object_id not in download_progress]:
annotation: Optional[WallaceCollectionInformation] = self.__extract_page(obj, kwargs['output'])
if annotation is None:
self._log.error(f"Object '{obj.object_id}' could not be downloaded")
continue
annotations.append(annotation)
download_progress.append(obj.object_id)
with open(download_progress_file, 'w') as fo:
fo.write("\n".join(download_progress))
df = pd.DataFrame(
[a.to_dict() for a in annotations],
index=[o.object_id for o in objects],
columns=['object_id', 'tag', 'image_name'])
df.to_csv(os.path.join(kwargs['output'], "wallace_annotation.csv"))
def __extract_page(self, obj: ZoteroData, output) -> Optional[WallaceCollectionInformation]:
self._log.debug("Will scrape object_id '%s'", obj.object_id)
page = requests.get(
f"{self.__URL_PREFIX}{self.__URL_TEMPLATE}{obj.object_id}",
cookies={}
)
if page.ok:
html_page = html.fromstring(page.text)
values = {}
for xpath_key, xpath_string in self.__XPATH.items():
xpath = html_page.xpath(xpath_string)
values[xpath_key] = xpath[0] if len(xpath) > 0 else ""
info = WallaceCollectionInformation(object_id=obj.object_id, **values)
info.tag = obj.tag
image_popup = requests.get(self.__URL_PREFIX + re.findall(r"(/eMuseumPlus.*=F)", values['image_url'])[0],
cookies=page.cookies)
if image_popup.ok:
info.image_url = self.__URL_PREFIX + html.fromstring(image_popup.text) \
.xpath("/html/body/div/table/tr/td/img/@src")[0]
with open(os.path.join(output, f"{info.object_id}.json"), 'w') as fo:
json.dump(info.to_dict(), fo, indent=2)
target_image = os.path.join(output, f"{info.object_id}.jpg")
if not os.path.isfile(target_image):
WallaceCollection._download_image(image_url=info.image_url,
target_file=target_image,
cookies=image_popup.cookies)
return info
@staticmethod
def _extract_object_ids(input_file):
with open(input_file, 'r') as fi:
text = fi.read()
object_id_pattern = r'objectId=([0-9]+)'
return sorted(list(set(re.findall(object_id_pattern, text))))
| [
"logging.getLogger",
"pandas.read_csv",
"lxml.html.fromstring",
"os.path.join",
"requests.get",
"os.path.isfile",
"re.findall"
] | [((4656, 4683), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (4673, 4683), False, 'import logging\n'), ((5050, 5098), 'os.path.join', 'os.path.join', (["kwargs['output']", '"""downloaded.txt"""'], {}), "(kwargs['output'], 'downloaded.txt')\n", (5062, 5098), False, 'import os\n'), ((5111, 5149), 'os.path.isfile', 'os.path.isfile', (['download_progress_file'], {}), '(download_progress_file)\n', (5125, 5149), False, 'import os\n'), ((6339, 6427), 'requests.get', 'requests.get', (['f"""{self.__URL_PREFIX}{self.__URL_TEMPLATE}{obj.object_id}"""'], {'cookies': '{}'}), "(f'{self.__URL_PREFIX}{self.__URL_TEMPLATE}{obj.object_id}',\n cookies={})\n", (6351, 6427), False, 'import requests\n'), ((6099, 6155), 'os.path.join', 'os.path.join', (["kwargs['output']", '"""wallace_annotation.csv"""'], {}), "(kwargs['output'], 'wallace_annotation.csv')\n", (6111, 6155), False, 'import os\n'), ((6503, 6529), 'lxml.html.fromstring', 'html.fromstring', (['page.text'], {}), '(page.text)\n', (6518, 6529), False, 'from lxml import html\n'), ((7408, 7453), 'os.path.join', 'os.path.join', (['output', 'f"""{info.object_id}.jpg"""'], {}), "(output, f'{info.object_id}.jpg')\n", (7420, 7453), False, 'import os\n'), ((7477, 7505), 'os.path.isfile', 'os.path.isfile', (['target_image'], {}), '(target_image)\n', (7491, 7505), False, 'import os\n'), ((7991, 8026), 're.findall', 're.findall', (['object_id_pattern', 'text'], {}), '(object_id_pattern, text)\n', (8001, 8026), False, 'import re\n'), ((4961, 4994), 'pandas.read_csv', 'pd.read_csv', (["kwargs['input_file']"], {}), "(kwargs['input_file'])\n", (4972, 4994), True, 'import pandas as pd\n'), ((6920, 6973), 're.findall', 're.findall', (['"""(/eMuseumPlus.*=F)"""', "values['image_url']"], {}), "('(/eMuseumPlus.*=F)', values['image_url'])\n", (6930, 6973), False, 'import re\n'), ((7256, 7302), 'os.path.join', 'os.path.join', (['output', 'f"""{info.object_id}.json"""'], {}), "(output, f'{info.object_id}.json')\n", (7268, 7302), False, 'import os\n'), ((7124, 7157), 'lxml.html.fromstring', 'html.fromstring', (['image_popup.text'], {}), '(image_popup.text)\n', (7139, 7157), False, 'from lxml import html\n')] |
import os
import socket
class NoAvailablePortError(Exception):
pass
def get_available_from_port_range(from_port, to_port):
"""Returns available local port number.
"""
r = range(from_port, to_port)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
for port in r:
try:
if sock.connect_ex(('localhost', port)) != 0:
return port
finally:
sock.close()
raise NoAvailablePortError('No available port between {} and {}'.format(
from_port, to_port))
def is_ci():
"""Returns if current execution is running on CI
Returns:
bool: `True` if current executions is on CI
"""
return os.getenv('CI', 'false') == 'true'
| [
"socket.socket",
"os.getenv"
] | [((229, 278), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (242, 278), False, 'import socket\n'), ((700, 724), 'os.getenv', 'os.getenv', (['"""CI"""', '"""false"""'], {}), "('CI', 'false')\n", (709, 724), False, 'import os\n')] |
from impacket import ImpactDecoder
class IcmpFilter():
attributes = None
myIpAddresses = None
logger = None
def __init__(self, attributes, logger, myIpAddresses):
self.attributes = attributes
self.logger = logger
self.myIpAddresses = myIpAddresses
def rule(self):
rule = "icmp[icmptype] == icmp-echo"
#rule = "icmp-echo"
#rule = "icmp"
return rule
def run(self, header, payload):
rip = ImpactDecoder.EthDecoder().decode(payload)
#print rip
proto = -1
try:
proto = rip.child().get_ip_p()
except AttributeError:
pass
# NOT ICMP
if proto != 1:
self.logger.warn('got packet that was not ICMP?!')
return None
icmpType = rip.child().child().get_icmp_type()
if(icmpType == rip.child().child().ICMP_ECHOREPLY):
self.logger.warn('got icmp ECHOREPLY?!')
return None
#if(icmpType == rip.child().child().ICMP_ECHO):
# status = 'echo'
dstAddr = rip.child().get_ip_dst()
srcAddr = rip.child().get_ip_src()
message = 'icmp echo request from '+srcAddr+' to '+dstAddr
self.logger.debug("msg rcvd: "+str(message))
return message
| [
"impacket.ImpactDecoder.EthDecoder"
] | [((415, 441), 'impacket.ImpactDecoder.EthDecoder', 'ImpactDecoder.EthDecoder', ([], {}), '()\n', (439, 441), False, 'from impacket import ImpactDecoder\n')] |
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QKeySequence
class QtHelper:
@staticmethod
def key_event_sequence(event):
val = event.key()
mod = event.modifiers()
if mod & Qt.ShiftModifier:
val += Qt.SHIFT
if mod & Qt.ControlModifier:
val += Qt.CTRL
if mod & Qt.AltModifier:
val += Qt.ALT
if mod & Qt.MetaModifier:
val += Qt.META
return QKeySequence(val) | [
"PyQt5.QtGui.QKeySequence"
] | [((456, 473), 'PyQt5.QtGui.QKeySequence', 'QKeySequence', (['val'], {}), '(val)\n', (468, 473), False, 'from PyQt5.QtGui import QKeySequence\n')] |
import logging
import json
import hypernova
import unittest
import unittest.mock as mock
from hypernova.plugins.dev_mode import DevModePlugin
import utils.mocks as mocks
import utils.plugins as plugins
class TestRenderer(unittest.TestCase):
@mock.patch("requests.post", side_effect=mocks.make_server_timeout)
def test_request_fail(self, mock_post):
renderer = hypernova.Renderer("http://localhost")
html = renderer.render({"component": {"foo": "bar"}})
self.assertIsInstance(html, str)
class TestPlugins(unittest.TestCase):
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_no_plugins(self, mock_post):
renderer = hypernova.Renderer("http://localhost")
html = renderer.render({"component": {"foo": "bar"}})
self.assertEqual(html, "<p>{}</p>".format(json.dumps({"foo": "bar"})))
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_get_view_data(self, mock_post):
expected_data = {"foo": "bar"}
renderer = hypernova.Renderer(
"http://localhost", [plugins.PluginGetViewData("component", expected_data)]
)
html = renderer.render({"component": {}})
data = mock_post.call_args[1]["json"]["component"]["data"]
self.assertEqual(data, expected_data)
self.assertEqual(html, "<p>{}</p>".format(json.dumps(expected_data)))
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_get_view_data_no_return(self, mock_post):
renderer = hypernova.Renderer(
"http://localhost", [plugins.PluginGetViewDataDoNothing()]
)
html = renderer.render({"component": {"foo": "bar"}})
self.assertEqual(html, "<p>{}</p>".format(json.dumps({"foo": "bar"})))
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_prepare_request(self, mock_post):
to_append = {"extra": 1}
renderer = hypernova.Renderer(
"http://localhost", [plugins.PluginPrepareRequest("component", to_append)]
)
renderer.render({"component": {}})
component_data = mock_post.call_args[1]["json"]["component"]
self.assertEqual(component_data.get("extra"), to_append.get("extra"))
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_should_send_request_true(self, mock_post):
renderer = hypernova.Renderer(
"http://localhost", [plugins.PluginShouldSendRequestTrue()]
)
renderer.render({"component": {}})
self.assertTrue(mock_post.called)
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_should_send_request_false(self, mock_post):
renderer = hypernova.Renderer(
"http://localhost", [plugins.PluginShouldSendRequestFalse()]
)
renderer.render({"component": {}})
self.assertFalse(mock_post.called)
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
def test_plugin_should_send_request_any_false(self, mock_post):
renderer = hypernova.Renderer(
"http://localhost",
[
plugins.PluginShouldSendRequestTrue(),
plugins.PluginShouldSendRequestFalse(),
],
)
renderer.render({"component": {}})
self.assertFalse(mock_post.called)
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
@mock.patch.object(
plugins.PluginWillSendRequest, "will_send_request", autospec=True
)
def test_plugin_will_send_request(self, mock_plugin, mock_post):
renderer = hypernova.Renderer(
"http://localhost",
[plugins.PluginShouldSendRequestTrue(), plugins.PluginWillSendRequest()],
)
renderer.render({"component": {}})
self.assertTrue(mock_plugin.called)
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
@mock.patch.object(
plugins.PluginWillSendRequest, "will_send_request", autospec=True
)
def test_plugin_will_send_request_false(self, mock_plugin, mock_post):
renderer = hypernova.Renderer(
"http://localhost",
[plugins.PluginShouldSendRequestFalse(), plugins.PluginWillSendRequest()],
)
renderer.render({"component": {}})
self.assertFalse(mock_plugin.called)
@mock.patch("requests.post", side_effect=mocks.make_response_ok)
@mock.patch.object(plugins.PluginOnSuccess, "on_success", autospec=True)
def test_plugin_on_success(self, mock_plugin, mock_post):
renderer = hypernova.Renderer("http://localhost", [plugins.PluginOnSuccess()])
renderer.render({"component": {}})
self.assertEqual(1, mock_plugin.call_count)
@mock.patch("requests.post", side_effect=mocks.make_response_component_error)
@mock.patch.object(plugins.PluginOnError, "on_error", autospec=True)
def test_plugin_on_error_component(self, mock_plugin, mock_post):
renderer = hypernova.Renderer("http://localhost", [plugins.PluginOnError()])
renderer.render({"component": {}})
self.assertEqual(1, mock_plugin.call_count)
@mock.patch("requests.post", side_effect=mocks.make_response_server_error)
@mock.patch.object(plugins.PluginOnError, "on_error", autospec=True)
def test_plugin_on_error_response(self, mock_plugin, mock_post):
renderer = hypernova.Renderer("http://localhost", [plugins.PluginOnError()])
renderer.render({"component": {}})
self.assertEqual(1, mock_plugin.call_count)
class TestDevModePlugin(unittest.TestCase):
def test_after_response_should_iterate_dict_on_error(self):
logger = logging.getLogger(__name__)
current = {'App': {'name': 'App'}}
resp = DevModePlugin(logger).after_response(current, {})
self.assertTrue('App' in resp)
| [
"logging.getLogger",
"utils.plugins.PluginGetViewData",
"utils.plugins.PluginShouldSendRequestFalse",
"utils.plugins.PluginPrepareRequest",
"json.dumps",
"utils.plugins.PluginGetViewDataDoNothing",
"utils.plugins.PluginOnError",
"utils.plugins.PluginOnSuccess",
"utils.plugins.PluginShouldSendRequestTrue",
"utils.plugins.PluginWillSendRequest",
"hypernova.plugins.dev_mode.DevModePlugin",
"hypernova.Renderer",
"unittest.mock.patch.object",
"unittest.mock.patch"
] | [((249, 315), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_server_timeout'}), "('requests.post', side_effect=mocks.make_server_timeout)\n", (259, 315), True, 'import unittest.mock as mock\n'), ((566, 629), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (576, 629), True, 'import unittest.mock as mock\n'), ((877, 940), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (887, 940), True, 'import unittest.mock as mock\n'), ((1416, 1479), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (1426, 1479), True, 'import unittest.mock as mock\n'), ((1809, 1872), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (1819, 1872), True, 'import unittest.mock as mock\n'), ((2292, 2355), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (2302, 2355), True, 'import unittest.mock as mock\n'), ((2631, 2694), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (2641, 2694), True, 'import unittest.mock as mock\n'), ((2973, 3036), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (2983, 3036), True, 'import unittest.mock as mock\n'), ((3418, 3481), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (3428, 3481), True, 'import unittest.mock as mock\n'), ((3487, 3575), 'unittest.mock.patch.object', 'mock.patch.object', (['plugins.PluginWillSendRequest', '"""will_send_request"""'], {'autospec': '(True)'}), "(plugins.PluginWillSendRequest, 'will_send_request',\n autospec=True)\n", (3504, 3575), True, 'import unittest.mock as mock\n'), ((3915, 3978), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (3925, 3978), True, 'import unittest.mock as mock\n'), ((3984, 4072), 'unittest.mock.patch.object', 'mock.patch.object', (['plugins.PluginWillSendRequest', '"""will_send_request"""'], {'autospec': '(True)'}), "(plugins.PluginWillSendRequest, 'will_send_request',\n autospec=True)\n", (4001, 4072), True, 'import unittest.mock as mock\n'), ((4420, 4483), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_ok'}), "('requests.post', side_effect=mocks.make_response_ok)\n", (4430, 4483), True, 'import unittest.mock as mock\n'), ((4489, 4560), 'unittest.mock.patch.object', 'mock.patch.object', (['plugins.PluginOnSuccess', '"""on_success"""'], {'autospec': '(True)'}), "(plugins.PluginOnSuccess, 'on_success', autospec=True)\n", (4506, 4560), True, 'import unittest.mock as mock\n'), ((4811, 4887), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_component_error'}), "('requests.post', side_effect=mocks.make_response_component_error)\n", (4821, 4887), True, 'import unittest.mock as mock\n'), ((4893, 4960), 'unittest.mock.patch.object', 'mock.patch.object', (['plugins.PluginOnError', '"""on_error"""'], {'autospec': '(True)'}), "(plugins.PluginOnError, 'on_error', autospec=True)\n", (4910, 4960), True, 'import unittest.mock as mock\n'), ((5217, 5290), 'unittest.mock.patch', 'mock.patch', (['"""requests.post"""'], {'side_effect': 'mocks.make_response_server_error'}), "('requests.post', side_effect=mocks.make_response_server_error)\n", (5227, 5290), True, 'import unittest.mock as mock\n'), ((5296, 5363), 'unittest.mock.patch.object', 'mock.patch.object', (['plugins.PluginOnError', '"""on_error"""'], {'autospec': '(True)'}), "(plugins.PluginOnError, 'on_error', autospec=True)\n", (5313, 5363), True, 'import unittest.mock as mock\n'), ((379, 417), 'hypernova.Renderer', 'hypernova.Renderer', (['"""http://localhost"""'], {}), "('http://localhost')\n", (397, 417), False, 'import hypernova\n'), ((691, 729), 'hypernova.Renderer', 'hypernova.Renderer', (['"""http://localhost"""'], {}), "('http://localhost')\n", (709, 729), False, 'import hypernova\n'), ((5740, 5767), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (5757, 5767), False, 'import logging\n'), ((842, 868), 'json.dumps', 'json.dumps', (["{'foo': 'bar'}"], {}), "({'foo': 'bar'})\n", (852, 868), False, 'import json\n'), ((1104, 1157), 'utils.plugins.PluginGetViewData', 'plugins.PluginGetViewData', (['"""component"""', 'expected_data'], {}), "('component', expected_data)\n", (1129, 1157), True, 'import utils.plugins as plugins\n'), ((1382, 1407), 'json.dumps', 'json.dumps', (['expected_data'], {}), '(expected_data)\n', (1392, 1407), False, 'import json\n'), ((1614, 1650), 'utils.plugins.PluginGetViewDataDoNothing', 'plugins.PluginGetViewDataDoNothing', ([], {}), '()\n', (1648, 1650), True, 'import utils.plugins as plugins\n'), ((1774, 1800), 'json.dumps', 'json.dumps', (["{'foo': 'bar'}"], {}), "({'foo': 'bar'})\n", (1784, 1800), False, 'import json\n'), ((2032, 2084), 'utils.plugins.PluginPrepareRequest', 'plugins.PluginPrepareRequest', (['"""component"""', 'to_append'], {}), "('component', to_append)\n", (2060, 2084), True, 'import utils.plugins as plugins\n'), ((2491, 2528), 'utils.plugins.PluginShouldSendRequestTrue', 'plugins.PluginShouldSendRequestTrue', ([], {}), '()\n', (2526, 2528), True, 'import utils.plugins as plugins\n'), ((2831, 2869), 'utils.plugins.PluginShouldSendRequestFalse', 'plugins.PluginShouldSendRequestFalse', ([], {}), '()\n', (2867, 2869), True, 'import utils.plugins as plugins\n'), ((3206, 3243), 'utils.plugins.PluginShouldSendRequestTrue', 'plugins.PluginShouldSendRequestTrue', ([], {}), '()\n', (3241, 3243), True, 'import utils.plugins as plugins\n'), ((3261, 3299), 'utils.plugins.PluginShouldSendRequestFalse', 'plugins.PluginShouldSendRequestFalse', ([], {}), '()\n', (3297, 3299), True, 'import utils.plugins as plugins\n'), ((3739, 3776), 'utils.plugins.PluginShouldSendRequestTrue', 'plugins.PluginShouldSendRequestTrue', ([], {}), '()\n', (3774, 3776), True, 'import utils.plugins as plugins\n'), ((3778, 3809), 'utils.plugins.PluginWillSendRequest', 'plugins.PluginWillSendRequest', ([], {}), '()\n', (3807, 3809), True, 'import utils.plugins as plugins\n'), ((4242, 4280), 'utils.plugins.PluginShouldSendRequestFalse', 'plugins.PluginShouldSendRequestFalse', ([], {}), '()\n', (4278, 4280), True, 'import utils.plugins as plugins\n'), ((4282, 4313), 'utils.plugins.PluginWillSendRequest', 'plugins.PluginWillSendRequest', ([], {}), '()\n', (4311, 4313), True, 'import utils.plugins as plugins\n'), ((4682, 4707), 'utils.plugins.PluginOnSuccess', 'plugins.PluginOnSuccess', ([], {}), '()\n', (4705, 4707), True, 'import utils.plugins as plugins\n'), ((5090, 5113), 'utils.plugins.PluginOnError', 'plugins.PluginOnError', ([], {}), '()\n', (5111, 5113), True, 'import utils.plugins as plugins\n'), ((5492, 5515), 'utils.plugins.PluginOnError', 'plugins.PluginOnError', ([], {}), '()\n', (5513, 5515), True, 'import utils.plugins as plugins\n'), ((5827, 5848), 'hypernova.plugins.dev_mode.DevModePlugin', 'DevModePlugin', (['logger'], {}), '(logger)\n', (5840, 5848), False, 'from hypernova.plugins.dev_mode import DevModePlugin\n')] |
from torch.utils.data import Dataset
import torch
import cv2
import numpy as np
class Dataset(Dataset):
'''
LR = low resolution image
HR = high resolution image
dir = dump of image directories for dataset
'''
def __init__(self, ids, lr,hr):
'Initialization'
self.dir = ids
self.lr = lr
self.hr = hr
def __len__(self):
return len(self.dir)
def __getitem__(self, index):
filename = self.dir[index]
lower = cv2.imread(self.lr + filename,1)
higher = cv2.imread(self.hr + filename,1)
#transpose so pytorch plays nice
lower= lower.transpose((2, 0, 1))
higher = higher.transpose((2, 0, 1))
#pass numpy arrays to torch and make float tensors.
lower = torch.from_numpy(lower).float()
higher = torch.from_numpy(higher).float()
return lower, higher | [
"cv2.imread",
"torch.from_numpy"
] | [((433, 466), 'cv2.imread', 'cv2.imread', (['(self.lr + filename)', '(1)'], {}), '(self.lr + filename, 1)\n', (443, 466), False, 'import cv2\n'), ((477, 510), 'cv2.imread', 'cv2.imread', (['(self.hr + filename)', '(1)'], {}), '(self.hr + filename, 1)\n', (487, 510), False, 'import cv2\n'), ((689, 712), 'torch.from_numpy', 'torch.from_numpy', (['lower'], {}), '(lower)\n', (705, 712), False, 'import torch\n'), ((733, 757), 'torch.from_numpy', 'torch.from_numpy', (['higher'], {}), '(higher)\n', (749, 757), False, 'import torch\n')] |
#! /usr/bin/env python3
# Suggest name to z3 binary based on it its sha
import sys
import argparse
from . import common
class WordsCmd(object):
def __init__(self):
self._name = 'words'
self._help = 'Returns random words'
def mk_arg_parser(self, ap):
ap.add_argument('--noun', '-n', help='Noun', action='store_true')
ap.add_argument('--length',
'-l',
type=int,
metavar='LENGTH',
default=None)
ap.add_argument('--adj', '-a', help='Adjective', action='store_true')
ap.add_argument('--seed', '-s', type=str, metavar='SEED', default=None)
return ap
def run(self, args=None):
# pick a noun if adjective is not selected
if not args.adj:
get_a_word_fn = common.get_a_noun
else:
get_a_word_fn = common.get_an_adjective
word = get_a_word_fn(length=args.length, seed=args.seed, bound='atmost')
print(word)
return 0
def main(self, argv):
ap = argparse.ArgumentParser(prog=self._name, description=self._help)
ap = self.mk_arg_parser(ap)
args = ap.parse_args(argv)
return self.run(args)
def main():
cmd = WordsCmd()
return cmd.main(sys.argv[1:])
if __name__ == '__main__':
sys.exit(main())
| [
"argparse.ArgumentParser"
] | [((1090, 1154), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'self._name', 'description': 'self._help'}), '(prog=self._name, description=self._help)\n', (1113, 1154), False, 'import argparse\n')] |
# Imports here
import matplotlib.pyplot as plt
import seaborn as sns
from PIL import Image
import json
import numpy as np
import os, random
import time
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from torchvision import datasets, transforms, models
import argparse
import functions
parser = argparse.ArgumentParser(description='Use Neural Network')
parser.add_argument("image_dir", action="store", default="./flowers/test/6/image_07181.jpg", help="image dir")
parser.add_argument("checkpoint", action="store", default="./checkpoint.pth", help="checkpoint dir")
parser.add_argument("--top_k", action="store", dest="top_k", type=int, default=5, help="top k classes")
parser.add_argument("--category_names", action="store", dest="category_names", default="cat_to_name.json", help="category names")
parser.add_argument("--gpu", action="store_true", dest="gpu", default=False, help="processor")
args = parser.parse_args()
image_dir = args.image_dir
checkpoint = args.checkpoint
top_k = args.top_k
category_names = args.category_names
gpu = args.gpu
print(f"\nimage_dir = {image_dir}"
f"\ncheckpoint = {checkpoint}"
f"\ntop_k = {top_k}"
f"\ncategory_names = {category_names}"
f"\ngpu = {gpu}")
with open(category_names, 'r') as f:
cat_to_name = json.load(f)
model, optimizer = functions.load_checkpoint(checkpoint)
print(model)
functions.sanity_check(model, image_dir, top_k, cat_to_name, gpu) | [
"json.load",
"functions.load_checkpoint",
"argparse.ArgumentParser",
"functions.sanity_check"
] | [((340, 397), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Use Neural Network"""'}), "(description='Use Neural Network')\n", (363, 397), False, 'import argparse\n'), ((1355, 1392), 'functions.load_checkpoint', 'functions.load_checkpoint', (['checkpoint'], {}), '(checkpoint)\n', (1380, 1392), False, 'import functions\n'), ((1407, 1472), 'functions.sanity_check', 'functions.sanity_check', (['model', 'image_dir', 'top_k', 'cat_to_name', 'gpu'], {}), '(model, image_dir, top_k, cat_to_name, gpu)\n', (1429, 1472), False, 'import functions\n'), ((1322, 1334), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1331, 1334), False, 'import json\n')] |
"""
==================
Errorbar Subsample
==================
Demo for the errorevery keyword to show data full accuracy data plots with
few errorbars.
"""
import numpy as np
import matplotlib.pyplot as plt
# example data
x = np.arange(0.1, 4, 0.1)
y1 = np.exp(-1.0 * x)
y2 = np.exp(-0.5 * x)
# example variable error bar values
y1err = 0.1 + 0.1 * np.sqrt(x)
y2err = 0.1 + 0.1 * np.sqrt(x/2)
# Now switch to a more OO interface to exercise more features.
fig, (ax_l, ax_c, ax_r) = plt.subplots(nrows=1, ncols=3,
sharex=True, figsize=(12, 6))
ax_l.set_title('all errorbars')
ax_l.errorbar(x, y1, yerr=y1err)
ax_l.errorbar(x, y2, yerr=y2err)
ax_c.set_title('only every 6th errorbar')
ax_c.errorbar(x, y1, yerr=y1err, errorevery=6)
ax_c.errorbar(x, y2, yerr=y2err, errorevery=6)
ax_r.set_title('second series shifted by 3')
ax_r.errorbar(x, y1, yerr=y1err, errorevery=(0, 6))
ax_r.errorbar(x, y2, yerr=y2err, errorevery=(3, 6))
fig.suptitle('Errorbar subsampling for better appearance')
plt.show()
| [
"numpy.sqrt",
"numpy.exp",
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((228, 250), 'numpy.arange', 'np.arange', (['(0.1)', '(4)', '(0.1)'], {}), '(0.1, 4, 0.1)\n', (237, 250), True, 'import numpy as np\n'), ((256, 272), 'numpy.exp', 'np.exp', (['(-1.0 * x)'], {}), '(-1.0 * x)\n', (262, 272), True, 'import numpy as np\n'), ((278, 294), 'numpy.exp', 'np.exp', (['(-0.5 * x)'], {}), '(-0.5 * x)\n', (284, 294), True, 'import numpy as np\n'), ((487, 547), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(1)', 'ncols': '(3)', 'sharex': '(True)', 'figsize': '(12, 6)'}), '(nrows=1, ncols=3, sharex=True, figsize=(12, 6))\n', (499, 547), True, 'import matplotlib.pyplot as plt\n'), ((1033, 1043), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1041, 1043), True, 'import matplotlib.pyplot as plt\n'), ((352, 362), 'numpy.sqrt', 'np.sqrt', (['x'], {}), '(x)\n', (359, 362), True, 'import numpy as np\n'), ((383, 397), 'numpy.sqrt', 'np.sqrt', (['(x / 2)'], {}), '(x / 2)\n', (390, 397), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from PIL import Image
logger = logging.getLogger(__name__)
class Canvas:
"""
Represents the final composite image.
Think of this as a kind of std::vector for images.
"""
def __init__(self, profile, upper):
"""
Initialise a new canvas.
:param profile: The profile of the device that generated the images
we'll receive.
:param upper: The header and body will be used to start the canvas.
"""
logger.debug('Initialising new canvas using profile %s with upper %s',
profile, upper)
self._profile = profile
self.image = Image.new(self._profile.mode,
(upper.width,
upper.height - self._profile.footer_height))
self.image.paste(upper.crop(
(0, 0, upper.width, upper.height - self._profile.footer_height)),
(0, 0))
def append(self, join):
"""
Add a join to this canvas.
:param join: The lower image will be added. The upper image in the join
should be either the one passed in the constructor if this
is the first join, or the lower in the last append() call.
"""
new_height = (self.image.height - join.lower_crop) + join.spacing + (
join.lower.height - join.upper_crop)
extended = Image.new(self._profile.mode,
(self.image.width, new_height))
# add the old content (including the join's upper image), cut short if
# necessary
extended.paste(self.image, (0, 0, self.image.width,
self.image.height - join.lower_crop))
# add the lower image from the join
extended.paste(join.lower.crop((0, join.upper_crop, join.lower.width,
join.lower.height)),
(0, self.image.height - join.lower_crop + join.spacing))
self.image = extended
def finalise(self, lower):
"""
End this canvas.
:param lower: The image containing the footer to borrow.
"""
footer = self._profile.footer(lower)
extended = Image.new(self._profile.mode,
(self.image.width,
self.image.height + footer.height))
extended.paste(self.image, (0, 0))
extended.paste(footer, (0, self.image.height))
self.image = extended
| [
"logging.getLogger",
"PIL.Image.new"
] | [((112, 139), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (129, 139), False, 'import logging\n'), ((728, 821), 'PIL.Image.new', 'Image.new', (['self._profile.mode', '(upper.width, upper.height - self._profile.footer_height)'], {}), '(self._profile.mode, (upper.width, upper.height - self._profile.\n footer_height))\n', (737, 821), False, 'from PIL import Image\n'), ((1491, 1552), 'PIL.Image.new', 'Image.new', (['self._profile.mode', '(self.image.width, new_height)'], {}), '(self._profile.mode, (self.image.width, new_height))\n', (1500, 1552), False, 'from PIL import Image\n'), ((2322, 2411), 'PIL.Image.new', 'Image.new', (['self._profile.mode', '(self.image.width, self.image.height + footer.height)'], {}), '(self._profile.mode, (self.image.width, self.image.height + footer\n .height))\n', (2331, 2411), False, 'from PIL import Image\n')] |
"""[summary]
Main module.
[description]
The main module starts the web service
"""
from app import app
if __name__ == "__main__":
"""[summary]
[description]
The main module defines exception handler and runs the web service
"""
app.run(host= '0.0.0.0',port=5001)
| [
"app.app.run"
] | [((236, 270), 'app.app.run', 'app.run', ([], {'host': '"""0.0.0.0"""', 'port': '(5001)'}), "(host='0.0.0.0', port=5001)\n", (243, 270), False, 'from app import app\n')] |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from math import floor
from maro.backends.frame import node, NodeBase, NodeAttribute
def gen_vessel_definition(stop_nums: tuple):
@node("vessels")
class Vessel(NodeBase):
# The capacity of vessel for transfering containers.
capacity = NodeAttribute("i")
# Empty container volume on the vessel.
empty = NodeAttribute("i")
# Laden container volume on the vessel.
full = NodeAttribute("i")
# Remaining space of the vessel.
remaining_space = NodeAttribute("i")
# Discharged empty container number for loading laden containers.
early_discharge = NodeAttribute("i")
# Which route current vessel belongs to.
route_idx = NodeAttribute("i")
# Stop port index in route, it is used to identify where is current vessel.
# last_loc_idx == next_loc_idx means vessel parking at a port.
last_loc_idx = NodeAttribute("i")
next_loc_idx = NodeAttribute("i")
past_stop_list = NodeAttribute("i", stop_nums[0])
past_stop_tick_list = NodeAttribute("i", stop_nums[0])
future_stop_list = NodeAttribute("i", stop_nums[1])
future_stop_tick_list = NodeAttribute("i", stop_nums[1])
def __init__(self):
self._name = None
self._capacity = None
self._total_space = None
self._container_volume = None
self._route_idx = None
self._empty = None
@property
def name(self) -> str:
"""
Name of vessel (from config)
"""
return self._name
@property
def idx(self) -> int:
"""
Index of vessel
"""
return self.index
def set_init_state(self, name: str, container_volume: float, capacity: int, route_idx: int, empty: int):
"""Initialize vessel info"""
self._name = name
self._container_volume = container_volume
self._total_space = floor(capacity/container_volume)
self._capacity = capacity
self._route_idx = route_idx
self._empty = empty
self.reset()
def reset(self):
"""Reset states of vessel"""
self.capacity = self._capacity
self.route_idx = self._route_idx
self.empty = self._empty
def set_stop_list(self, past_stop_list:list, future_stop_list:list):
"""
Set the future stops (configured in config) when the vessel arrive at a port
Args:
stop_list (tuple): list of past and future stop list tuple
"""
# update past and future stop info
features = [(past_stop_list, self.past_stop_list, self.past_stop_tick_list),
(future_stop_list, self.future_stop_list, self.future_stop_tick_list)]
for feature in features:
for i, stop in enumerate(feature[0]):
tick = stop.arrive_tick if stop is not None else -1
port_idx = stop.port_idx if stop is not None else -1
feature[1][i] = port_idx
feature[2][i] = tick
def _on_empty_changed(self, value):
self._update_remaining_space()
def _on_full_changed(self, value):
self._update_remaining_space()
def _update_remaining_space(self):
self.remaining_space = self._total_space - self.full - self.empty
def __str__(self):
return f"<Vessel Index={self.index}, capacity={self.capacity}, empty={self.empty}, full={self.full}>"
return Vessel | [
"maro.backends.frame.node",
"maro.backends.frame.NodeAttribute",
"math.floor"
] | [((209, 224), 'maro.backends.frame.node', 'node', (['"""vessels"""'], {}), "('vessels')\n", (213, 224), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((333, 351), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (346, 351), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((417, 435), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (430, 435), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((500, 518), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (513, 518), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((587, 605), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (600, 605), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((707, 725), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (720, 725), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((796, 814), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (809, 814), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((994, 1012), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (1007, 1012), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((1036, 1054), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""'], {}), "('i')\n", (1049, 1054), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((1081, 1113), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""', 'stop_nums[0]'], {}), "('i', stop_nums[0])\n", (1094, 1113), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((1144, 1176), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""', 'stop_nums[0]'], {}), "('i', stop_nums[0])\n", (1157, 1176), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((1204, 1236), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""', 'stop_nums[1]'], {}), "('i', stop_nums[1])\n", (1217, 1236), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((1269, 1301), 'maro.backends.frame.NodeAttribute', 'NodeAttribute', (['"""i"""', 'stop_nums[1]'], {}), "('i', stop_nums[1])\n", (1282, 1301), False, 'from maro.backends.frame import node, NodeBase, NodeAttribute\n'), ((2103, 2137), 'math.floor', 'floor', (['(capacity / container_volume)'], {}), '(capacity / container_volume)\n', (2108, 2137), False, 'from math import floor\n')] |
from itertools import product
import numpy as np
import pytest
import sympy as sym
import symopt.config as config
from symopt.problem import OptimizationProblem
tol = 1.0e-8
wrap_using_values = ['lambdify', 'autowrap']
def needs_ipopt(test_func):
def new_test_func(solver, wrap_using):
if solver == 'ipopt' and not config.HAS_IPOPT:
pytest.skip(
"Test requires optional dependency ipopt, which is not "
"installed.")
else:
return test_func(solver, wrap_using)
return new_test_func
@pytest.mark.parametrize("solver,wrap_using",
product(["ipopt", "slsqp"], wrap_using_values))
@needs_ipopt
def test_prob18(solver, wrap_using):
""" problem 18 from the Hock-Schittkowski test suite """
if solver == "ipopt" and not config.HAS_IPOPT:
pytest.skip(
"Test requires optional dependency ipopt, which is not installed.")
x = sym.MatrixSymbol('x', 2, 1)
p = sym.Symbol('p')
prob = OptimizationProblem(mode='min', wrap_using=wrap_using)
prob.add_parameter(p)
prob.add_variable(x, lb=[2, 0], ub=[p, p])
prob.add_constraints_from([x[0] * x[1] >= 25,
x[0] ** 2 + x[1] ** 2 >= 25])
prob.obj = x[0] ** 2 / 100 + x[1] ** 2
x0 = [2, 2]
res_50 = prob.solve(x0, 50, solver=solver, tol=tol)
assert res_50['success']
assert np.allclose(res_50['x'], np.array([15.8114, 1.58114]))
res_20 = prob.solve(x0, 20, solver=solver, tol=tol)
assert res_20['success']
assert np.allclose(res_20['x'], np.array([15.8114, 1.58114]))
@pytest.mark.parametrize("solver,wrap_using",
product(["ipopt", "slsqp"], wrap_using_values))
@needs_ipopt
def test_prob71(solver, wrap_using):
""" problem 71 from the Hock-Schittkowski test suite """
if solver == "ipopt" and not config.HAS_IPOPT:
pytest.skip(
"Test requires optional dependency ipopt, which is not installed.")
x = sym.MatrixSymbol('x', 4, 1)
obj = x[0] * x[3] * (x[0] + x[1] + x[2]) + x[2]
cons = [x[0] * x[1] * x[2] * x[3] >= 25,
sym.Eq(x[0] ** 2 + x[1] ** 2 + x[2] ** 2 + x[3] ** 2, 40)]
lb = np.ones(4)
ub = 5 * np.ones(4)
prob_min = OptimizationProblem(mode='min', wrap_using=wrap_using)
prob_max = OptimizationProblem(mode='max', wrap_using=wrap_using)
prob_min.add_variable(x, lb=lb, ub=ub)
prob_min.add_constraints_from(cons)
prob_min.obj = obj
prob_max.add_variable(x, lb=lb, ub=ub)
prob_max.add_constraints_from(cons)
# test maximization by negating objective
prob_max.obj = -obj
x0 = np.array([1, 5, 5, 1])
res_min = prob_min.solve(x0, solver=solver, tol=tol)
res_max = prob_max.solve(x0, solver=solver, tol=tol)
assert res_min['success']
assert np.allclose(res_min['x'],
np.array([1.0, 4.74299964, 3.82114998, 1.37940831]))
assert res_max['success']
assert np.allclose(res_max['x'], res_min['x'])
assert np.allclose(np.abs(res_max['fun']), np.abs(res_min['fun']))
@pytest.mark.parametrize("solver,wrap_using",
product(["ipopt", "slsqp"], wrap_using_values))
@needs_ipopt
def test_prob64(solver, wrap_using):
""" problem 64 from the Hock-Schittkowski test suite """
x1, x2, x3 = sym.symarray('x', 3)
p = sym.MatrixSymbol('p', 6, 1)
prob = OptimizationProblem(wrap_using=wrap_using)
prob.add_variables_from([x1, x2, x3], lb=1.0e-5)
prob.add_parameter(p)
prob.obj = p[0] * x1 + p[1] / x1 + p[2] * x2 + p[3] / x2 + p[4] * x3 +\
p[5]/x3
prob.add_constraint(4 / x1 + 32 / x2 + 120 / x3 <= 1)
x0 = np.ones(3)
p0 = np.array([5, 50000, 20, 72000, 10, 144000])
res = prob.solve(x0, p0, solver=solver, tol=tol)
assert res['success']
assert np.allclose(res['x'],
np.array([108.7347175, 85.12613942, 204.3247078]))
@pytest.mark.parametrize("solver,wrap_using",
product(["ipopt", "cobyla", "slsqp"],
wrap_using_values))
@needs_ipopt
def test_prob77(solver, wrap_using):
""" problem 77 from the Hock-Schittkowski test suite """
if solver == "ipopt" and not config.HAS_IPOPT:
pytest.skip(
"Test requires optional dependency ipopt, which is not installed.")
x1, x2, x3, x4, x5 = sym.symarray('x', 5)
prob = OptimizationProblem()
prob.add_variables_from([x1, x2, x3, x4, x5])
prob.obj = (x1 - 1) ** 2 + (x1 - x2) ** 2 + (x3 - 1) ** 2 + \
(x4 - 1) ** 4 + (x5 - 1) ** 6
# write equality constraints as two inequalities to try cobyla
cons = [x1 ** 2 * x4 + sym.sin(x4 - x5) - 2 * np.sqrt(2) <= 0,
x1 ** 2 * x4 + sym.sin(x4 - x5) - 2 * np.sqrt(2) >= 0,
x2 + x3 ** 4 * x4 ** 2 - 8 - np.sqrt(2) <= 0,
x2 + x3 ** 4 * x4 ** 2 - 8 - np.sqrt(2) >= 0]
prob.add_constraints_from(cons)
x0 = 2 * np.ones(5)
res = prob.solve(x0, solver=solver, tol=tol)
assert res['success']
assert np.allclose(res['x'],
np.array([1.166172, 1.182111, 1.380257, 1.506036,
0.6109203]))
| [
"numpy.abs",
"sympy.Symbol",
"numpy.allclose",
"sympy.Eq",
"numpy.ones",
"numpy.sqrt",
"sympy.sin",
"itertools.product",
"sympy.symarray",
"sympy.MatrixSymbol",
"numpy.array",
"pytest.skip",
"symopt.problem.OptimizationProblem"
] | [((961, 988), 'sympy.MatrixSymbol', 'sym.MatrixSymbol', (['"""x"""', '(2)', '(1)'], {}), "('x', 2, 1)\n", (977, 988), True, 'import sympy as sym\n'), ((997, 1012), 'sympy.Symbol', 'sym.Symbol', (['"""p"""'], {}), "('p')\n", (1007, 1012), True, 'import sympy as sym\n'), ((1025, 1079), 'symopt.problem.OptimizationProblem', 'OptimizationProblem', ([], {'mode': '"""min"""', 'wrap_using': 'wrap_using'}), "(mode='min', wrap_using=wrap_using)\n", (1044, 1079), False, 'from symopt.problem import OptimizationProblem\n'), ((641, 687), 'itertools.product', 'product', (["['ipopt', 'slsqp']", 'wrap_using_values'], {}), "(['ipopt', 'slsqp'], wrap_using_values)\n", (648, 687), False, 'from itertools import product\n'), ((2021, 2048), 'sympy.MatrixSymbol', 'sym.MatrixSymbol', (['"""x"""', '(4)', '(1)'], {}), "('x', 4, 1)\n", (2037, 2048), True, 'import sympy as sym\n'), ((2226, 2236), 'numpy.ones', 'np.ones', (['(4)'], {}), '(4)\n', (2233, 2236), True, 'import numpy as np\n'), ((2277, 2331), 'symopt.problem.OptimizationProblem', 'OptimizationProblem', ([], {'mode': '"""min"""', 'wrap_using': 'wrap_using'}), "(mode='min', wrap_using=wrap_using)\n", (2296, 2331), False, 'from symopt.problem import OptimizationProblem\n'), ((2347, 2401), 'symopt.problem.OptimizationProblem', 'OptimizationProblem', ([], {'mode': '"""max"""', 'wrap_using': 'wrap_using'}), "(mode='max', wrap_using=wrap_using)\n", (2366, 2401), False, 'from symopt.problem import OptimizationProblem\n'), ((2673, 2695), 'numpy.array', 'np.array', (['[1, 5, 5, 1]'], {}), '([1, 5, 5, 1])\n', (2681, 2695), True, 'import numpy as np\n'), ((2996, 3035), 'numpy.allclose', 'np.allclose', (["res_max['x']", "res_min['x']"], {}), "(res_max['x'], res_min['x'])\n", (3007, 3035), True, 'import numpy as np\n'), ((1701, 1747), 'itertools.product', 'product', (["['ipopt', 'slsqp']", 'wrap_using_values'], {}), "(['ipopt', 'slsqp'], wrap_using_values)\n", (1708, 1747), False, 'from itertools import product\n'), ((3356, 3376), 'sympy.symarray', 'sym.symarray', (['"""x"""', '(3)'], {}), "('x', 3)\n", (3368, 3376), True, 'import sympy as sym\n'), ((3385, 3412), 'sympy.MatrixSymbol', 'sym.MatrixSymbol', (['"""p"""', '(6)', '(1)'], {}), "('p', 6, 1)\n", (3401, 3412), True, 'import sympy as sym\n'), ((3425, 3467), 'symopt.problem.OptimizationProblem', 'OptimizationProblem', ([], {'wrap_using': 'wrap_using'}), '(wrap_using=wrap_using)\n', (3444, 3467), False, 'from symopt.problem import OptimizationProblem\n'), ((3707, 3717), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (3714, 3717), True, 'import numpy as np\n'), ((3727, 3770), 'numpy.array', 'np.array', (['[5, 50000, 20, 72000, 10, 144000]'], {}), '([5, 50000, 20, 72000, 10, 144000])\n', (3735, 3770), True, 'import numpy as np\n'), ((3180, 3226), 'itertools.product', 'product', (["['ipopt', 'slsqp']", 'wrap_using_values'], {}), "(['ipopt', 'slsqp'], wrap_using_values)\n", (3187, 3226), False, 'from itertools import product\n'), ((4410, 4430), 'sympy.symarray', 'sym.symarray', (['"""x"""', '(5)'], {}), "('x', 5)\n", (4422, 4430), True, 'import sympy as sym\n'), ((4442, 4463), 'symopt.problem.OptimizationProblem', 'OptimizationProblem', ([], {}), '()\n', (4461, 4463), False, 'from symopt.problem import OptimizationProblem\n'), ((4031, 4087), 'itertools.product', 'product', (["['ipopt', 'cobyla', 'slsqp']", 'wrap_using_values'], {}), "(['ipopt', 'cobyla', 'slsqp'], wrap_using_values)\n", (4038, 4087), False, 'from itertools import product\n'), ((859, 938), 'pytest.skip', 'pytest.skip', (['"""Test requires optional dependency ipopt, which is not installed."""'], {}), "('Test requires optional dependency ipopt, which is not installed.')\n", (870, 938), False, 'import pytest\n'), ((1446, 1474), 'numpy.array', 'np.array', (['[15.8114, 1.58114]'], {}), '([15.8114, 1.58114])\n', (1454, 1474), True, 'import numpy as np\n'), ((1598, 1626), 'numpy.array', 'np.array', (['[15.8114, 1.58114]'], {}), '([15.8114, 1.58114])\n', (1606, 1626), True, 'import numpy as np\n'), ((1919, 1998), 'pytest.skip', 'pytest.skip', (['"""Test requires optional dependency ipopt, which is not installed."""'], {}), "('Test requires optional dependency ipopt, which is not installed.')\n", (1930, 1998), False, 'import pytest\n'), ((2158, 2215), 'sympy.Eq', 'sym.Eq', (['(x[0] ** 2 + x[1] ** 2 + x[2] ** 2 + x[3] ** 2)', '(40)'], {}), '(x[0] ** 2 + x[1] ** 2 + x[2] ** 2 + x[3] ** 2, 40)\n', (2164, 2215), True, 'import sympy as sym\n'), ((2250, 2260), 'numpy.ones', 'np.ones', (['(4)'], {}), '(4)\n', (2257, 2260), True, 'import numpy as np\n'), ((2901, 2952), 'numpy.array', 'np.array', (['[1.0, 4.74299964, 3.82114998, 1.37940831]'], {}), '([1.0, 4.74299964, 3.82114998, 1.37940831])\n', (2909, 2952), True, 'import numpy as np\n'), ((3059, 3081), 'numpy.abs', 'np.abs', (["res_max['fun']"], {}), "(res_max['fun'])\n", (3065, 3081), True, 'import numpy as np\n'), ((3083, 3105), 'numpy.abs', 'np.abs', (["res_min['fun']"], {}), "(res_min['fun'])\n", (3089, 3105), True, 'import numpy as np\n'), ((3907, 3956), 'numpy.array', 'np.array', (['[108.7347175, 85.12613942, 204.3247078]'], {}), '([108.7347175, 85.12613942, 204.3247078])\n', (3915, 3956), True, 'import numpy as np\n'), ((4292, 4371), 'pytest.skip', 'pytest.skip', (['"""Test requires optional dependency ipopt, which is not installed."""'], {}), "('Test requires optional dependency ipopt, which is not installed.')\n", (4303, 4371), False, 'import pytest\n'), ((4994, 5004), 'numpy.ones', 'np.ones', (['(5)'], {}), '(5)\n', (5001, 5004), True, 'import numpy as np\n'), ((5137, 5198), 'numpy.array', 'np.array', (['[1.166172, 1.182111, 1.380257, 1.506036, 0.6109203]'], {}), '([1.166172, 1.182111, 1.380257, 1.506036, 0.6109203])\n', (5145, 5198), True, 'import numpy as np\n'), ((363, 442), 'pytest.skip', 'pytest.skip', (['"""Test requires optional dependency ipopt, which is not installed."""'], {}), "('Test requires optional dependency ipopt, which is not installed.')\n", (374, 442), False, 'import pytest\n'), ((4869, 4879), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4876, 4879), True, 'import numpy as np\n'), ((4927, 4937), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4934, 4937), True, 'import numpy as np\n'), ((4721, 4737), 'sympy.sin', 'sym.sin', (['(x4 - x5)'], {}), '(x4 - x5)\n', (4728, 4737), True, 'import sympy as sym\n'), ((4744, 4754), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4751, 4754), True, 'import numpy as np\n'), ((4788, 4804), 'sympy.sin', 'sym.sin', (['(x4 - x5)'], {}), '(x4 - x5)\n', (4795, 4804), True, 'import sympy as sym\n'), ((4811, 4821), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (4818, 4821), True, 'import numpy as np\n')] |
"""
Tests for the binaries helper module
"""
import os
import pytest
from unittest.mock import call, patch
import kustomize
@patch('builtins.print')
@patch('shutil.which', return_value=None)
@patch('kustomize.helpers.binaries.GithubReleases')
def test_binarypath_download_missing(mock_downloader, mock_which, mock_print):
"""
Does function trigger download of a non-existing binary?
"""
with pytest.raises(FileNotFoundError):
kustomize.helpers.binaries.binarypath('foo', download_if_missing=True)
assert mock_print.mock_calls == [
call('Binary for foo not found. Attempting download ...'),
]
assert mock_downloader.mock_calls == [
call('foo'),
call().download(),
]
@patch('kustomize.helpers.binaries.run')
def test_run_piped_commands(mock_run):
"""
Is list of commands properly executed as a pipe?
"""
command_list = ['foo abc', 'bar -v', 'baz']
kustomize.helpers.binaries.run_piped_commands(command_list)
assert len(mock_run.mock_calls) == 3, \
f"run() not called for each command in '{' | '.join(command_list)}'"
@patch('builtins.print')
@patch('kustomize.helpers.binaries.run_piped_commands')
def test_shell_command(mock_run_piped_commands, mock_print):
"""
Is command printed and then executed?
"""
executable = kustomize.helpers.download.DOWNLOAD_PATH / 'foo'
exec_location = str(executable.parent) + os.path.sep
shell_command = f"{executable} --bar | {executable} baz"
kustomize.helpers.binaries.shell(shell_command)
assert mock_print.called, \
"print() is never called"
assert exec_location not in str(mock_print.mock_calls[0]), \
"Output doesn't seem to be beautified"
assert mock_run_piped_commands.called, \
"run_piped_commands() is not called"
def test_shell_failing_returncode():
"""
Does an invalid command return a failing status code?
"""
result = kustomize.helpers.binaries.shell('/non/existing/command')
assert result.returncode, \
"Non-zero status code expected, zero received"
| [
"kustomize.helpers.binaries.binarypath",
"kustomize.helpers.binaries.run_piped_commands",
"unittest.mock.call",
"kustomize.helpers.binaries.shell",
"pytest.raises",
"unittest.mock.patch"
] | [((129, 152), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {}), "('builtins.print')\n", (134, 152), False, 'from unittest.mock import call, patch\n'), ((154, 194), 'unittest.mock.patch', 'patch', (['"""shutil.which"""'], {'return_value': 'None'}), "('shutil.which', return_value=None)\n", (159, 194), False, 'from unittest.mock import call, patch\n'), ((196, 246), 'unittest.mock.patch', 'patch', (['"""kustomize.helpers.binaries.GithubReleases"""'], {}), "('kustomize.helpers.binaries.GithubReleases')\n", (201, 246), False, 'from unittest.mock import call, patch\n'), ((737, 776), 'unittest.mock.patch', 'patch', (['"""kustomize.helpers.binaries.run"""'], {}), "('kustomize.helpers.binaries.run')\n", (742, 776), False, 'from unittest.mock import call, patch\n'), ((1122, 1145), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {}), "('builtins.print')\n", (1127, 1145), False, 'from unittest.mock import call, patch\n'), ((1147, 1201), 'unittest.mock.patch', 'patch', (['"""kustomize.helpers.binaries.run_piped_commands"""'], {}), "('kustomize.helpers.binaries.run_piped_commands')\n", (1152, 1201), False, 'from unittest.mock import call, patch\n'), ((937, 996), 'kustomize.helpers.binaries.run_piped_commands', 'kustomize.helpers.binaries.run_piped_commands', (['command_list'], {}), '(command_list)\n', (982, 996), False, 'import kustomize\n'), ((1509, 1556), 'kustomize.helpers.binaries.shell', 'kustomize.helpers.binaries.shell', (['shell_command'], {}), '(shell_command)\n', (1541, 1556), False, 'import kustomize\n'), ((1952, 2009), 'kustomize.helpers.binaries.shell', 'kustomize.helpers.binaries.shell', (['"""/non/existing/command"""'], {}), "('/non/existing/command')\n", (1984, 2009), False, 'import kustomize\n'), ((412, 444), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (425, 444), False, 'import pytest\n'), ((454, 524), 'kustomize.helpers.binaries.binarypath', 'kustomize.helpers.binaries.binarypath', (['"""foo"""'], {'download_if_missing': '(True)'}), "('foo', download_if_missing=True)\n", (491, 524), False, 'import kustomize\n'), ((572, 629), 'unittest.mock.call', 'call', (['"""Binary for foo not found. Attempting download ..."""'], {}), "('Binary for foo not found. Attempting download ...')\n", (576, 629), False, 'from unittest.mock import call, patch\n'), ((688, 699), 'unittest.mock.call', 'call', (['"""foo"""'], {}), "('foo')\n", (692, 699), False, 'from unittest.mock import call, patch\n'), ((709, 715), 'unittest.mock.call', 'call', ([], {}), '()\n', (713, 715), False, 'from unittest.mock import call, patch\n')] |
import pathlib
template_cmake_file = pathlib.Path('TestCompileErrors.cmake.t')
output_cmake_file = pathlib.Path('TestCompileErrors.cmake')
test_script = pathlib.Path('TestCompileErrors.py')
script_text = test_script.read_text()
cmake_text = template_cmake_file.read_text().replace("{{TEST_SCRIPT_TEXT}}",script_text)
output_cmake_file.write_text(cmake_text)
| [
"pathlib.Path"
] | [((38, 79), 'pathlib.Path', 'pathlib.Path', (['"""TestCompileErrors.cmake.t"""'], {}), "('TestCompileErrors.cmake.t')\n", (50, 79), False, 'import pathlib\n'), ((100, 139), 'pathlib.Path', 'pathlib.Path', (['"""TestCompileErrors.cmake"""'], {}), "('TestCompileErrors.cmake')\n", (112, 139), False, 'import pathlib\n'), ((154, 190), 'pathlib.Path', 'pathlib.Path', (['"""TestCompileErrors.py"""'], {}), "('TestCompileErrors.py')\n", (166, 190), False, 'import pathlib\n')] |
"""Test cron job functionalities."""
import pytest
import geniepy
@pytest.mark.slow_integration_test
def test_run_predictions():
"""Test calculating predictions."""
geniepy.run_predictions()
@pytest.mark.slow_integration_test
def test_run_job():
"""Test end-to-end run cron job function."""
geniepy.run_job()
| [
"geniepy.run_job",
"geniepy.run_predictions"
] | [((175, 200), 'geniepy.run_predictions', 'geniepy.run_predictions', ([], {}), '()\n', (198, 200), False, 'import geniepy\n'), ((311, 328), 'geniepy.run_job', 'geniepy.run_job', ([], {}), '()\n', (326, 328), False, 'import geniepy\n')] |
import logging
from pathlib import Path
from typing import List, Optional
from preql import settings
from preql.context import context
from preql.utils import SafeDict, dataclass, listgen, method, safezip
from . import exceptions as exc
from . import pql_ast as ast
from . import pql_objects as objects
from . import sql
from .compiler import cast_to_instance
from .exceptions import InsufficientAccessLevel, ReturnSignal, Signal
from .interp_common import (
assert_type,
call_builtin_func,
cast_to_python,
cast_to_python_int,
cast_to_python_string,
dsp,
exclude_fields,
is_global_scope,
pyvalue_inst,
)
from .parser import Str
from .pql_types import Id, Object, T, Type, dp_inst
from .state import (
AccessLevels,
catch_access,
get_access_level,
get_db,
get_var,
has_var,
reduce_access,
set_var,
unique_name,
use_scope,
)
from .types_impl import (
flatten_type,
kernel_type,
pql_repr,
table_flat_for_insert,
table_params,
)
MODULES_PATH = Path(__file__).parent.parent / 'modules'
@dsp
def resolve(struct_def: ast.StructDef):
members = {str(k): resolve(v) for k, v in struct_def.members}
struct = T.struct(members)
set_var(struct_def.name, struct)
return struct
def _resolve_name_and_scope(name, ast_node):
if is_global_scope(context.state):
temporary = False
else:
temporary = True
if len(name.parts) > 1:
raise Signal(
T.NameError,
ast_node,
"Local tables cannot include a schema (namespace)",
)
(name,) = name.parts
name = Id('__local_' + unique_name(name))
name = get_db().qualified_name(name)
return name, temporary
@dsp
def resolve(table_def: ast.TableDef):
name, temporary = _resolve_name_and_scope(table_def.name, table_def)
t = T.table({}, name=name, temporary=temporary)
with use_scope({table_def.name.name: t}): # For self-reference
elems = {c.name: resolve(c) for c in table_def.columns}
t = t(elems)
if table_def.methods:
methods = evaluate(table_def.methods)
t.proto_attrs.update({m.userfunc.name: m.userfunc for m in methods})
return t
@dsp
def resolve(col_def: ast.ColumnDef):
coltype = resolve(col_def.type)
query = col_def.query
assert not query
if isinstance(coltype, objects.SelectedColumnInstance):
table = coltype.parent.type
if 'name' not in table.options:
# XXX better test for persistence
raise Signal.make(
T.TypeError,
col_def.type,
"Tables provided as relations must be persistent.",
)
x = T.t_relation[coltype.type](
rel={'table': table, 'column': coltype.name, 'key': False}
)
return x.replace(
_nullable=coltype.type._nullable
) # inherit is_nullable (TODO: use sumtypes?)
elif coltype <= T.table:
if 'name' not in coltype.options:
# XXX better test for persistence
raise Signal.make(
T.TypeError,
col_def.type,
"Tables provided as relations must be persistent.",
)
x = T.t_relation[T.t_id.as_nullable()](
rel={'table': coltype, 'column': 'id', 'key': True}
)
return x.replace(
_nullable=coltype._nullable
) # inherit is_nullable (TODO: use sumtypes?)
return coltype(default=col_def.default)
@dsp
def resolve(type_: ast.Type):
t = evaluate(type_.type_obj)
if isinstance(t, objects.TableInstance):
t = t.type
if not isinstance(t, (Type, objects.SelectedColumnInstance)):
raise Signal.make(
T.TypeError, type_, f"Expected type in column definition. Instead got '{t}'"
)
if type_.nullable:
t = t.as_nullable()
return t
def db_query(sql_code, subqueries=None):
try:
return get_db().query(sql_code, subqueries)
except exc.DatabaseQueryError as e:
raise Signal.make(T.DbQueryError, None, e.args[0]) from e
def drop_table(table_type):
name = table_type.options['name']
code = sql.compile_drop_table(name)
return db_query(code, {})
@dsp
def _set_value(name: ast.Name, value):
set_var(name.name, value)
@dsp
def _set_value(attr: ast.Attr, value):
raise Signal.make(
T.NotImplementedError, attr, f"Cannot set attribute for {attr.expr.repr()}"
)
def _copy_rows(target_name: ast.Name, source: objects.TableInstance):
if source is objects.EmptyList: # Nothing to add
return objects.null
target = evaluate(target_name)
params = dict(table_params(target.type))
for p in params:
if p not in source.type.elems:
raise Signal.make(
T.TypeError, source, f"Missing column '{p}' in {source.type}"
)
read_only, columns = table_flat_for_insert(target.type)
if get_db().target == sql.bigquery and 'id' in read_only:
# XXX very hacky!
to_exclude = ['id'] if 'id' in source.type.elems else []
proj = ast.Projection(
source,
[
ast.NamedField(
'id',
objects.Instance.make(
sql.RawSql(T.string, 'GENERATE_UUID()'), T.string, []
),
),
ast.NamedField(None, ast.Ellipsis(None, to_exclude)),
],
)
source = cast_to_instance(proj)
read_only.remove('id')
columns.insert(0, 'id')
source = exclude_fields(source, set(read_only) & set(source.type.elems))
table = target.type
try:
table_name = table.options['name']
except KeyError:
raise Signal.make(
T.ValueError, target_name, "Cannot add a new row to an unnamed table"
)
code = sql.Insert(table_name, columns, source.code)
db_query(code, source.subqueries)
return objects.null
@method
def _execute(struct_def: ast.StructDef):
resolve(struct_def)
@method
def _execute(table_def: ast.TableDefFromExpr):
expr = cast_to_instance(table_def.expr)
name, temporary = _resolve_name_and_scope(table_def.name, table_def)
# name = get_db().qualified_name(name)
t = new_table_from_expr(name, expr, table_def.const, temporary)
set_var(table_def.name, t)
@method
def _execute(var_def: ast.SetValue):
res = evaluate(var_def.value)
if res.type <= T.primitive and not res.type <= (T.union[T.aggregated, T.projected]):
res = objects.pyvalue_inst(res.localize(), res.type)
_set_value(var_def.name, res)
return res
def ensure_namespace(name: Id):
path = name.parts[:-1]
if not path:
return
if len(path) > 1:
raise Signal(
T.NotImplementedError, name, "Nested namespaces not supported yet!"
)
(name,) = path
if not has_var(name):
module = objects.Module(name, {})
set_var(name, module)
@method
def _execute(table_def: ast.TableDef):
if table_def.columns and isinstance(table_def.columns[-1], ast.Ellipsis):
ellipsis = table_def.columns.pop()
else:
ellipsis = None
if any(isinstance(c, ast.Ellipsis) for c in table_def.columns):
# XXX why must it? just ensure it appears once
raise Signal.make(T.SyntaxError, table_def, "Ellipsis must appear at the end")
# Create type and a corresponding table in the database
t = resolve(table_def)
db_name = t.options['name']
assert isinstance(db_name, Id), (db_name, t)
if t.options['temporary']:
# register name for later removal
get_var('__unwind__').append(lambda: drop_table(t))
exists = get_db().table_exists(db_name)
if exists:
assert not t.options['temporary']
cur_type = get_db().import_table_type(
db_name, None if ellipsis else set(t.elems) | {'id'}
)
if ellipsis:
elems_to_add = {
Str(n, ellipsis.text_ref): v
for n, v in cur_type.elems.items()
if n not in t.elems
}
# TODO what is primary key isn't included?
t = t({**t.elems, **elems_to_add}, **cur_type.options)
# Auto-add id only if it exists already and not defined by user
if 'id' in cur_type.elems: # and 'id' not in t.elems:
# t = t(dict(id=T.t_id, **t.elems), pk=[['id']])
assert cur_type.elems['id'] <= T.primitive, cur_type.elems['id']
t.elems['id'] = T.t_id
for e_name, e1_type in t.elems.items():
if e_name not in cur_type.elems:
raise Signal.make(
T.TypeError,
table_def,
f"Column '{e_name}' defined, but doesn't exist in database.",
)
# e2_type = cur_type.elems[e_name]
# XXX use can_cast() instead of hardcoding it
# if not (e1_type <= e2_type or (e1_type <= T.t_id and e2_type <= T.int)):
# raise Signal.make(T.TypeError, table_def, f"Cannot cast column '{e_name}' from type '{e2_type}' to '{e1_type}'")
inst = objects.new_table(t, db_name, select_fields=True)
else:
# Auto-add id by default
elems = dict(t.elems)
if 'id' not in elems:
elems = {'id': T.t_id, **elems}
t = t(elems, pk=[['id']])
inst = objects.new_table(t, db_name)
ensure_namespace(table_def.name)
set_var(table_def.name, inst)
if not exists:
sql_code = sql.compile_type_def(db_name, t)
db_query(sql_code)
@method
def _execute(insert_rows: ast.InsertRows):
if not isinstance(insert_rows.name, ast.Name):
# TODO support Attr
raise Signal.make(T.SyntaxError, insert_rows, "L-value must be table name")
rval = evaluate(insert_rows.value)
assert_type(rval.type, T.table, insert_rows, '+=')
return _copy_rows(insert_rows.name, rval)
@method
def _execute(func_def: ast.FuncDef):
func = func_def.userfunc
assert isinstance(func, objects.UserFunction)
new_params = []
for p in func.params:
if p.type:
t = evaluate(p.type)
p = p.replace(type=t)
new_params.append(p)
set_var(func.name, func.replace(params=new_params))
@method
def _execute(p: ast.Print):
display = context.state.display
# TODO Can be done better. Maybe cast to ReprText?
insts = evaluate(p.value)
assert isinstance(insts, list)
for inst in insts:
if inst.type <= T.string:
repr_ = cast_to_python_string(inst)
else:
repr_ = inst.repr()
display.print(repr_, end=" ")
display.print("")
@method
def _execute(p: ast.Assert):
res = cast_to_python(p.cond)
if not res:
# TODO pretty print values
if isinstance(p.cond, ast.Compare):
s = (' %s ' % p.cond.op).join(str(evaluate(a).repr()) for a in p.cond.args)
else:
s = p.cond.repr()
raise Signal.make(T.AssertError, p.cond, f"Assertion failed: {s}")
@method
def _execute(cb: ast.CodeBlock):
for stmt in cb.statements:
execute(stmt)
return objects.null
@method
def _execute(i: ast.If):
cond = cast_to_python(i.cond)
if cond:
execute(i.then)
elif i.else_:
execute(i.else_)
@method
def _execute(w: ast.While):
while cast_to_python(w.cond):
execute(w.do)
@method
def _execute(f: ast.For):
expr = cast_to_python(f.iterable)
for i in expr:
with use_scope({f.var: objects.from_python(i)}):
execute(f.do)
@method
def _execute(t: ast.Try):
try:
execute(t.try_)
except Signal as e:
catch_type = evaluate(t.catch_expr).localize()
if not isinstance(catch_type, Type):
raise Signal.make(
T.TypeError,
t.catch_expr,
f"Catch expected type, got {t.catch_expr.type}",
)
if e.type <= catch_type:
scope = {t.catch_name: e} if t.catch_name else {}
with use_scope(scope):
execute(t.catch_block)
else:
raise
def find_module(module_name):
paths = [MODULES_PATH, Path.cwd()]
for path in paths:
module_path = (path / module_name).with_suffix(".pql")
if module_path.exists():
return module_path
raise Signal.make(T.ImportError, r, "Cannot find module")
def import_module(state, r):
module_path = find_module(r.module_path)
# assert state is state.interp.state # Fix for threaded
i = state.interp.clone(use_core=r.use_core)
state.stacktrace.append(r.text_ref)
try:
i.include(module_path)
finally:
assert state.stacktrace[-1] is r.text_ref
state.stacktrace.pop()
# Inherit module db (in case it called global connect())
# assert state.db is i.state.db
# state.state.db = i.state.db
ns = i.state.ns
assert len(ns) == 1
return objects.Module(r.module_path, ns._ns[0])
@method
def _execute(r: ast.Import):
module = import_module(context.state, r)
set_var(r.as_name or r.module_path, module)
return module
@method
def _execute(r: ast.Return):
value = evaluate(r.value)
raise ReturnSignal(value)
@method
def _execute(t: ast.Throw):
e = evaluate(t.value)
if isinstance(e, ast.Ast):
raise exc.InsufficientAccessLevel()
assert isinstance(e, Exception), e
raise e
def execute(stmt):
if isinstance(stmt, ast.Statement):
return stmt._execute() or objects.null
return evaluate(stmt)
# Simplify performs local operations before any db-specific compilation occurs
# Technically not super useful at the moment, but makes conceptual sense.
@method
def simplify(cb: ast.CodeBlock):
# if len(cb.statements) == 1:
# s ,= cb.statements
# return simplify(s)
try:
return cb._execute()
except ReturnSignal as r:
# XXX is this correct?
return r.value
except Signal as e:
# Failed to run it, so try to cast as instance
# XXX order should be other way around!
if e.type <= T.CastError:
return cb.compile_to_inst()
raise
except InsufficientAccessLevel:
return cb
@method
def simplify(n: ast.Name):
# XXX what happens to caching if this is a global variable?
return get_var(n.name)
@method
def simplify(x: Object):
return x
# @dsp
# def simplify(ls: list):
# return [simplify(i) for i in ls]
# @dsp
# def simplify(d: objects.ParamDict):
# return d.replace(params={name: evaluate( v) for name, v in d.params.items()})
# @dsp
# def simplify(node: ast.Ast):
# # return _simplify_ast(state, node)
# return node
# def _simplify_ast(state, node):
# resolved = {k:simplify(v) for k, v in node
# if isinstance(v, types.PqlObject) or isinstance(v, list) and all(isinstance(i, types.PqlObject) for i in v)}
# return node.replace(**resolved)
# TODO isn't this needed somewhere??
# @dsp
# def simplify(if_: ast.If):
# if_ = _simplify_ast(state, if_)
# if isinstance(if_.cond, objects.ValueInstance): # XXX a more general test?
# if if_.cond.local_value:
# return if_.then
# else:
# return if_.else_
# return if_
# TODO Optimize these, right now failure to evaluate will lose all work
@method
def simplify(obj: ast.Or):
a, b = evaluate(obj.args)
ta = kernel_type(a.type)
tb = kernel_type(b.type)
if ta != tb:
raise Signal.make(
T.TypeError,
obj,
f"'or' operator requires both arguments to be of the same type, but got '{ta}' and '{tb}'.",
)
try:
if a.test_nonzero():
return a
except InsufficientAccessLevel:
return obj
return b
@method
def simplify(obj: ast.And):
a, b = evaluate(obj.args)
ta = kernel_type(a.type)
tb = kernel_type(b.type)
if ta != tb:
raise Signal.make(
T.TypeError,
obj,
f"'and' operator requires both arguments to be of the same type, but got '{ta}' and '{tb}'.",
)
try:
if not a.test_nonzero():
return a
except InsufficientAccessLevel:
return obj
return b
@method
def simplify(obj: ast.Not):
inst = evaluate(obj.expr)
try:
nz = inst.test_nonzero()
except InsufficientAccessLevel:
return obj
return objects.pyvalue_inst(not nz)
@method
def simplify(funccall: ast.FuncCall):
state = context.state
func = evaluate(funccall.func)
if isinstance(func, objects.UnknownInstance):
# evaluate( [a.value for a in funccall.args])
raise Signal.make(
T.TypeError,
funccall.func,
f"Error: Object of type '{func.type}' is not callable",
)
args = funccall.args
if isinstance(func, Type):
# Cast to type
args = args + [func]
func = get_var('cast')
if not isinstance(func, objects.Function):
raise Signal.make(
T.TypeError,
funccall.func,
f"Error: Object of type '{func.type}' is not callable",
)
state.stacktrace.append(funccall.text_ref)
try:
res = eval_func_call(func, args)
finally:
assert state.stacktrace[-1] is funccall.text_ref
state.stacktrace.pop()
assert isinstance(res, Object), (type(res), res)
return res
def eval_func_call(func, args):
state = context.state
assert isinstance(func, objects.Function)
matched_args = func.match_params(args)
if isinstance(func, objects.MethodInstance):
ordered_args = {'this': func.parent}
func = func.func
# args.update(func.parent.all_attrs())
else:
ordered_args = {}
# XXX simplify destroys text_ref, so it harms error messages.
# TODO Can I get rid of it, or make it preserve the text_ref somehow?
# Don't I need an instance to ensure I have type?
for i, (p, a) in enumerate(matched_args):
if not p.name.startswith(
'$'
): # $param means don't evaluate expression, leave it to the function
a = evaluate(a)
# TODO cast?
if p.type and not a.type <= p.type:
raise Signal.make(
T.TypeError,
func,
f"Argument #{i} of '{func.name}' is of type '{a.type}', expected '{p.type}'",
)
ordered_args[p.name] = a
if isinstance(func, objects.InternalFunction):
# TODO ensure pure function?
# TODO Ensure correct types
ordered_args = list(ordered_args.values())
return func.func(*ordered_args)
# TODO make tests to ensure caching was successful
expr = func.expr
if settings.cache:
params = {
name: ast.Parameter(name, value.type)
for name, value in ordered_args.items()
}
sig = (func.name,) + tuple(a.type for a in ordered_args.values())
try:
with use_scope(params):
if sig in state._cache:
compiled_expr = state._cache[sig]
else:
logging.info(f"Compiling.. {func}")
with context(state=reduce_access(AccessLevels.COMPILE)):
compiled_expr = _call_expr(func.expr)
logging.info("Compiled successfully")
if isinstance(compiled_expr, objects.Instance):
# XXX a little ugly
qb = sql.QueryBuilder(True)
x = compiled_expr.code.compile(qb)
x = x.optimize()
compiled_expr = compiled_expr.replace(code=x)
state._cache[sig] = compiled_expr
expr = ast.ResolveParameters(compiled_expr, ordered_args)
except exc.InsufficientAccessLevel:
# Don't cache
pass
with use_scope({**ordered_args, '__unwind__': []}):
res = _call_expr(expr)
# for to_unwind in get_var('__unwind__'):
# to_unwind()
if isinstance(res, ast.ResolveParameters): # XXX A bit of a hack
raise exc.InsufficientAccessLevel()
return res
def _call_expr(expr):
try:
return evaluate(expr)
except ReturnSignal as r:
return r.value
# TODO fix these once we have proper types
@method
def test_nonzero(table: objects.TableInstance):
count = call_builtin_func("count", [table])
return bool(cast_to_python_int(count))
@method
def test_nonzero(inst: objects.Instance):
return bool(cast_to_python(inst))
@method
def test_nonzero(inst: Type):
return True
@method
def apply_database_rw(o: ast.One):
# TODO move these to the core/base module
obj = evaluate(o.expr)
if obj.type <= T.struct:
if len(obj.attrs) != 1:
raise Signal.make(
T.ValueError,
o,
f"'one' expected a struct with a single attribute, got {len(obj.attrs)}",
)
(x,) = obj.attrs.values()
return x
slice_ast = ast.Slice(obj, ast.Range(None, ast.Const(T.int, 2))).set_text_ref(
o.text_ref
)
table = evaluate(slice_ast)
assert table.type <= T.table, table
rows = table.localize() # Must be 1 row
if len(rows) == 0:
if not o.nullable:
raise Signal.make(
T.ValueError,
o,
"'one' expected a single result, got an empty expression",
)
return objects.null
elif len(rows) > 1:
raise Signal.make(T.ValueError, o, "'one' expected a single result, got more")
(row,) = rows
rowtype = T.row[table.type]
if table.type <= T.list:
return pyvalue_inst(row)
assert table.type <= T.table
assert_type(table.type, T.table, o, 'one')
d = {k: pyvalue_inst(v, table.type.elems[k], True) for k, v in row.items()}
return objects.RowInstance(rowtype, d)
@method
def apply_database_rw(d: ast.Delete):
catch_access(AccessLevels.WRITE_DB)
# TODO Optimize: Delete on condition, not id, when possible
cond_table = ast.Selection(d.table, d.conds).set_text_ref(d.text_ref)
table = evaluate(cond_table)
if not table.type <= T.table:
raise Signal.make(T.TypeError, d.table, f"Expected a table. Got: {table.type}")
if not 'name' in table.type.options:
raise Signal.make(
T.ValueError, d.table, "Cannot delete. Table is not persistent"
)
rows = list(table.localize())
if rows:
if 'id' not in rows[0]:
raise Signal.make(T.TypeError, d, "Delete error: Table does not contain id")
ids = [row['id'] for row in rows]
for code in sql.deletes_by_ids(table, ids):
db_query(code, table.subqueries)
return evaluate(d.table)
@method
def apply_database_rw(u: ast.Update):
catch_access(AccessLevels.WRITE_DB)
# TODO Optimize: Update on condition, not id, when possible
table = evaluate(u.table)
if not table.type <= T.table:
raise Signal.make(T.TypeError, u.table, f"Expected a table. Got: {table.type}")
if not 'name' in table.type.options:
raise Signal.make(
T.ValueError, u.table, "Cannot update: Table is not persistent"
)
for f in u.fields:
if not f.name:
raise Signal.make(
T.SyntaxError, f, f"Update requires that all fields have a name"
)
# TODO verify table is concrete (i.e. lvalue, not a transitory expression)
update_scope = {n: c for n, c in table.all_attrs().items()}
with use_scope(update_scope):
proj = {f.name: evaluate(f.value) for f in u.fields}
rows = list(table.localize())
if rows:
if 'id' not in rows[0]:
raise Signal.make(T.TypeError, u, "Update error: Table does not contain id")
if not set(proj) < set(rows[0]):
raise Signal.make(
T.TypeError, u, "Update error: Not all keys exist in table"
)
ids = [row['id'] for row in rows]
for code in sql.updates_by_ids(table, proj, ids):
db_query(code, table.subqueries)
# TODO return by ids to maintain consistency, and skip a possibly long query
return table
@method
def apply_database_rw(new: ast.NewRows):
catch_access(AccessLevels.WRITE_DB)
obj = get_var(new.type)
if len(new.args) > 1:
raise Signal.make(
T.NotImplementedError, new, "Not yet implemented"
) # . Requires column-wise table concat (use join and enum)")
if isinstance(obj, objects.UnknownInstance):
(arg,) = new.args
table = evaluate(arg.value)
fakerows = [objects.RowInstance(T.row[table], {'id': T.t_id})]
return ast.List_(T.list[T.int], fakerows).set_text_ref(new.text_ref)
if isinstance(obj, objects.TableInstance):
# XXX Is it always TableInstance? Just sometimes? What's the transition here?
obj = obj.type
assert_type(
obj, T.table, new, "'new' expected an object of type '%s', instead got '%s'"
)
(arg,) = new.args
# TODO postgres can do it better!
table = evaluate(arg.value)
rows = table.localize()
# TODO ensure rows are the right type
cons = TableConstructor.make(obj)
# TODO very inefficient, vectorize this
ids = []
for row in rows:
matched = cons.match_params([objects.from_python(v) for v in row.values()])
ids += [
_new_row(new, obj, matched).primary_key()
] # XXX return everything, not just pk?
# XXX find a nicer way - requires a better typesystem, where id(t) < int
return ast.List_(T.list[T.int], ids).set_text_ref(new.text_ref)
@listgen
def _destructure_param_match(ast_node, param_match):
# TODO use cast rather than a ad-hoc hardwired destructure
for k, v in param_match:
if isinstance(v, objects.RowInstance):
v = v.primary_key()
v = v.localize()
if k.type <= T.struct:
names = [name for name, t in flatten_type(k.orig, [k.name])]
if not isinstance(v, list):
msg = f"Parameter {k.name} received a bad value: {v} (expecting a struct or a list)"
raise Signal.make(T.TypeError, ast_node, msg)
if len(v) != len(names):
msg = f"Parameter {k.name} received a bad value (size of {len(names)})"
raise Signal.make(T.TypeError, ast_node, msg)
yield from safezip(names, v)
else:
yield k.name, v
def _new_value(v, type_):
if isinstance(v, list):
return evaluate(objects.PythonList(v))
return objects.pyvalue_inst(v, type_=type_)
@dsp
def freeze(i: objects.Instance):
return _new_value(cast_to_python(i), type_=i.type)
@dsp
def freeze(i: objects.RowInstance):
return i.replace(attrs={k: freeze(v) for k, v in i.attrs.items()})
def _new_row(new_ast, table, matched):
matched = [(k, freeze(evaluate(v))) for k, v in matched]
destructured_pairs = _destructure_param_match(new_ast, matched)
keys = [name for (name, _) in destructured_pairs]
values = [sql.make_value(v) for (_, v) in destructured_pairs]
# XXX use regular insert?
if 'name' not in table.options:
raise Signal.make(
T.TypeError,
new_ast,
f"'new' expects a persistent table. Instead got a table expression.",
)
if get_db().target == sql.bigquery:
rowid = db_query(sql.FuncCall(T.string, 'GENERATE_UUID', []))
keys += ['id']
values += [sql.make_value(rowid)]
try:
table_name = table.options['name']
except KeyError:
raise Signal.make(
T.ValueError, new_ast, "Cannot add a new row to an unnamed table"
)
q = sql.InsertConsts(table_name, keys, [values])
db_query(q)
if get_db().target != sql.bigquery:
rowid = db_query(sql.LastRowId())
d = SafeDict({'id': objects.pyvalue_inst(rowid)})
d.update({p.name: v for p, v in matched})
return objects.RowInstance(T.row[table], d)
@method
def apply_database_rw(new: ast.New):
catch_access(AccessLevels.WRITE_DB)
obj = get_var(new.type)
# XXX Assimilate this special case
if isinstance(obj, Type) and obj <= T.Exception:
def create_exception(msg):
state = context.state
msg = cast_to_python(msg)
assert new.text_ref is state.stacktrace[-1]
return Signal(
obj, list(state.stacktrace), msg
) # TODO move this to `throw`?
f = objects.InternalFunction(
obj.typename, [objects.Param('message')], create_exception
)
res = evaluate(ast.FuncCall(f, new.args).set_text_ref(new.text_ref))
return res
if not isinstance(obj, objects.TableInstance):
raise Signal.make(
T.TypeError,
new,
f"'new' expects a table or exception, instead got {obj.repr()}",
)
table = obj
# TODO assert tabletype is a real table and not a query (not transient), otherwise new is meaningless
assert_type(
table.type,
T.table,
new,
"'new' expected an object of type '%s', instead got '%s'",
)
cons = TableConstructor.make(table.type)
matched = cons.match_params(new.args)
return _new_row(new, table.type, matched)
@method
def apply_database_rw(x: Object):
return x
@dataclass
class TableConstructor(objects.Function):
"Serves as an ad-hoc constructor function for given table, to allow matching params"
params: List[objects.Param]
param_collector: Optional[objects.Param] = None
name = 'new'
@classmethod
def make(cls, table):
return cls(
[
objects.Param(name, p, p.options.get('default'), orig=p).set_text_ref(
getattr(name, 'text_ref', None)
)
for name, p in table_params(table)
]
)
def add_as_subquery(inst: objects.Instance):
code_cls = sql.TableName if (inst.type <= T.table) else sql.Name
name = unique_name(inst)
return inst.replace(
code=code_cls(inst.code.type, name),
subqueries=inst.subqueries.update({name: inst.code}),
)
@dsp
def evaluate(obj: list):
return [evaluate(item) for item in obj]
@dsp
def evaluate(obj_):
access_level = get_access_level()
# - Generic, non-db related operations
obj = obj_.simplify()
assert obj, obj_
if access_level < AccessLevels.COMPILE:
return obj
# - Compile to instances with db-specific code (sql)
# . Compilation may fail (e.g. due to lack of DB access)
# . Resulting code generic within the same database, and can be cached
# obj = compile_to_inst(state.reduce_access(state.AccessLevels.COMPILE), obj)
obj = obj.compile_to_inst()
if access_level < AccessLevels.EVALUATE:
return obj
# - Resolve parameters to "instantiate" the cached code
# TODO necessary?
if isinstance(obj, ast.Parameter):
obj = get_var(obj.name)
if access_level < AccessLevels.READ_DB:
return obj
# - Apply operations that read or write the database (delete, insert, update, one, etc.)
obj = obj.apply_database_rw()
assert not isinstance(obj, (ast.ResolveParameters, ast.ParameterizedSqlCode)), obj
return obj
#
# localize()
# -------------
#
# Return the local value of the expression. Only requires computation if the value is an instance.
#
@method
def localize(inst: objects.AbsInstance):
raise NotImplementedError(inst)
@method
def localize(inst: objects.AbsStructInstance):
return {k: evaluate(v).localize() for k, v in inst.attrs.items()}
@method
def localize(inst: objects.Instance):
# TODO This protection doesn't work for unoptimized code
# Cancel unoptimized mode? Or leave this unprotected?
# state.require_access(state.AccessLevels.WRITE_DB)
if inst.code is sql.null:
return None
return db_query(inst.code, inst.subqueries)
@method
def localize(inst: objects.ValueInstance):
return inst.local_value
@method
def localize(inst: objects.SelectedColumnInstance):
# XXX is this right?
p = evaluate(inst.parent)
return p.get_attr(inst.name)
@method
def localize(x: Object):
return x
def new_table_from_rows(name, columns, rows, temporary):
# TODO check table doesn't exist
name = Id(name)
tuples = [[sql.make_value(i) for i in row] for row in rows]
# TODO refactor into function?
elems = {c: v.type.as_nullable() for c, v in zip(columns, tuples[0])}
elems = {'id': T.t_id, **elems}
table = T.table(elems, temporary=temporary, pk=[['id']], name=name)
db_query(sql.compile_type_def(name, table))
db_query(sql.InsertConsts(name, columns, tuples))
return objects.new_table(table)
def new_table_from_expr(name, expr, const, temporary):
assert isinstance(name, Id)
elems = expr.type.elems
if any(t <= T.unknown for t in elems.values()):
return objects.TableInstance.make(sql.null, expr.type, [])
if 'id' in elems and not const:
msg = "Field 'id' already exists. Rename it, or use 'const table' to copy it as-is."
raise Signal.make(T.NameError, None, msg)
table = T.table(
dict(elems), name=name, pk=[] if const else [['id']], temporary=temporary
)
if not const:
table.elems['id'] = T.t_id
db_query(sql.compile_type_def(name, table))
if temporary:
get_var('__unwind__').append(lambda: drop_table(table))
read_only, flat_columns = table_flat_for_insert(table)
if get_db().target == sql.bigquery and 'id' in read_only:
# XXX very hacky!
to_exclude = ['id'] if 'id' in expr.type.elems else []
proj = ast.Projection(
expr,
[
ast.NamedField(
'id',
objects.Instance.make(
sql.RawSql(T.string, 'GENERATE_UUID()'), T.string, []
),
),
ast.NamedField(None, ast.Ellipsis(None, to_exclude)),
],
)
expr = cast_to_instance(proj)
read_only.remove('id')
flat_columns.insert(0, 'id')
expr = exclude_fields(expr, set(read_only) & set(elems))
db_query(sql.Insert(name, flat_columns, expr.code), expr.subqueries)
return objects.new_table(table)
# cast_to_python - make sure the value is a native python object, not a preql instance
@dsp
def cast_to_python(obj):
raise Signal.make(T.TypeError, None, f"Unexpected value: {pql_repr(obj.type, obj)}")
@dsp
def cast_to_python(obj: ast.Ast):
inst = cast_to_instance(obj)
return cast_to_python(inst)
@dsp
def cast_to_python(obj: objects.AbsInstance):
# if state.access_level <= state.AccessLevels.QUERY:
if obj.type <= T.projected | T.aggregated:
raise exc.InsufficientAccessLevel(get_access_level())
# raise Signal.make(T.CastError, None, f"Internal error. Cannot cast projected obj: {obj}")
res = obj.localize()
if obj.type == T.float:
res = float(res)
elif obj.type == T.int:
res = int(res)
elif obj.type == T.bool:
assert res in (0, 1), res
res = bool(res)
return res
### Added functions
def function_localize_keys(self, struct):
return cast_to_python(struct)
objects.Function._localize_keys = function_localize_keys
def instance_repr(self):
return pql_repr(self.type, self.localize())
objects.Instance.repr = instance_repr
@dp_inst
def post_instance_getattr(inst, p: T.property):
return eval_func_call(objects.MethodInstance(inst, p.func), [])
| [
"pathlib.Path.cwd",
"preql.utils.safezip",
"logging.info",
"pathlib.Path"
] | [((12396, 12406), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (12404, 12406), False, 'from pathlib import Path\n'), ((1039, 1053), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1043, 1053), False, 'from pathlib import Path\n'), ((26856, 26873), 'preql.utils.safezip', 'safezip', (['names', 'v'], {}), '(names, v)\n', (26863, 26873), False, 'from preql.utils import SafeDict, dataclass, listgen, method, safezip\n'), ((19437, 19472), 'logging.info', 'logging.info', (['f"""Compiling.. {func}"""'], {}), "(f'Compiling.. {func}')\n", (19449, 19472), False, 'import logging\n'), ((19632, 19669), 'logging.info', 'logging.info', (['"""Compiled successfully"""'], {}), "('Compiled successfully')\n", (19644, 19669), False, 'import logging\n')] |
#!/usr/bin/env python3
import pytest
import pathlib
import json
TESTVECTORS_PATH = pathlib.Path(__file__).parent.joinpath("testvectors.json")
# from typing import Dict, Iterable, List, Optional, Sequence, Set, Tuple, TypeVar, Union
from typing import List, Tuple
from bip39 import (
encode_bytes,
decode_phrase,
phrase_to_seed,
EncodingError,
DecodingError,
)
########################################################################################################################
### Testcases encoding and decoding of BIP39 mnemonic phrases ##########################################################
########################################################################################################################
def load_test_vectors() -> List[Tuple[bytes, str, str]]:
"""Load the BIP39 test vectors from local storage.
Original source for test vectors: https://github.com/trezor/python-mnemonic/blob/master/vectors.json
"""
with open(TESTVECTORS_PATH) as f:
vectors = json.load(f)
return [(bytes.fromhex(entropy), phrase, seed) for entropy, phrase, seed, _ in vectors["english"]]
TEST_VECTORS = load_test_vectors()
@pytest.mark.parametrize("entropy, phrase, seed", TEST_VECTORS)
def test_encode_bytes__official_vectors(entropy: bytes, phrase: str, seed: str):
assert encode_bytes(entropy) == phrase
@pytest.mark.parametrize("entropy, phrase, seed", TEST_VECTORS)
def test_decode_phrase__official_vectors(entropy: bytes, phrase: str, seed: str):
assert decode_phrase(phrase) == entropy
@pytest.mark.parametrize("entropy, phrase, seed", TEST_VECTORS)
def test_seed_phrase__official_vectors(entropy: bytes, phrase: str, seed: str):
assert phrase_to_seed(phrase, passphrase="<PASSWORD>").hex() == seed
### Additional tests ###
PHRASES = [
"any pitch edit post web arm gun cradle goose card aim absorb",
"key water you run rent pen hub key learn tank sunset air echo letter adapt",
"pupil year card short mean weird inch shy fun bid joy slot only use cry gap fox aisle",
"jar kite sand indoor crowd spot label aim clay mix job gas kite can bomb wink ten emerge fly car also",
"eight era guard oak fox rent day fee pool kid noble one pact bag slab april ugly job law razor blur try dose quiz",
]
PHRASES_INVALID_LENGTH = [" ".join(["any"] * i) for i in range(1, 65) if i not in {12, 15, 18, 21, 24}]
PHRASES_INVALID_CHECKSUM = [
"any pitch edit post web arm gun cradle goose card aim abuse",
"key water you run rent pen hub key learn tank sunset air echo letter add",
"pupil year card short mean weird inch shy fun bid joy slot only use cry gap fox alarm",
"jar kite sand indoor crowd spot label aim clay mix job gas kite can bomb wink ten emerge fly car alter",
"eight era guard oak fox rent day fee pool kid noble one pact bag slab april ugly job law razor blur try dose quit",
]
PHRASES_INVALID_LENGTH_VALID_CHECKSUM = [
"",
"easy swamp table",
"master auction transfer old lesson chief",
"fuel absurd regular mandate kingdom valley miss start away",
"achieve potato wine chase erupt machine quality ozone brick state attract equal "
"canvas city weather infant acid stable grocery interest cruel diagram bid guide "
"nut minimum broccoli",
"truth rely once wild toast oxygen birth ugly spawn journey finger steak "
"liberty tank fashion garbage swift bike safe rate since always attract chef "
"final film clutch talk isolate tide",
"chef recall shield table almost athlete barely absorb control oval vehicle proud "
"debate rally vapor barely pioneer lion state chest fine volume comic romance "
"cigar answer thing struggle mad abandon prefer bronze minor",
"tail obey flash palace furnace blast faculty narrow number imitate slab state "
"copper sister media ski dose achieve glad dirt want nasty giggle arrive "
"grit fame parent oil mix mean repeat wealth summer daughter strategy corn",
"honey walk trouble embark curious nation twin make dress smoke novel tank "
"manage improve math tone sea purpose ozone junk rate language play milk "
"term tray breeze skate couple barrel find void retire rapid armor brisk "
"guess pistol desk",
"accident purpose arm private excuse mesh useless depth party check drill inhale "
"pink caught clerk minor uphold crush snack glory enforce hedgehog jealous prevent "
"object ethics stage faith wage require turtle truth picture stove disease struggle "
"strike gadget stereo beauty bachelor promote",
"wisdom multiply crazy embrace connect wave toast shallow marine exchange frequent grief "
"mouse genius where play twin romance lecture lunch rather pattern deer dinner "
"romance jealous mutual food faint bamboo dignity carpet rice wisdom minimum offer "
"drastic ready arm estate bulk squirrel goose square creek",
"walnut cattle virtual admit fiction kitchen alpha fiction pudding mirror egg target "
"grace snake casino pigeon hazard afford bulk estate maze rival prosper vibrant "
"wrong begin wheel wear accident anger hip length chimney royal primary library "
"frequent member gym phone result noodle unique design slice already display match",
]
PHRASES_INVALID_WORDS = [
"any pitch edit post web arm gun cradle goose card aim abrasivo",
"key water you run rent pen hub key learn tank sunset air echo letter activo",
"pupil year card short mean weird inch shy fun bid joy slot only use cry gap fox adopter",
"jar kite sand indoor crowd spot label aim clay mix job gas kite can bomb wink ten emerge fly car allagato",
"eight era guard oak fox rent day fee pool kid noble one pact bag slab april ugly job law razor blur try dose peón",
]
@pytest.mark.parametrize("phrase", PHRASES)
def test_encode_decode(phrase: str):
entropy = decode_phrase(phrase)
assert encode_bytes(entropy) == phrase
@pytest.mark.parametrize(
"num_bytes, num_words",
((bits // 8, num_words) for bits, num_words in [(128, 12), (160, 15), (192, 18), (224, 21), (256, 24)]),
)
def test_encode_bytes__valid_length(num_bytes: int, num_words: int):
phrase = encode_bytes(b"\x00" * num_bytes)
assert len(phrase.split()) == num_words
@pytest.mark.parametrize(
"num_bytes", (num_bytes for num_bytes in range(64) if num_bytes * 8 not in {128, 160, 192, 224, 256})
)
def test_encode_bytes__invalid_length(num_bytes: int):
with pytest.raises(EncodingError):
encode_bytes(b"\x00" * num_bytes)
@pytest.mark.parametrize("phrase", PHRASES_INVALID_LENGTH)
def test_decode_phrase__invalid_length(phrase: str):
with pytest.raises(DecodingError):
decode_phrase(phrase)
@pytest.mark.parametrize("phrase", PHRASES_INVALID_CHECKSUM)
def test_decode_phrase__invalid_checksum(phrase: str):
with pytest.raises(DecodingError):
decode_phrase(phrase)
@pytest.mark.parametrize("phrase", PHRASES_INVALID_WORDS)
def test_decode_phrase__invalid_words(phrase: str):
with pytest.raises(DecodingError):
decode_phrase(phrase)
| [
"pathlib.Path",
"bip39.decode_phrase",
"bip39.encode_bytes",
"pytest.mark.parametrize",
"pytest.raises",
"bip39.phrase_to_seed",
"json.load"
] | [((1188, 1250), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""entropy, phrase, seed"""', 'TEST_VECTORS'], {}), "('entropy, phrase, seed', TEST_VECTORS)\n", (1211, 1250), False, 'import pytest\n'), ((1378, 1440), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""entropy, phrase, seed"""', 'TEST_VECTORS'], {}), "('entropy, phrase, seed', TEST_VECTORS)\n", (1401, 1440), False, 'import pytest\n'), ((1570, 1632), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""entropy, phrase, seed"""', 'TEST_VECTORS'], {}), "('entropy, phrase, seed', TEST_VECTORS)\n", (1593, 1632), False, 'import pytest\n'), ((5774, 5816), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""phrase"""', 'PHRASES'], {}), "('phrase', PHRASES)\n", (5797, 5816), False, 'import pytest\n'), ((5936, 6097), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""num_bytes, num_words"""', '((bits // 8, num_words) for bits, num_words in [(128, 12), (160, 15), (192,\n 18), (224, 21), (256, 24)])'], {}), "('num_bytes, num_words', ((bits // 8, num_words) for\n bits, num_words in [(128, 12), (160, 15), (192, 18), (224, 21), (256, 24)])\n )\n", (5959, 6097), False, 'import pytest\n'), ((6535, 6592), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""phrase"""', 'PHRASES_INVALID_LENGTH'], {}), "('phrase', PHRASES_INVALID_LENGTH)\n", (6558, 6592), False, 'import pytest\n'), ((6718, 6777), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""phrase"""', 'PHRASES_INVALID_CHECKSUM'], {}), "('phrase', PHRASES_INVALID_CHECKSUM)\n", (6741, 6777), False, 'import pytest\n'), ((6905, 6961), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""phrase"""', 'PHRASES_INVALID_WORDS'], {}), "('phrase', PHRASES_INVALID_WORDS)\n", (6928, 6961), False, 'import pytest\n'), ((5868, 5889), 'bip39.decode_phrase', 'decode_phrase', (['phrase'], {}), '(phrase)\n', (5881, 5889), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((6182, 6215), 'bip39.encode_bytes', 'encode_bytes', (["(b'\\x00' * num_bytes)"], {}), "(b'\\x00' * num_bytes)\n", (6194, 6215), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((1032, 1044), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1041, 1044), False, 'import json\n'), ((1343, 1364), 'bip39.encode_bytes', 'encode_bytes', (['entropy'], {}), '(entropy)\n', (1355, 1364), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((1534, 1555), 'bip39.decode_phrase', 'decode_phrase', (['phrase'], {}), '(phrase)\n', (1547, 1555), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((5901, 5922), 'bip39.encode_bytes', 'encode_bytes', (['entropy'], {}), '(entropy)\n', (5913, 5922), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((6460, 6488), 'pytest.raises', 'pytest.raises', (['EncodingError'], {}), '(EncodingError)\n', (6473, 6488), False, 'import pytest\n'), ((6498, 6531), 'bip39.encode_bytes', 'encode_bytes', (["(b'\\x00' * num_bytes)"], {}), "(b'\\x00' * num_bytes)\n", (6510, 6531), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((6655, 6683), 'pytest.raises', 'pytest.raises', (['DecodingError'], {}), '(DecodingError)\n', (6668, 6683), False, 'import pytest\n'), ((6693, 6714), 'bip39.decode_phrase', 'decode_phrase', (['phrase'], {}), '(phrase)\n', (6706, 6714), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((6842, 6870), 'pytest.raises', 'pytest.raises', (['DecodingError'], {}), '(DecodingError)\n', (6855, 6870), False, 'import pytest\n'), ((6880, 6901), 'bip39.decode_phrase', 'decode_phrase', (['phrase'], {}), '(phrase)\n', (6893, 6901), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((7023, 7051), 'pytest.raises', 'pytest.raises', (['DecodingError'], {}), '(DecodingError)\n', (7036, 7051), False, 'import pytest\n'), ((7061, 7082), 'bip39.decode_phrase', 'decode_phrase', (['phrase'], {}), '(phrase)\n', (7074, 7082), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n'), ((86, 108), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (98, 108), False, 'import pathlib\n'), ((1724, 1771), 'bip39.phrase_to_seed', 'phrase_to_seed', (['phrase'], {'passphrase': '"""<PASSWORD>"""'}), "(phrase, passphrase='<PASSWORD>')\n", (1738, 1771), False, 'from bip39 import encode_bytes, decode_phrase, phrase_to_seed, EncodingError, DecodingError\n')] |
import os
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Thymios paths
#EMOBASE_PATH = '/home/george/extracted_features/linear/SAVEE_linear_emobase2010'
#NL_FEATURE_PATH = '/home/george/extracted_features/utterance'
#PYUNICORN_PATH = '/home/thymios/Desktop/Research/pyunicorn/'
#SAVEE_PATH = '/home/thymios/Research/datasets/SAVEE/'
#BERLIN_PATH = '/home/thymios/Desktop/Research/datasets/emodb/'
#IEMOCAP_PATH = '/home/thymios/Desktop/Research/datasets/IEMOCAP/'
#EXTRACTED_FEATURES_PATH = '/home/thymios/Desktop/Research/data/'
#OPENSMILE_CONFIG_PATH = '/home/thymios/Desktop/Research/opensmile-2.3' \
# '.0/config/emobase2010.conf'
# Paths I will need
EXTRACTED_FEATURES_PATH = '/home/teo/Documents/Thesis/Features/'
IEMOCAP_PATH = '/home/teo/Documents/Thesis/IEMOCAP/'
OPENSMILE_CONFIG_PATH = '/home/teo/Documents/Thesis/opensmile-2.3.0/config/emobase2010.conf' | [
"os.path.abspath"
] | [((55, 80), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (70, 80), False, 'import os\n')] |
import argparse
import logging
import os
from typing import (
Callable,
Dict,
Final,
Iterable,
List,
NamedTuple,
Optional,
Sequence,
Tuple,
Union,
)
import matplotlib.patches as patches # type: ignore
import numpy as np
from pytorch_lightning.callbacks import ProgressBar
from rich.console import Console
from rich.progress import (
BarColumn,
Progress,
ProgressColumn,
ProgressType,
StyleType,
TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
)
logger = logging.getLogger(__name__)
# DISTRIBUTIONS COPIED FROM Hewitt and Manning's Structural Probes project
PTB_TRAIN_EMPIRICAL_POS_DISTRIBUTION: Final = [
0.00003789361998,
0.00006105083219,
0.0001021022538,
0.0001494692788,
0.0001768368932,
0.0002463085299,
0.0003894622053,
0.0004747228503,
0.0009083942789,
0.001437852358,
0.001448378364,
0.001860997781,
0.00204941328,
0.002255722989,
0.002487295111,
0.002802022677,
0.002813601283,
0.003408320597,
0.004519866783,
0.005023009848,
0.00728294324,
0.007465043136,
0.007759771291,
0.008849212865,
0.009158677428,
0.01031864324,
0.01314803353,
0.01562690784,
0.01835314328,
0.02107727351,
0.02281195923,
0.02353299061,
0.02520662549,
0.02782865347,
0.03146117799,
0.03259903919,
0.03849149709,
0.04155456471,
0.05129006724,
0.06300445882,
0.06443704817,
0.08614693462,
0.09627716236,
0.1037379951,
0.1399274548,
]
PTB_DEV_EMPIRICAL_DEPTH_DICT: Final = {
14: 0.00009970835307,
13: 0.000373906324,
12: 0.0007228855597,
11: 0.001395916943,
10: 0.003938479946,
9: 0.007702470274,
8: 0.01570406561,
7: 0.02921454745,
0: 0.04237605005,
6: 0.05309469801,
5: 0.08729466311,
4: 0.1302440362,
3: 0.183563078,
1: 0.2192088142,
2: 0.22506668,
}
PTB_DEV_EMPIRICAL_DEPTH_keys: Final = list(sorted(PTB_DEV_EMPIRICAL_DEPTH_DICT.keys()))
PTB_DEV_EMPIRICAL_DEPTH_DISTRIBUTION: Final = [
PTB_DEV_EMPIRICAL_DEPTH_DICT[x] for x in PTB_DEV_EMPIRICAL_DEPTH_keys
]
PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_dict: Final = {
-44: 7.690651244347372e-06,
-3: 0.047819370772888475,
-2: 0.1088534777124927,
-1: 0.277384211752194,
4: 0.035580248649741374,
1: 0.17205854563192982,
3: 0.06036172428795556,
2: 0.09961151224571411,
-4: 0.02238199244997781,
15: 0.003433326448369362,
6: 0.01574166443271559,
7: 0.011697480542652352,
8: 0.009206808203947281,
11: 0.00579765237377444,
-13: 0.0016556873464616411,
-11: 0.002414864490725075,
5: 0.022290803299509117,
-8: 0.004191404928169318,
19: 0.0021665663219790025,
-7: 0.005423007791728375,
-5: 0.012027079881695811,
9: 0.00793565341970301,
22: 0.0015447222356503435,
-10: 0.0029543087422928688,
-19: 0.0007163292301877837,
-6: 0.00748410232521347,
12: 0.004976950019556227,
35: 0.0003317966679704152,
13: 0.004389164531595393,
18: 0.002396187194845945,
-9: 0.0034783716913719684,
28: 0.0008723395840016876,
43: 0.00011865576205564516,
-17: 0.0009151874980773372,
-12: 0.0020545025467042263,
26: 0.0009964886683747236,
25: 0.0011404137130903674,
-23: 0.0003471779704591099,
-26: 0.00023731152411129032,
20: 0.001866630923449455,
34: 0.00038343389775389035,
10: 0.006666695964385693,
36: 0.0002955407406756347,
-22: 0.00042518314736606183,
-15: 0.0012920294090503583,
-21: 0.0005306549358599686,
16: 0.0030652738531041666,
17: 0.0026005387850528898,
-16: 0.001105256450259065,
14: 0.003947501417277158,
23: 0.001423869144667742,
-20: 0.0005767988433260529,
21: 0.0017677511217364173,
32: 0.00048780702178431896,
38: 0.0002647781356982452,
37: 0.0002450021753556377,
50: 4.834123639304062e-05,
46: 6.042654549130078e-05,
31: 0.0005910814813512694,
-14: 0.0015601035381390383,
27: 0.0009470487675182048,
45: 0.00010107713063999403,
24: 0.0012953254024407929,
42: 0.00013623439347129629,
29: 0.000745993170701695,
40: 0.00020654891913390083,
41: 0.00013953038686173087,
47: 5.49332231739098e-05,
30: 0.0006273374086460499,
-18: 0.0008174063608277777,
56: 1.7578631415651135e-05,
-35: 4.1749249612171444e-05,
-27: 0.0001658983339852076,
39: 0.00019885826788955345,
33: 0.0004647350680512769,
-31: 8.789315707825567e-05,
57: 2.1973289269563917e-05,
61: 1.867729587912933e-05,
-30: 0.00011975442651912336,
44: 8.239983476086469e-05,
-24: 0.00028455409604085275,
-29: 0.000106570452957385,
-25: 0.0002614821423078106,
65: 8.789315707825568e-06,
49: 4.834123639304062e-05,
51: 3.186126944086768e-05,
62: 1.0986644634781959e-05,
90: 1.098664463478196e-06,
-36: 3.405859836782407e-05,
-28: 0.00013953038686173087,
-38: 2.1973289269563917e-05,
-33: 6.921586119912634e-05,
52: 2.3071953733042113e-05,
55: 1.867729587912933e-05,
72: 4.394657853912784e-06,
73: 3.295993390434588e-06,
77: 2.197328926956392e-06,
85: 1.098664463478196e-06,
48: 5.603188763738799e-05,
68: 5.493322317390979e-06,
-32: 6.482120334521356e-05,
-40: 1.4282638025216547e-05,
53: 2.417061819652031e-05,
54: 2.5269282659998507e-05,
100: 1.098664463478196e-06,
-34: 6.372253888173536e-05,
-39: 2.3071953733042113e-05,
-48: 3.295993390434588e-06,
-37: 2.3071953733042113e-05,
-67: 1.098664463478196e-06,
-64: 2.197328926956392e-06,
-63: 1.098664463478196e-06,
-59: 1.098664463478196e-06,
-41: 9.887980171303763e-06,
58: 1.2085309098260154e-05,
-47: 3.295993390434588e-06,
59: 9.887980171303763e-06,
60: 9.887980171303763e-06,
63: 1.0986644634781959e-05,
67: 3.295993390434588e-06,
79: 3.295993390434588e-06,
64: 6.591986780869176e-06,
69: 2.197328926956392e-06,
-43: 5.493322317390979e-06,
80: 1.098664463478196e-06,
81: 1.098664463478196e-06,
-58: 1.098664463478196e-06,
-56: 1.098664463478196e-06,
-42: 5.493322317390979e-06,
-49: 1.098664463478196e-06,
74: 4.394657853912784e-06,
75: 3.295993390434588e-06,
117: 1.098664463478196e-06,
-62: 1.098664463478196e-06,
76: 1.098664463478196e-06,
78: 2.197328926956392e-06,
-53: 2.197328926956392e-06,
-65: 1.098664463478196e-06,
-61: 1.098664463478196e-06,
127: 1.098664463478196e-06,
-45: 4.394657853912784e-06,
-46: 1.098664463478196e-06,
-50: 1.098664463478196e-06,
-77: 1.098664463478196e-06,
-74: 1.098664463478196e-06,
70: 2.197328926956392e-06,
66: 1.098664463478196e-06,
-55: 1.098664463478196e-06,
-54: 2.197328926956392e-06,
-66: 1.098664463478196e-06,
71: 2.197328926956392e-06,
83: 1.098664463478196e-06,
87: 1.098664463478196e-06,
86: 1.098664463478196e-06,
}
PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_dists: Final = list(
sorted(PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_dict.keys())
)
PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_probs: Final = [
PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_dict[x]
for x in PTB_TRAIN_EMPIRICAL_DEP_SEQ_LEN_dists
]
class Observation(NamedTuple):
"""Observation of a single sentence, with all info from the conllx File"""
index: Tuple[str] # type: ignore
sentence: Tuple[str]
lemma_sentence: Tuple[str]
upos_sentence: Tuple[str]
xpos_sentence: Tuple[str]
morph: Tuple[str]
head_indices: Tuple[str]
governance_relations: Tuple[str]
secondary_relations: Tuple[str]
extra_info: Tuple[str]
pp_entity_id: Tuple[str] = ("",)
pp_gold_governance: Tuple[str] = ("",)
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ("yes", "true", "t", "y", "1"):
return True
elif v.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise argparse.ArgumentTypeError("Boolean value expected.")
class UnionFind:
"""
Naive UnionFind implementation for (slow) Prim's MST algorithm
Used to compute minimum spanning trees for distance matrices
"""
def __init__(self, n: int):
"""
initialize the list, with for each item in the sequence its parent.
Args:
n: The number of parents, equal to the length of the sequence
"""
self.parents = list(range(n))
def union(self, i: int, j: int) -> None:
"""
Update the parent of i, to be j
Args:
i: the index of the item to update
j: the index of the new parent for item i
"""
if self.find(i) != self.find(
j
): # make sure both indices don't have the same parent
i_parent = self.find(i)
self.parents[i_parent] = j
def find(self, i: int):
"""
find the direct parent connected to i
Args:
i: the index of the item for which to find the parent
Returns: the id of the parent of item i
"""
# initialize the parent of i as itself
i_parent = i
while True:
# loop over the stored parent ids sequence,
# until we find one that is actually set correct.
if i_parent != self.parents[i_parent]:
i_parent = self.parents[i_parent]
else:
break
return i_parent
def dist_matrix_to_edges(matrix, poses=None):
"""
Constructs a minimum spanning tree from the pairwise weights in matrix;
returns the edges.
Never lets punctuation-tagged words be part of the tree.
"""
# map each tuple of indices to a distance
pairs_to_distances = {}
uf = UnionFind(len(matrix))
for i_index, line in enumerate(matrix):
for j_index, dist in enumerate(line):
# Skip all the punctuations, not part of dependency tree
if poses is not None and poses[i_index] in [
"''",
",",
".",
":",
"``",
"-LRB-",
"-RRB-",
]:
continue
if poses is not None and poses[j_index] in [
"''",
",",
".",
":",
"``",
"-LRB-",
"-RRB-",
]:
continue
pairs_to_distances[(i_index, j_index)] = dist
edges = []
# loop over the sorted distances, so we start at the root
for (i_index, j_index), distance in sorted(
pairs_to_distances.items(), key=lambda x: x[1]
):
if uf.find(i_index) != uf.find(j_index):
uf.union(i_index, j_index)
edges.append((i_index, j_index))
return edges
def find_child_edges(test_edge, edges):
check_edges = edges.copy()
child_edges = []
test_id = [test_edge[1]]
added_idxs = True
while added_idxs:
added_idxs = []
new_test_ids = []
for idx, e in enumerate(check_edges):
if e[0] in test_id:
child_edges.append(e)
added_idxs.append(idx)
new_test_ids.append(e[1])
if added_idxs:
check_edges = np.delete(check_edges, added_idxs, axis=0)
test_id = new_test_ids
def sentence_to_graph(
words: List[str],
words_phraseids: Optional[List[str]] = None,
tikz: bool = False,
gold_edges: Optional[List[Tuple[int, int]]] = None,
prediction_edges: Optional[List[Tuple[int, int]]] = None,
entid2color: Dict[str, str] = None,
depths=None,
ax=None,
):
"""Turns edge sets on word (nodes) into tikz dependency LaTeX."""
if entid2color is None:
entid2color = {}
# generate tikz string
if tikz:
assert (
gold_edges is not None or prediction_edges is not None
), "atleast one of the list of edges must be filled"
tikz_string = """\\begin{dependency}[hide label, edge unit distance=.5ex]\\begin{deptext}[column sep=0.05cm]"""
tikz_string += (
"\\& ".join([x.replace("$", "\$").replace("&", "+") for x in words])
+ " \\\\"
+ "\n"
)
tikz_string += "\\end{deptext}" + "\n"
if gold_edges is not None:
for i_index, j_index in gold_edges:
tikz_string += "\\depedge{{{}}}{{{}}}{{{}}}\n".format(
i_index + 1, j_index + 1, "."
)
if prediction_edges is not None:
for i_index, j_index in prediction_edges:
tikz_string += "\\depedge[edge style={{red!60!}}, edge below]{{{}}}{{{}}}{{{}}}\n".format(
i_index + 1, j_index + 1, "."
)
tikz_string += "\\end{dependency}\n"
return tikz_string
else:
assert (
words_phraseids is not None
), "when not doing tikz, the 'words_phraseids' should be set"
assert (
gold_edges is not None and ax is not None
), "the networkx method is only implemented for gold standard edges"
assert (
depths is not None
), "expecting to receive the depths, when not creating the tikz figure"
ax.axis("off")
stepsize = 4
points = np.array([[i, 0] for i in range(0, len(words) * stepsize, stepsize)])
# edges = np.array(gold_edges)
edges = gold_edges.copy()
x = points[:, 0].flatten()
y = points[:, 1].flatten()
ax.scatter(x, y, marker="o")
ax.set_ylim([-15, 9])
phrid2id_: Dict[str, int] = {
phr: i for i, phr in enumerate(set(words_phraseids))
}
phrid2id_["_"] = -100
entid2color["_"] = "black"
# FIX ALL THE AVAILABLE TEXT PRINTING
for i, point in enumerate(points):
if words_phraseids[i] in entid2color:
col = entid2color[words_phraseids[i]]
else:
col = "black"
offset = 1.5
ax.text(
point[0],
point[1] - offset,
int(depths[i]),
color="black",
fontweight="bold",
rotation=0,
verticalalignment="top",
horizontalalignment="center",
)
offset = 6
ax.text(
point[0],
point[1] - offset,
phrid2id_[words_phraseids[i]],
color=col,
rotation=0,
verticalalignment="top",
horizontalalignment="center",
)
offset = 8
ax.text(
point[0],
point[1] - offset,
words[i],
color=col,
rotation=45,
verticalalignment="top",
horizontalalignment="right",
)
def add_edges_arcs(
edges: List[Tuple[int, int]],
color="black",
linewidth=1,
linestyle=None,
) -> None:
for edge in edges:
max_e = points[np.max(edge), 0]
min_e = points[np.min(edge), 0]
diff = max_e - min_e
arc = patches.Arc(
(min_e + diff / 2, 0),
width=diff,
height=(diff) / 2,
color=color,
linestyle=linestyle,
linewidth=linewidth,
theta2=180.0,
)
ax.add_patch(arc)
# DRAW ALL THE EDGES
add_edges_arcs(edges)
visual_edges, phrase2depth, phrase_head_idx = _text_edges2visual_edges(
edges, depths, words_phraseids
)
add_edges_arcs(visual_edges, color="red", linewidth=2, linestyle=(0, (1, 2)))
for phrase_id, idx in phrase_head_idx.items():
offset = 3.5
ax.text(
points[idx][0],
points[idx][1] - offset,
int(phrase2depth[phrase_id]),
color="red",
fontweight="bold",
rotation=0,
verticalalignment="top",
horizontalalignment="center",
)
return ax
def print_tikz(self, prediction_edges, gold_edges, words, split_name):
"""Turns edge sets on word (nodes) into tikz dependency LaTeX."""
with open(os.path.join(self.reporting_root, split_name + ".tikz"), "a") as fout:
string = """\\begin{dependency}[hide label, edge unit distance=.5ex]\\begin{deptext}[column sep=0.05cm]"""
string += (
"\\& ".join([x.replace("$", "\$").replace("&", "+") for x in words])
+ " \\\\"
+ "\n"
)
string += "\\end{deptext}" + "\n"
for i_index, j_index in gold_edges:
string += "\\depedge{{{}}}{{{}}}{{{}}}\n".format(
i_index + 1, j_index + 1, "."
)
for i_index, j_index in prediction_edges:
string += "\\depedge[edge style={{red!60!}}, edge below]{{{}}}{{{}}}{{{}}}\n".format(
i_index + 1, j_index + 1, "."
)
string += "\\end{dependency}\n"
fout.write("\n\n")
fout.write(string)
def get_nopunct_argmin(prediction, words, poses):
"""
Gets the argmin of predictions, but filters out all punctuation-POS-tagged words
"""
puncts = ["''", ",", ".", ":", "``", "-LRB-", "-RRB-"]
original_argmin = np.argmin(prediction)
for i in range(len(words)):
argmin = np.argmin(prediction)
if poses[argmin] not in puncts:
return argmin
else:
prediction[argmin] = 9000
return original_argmin
class MyProgress(ProgressBar):
def __init__(self, disable_val_bar: bool = False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.disable_val_bar = disable_val_bar
def init_validation_tqdm(self):
bar = super().init_validation_tqdm()
if self.disable_val_bar:
bar.disable = True
return bar
def get_all_subclasses(cls):
all_subclasses = []
for subclass in cls.__subclasses__():
all_subclasses.append(subclass)
all_subclasses.extend(get_all_subclasses(subclass))
return all_subclasses
def check_volta_layer(config, task: str, layer_idx: int, mm_bert_layer: int):
mm_vision_layer = "Visual" in task
if layer_idx in ["text_baseline", "vision_baseline", "vision_mapped_baseline"]:
pass
elif config is not None:
if mm_bert_layer:
if mm_vision_layer:
layer_idx = config.bert_layer2ff_sublayer[str(layer_idx)]
if layer_idx not in config.v_ff_sublayers:
logger.warning(
"given mapped layer_idx {} not a vision-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
raise ValueError(
"given mapped layer_idx {} not a vision-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
else:
if mm_vision_layer:
if layer_idx not in config.v_ff_sublayers:
logger.warning(
"given layer_idx {} not a vision-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
raise ValueError(
"given layer_idx {} not a vision-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
else:
if layer_idx not in config.t_ff_sublayers:
logger.warning(
"given layer_idx {} not a text-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
raise ValueError(
"given layer_idx {} not a text-feedforward sublayers, "
"skipping this setting".format(layer_idx)
)
progress_columns: List["ProgressColumn"] = [
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TextColumn(
"[progress.percentage]{task.percentage:>3.0f}% "
"[{task.completed}/{task.total}]"
),
TimeElapsedColumn(),
TextColumn("<"),
TimeRemainingColumn(),
]
# AN EXACT COPy FROM THE RICH TRACK, BUT EXTENDED SO MORE PROGRESS INFORMATION IS PRINTED
def track(
sequence: Union[Sequence[ProgressType], Iterable[ProgressType]],
description: str = "Working...",
total: Optional[float] = None,
auto_refresh: bool = True,
console: Optional[Console] = None,
transient: bool = False,
get_time: Optional[Callable[[], float]] = None,
refresh_per_second: float = 10,
style: StyleType = "bar.back",
complete_style: StyleType = "bar.complete",
finished_style: StyleType = "bar.finished",
pulse_style: StyleType = "bar.pulse",
update_period: float = 0.1,
disable: bool = False,
) -> Iterable[ProgressType]:
"""Track progress by iterating over a sequence.
Args:
sequence (Iterable[ProgressType]): A sequence (must support "len")
you wish to iterate over.
description (str, optional): Description of task show next to progress bar.
Defaults to "Working".
total: (float, optional): Total number of steps. Default is len(sequence).
auto_refresh (bool, optional): Automatic refresh, disable to force a refresh
after each iteration. Default is True.
transient: (bool, optional): Clear the progress on exit. Defaults to False.
console (Console, optional): Console to write to.
Default creates internal Console instance.
refresh_per_second (float): Number of times per second to refresh
the progress information. Defaults to 10.
style (StyleType, optional): Style for the bar background.
Defaults to "bar.back".
complete_style (StyleType, optional): Style for the completed bar.
Defaults to "bar.complete".
finished_style (StyleType, optional): Style for a finished bar.
Defaults to "bar.done".
pulse_style (StyleType, optional): Style for pulsing bars.
Defaults to "bar.pulse".
update_period (float, optional): Minimum time (in seconds)
between calls to update(). Defaults to 0.1.
disable (bool, optional): Disable display of progress.
Returns:
Iterable[ProgressType]: An iterable of the values in the sequence.
"""
columns: List["ProgressColumn"] = (
[TextColumn("{task.description}")] if description else []
)
columns.extend(
(
BarColumn(
style=style,
complete_style=complete_style,
finished_style=finished_style,
pulse_style=pulse_style,
),
TextColumn("{task.percentage:>3.0f}% [{task.completed}/{task.total}]"),
TimeElapsedColumn(),
TextColumn("<"),
TimeRemainingColumn(),
)
)
progress = Progress(
*columns,
auto_refresh=auto_refresh,
console=console,
transient=transient,
get_time=get_time,
refresh_per_second=refresh_per_second or 10,
disable=disable,
)
with progress:
yield from progress.track(
sequence, total=total, description=description, update_period=update_period
)
def _text_edges2visual_edges(
text_edges: List[Tuple[int, int]],
text_depths: Union[List[int], np.ndarray],
words_phraseids: List[str],
) -> Tuple[List[Tuple[int, int]], Dict[str, int], Dict[str, int]]:
"""
Args:
text_edges: the edges of the dependency tree for the sentence in a np matrix.
Each item is an edge with [source, target]
text_depths: The depths for each text token in the sentence
words_phraseids: for each word in the sentence,
the id of the phrase it belongs to. '_' if none.
Returns: the edges of the dependency tree projected on visual regions
"""
visual_edges = []
# For each phrase, check what the text idx is of each phrase
phrase2txt_index: Dict[str, int] = {}
# Also, based on the text idx we can map the phrase to an initial depth
phrase2depth: Dict[str, int] = {}
# loop over unique phrase ids in the sentence
for phr_id in set(words_phraseids):
if phr_id == "_": # word does not belong to a phrase
continue
# Collections with for current phrase id all its head_idx and all its depths
phrase_idxs: List[int] = []
phrase_depths: List[int] = []
# Find all words that belong to the same phrase
for idx, id_ in enumerate(words_phraseids):
if phr_id == id_:
phrase_idxs.append(idx)
phrase_depths.append(text_depths[idx])
# if all depths are equal, we use the final token from the phrase as head index
if all(phrase_depths[0] == x for x in phrase_depths):
phr_min_d_idx = len(phrase_depths) - 1
# not all depths are equal, we use the one with lowest depth
else:
phr_min_d_idx = int(np.argmin(phrase_depths))
phrase2txt_index[phr_id] = phrase_idxs[phr_min_d_idx]
phrase2depth[phr_id] = phrase_depths[phr_min_d_idx]
# Find the root node for the text
text_root_idx = np.where(text_depths == 0)[0][0]
phrase2depth["0"] = 0 # the full image is always the root for visual tree
if text_root_idx in phrase2txt_index.values():
# There is a phrase that is the root, thus to have not two elements
# map to same text index, we map the visual root to prepended token
# in front of the sentence
phrase2txt_index["0"] = -1
else:
# this is as expected. The text root is not a phrase
# and we map the visual root to the text root
phrase2txt_index["0"] = text_root_idx
# reverse mapping from index to phrase_id
idx2phr = {v: k for k, v in phrase2txt_index.items()}
# loop over all the phrases based on there depth, lowest to highest
for sort_phr_id in sorted(phrase2depth, key=phrase2depth.get): # type: ignore
sort_txt_idx = phrase2txt_index[sort_phr_id]
# this is our own manual assigned root node. since it is the root,
# it has no parent, so continue
if sort_phr_id == "0":
continue
# the current node is the root node in the text tree
elif sort_txt_idx == text_root_idx:
# there is no parent for a root node. A phrase shouldn't be the root?
# In the visual tree it should be the child of the full image root node
new_edge = (phrase2txt_index["0"], sort_txt_idx)
visual_edges.append(new_edge)
phrase2depth[sort_phr_id] = phrase2depth["0"] + 1
else:
# loop over all textual edges to find and construct the correct
# visual edge given the current phrase_id
for edge in text_edges:
# if the current phrase_idx is not the child node of the
# current text edge, we don't need this edge.
if sort_txt_idx != edge[1]:
continue
# the current node is the child of the root node, or its
# text parent node, is also a visual node
elif edge[0] == text_root_idx or edge[0] in phrase2txt_index.values():
# we append the textual edge as a valid visual edge
visual_edges.append(edge)
# we update the depth to be equal to
# "the depth of the parent node plus 1"
phrase2depth[idx2phr[edge[1]]] = phrase2depth[idx2phr[edge[0]]] + 1
# we found an edge where this node is the child, but the parent
# is not a visual node we need to move up through the textual edges
# from the current one, to find the visual parent.
else:
# start chain by finding the direct parent edge from current edge
parent_edge_chain = [_find_parent_edge(edge, text_edges)]
if parent_edge_chain[0] is None:
# No head could be find, so we leave this phrase dangling
logger.debug(
"no head found. Left phrase dangling "
"MAYBE IF IT IS NOT A TREE, WE CAN FURTHER CHECK THIS?"
)
continue
while True:
dangling_skip = False
# the highest node in the current parent edge chain is
# a visual node, or it is the visual root
if (
parent_edge_chain[-1][0] == text_root_idx
or parent_edge_chain[-1][0] in phrase2txt_index.values()
):
break
# no visual parent found, keep moving up
pe = _find_parent_edge(parent_edge_chain[-1], text_edges)
if pe is None:
# No head could be found higher, leave this phrase dangling
logger.debug(
"no head found higher. Left phrase dangling! "
"MAYBE IF IT IS NOT A TREE, WE CAN FURTHER CHECK THIS?"
)
dangling_skip = True
break
parent_edge_chain.append(pe)
if dangling_skip:
continue
# update the new depth, based on steps shifted up in the tree
# (length of parent edge chain)
parent_id = parent_edge_chain[-1][0]
# update depth as parent_depth + 1
phrase2depth[sort_phr_id] = phrase2depth[idx2phr[parent_id]] + 1
# add the edge from the parent to current node
new_edge = (parent_id, edge[1])
visual_edges.append(new_edge)
assert (
sum(value == 0 for value in phrase2depth.values()) == 1
), "ERROR: multiple roots in visual tree."
return visual_edges, phrase2depth, phrase2txt_index
def _find_parent_edge(test_edge, edges):
for e in edges:
if e[1] == test_edge[0]:
return e
return None
| [
"logging.getLogger",
"rich.progress.BarColumn",
"matplotlib.patches.Arc",
"numpy.where",
"numpy.delete",
"os.path.join",
"numpy.min",
"argparse.ArgumentTypeError",
"numpy.max",
"rich.progress.TimeRemainingColumn",
"rich.progress.Progress",
"rich.progress.TimeElapsedColumn",
"rich.progress.TextColumn",
"numpy.argmin"
] | [((533, 560), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (550, 560), False, 'import logging\n'), ((17682, 17703), 'numpy.argmin', 'np.argmin', (['prediction'], {}), '(prediction)\n', (17691, 17703), True, 'import numpy as np\n'), ((20423, 20477), 'rich.progress.TextColumn', 'TextColumn', (['"""[progress.description]{task.description}"""'], {}), "('[progress.description]{task.description}')\n", (20433, 20477), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((20483, 20494), 'rich.progress.BarColumn', 'BarColumn', ([], {}), '()\n', (20492, 20494), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((20500, 20601), 'rich.progress.TextColumn', 'TextColumn', (['"""[progress.percentage]{task.percentage:>3.0f}% [{task.completed}/{task.total}]"""'], {}), "(\n '[progress.percentage]{task.percentage:>3.0f}% [{task.completed}/{task.total}]'\n )\n", (20510, 20601), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((20622, 20641), 'rich.progress.TimeElapsedColumn', 'TimeElapsedColumn', ([], {}), '()\n', (20639, 20641), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((20647, 20662), 'rich.progress.TextColumn', 'TextColumn', (['"""<"""'], {}), "('<')\n", (20657, 20662), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((20668, 20689), 'rich.progress.TimeRemainingColumn', 'TimeRemainingColumn', ([], {}), '()\n', (20687, 20689), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23777, 23951), 'rich.progress.Progress', 'Progress', (['*columns'], {'auto_refresh': 'auto_refresh', 'console': 'console', 'transient': 'transient', 'get_time': 'get_time', 'refresh_per_second': '(refresh_per_second or 10)', 'disable': 'disable'}), '(*columns, auto_refresh=auto_refresh, console=console, transient=\n transient, get_time=get_time, refresh_per_second=refresh_per_second or \n 10, disable=disable)\n', (23785, 23951), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((17753, 17774), 'numpy.argmin', 'np.argmin', (['prediction'], {}), '(prediction)\n', (17762, 17774), True, 'import numpy as np\n'), ((8010, 8063), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""Boolean value expected."""'], {}), "('Boolean value expected.')\n", (8036, 8063), False, 'import argparse\n'), ((11360, 11402), 'numpy.delete', 'np.delete', (['check_edges', 'added_idxs'], {'axis': '(0)'}), '(check_edges, added_idxs, axis=0)\n', (11369, 11402), True, 'import numpy as np\n'), ((16600, 16655), 'os.path.join', 'os.path.join', (['self.reporting_root', "(split_name + '.tikz')"], {}), "(self.reporting_root, split_name + '.tikz')\n", (16612, 16655), False, 'import os\n'), ((23270, 23302), 'rich.progress.TextColumn', 'TextColumn', (['"""{task.description}"""'], {}), "('{task.description}')\n", (23280, 23302), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23375, 23489), 'rich.progress.BarColumn', 'BarColumn', ([], {'style': 'style', 'complete_style': 'complete_style', 'finished_style': 'finished_style', 'pulse_style': 'pulse_style'}), '(style=style, complete_style=complete_style, finished_style=\n finished_style, pulse_style=pulse_style)\n', (23384, 23489), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23577, 23647), 'rich.progress.TextColumn', 'TextColumn', (['"""{task.percentage:>3.0f}% [{task.completed}/{task.total}]"""'], {}), "('{task.percentage:>3.0f}% [{task.completed}/{task.total}]')\n", (23587, 23647), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23661, 23680), 'rich.progress.TimeElapsedColumn', 'TimeElapsedColumn', ([], {}), '()\n', (23678, 23680), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23694, 23709), 'rich.progress.TextColumn', 'TextColumn', (['"""<"""'], {}), "('<')\n", (23704, 23709), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((23723, 23744), 'rich.progress.TimeRemainingColumn', 'TimeRemainingColumn', ([], {}), '()\n', (23742, 23744), False, 'from rich.progress import BarColumn, Progress, ProgressColumn, ProgressType, StyleType, TextColumn, TimeElapsedColumn, TimeRemainingColumn\n'), ((26154, 26180), 'numpy.where', 'np.where', (['(text_depths == 0)'], {}), '(text_depths == 0)\n', (26162, 26180), True, 'import numpy as np\n'), ((15404, 15540), 'matplotlib.patches.Arc', 'patches.Arc', (['(min_e + diff / 2, 0)'], {'width': 'diff', 'height': '(diff / 2)', 'color': 'color', 'linestyle': 'linestyle', 'linewidth': 'linewidth', 'theta2': '(180.0)'}), '((min_e + diff / 2, 0), width=diff, height=diff / 2, color=color,\n linestyle=linestyle, linewidth=linewidth, theta2=180.0)\n', (15415, 15540), True, 'import matplotlib.patches as patches\n'), ((25948, 25972), 'numpy.argmin', 'np.argmin', (['phrase_depths'], {}), '(phrase_depths)\n', (25957, 25972), True, 'import numpy as np\n'), ((15280, 15292), 'numpy.max', 'np.max', (['edge'], {}), '(edge)\n', (15286, 15292), True, 'import numpy as np\n'), ((15328, 15340), 'numpy.min', 'np.min', (['edge'], {}), '(edge)\n', (15334, 15340), True, 'import numpy as np\n')] |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from src.models.model import Model
from src.utils.loss_aug_AP import LossAugmentedInferenceAP
class MeanAveragePrecisionModel(Model):
"""
Our model for optimizing Mean Average Precision.
"""
def __init__(self, config, reuse=False):
super(MeanAveragePrecisionModel, self).__init__(config, reuse)
def compute_loss(self):
if self.config.optimization_framework == "SSVM":
loss = self.mAP_score_aug * self.config.alpha - self.mAP_score_GT
elif self.config.optimization_framework == "DLM": # Direct Loss minimization
loss = (1 / self.config.epsilon) * (
self.mAP_score_aug * self.config.alpha - self.mAP_score_std)
if not self.config.positive_update:
loss *= -1
else:
raise ValueError("Unknown optimization framework {}".format(
self.config.optimization_framework))
return loss
def perform_loss_augmented_inference(self, sess, batch):
batch_size = len(batch["labels"])
num_pos, num_neg, pos_inds, neg_inds = self.get_positive_negative_splits(
batch)
_feed_dict = {
self.x: batch["imgs"],
self.n_queries_to_parse: self.config.batch_size,
self.num_pos: num_pos,
self.num_neg: num_neg,
self.pos_inds: pos_inds,
self.neg_inds: neg_inds
}
phi_pos, phi_neg, skipped_queries = sess.run(
[self.phi_pos, self.phi_neg, self.skipped_queries],
feed_dict=_feed_dict)
Y_aug = np.zeros((batch_size, np.max(num_pos), np.max(num_neg)))
for qq in range(batch_size):
if skipped_queries[qq] == 1:
print("Skipped {}".format(qq))
continue
q_phi_pos = phi_pos[qq][:num_pos[qq]]
q_phi_neg = phi_neg[qq][:num_neg[qq]]
loss_aug_AP_algo = LossAugmentedInferenceAP(q_phi_pos, q_phi_neg,
self.config.epsilon,
self.config.positive_update)
q_Y_aug = -1 * loss_aug_AP_algo.direction[1:, 1:]
Y_aug[qq, :num_pos[qq], :num_neg[qq]] = q_Y_aug
return Y_aug
@property
def config(self):
return self._config
| [
"numpy.max",
"src.utils.loss_aug_AP.LossAugmentedInferenceAP"
] | [((1930, 2031), 'src.utils.loss_aug_AP.LossAugmentedInferenceAP', 'LossAugmentedInferenceAP', (['q_phi_pos', 'q_phi_neg', 'self.config.epsilon', 'self.config.positive_update'], {}), '(q_phi_pos, q_phi_neg, self.config.epsilon, self.\n config.positive_update)\n', (1954, 2031), False, 'from src.utils.loss_aug_AP import LossAugmentedInferenceAP\n'), ((1657, 1672), 'numpy.max', 'np.max', (['num_pos'], {}), '(num_pos)\n', (1663, 1672), True, 'import numpy as np\n'), ((1674, 1689), 'numpy.max', 'np.max', (['num_neg'], {}), '(num_neg)\n', (1680, 1689), True, 'import numpy as np\n')] |
"""04: Jump to Recent File
Use Recent Files to quickly jump to last-visited file.
- Ctrl-E/Cmd-E
- Very useful toggle
- Cursor keys
- Or speed search
- Variations such as recently *changed* file
Repo: https://github.com/pauleveritt/42-workshop
Playlist: https://www.jetbrains.com/pycharm/guide/playlists/42/
"""
from fortytwo import App, Greeter
def main():
site = App()
with site as container:
greeter = container.get(Greeter)
greeting = greeter('Larry')
return greeting
if __name__ == '__main__':
print(main())
| [
"fortytwo.App"
] | [((379, 384), 'fortytwo.App', 'App', ([], {}), '()\n', (382, 384), False, 'from fortytwo import App, Greeter\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'GUI.ui'
#
# Created by: PyQt5 UI code generator 5.12.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setWindowModality(QtCore.Qt.WindowModal)
MainWindow.resize(1313, 869)
MainWindow.setMinimumSize(QtCore.QSize(1313, 869))
MainWindow.setMaximumSize(QtCore.QSize(1920, 1080))
MainWindow.setFocusPolicy(QtCore.Qt.StrongFocus)
MainWindow.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
MainWindow.setDocumentMode(False)
MainWindow.setTabShape(QtWidgets.QTabWidget.Rounded)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.ComboBoxCameras = QtWidgets.QComboBox(self.centralwidget)
self.ComboBoxCameras.setMinimumSize(QtCore.QSize(200, 30))
self.ComboBoxCameras.setMaximumSize(QtCore.QSize(200, 50))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
self.ComboBoxCameras.setFont(font)
self.ComboBoxCameras.setLayoutDirection(QtCore.Qt.LeftToRight)
self.ComboBoxCameras.setMaxVisibleItems(100)
self.ComboBoxCameras.setFrame(True)
self.ComboBoxCameras.setObjectName("ComboBoxCameras")
self.gridLayout.addWidget(self.ComboBoxCameras, 0, 1, 1, 1)
self.scrollArea_2 = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea_2.setMinimumSize(QtCore.QSize(640, 279))
self.scrollArea_2.setMaximumSize(QtCore.QSize(640, 9999))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setBold(False)
font.setWeight(50)
self.scrollArea_2.setFont(font)
self.scrollArea_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea_2.setWidgetResizable(True)
self.scrollArea_2.setObjectName("scrollArea_2")
self.scrollAreaWidgetContents_3 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_3.setGeometry(QtCore.QRect(0, 0, 640, 282))
self.scrollAreaWidgetContents_3.setObjectName("scrollAreaWidgetContents_3")
self.gridLayout_4 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents_3)
self.gridLayout_4.setObjectName("gridLayout_4")
self.tableWidget_2 = QtWidgets.QTableWidget(self.scrollAreaWidgetContents_3)
self.tableWidget_2.setMinimumSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.tableWidget_2.setFont(font)
self.tableWidget_2.setFrameShape(QtWidgets.QFrame.WinPanel)
self.tableWidget_2.setObjectName("tableWidget_2")
self.tableWidget_2.setColumnCount(3)
self.tableWidget_2.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget_2.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget_2.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget_2.setHorizontalHeaderItem(2, item)
self.gridLayout_4.addWidget(self.tableWidget_2, 0, 0, 1, 1)
self.scrollArea_2.setWidget(self.scrollAreaWidgetContents_3)
self.gridLayout.addWidget(self.scrollArea_2, 3, 0, 1, 1)
self.imgLabel = QtWidgets.QLabel(self.centralwidget)
self.imgLabel.setMinimumSize(QtCore.QSize(640, 480))
self.imgLabel.setMaximumSize(QtCore.QSize(640, 480))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setBold(False)
font.setWeight(50)
self.imgLabel.setFont(font)
self.imgLabel.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.imgLabel.setAutoFillBackground(False)
self.imgLabel.setStyleSheet("background-color: rgb(50,50,50)\n"
"")
self.imgLabel.setFrameShape(QtWidgets.QFrame.WinPanel)
self.imgLabel.setFrameShadow(QtWidgets.QFrame.Plain)
self.imgLabel.setLineWidth(1)
self.imgLabel.setText("")
self.imgLabel.setObjectName("imgLabel")
self.gridLayout.addWidget(self.imgLabel, 2, 0, 1, 1)
self.scrollArea = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea.setMinimumSize(QtCore.QSize(640, 771))
self.scrollArea.setMaximumSize(QtCore.QSize(9999, 9999))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setBold(False)
font.setWeight(50)
self.scrollArea.setFont(font)
self.scrollArea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 645, 774))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.gridLayout_3 = QtWidgets.QGridLayout(self.scrollAreaWidgetContents)
self.gridLayout_3.setObjectName("gridLayout_3")
self.tableWidget = QtWidgets.QTableWidget(self.scrollAreaWidgetContents)
self.tableWidget.setMinimumSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.tableWidget.setFont(font)
self.tableWidget.viewport().setProperty("cursor", QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.tableWidget.setFrameShape(QtWidgets.QFrame.WinPanel)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(4)
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignVCenter)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
item.setFont(font)
self.tableWidget.setHorizontalHeaderItem(3, item)
self.gridLayout_3.addWidget(self.tableWidget, 0, 0, 1, 1)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout.addWidget(self.scrollArea, 1, 1, 3, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSpacing(10)
self.horizontalLayout.setObjectName("horizontalLayout")
self.startButton = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.startButton.sizePolicy().hasHeightForWidth())
self.startButton.setSizePolicy(sizePolicy)
self.startButton.setMinimumSize(QtCore.QSize(160, 30))
self.startButton.setMaximumSize(QtCore.QSize(200, 30))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.startButton.setFont(font)
self.startButton.setStyleSheet("background-color:rgb(197, 208, 255)")
self.startButton.setObjectName("startButton")
self.horizontalLayout.addWidget(self.startButton)
self.stopButton = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.stopButton.sizePolicy().hasHeightForWidth())
self.stopButton.setSizePolicy(sizePolicy)
self.stopButton.setMinimumSize(QtCore.QSize(150, 30))
self.stopButton.setMaximumSize(QtCore.QSize(200, 30))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.stopButton.setFont(font)
self.stopButton.setStyleSheet("background-color:rgb(197, 208, 255)")
self.stopButton.setObjectName("stopButton")
self.horizontalLayout.addWidget(self.stopButton)
self.detectButton = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.detectButton.sizePolicy().hasHeightForWidth())
self.detectButton.setSizePolicy(sizePolicy)
self.detectButton.setMinimumSize(QtCore.QSize(150, 30))
self.detectButton.setMaximumSize(QtCore.QSize(200, 30))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.detectButton.setFont(font)
self.detectButton.setStyleSheet("background-color:rgb(197, 208, 255)")
self.detectButton.setObjectName("detectButton")
self.horizontalLayout.addWidget(self.detectButton)
self.trainButton = QtWidgets.QPushButton(self.centralwidget)
self.trainButton.setMinimumSize(QtCore.QSize(150, 30))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.trainButton.setFont(font)
self.trainButton.setStyleSheet("background-color:rgb(197, 208, 255)")
self.trainButton.setObjectName("trainButton")
self.horizontalLayout.addWidget(self.trainButton)
self.horizontalLayout_2.addLayout(self.horizontalLayout)
self.gridLayout.addLayout(self.horizontalLayout_2, 0, 0, 2, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1313, 21))
self.menubar.setObjectName("menubar")
self.menuKameralar = QtWidgets.QMenu(self.menubar)
self.menuKameralar.setObjectName("menuKameralar")
self.menuFontlar = QtWidgets.QMenu(self.menubar)
self.menuFontlar.setObjectName("menuFontlar")
MainWindow.setMenuBar(self.menubar)
self.actionDiskten_Ekle = QtWidgets.QAction(MainWindow)
self.actionDiskten_Ekle.setObjectName("actionDiskten_Ekle")
self.actionProgramHakkinda = QtWidgets.QAction(MainWindow)
self.actionProgramHakkinda.setObjectName("actionProgramHakkinda")
self.actionGelistiriciHakkinda = QtWidgets.QAction(MainWindow)
self.actionGelistiriciHakkinda.setObjectName("actionGelistiriciHakkinda")
self.actionPersonelEkle = QtWidgets.QAction(MainWindow)
self.actionPersonelEkle.setObjectName("actionPersonelEkle")
self.actionPersoneller = QtWidgets.QAction(MainWindow)
self.actionPersoneller.setObjectName("actionPersoneller")
self.actionLoglar = QtWidgets.QAction(MainWindow)
self.actionLoglar.setObjectName("actionLoglar")
self.actionKamera1 = QtWidgets.QAction(MainWindow)
self.actionKamera1.setCheckable(True)
self.actionKamera1.setObjectName("actionKamera1")
self.actionKamera2 = QtWidgets.QAction(MainWindow)
self.actionKamera2.setCheckable(True)
self.actionKamera2.setObjectName("actionKamera2")
self.actionKamera3 = QtWidgets.QAction(MainWindow)
self.actionKamera3.setCheckable(True)
self.actionKamera3.setObjectName("actionKamera3")
self.actionKamera4 = QtWidgets.QAction(MainWindow)
self.actionKamera4.setCheckable(True)
self.actionKamera4.setObjectName("actionKamera4")
self.actionKamera5 = QtWidgets.QAction(MainWindow)
self.actionKamera5.setCheckable(True)
self.actionKamera5.setObjectName("actionKamera5")
self.actionCikis = QtWidgets.QAction(MainWindow)
self.actionCikis.setObjectName("actionCikis")
self.actionYazdir = QtWidgets.QAction(MainWindow)
self.actionYazdir.setObjectName("actionYazdir")
self.actionPersonelGirisCikisBilgileri = QtWidgets.QAction(MainWindow)
self.actionPersonelGirisCikisBilgileri.setObjectName("actionPersonelGirisCikisBilgileri")
self.actionFotograf_Ekle = QtWidgets.QAction(MainWindow)
self.actionFotograf_Ekle.setObjectName("actionFotograf_Ekle")
self.actionPersonelleriExcelDosyasinaAktar = QtWidgets.QAction(MainWindow)
self.actionPersonelleriExcelDosyasinaAktar.setObjectName("actionPersonelleriExcelDosyasinaAktar")
self.actionGiris_Cikis_KayitlariniExcelDosyasinaAktar = QtWidgets.QAction(MainWindow)
self.actionGiris_Cikis_KayitlariniExcelDosyasinaAktar.setObjectName("actionGiris_Cikis_KayitlariniExcelDosyasinaAktar")
self.actionAddCamera = QtWidgets.QAction(MainWindow)
self.actionAddCamera.setObjectName("actionAddCamera")
self.actionCameras = QtWidgets.QAction(MainWindow)
self.actionCameras.setObjectName("actionCameras")
self.actionAddFont = QtWidgets.QAction(MainWindow)
self.actionAddFont.setObjectName("actionAddFont")
self.actionFonts = QtWidgets.QAction(MainWindow)
self.actionFonts.setObjectName("actionFonts")
self.actionPlate = QtWidgets.QAction(MainWindow)
self.actionPlate.setObjectName("actionPlate")
self.menuKameralar.addAction(self.actionAddCamera)
self.menuKameralar.addAction(self.actionCameras)
self.menuKameralar.addSeparator()
self.menuKameralar.addAction(self.actionPlate)
self.menuFontlar.addAction(self.actionAddFont)
self.menuFontlar.addAction(self.actionFonts)
self.menubar.addAction(self.menuKameralar.menuAction())
self.menubar.addAction(self.menuFontlar.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Vagon Plaka Takip"))
item = self.tableWidget_2.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Okunan Plaka"))
item = self.tableWidget_2.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Tarih"))
item = self.tableWidget_2.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Saat"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Gorunen Plaka"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Tarih"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Saat"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "Kamera"))
self.startButton.setText(_translate("MainWindow", "Kamerayi Baslat"))
self.stopButton.setText(_translate("MainWindow", "Kamerayi Durdur"))
self.detectButton.setText(_translate("MainWindow", "Plaka Tanımayı Baslat"))
self.trainButton.setText(_translate("MainWindow", "KNN Çalıstır"))
self.menuKameralar.setTitle(_translate("MainWindow", "Kameralar"))
self.menuFontlar.setTitle(_translate("MainWindow", "Fontlar"))
self.actionDiskten_Ekle.setText(_translate("MainWindow", "Diskten Ekle"))
self.actionProgramHakkinda.setText(_translate("MainWindow", "Program Hakkinda"))
self.actionGelistiriciHakkinda.setText(_translate("MainWindow", "Gelistirici Hakkinda"))
self.actionPersonelEkle.setText(_translate("MainWindow", "Personel Ekle"))
self.actionPersoneller.setText(_translate("MainWindow", "Personeller"))
self.actionLoglar.setText(_translate("MainWindow", "Genel Loglar"))
self.actionKamera1.setText(_translate("MainWindow", "Kamera-1"))
self.actionKamera2.setText(_translate("MainWindow", "Kamera-2"))
self.actionKamera3.setText(_translate("MainWindow", "Kamera-3"))
self.actionKamera4.setText(_translate("MainWindow", "Kamera-4"))
self.actionKamera5.setText(_translate("MainWindow", "Kamera-5"))
self.actionCikis.setText(_translate("MainWindow", "Cikis"))
self.actionYazdir.setText(_translate("MainWindow", "Excel\'e Yazdir"))
self.actionPersonelGirisCikisBilgileri.setText(_translate("MainWindow", "Personel Giris/Cikis Bilgileri"))
self.actionFotograf_Ekle.setText(_translate("MainWindow", "Fotograf Ekle"))
self.actionPersonelleriExcelDosyasinaAktar.setText(_translate("MainWindow", "Personelleri Excel Dosyasina Aktar"))
self.actionGiris_Cikis_KayitlariniExcelDosyasinaAktar.setText(_translate("MainWindow", "Giris-Cikis Kayitlarini Excel Dosyasina Aktar"))
self.actionAddCamera.setText(_translate("MainWindow", "Kamera Ekle"))
self.actionCameras.setText(_translate("MainWindow", "Kameralar"))
self.actionAddFont.setText(_translate("MainWindow", "Font Ekle"))
self.actionFonts.setText(_translate("MainWindow", "Fontlar"))
self.actionPlate.setText(_translate("MainWindow", "Plaka"))
| [
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QMenu",
"PyQt5.QtGui.QFont",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QScrollArea",
"PyQt5.QtWidgets.QMenuBar",
"PyQt5.QtWidgets.QAction",
"PyQt5.QtCore.QRect",
"PyQt5.QtGui.QCursor",
"PyQt5.QtWidgets.QTableWidgetItem",
"PyQt5.QtCore.QSize"
] | [((821, 850), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (838, 850), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((935, 976), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (956, 976), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1060, 1099), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1079, 1099), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1249, 1262), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1260, 1262), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1750, 1791), 'PyQt5.QtWidgets.QScrollArea', 'QtWidgets.QScrollArea', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1771, 1791), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1938, 1951), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1949, 1951), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2296, 2315), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (2313, 2315), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2510, 2564), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.scrollAreaWidgetContents_3'], {}), '(self.scrollAreaWidgetContents_3)\n', (2531, 2564), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2650, 2705), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['self.scrollAreaWidgetContents_3'], {}), '(self.scrollAreaWidgetContents_3)\n', (2672, 2705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2783, 2796), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2794, 2796), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3185, 3213), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3211, 3213), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3306, 3319), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3317, 3319), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3476, 3504), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3502, 3504), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3520, 3533), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3531, 3533), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3690, 3718), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3716, 3718), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3734, 3747), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (3745, 3747), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4115, 4151), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4131, 4151), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4289, 4302), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (4300, 4302), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4956, 4997), 'PyQt5.QtWidgets.QScrollArea', 'QtWidgets.QScrollArea', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4977, 4997), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5141, 5154), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5152, 5154), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5487, 5506), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', ([], {}), '()\n', (5504, 5506), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5695, 5747), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.scrollAreaWidgetContents'], {}), '(self.scrollAreaWidgetContents)\n', (5716, 5747), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5831, 5884), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['self.scrollAreaWidgetContents'], {}), '(self.scrollAreaWidgetContents)\n', (5853, 5884), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5960, 5973), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5971, 5973), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6446, 6474), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (6472, 6474), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6567, 6580), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6578, 6580), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6735, 6763), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (6761, 6763), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6779, 6792), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6790, 6792), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6947, 6975), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (6973, 6975), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6991, 7004), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (7002, 7004), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7159, 7187), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7185, 7187), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7203, 7216), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (7214, 7216), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7584, 7607), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (7605, 7607), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7708, 7731), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (7729, 7731), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7868, 7909), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (7889, 7909), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7931, 8021), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (7952, 8021), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8381, 8394), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (8392, 8394), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8768, 8809), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (8789, 8809), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8831, 8923), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Preferred)\n', (8852, 8923), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9279, 9292), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (9290, 9292), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9663, 9704), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (9684, 9704), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9726, 9816), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (9747, 9816), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10180, 10193), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (10191, 10193), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10573, 10614), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (10594, 10614), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10693, 10706), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (10704, 10706), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11271, 11303), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (11291, 11303), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11425, 11455), 'PyQt5.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (11443, 11455), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11594, 11623), 'PyQt5.QtWidgets.QMenu', 'QtWidgets.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (11609, 11623), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11709, 11738), 'PyQt5.QtWidgets.QMenu', 'QtWidgets.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (11724, 11738), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11871, 11900), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (11888, 11900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12006, 12035), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12023, 12035), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12151, 12180), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12168, 12180), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12297, 12326), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12314, 12326), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12428, 12457), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12445, 12457), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12552, 12581), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12569, 12581), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12667, 12696), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12684, 12696), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12830, 12859), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (12847, 12859), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12993, 13022), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13010, 13022), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13156, 13185), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13173, 13185), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13319, 13348), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13336, 13348), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13480, 13509), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13497, 13509), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13592, 13621), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13609, 13621), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13727, 13756), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13744, 13756), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13890, 13919), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (13907, 13919), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14043, 14072), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14060, 14072), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14243, 14272), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14260, 14272), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14432, 14461), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14449, 14461), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14553, 14582), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14570, 14582), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14670, 14699), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14687, 14699), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14785, 14814), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14802, 14814), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14896, 14925), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (14913, 14925), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15475, 15524), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (15512, 15524), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((479, 502), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1313)', '(869)'], {}), '(1313, 869)\n', (491, 502), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((538, 562), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(1920)', '(1080)'], {}), '(1920, 1080)\n', (550, 562), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1144, 1165), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(200)', '(30)'], {}), '(200, 30)\n', (1156, 1165), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1211, 1232), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(200)', '(50)'], {}), '(200, 50)\n', (1223, 1232), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1833, 1855), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(640)', '(279)'], {}), '(640, 279)\n', (1845, 1855), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1898, 1921), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(640)', '(9999)'], {}), '(640, 9999)\n', (1910, 1921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2368, 2396), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(640)', '(282)'], {}), '(0, 0, 640, 282)\n', (2380, 2396), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2748, 2766), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (2760, 2766), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4189, 4211), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(640)', '(480)'], {}), '(640, 480)\n', (4201, 4211), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4250, 4272), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(640)', '(480)'], {}), '(640, 480)\n', (4262, 4272), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4460, 4496), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.ArrowCursor'], {}), '(QtCore.Qt.ArrowCursor)\n', (4473, 4496), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5037, 5059), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(640)', '(771)'], {}), '(640, 771)\n', (5049, 5059), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5100, 5124), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(9999)', '(9999)'], {}), '(9999, 9999)\n', (5112, 5124), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5557, 5585), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(645)', '(774)'], {}), '(0, 0, 645, 774)\n', (5569, 5585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5925, 5943), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (5937, 5943), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6190, 6226), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.ArrowCursor'], {}), '(QtCore.Qt.ArrowCursor)\n', (6203, 6226), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8280, 8301), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(160)', '(30)'], {}), '(160, 30)\n', (8292, 8301), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8343, 8364), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(200)', '(30)'], {}), '(200, 30)\n', (8355, 8364), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9179, 9200), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(30)'], {}), '(150, 30)\n', (9191, 9200), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9241, 9262), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(200)', '(30)'], {}), '(200, 30)\n', (9253, 9262), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10078, 10099), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(30)'], {}), '(150, 30)\n', (10090, 10099), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10142, 10163), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(200)', '(30)'], {}), '(200, 30)\n', (10154, 10163), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10655, 10676), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(30)'], {}), '(150, 30)\n', (10667, 10676), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11489, 11517), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(1313)', '(21)'], {}), '(0, 0, 1313, 21)\n', (11501, 11517), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 24 12:17:33 2016
@author: mcgibbon
"""
import xarray
from six import string_types
from datetime import timedelta
from .time import TimeRange, get_netcdf_time, datetime
from . import export
from .util import zlcl_from_T_RH
from .magic import leg_times
import numpy as np
import re
import atmos
try:
from numba import jit
except ImportError:
def jit(function):
return function
def align(*objects, **kwargs):
"""Given any number of Dataset objects, returns new
objects with aligned indexes.
Array from the aligned objects are suitable as input to mathematical
operators, because along each dimension they have the same indexes.
Missing values (if ``join != 'inner'``) are filled with NaN.
Parameters
----------
*objects : Dataset
Objects to align.
join : {'outer', 'inner', 'left', 'right'}, optional
Method for joining the indexes of the passed objects along each
dimension:
- 'outer': use the union of object indexes
- 'inner': use the intersection of object indexes
- 'left': use indexes from the first object with each dimension
- 'right': use indexes from the last object with each dimension
copy : bool, optional
If ``copy=True``, the returned objects contain all new variables. If
``copy=False`` and no reindexing is required then the aligned objects
will include original variables.
Returns
-------
aligned : same as *objects
Tuple of objects with aligned coordinates.
"""
xarray_datasets = [obj.xarray for obj in objects]
aligned_datasets = xarray.align(xarray_datasets, **kwargs)
return [Dataset(ds) for ds in aligned_datasets]
class EmptyDataset(object):
pass
@export
class Dataset(object):
def __init__(self, xarray_dataset, variable_aliases=None):
"""
variable_aliases should be a dictionary whose keys are aliases, and values
are the variable names they refer to.
"""
# if 'time_offset' in xarray_dataset:
# xarray_dataset['time'] = xarray_dataset['time_offset']
self._dataset = xarray_dataset
self._time = get_netcdf_time(self._dataset)
if not hasattr(self, 'variable_aliases'): # subclass might initialize this
self.variable_aliases = {}
if variable_aliases is not None:
for alias, variable_name in variable_aliases.items():
self.define_alias(alias, variable_name)
def __repr__(self):
return self._dataset.__repr__()
def __str__(self):
return self._dataset.__str__()
def __unicode__(self):
return self._dataset.__unicode__()
def resample(self, window='3H', label='center'):
if label == 'center':
xarray_dataset = self._dataset.resample(window, 'time', how='mean', label='left')
time_offset_to_middle = 0.5*(xarray_dataset['time'][1] - xarray_dataset['time'][0])
xarray_dataset['time'] += time_offset_to_middle
else:
xarray_dataset = self._dataset.resample(window, 'time', how='mean', label=label)
return Dataset(xarray_dataset)
def __getitem__(self, key):
if key in self.variable_aliases:
return self[self.variable_aliases[key]]
elif key in self._dataset:
return self._dataset[key]
else:
raise KeyError()
def __setitem__(self, key, item):
self._dataset[key] = item
def define_alias(self, alias, variable_name):
if not self._dataset_has_variable(variable_name):
raise ValueError(
'Cannot create alias for non-existent variable {}'.format(variable_name))
else:
self.variable_aliases[alias] = variable_name
def _dataset_has_variable(self, variable_name):
return variable_name in self._dataset.data_vars.keys()
@property
def time(self):
return self._time.values
@property
def time_range(self):
return TimeRange(self.time[0], self.time[-1])
@property
def xarray(self):
return self._dataset
| [
"xarray.align"
] | [((1669, 1708), 'xarray.align', 'xarray.align', (['xarray_datasets'], {}), '(xarray_datasets, **kwargs)\n', (1681, 1708), False, 'import xarray\n')] |
import requests, csv, sys
from bs4 import BeautifulSoup as bs
# argument handling
if len(sys.argv) is not 2:
sys.exit('Please enter a school name to scrape! (Options: Penn, Brown, or Harvard)')
school = sys.argv[1].lower()
if school != 'penn' and school != 'harvard' and school != 'brown':
sys.exit('Not a valid school. Try Penn, Brown, or Harvard!')
# assign url based on input
url = 'https://secure.www.upenn.edu/secretary/hondegchron.html'
if school == 'harvard':
url = 'https://www.harvard.edu/on-campus/commencement/honorary-degrees'
if school == 'brown':
url = 'https://www.brown.edu/about/administration/corporation/2000s'
r = requests.get(url)
data = r.text
soup = bs(data, 'html.parser')
honorees_by_year = {}
# gets data from Penn's website
def penn():
years = {}
names = []
for year in soup.findAll('h2'):
honorees = year.next_sibling.next_sibling.get_text().splitlines()
names = []
name = ''
for s in honorees:
if not s.startswith('(') and not s.startswith('January'):
name += s.replace(u'\xa0', u' ').strip().encode('utf8')
degrees = 'D.' in name or 'M.' in name or 'B.' in name
mistakes = 'Award' in name or '.D' in name or 'L.' in name
if degrees or mistakes:
names.append(name)
name = ''
years[int(year.text)] = names
fixed_years = {}
for year in years:
fixed_names = []
for name in years[year]:
n = name.split(',')
name = ''
for i in range(1, len(n) - 1):
name += n[i].strip() + ' '
name += n[0].title().strip()
fixed_names.append(name)
fixed_years[year] = fixed_names
return fixed_years
# gets data from Harvard's website
def harvard():
years = {}
header = True
for row in soup.findAll('tr'):
if header:
header = False
continue
# map all names in this list to their year
cells = row.findAll('td')
if len(cells) < 1:
continue
names = cells[1].text.split(',')
year = int(cells[0].text)
if year in years:
years[year] += names
else:
years[year] = names
# strip names and fix Jr. cases
for year in years:
fixed_names = []
names = years[year]
for i, name in enumerate(names):
if name.strip() == 'Jr.':
fixed_names[len(fixed_names) - 1] += ' Jr.'
else:
fixed_names.append(name.strip().encode('utf8'))
years[year] = fixed_names
return years
# gets data from Brown's website
def brown():
years = {}
c20 = 'https://www.brown.edu/about/administration/corporation/2000s'
c19 = 'https://www.brown.edu/about/administration/corporation/1900s'
c18 = 'https://www.brown.edu/about/administration/corporation/1800s'
c17 = 'https://www.brown.edu/about/administration/corporation/1700s'
years20 = brown_get_century(c20, years)
years19 = brown_get_century(c19, years20)
years18 = brown_get_century(c18, years19)
years17 = brown_get_century(c17, years18)
# strip names and fix Jr. cases
years = years17
for year in years:
fixed_names = []
names = years[year]
for name in names:
name = name.strip().encode('utf8')
end = name.find('(')
if end is not -1:
name = name[0:end]
comma = name.find(', ')
jr = name.find('Jr.')
if comma is not -1 and jr is -1:
names = name.split(', ')
name = names[1].strip() + ' ' + names[0].strip()
elif comma is not -1 and jr is not -1:
names = name.split(', ')
name = names[1].strip() + ' ' + names[0].strip() + ' Jr.'
fixed_names.append(name)
years[year] = fixed_names
return years
# gets data from each individual Brown page
def brown_get_century(url, years):
r = requests.get(url)
data = r.text
soup = bs(data, 'html.parser')
header = True
for row in soup.findAll('tr'):
if header:
header = False
continue
# map all the name to its year
cells = row.findAll('td')
if len(cells) < 1:
continue
name = cells[2].text
year = int(cells[0].text)
if year in years:
years[year].append(name)
else:
years[year] = [name]
return years
if school == 'penn':
honorees_by_year = penn()
elif school == 'harvard':
honorees_by_year = harvard()
elif school == 'brown':
honorees_by_year = brown()
# write out all data to a csv
with open('data/honorees_' + school + '.csv', 'wb') as output:
honorees = csv.writer(output)
for year in sorted(honorees_by_year):
for name in honorees_by_year[year]:
honorees.writerow([name, year])
| [
"bs4.BeautifulSoup",
"csv.writer",
"requests.get",
"sys.exit"
] | [((655, 672), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (667, 672), False, 'import requests, csv, sys\n'), ((694, 717), 'bs4.BeautifulSoup', 'bs', (['data', '"""html.parser"""'], {}), "(data, 'html.parser')\n", (696, 717), True, 'from bs4 import BeautifulSoup as bs\n'), ((114, 203), 'sys.exit', 'sys.exit', (['"""Please enter a school name to scrape! (Options: Penn, Brown, or Harvard)"""'], {}), "(\n 'Please enter a school name to scrape! (Options: Penn, Brown, or Harvard)')\n", (122, 203), False, 'import requests, csv, sys\n'), ((301, 361), 'sys.exit', 'sys.exit', (['"""Not a valid school. Try Penn, Brown, or Harvard!"""'], {}), "('Not a valid school. Try Penn, Brown, or Harvard!')\n", (309, 361), False, 'import requests, csv, sys\n'), ((4080, 4097), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4092, 4097), False, 'import requests, csv, sys\n'), ((4127, 4150), 'bs4.BeautifulSoup', 'bs', (['data', '"""html.parser"""'], {}), "(data, 'html.parser')\n", (4129, 4150), True, 'from bs4 import BeautifulSoup as bs\n'), ((4860, 4878), 'csv.writer', 'csv.writer', (['output'], {}), '(output)\n', (4870, 4878), False, 'import requests, csv, sys\n')] |
# -*- coding: utf-8 -*-
# Description: Document
# Documentation: data_structures.txt
import os.path
import string
import six
from Remark.FileSystem import unixDirectoryName, unixRelativePath, fileExtension
from Remark.DocumentType_Registry import documentType
class Document(object):
def __init__(self, relativeName):
'''
Constructs a document-object for the given file.
relativeName:
The path to the file, relative to the document-tree's
root-directory.
'''
# The relative-path to the document, with respect to
# the document-tree's root-directory. It is important
# that the relative-name is in the unix-form. This is
# being relied upon elsewhere (e.g. Gallery_Macro
# creates an md5-digest from the relative-name;
# this must be portable across operating systems).
self.relativeName = unixDirectoryName(relativeName)
# The relative-directory and the filename of the document.
self.relativeDirectory, self.fileName = os.path.split(relativeName)
# The file-name extension of this document, in lower-case
# to enable case-insensitivity.
self.extension = fileExtension(self.fileName).lower()
# The document-type of this document.
self.documentType = documentType(self.extension)
# The parent-document of this document.
self.parent = None
# A map from document's relative-name (string) to a
# document-object (Document).
self.childSet = {}
# A map from a tag-name (string) to a text
# (list of strings).
self.tagSet = {}
# The predefined document-tags.
self.setTag('description')
self.setTag('detail')
self.setTag('author')
self.setTag('file_name', [self.fileName])
self.setTag('relative_name', [self.relativeName])
self.setTag('relative_directory', [self.relativeDirectory])
self.setTag('extension', [self.extension])
self.setTag('html_head')
self.setTag('document_type', [self.documentType.name()])
# This will be filled in later, after the
# description-tags have been parsed.
self.setTag('link_description')
# Whether the document should be generated.
# By default the document is not generated;
# the regeneration rules change this later.
self.regenerate_ = False
def setRegenerate(self, regenerate):
self.regenerate_ = regenerate
def regenerate(self):
return self.regenerate_
def insertChild(self, child):
'''
Inserts a new child-document for this document.
A document can be only be linked to at most one
parent-document.
'''
assert child.parent == None
self.childSet[child.relativeName] = child
child.parent = self
def setTag(self, tagName, text = ['']):
'''
Associates text with a given tag-name.
tagName (string):
The name of the tag. It will be stripped
of surrounding whitespace.
text (list of strings):
The text to associate to the tag-name.
'''
assert isinstance(text, list)
assert isinstance(tagName, six.string_types)
self.tagSet[tagName.strip()] = text
def tag(self, tagName, defaultText = ['']):
'''
Returns the text associated with the given
tag-name. If the tag-name is not found, returns
the given default-value instead.
tagName (string):
The tag-name to find. It will be stripped of
surrounding whitespace.
'''
assert isinstance(tagName, six.string_types)
return self.tagSet.get(tagName.strip(), defaultText)
def tagString(self, tagName, default = ''):
'''
Returns the tag-text associated with the given
tag-name, such that the lines of the tag-text are
joined together into a single string.
'''
return ''.join(self.tag(tagName, [default]))
def tagInteger(self, tagName, default = 0):
'''
Returns the integer associated with the given tag-name.
The tag-text is first interpreted as a single string,
which is then converted to an integer.
'''
if not tagName in self.tagSet:
return default
return int(self.tagString(tagName))
def linkDescription(self):
'''
Returns the link-description of the document.
returns:
The link-description given by the document-type,
if the document has a document-type. Otherwise
the empty string.
'''
type = self.documentType
if type == None:
return ''
return self.documentType.linkDescription(self)
| [
"Remark.DocumentType_Registry.documentType",
"Remark.FileSystem.fileExtension",
"Remark.FileSystem.unixDirectoryName"
] | [((931, 962), 'Remark.FileSystem.unixDirectoryName', 'unixDirectoryName', (['relativeName'], {}), '(relativeName)\n', (948, 962), False, 'from Remark.FileSystem import unixDirectoryName, unixRelativePath, fileExtension\n'), ((1361, 1389), 'Remark.DocumentType_Registry.documentType', 'documentType', (['self.extension'], {}), '(self.extension)\n', (1373, 1389), False, 'from Remark.DocumentType_Registry import documentType\n'), ((1246, 1274), 'Remark.FileSystem.fileExtension', 'fileExtension', (['self.fileName'], {}), '(self.fileName)\n', (1259, 1274), False, 'from Remark.FileSystem import unixDirectoryName, unixRelativePath, fileExtension\n')] |
# Generated by Django 2.0 on 2018-08-16 11:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ChatRecord',
fields=[
('msg_id', models.AutoField(primary_key=True, serialize=False)),
('content', models.CharField(max_length=256)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '聊天记录',
'verbose_name_plural': '聊天记录',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='Collection',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '收藏记录',
'verbose_name_plural': '收藏记录',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '关注者',
'verbose_name_plural': '关注者',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='Goods',
fields=[
('goods_id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=30)),
('description', models.CharField(max_length=3000)),
('picture', models.TextField()),
('sale_way', models.PositiveSmallIntegerField(choices=[(0, '一口价'), (1, '拍卖')])),
('is_new', models.BooleanField(choices=[(0, '非全新'), (1, '全新')])),
('fixed_price', models.DecimalField(decimal_places=2, max_digits=8)),
('purchase_price', models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True)),
('status', models.PositiveSmallIntegerField(choices=[(0, '未出售'), (1, '交易中'), (2, '已出售')])),
('visits', models.PositiveIntegerField(default=0)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '商品标题',
'verbose_name_plural': '商品标题',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='GoodsMessage',
fields=[
('msg_id', models.AutoField(primary_key=True, serialize=False)),
('content', models.CharField(max_length=256)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
('goods_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.Goods')),
],
options={
'verbose_name': '商品留言',
'verbose_name_plural': '商品留言',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='GoodsType',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=20, unique=True)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '物品分类',
'verbose_name_plural': '物品分类',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='Permission',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('permission_no', models.CharField(max_length=255, unique=True)),
('permission_name', models.CharField(blank=True, max_length=255)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': '权限',
'verbose_name_plural': '权限',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('role_name', models.CharField(max_length=255)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
('role_permission', models.TextField(choices=[('normal_user', '普通用户')], default='普通用户')),
],
options={
'verbose_name': '角色',
'verbose_name_plural': '角色',
'ordering': ['-create_at'],
},
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('avatar', models.CharField(max_length=2083)),
('real_name', models.CharField(max_length=20)),
('student_id', models.CharField(max_length=11)),
('card_id', models.CharField(max_length=18)),
('sex', models.PositiveSmallIntegerField(choices=[(0, '男'), (1, '女')])),
('email', models.EmailField(max_length=254, unique=True)),
('name', models.CharField(max_length=20, unique=True)),
('password', models.CharField(max_length=256)),
('create_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
('role_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.Role')),
],
options={
'verbose_name': '用户',
'verbose_name_plural': '用户',
'ordering': ['-create_at'],
},
),
migrations.AddField(
model_name='goodsmessage',
name='recv_user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='it_recv_this_message', to='app_db.User'),
),
migrations.AddField(
model_name='goodsmessage',
name='send_user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='it_send_this_message', to='app_db.User'),
),
migrations.AddField(
model_name='goods',
name='customer_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.User'),
),
migrations.AddField(
model_name='goods',
name='goods_type_id',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='app_db.GoodsType'),
),
migrations.AddField(
model_name='follow',
name='fans_user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='its_fans', to='app_db.User'),
),
migrations.AddField(
model_name='follow',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='it_love', to='app_db.User'),
),
migrations.AddField(
model_name='collection',
name='goods_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.Goods'),
),
migrations.AddField(
model_name='collection',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.User'),
),
migrations.AddField(
model_name='chatrecord',
name='goods_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app_db.Goods'),
),
migrations.AddField(
model_name='chatrecord',
name='recv_user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='it_recv_this_record', to='app_db.User'),
),
migrations.AddField(
model_name='chatrecord',
name='send_user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='it_send_this_record', to='app_db.User'),
),
]
| [
"django.db.models.EmailField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.CharField"
] | [((6826, 6948), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""it_recv_this_message"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='it_recv_this_message', to='app_db.User')\n", (6843, 6948), False, 'from django.db import migrations, models\n'), ((7075, 7197), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""it_send_this_message"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='it_send_this_message', to='app_db.User')\n", (7092, 7197), False, 'from django.db import migrations, models\n'), ((7316, 7401), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='app_db.User'\n )\n", (7333, 7401), False, 'from django.db import migrations, models\n'), ((7522, 7635), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.GoodsType"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='app_db.GoodsType')\n", (7539, 7635), False, 'from django.db import migrations, models\n'), ((7756, 7866), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""its_fans"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='its_fans', to='app_db.User')\n", (7773, 7866), False, 'from django.db import migrations, models\n'), ((7982, 8091), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""it_love"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='it_love', to='app_db.User')\n", (7999, 8091), False, 'from django.db import migrations, models\n'), ((8212, 8298), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.Goods"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'app_db.Goods')\n", (8229, 8298), False, 'from django.db import migrations, models\n'), ((8418, 8503), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='app_db.User'\n )\n", (8435, 8503), False, 'from django.db import migrations, models\n'), ((8624, 8710), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.Goods"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'app_db.Goods')\n", (8641, 8710), False, 'from django.db import migrations, models\n'), ((8835, 8956), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""it_recv_this_record"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='it_recv_this_record', to='app_db.User')\n", (8852, 8956), False, 'from django.db import migrations, models\n'), ((9081, 9202), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""it_send_this_record"""', 'to': '"""app_db.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='it_send_this_record', to='app_db.User')\n", (9098, 9202), False, 'from django.db import migrations, models\n'), ((341, 392), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (357, 392), False, 'from django.db import migrations, models\n'), ((423, 455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (439, 455), False, 'from django.db import migrations, models\n'), ((488, 527), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (508, 527), False, 'from django.db import migrations, models\n'), ((560, 595), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (580, 595), False, 'from django.db import migrations, models\n'), ((899, 950), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (915, 950), False, 'from django.db import migrations, models\n'), ((983, 1022), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1003, 1022), False, 'from django.db import migrations, models\n'), ((1055, 1090), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1075, 1090), False, 'from django.db import migrations, models\n'), ((1390, 1441), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (1406, 1441), False, 'from django.db import migrations, models\n'), ((1474, 1513), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1494, 1513), False, 'from django.db import migrations, models\n'), ((1546, 1581), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1566, 1581), False, 'from django.db import migrations, models\n'), ((1884, 1935), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (1900, 1935), False, 'from django.db import migrations, models\n'), ((1964, 1995), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (1980, 1995), False, 'from django.db import migrations, models\n'), ((2030, 2063), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3000)'}), '(max_length=3000)\n', (2046, 2063), False, 'from django.db import migrations, models\n'), ((2094, 2112), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2110, 2112), False, 'from django.db import migrations, models\n'), ((2144, 2209), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': "[(0, '一口价'), (1, '拍卖')]"}), "(choices=[(0, '一口价'), (1, '拍卖')])\n", (2176, 2209), False, 'from django.db import migrations, models\n'), ((2239, 2291), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(0, '非全新'), (1, '全新')]"}), "(choices=[(0, '非全新'), (1, '全新')])\n", (2258, 2291), False, 'from django.db import migrations, models\n'), ((2326, 2377), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(8)'}), '(decimal_places=2, max_digits=8)\n', (2345, 2377), False, 'from django.db import migrations, models\n'), ((2415, 2489), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(2)', 'max_digits': '(8)', 'null': '(True)'}), '(blank=True, decimal_places=2, max_digits=8, null=True)\n', (2434, 2489), False, 'from django.db import migrations, models\n'), ((2519, 2597), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': "[(0, '未出售'), (1, '交易中'), (2, '已出售')]"}), "(choices=[(0, '未出售'), (1, '交易中'), (2, '已出售')])\n", (2551, 2597), False, 'from django.db import migrations, models\n'), ((2627, 2665), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2654, 2665), False, 'from django.db import migrations, models\n'), ((2698, 2737), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2718, 2737), False, 'from django.db import migrations, models\n'), ((2770, 2805), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2790, 2805), False, 'from django.db import migrations, models\n'), ((3115, 3166), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (3131, 3166), False, 'from django.db import migrations, models\n'), ((3197, 3229), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (3213, 3229), False, 'from django.db import migrations, models\n'), ((3262, 3301), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3282, 3301), False, 'from django.db import migrations, models\n'), ((3334, 3369), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (3354, 3369), False, 'from django.db import migrations, models\n'), ((3401, 3487), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.Goods"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'app_db.Goods')\n", (3418, 3487), False, 'from django.db import migrations, models\n'), ((3785, 3836), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (3801, 3836), False, 'from django.db import migrations, models\n'), ((3864, 3908), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'unique': '(True)'}), '(max_length=20, unique=True)\n', (3880, 3908), False, 'from django.db import migrations, models\n'), ((3941, 3980), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3961, 3980), False, 'from django.db import migrations, models\n'), ((4013, 4048), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (4033, 4048), False, 'from django.db import migrations, models\n'), ((4352, 4403), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (4368, 4403), False, 'from django.db import migrations, models\n'), ((4440, 4485), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (4456, 4485), False, 'from django.db import migrations, models\n'), ((4524, 4568), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(255)'}), '(blank=True, max_length=255)\n', (4540, 4568), False, 'from django.db import migrations, models\n'), ((4601, 4640), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4621, 4640), False, 'from django.db import migrations, models\n'), ((4673, 4708), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (4693, 4708), False, 'from django.db import migrations, models\n'), ((5002, 5053), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (5018, 5053), False, 'from django.db import migrations, models\n'), ((5086, 5118), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (5102, 5118), False, 'from django.db import migrations, models\n'), ((5151, 5190), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (5171, 5190), False, 'from django.db import migrations, models\n'), ((5223, 5258), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (5243, 5258), False, 'from django.db import migrations, models\n'), ((5297, 5364), 'django.db.models.TextField', 'models.TextField', ([], {'choices': "[('normal_user', '普通用户')]", 'default': '"""普通用户"""'}), "(choices=[('normal_user', '普通用户')], default='普通用户')\n", (5313, 5364), False, 'from django.db import migrations, models\n'), ((5658, 5709), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (5674, 5709), False, 'from django.db import migrations, models\n'), ((5739, 5772), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2083)'}), '(max_length=2083)\n', (5755, 5772), False, 'from django.db import migrations, models\n'), ((5805, 5836), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (5821, 5836), False, 'from django.db import migrations, models\n'), ((5870, 5901), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(11)'}), '(max_length=11)\n', (5886, 5901), False, 'from django.db import migrations, models\n'), ((5932, 5963), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(18)'}), '(max_length=18)\n', (5948, 5963), False, 'from django.db import migrations, models\n'), ((5990, 6052), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': "[(0, '男'), (1, '女')]"}), "(choices=[(0, '男'), (1, '女')])\n", (6022, 6052), False, 'from django.db import migrations, models\n'), ((6081, 6127), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'unique': '(True)'}), '(max_length=254, unique=True)\n', (6098, 6127), False, 'from django.db import migrations, models\n'), ((6155, 6199), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'unique': '(True)'}), '(max_length=20, unique=True)\n', (6171, 6199), False, 'from django.db import migrations, models\n'), ((6231, 6263), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (6247, 6263), False, 'from django.db import migrations, models\n'), ((6296, 6335), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (6316, 6335), False, 'from django.db import migrations, models\n'), ((6368, 6403), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (6388, 6403), False, 'from django.db import migrations, models\n'), ((6434, 6519), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""app_db.Role"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='app_db.Role'\n )\n", (6451, 6519), False, 'from django.db import migrations, models\n')] |
# Módulos usados
from random import randint
from time import sleep
# índices: 0 1 2
opcao=['Pedra', 'Papel', 'Tesoura']
menu='''
<NAME>
[ 1 ] Pedra
[ 2 ] Papel
[ 3 ] Tesoura
[ 4 ] Sair
'''
jogador = 0
while jogador != 4:
print(menu)
# Jogador Escolhe
jogador = int(input('Escolha: '))
# Escolhe um número aleatório de 1 à 3
computador = randint(1,3)
# Sai do jogo
if jogador == 4:
break # Break finaliza o loop (while)
# Mostra a opção do Jogador
print('\n','Você escolheu: ', opcao[jogador-1])
# -1 pois índice começa no 0 e o jogador escolhe de 1 à 3
# sendo que a lista 'opcao' vai de 0 à 2
print('\t'*4+'Jo') # '\t'*4 = tab 4 vezes
sleep(1) # Pausa por 1 s o programa
print('\t'*4+'Ken')
sleep(1)
print('\t'*4+'Po')
sleep(1)
# Mostra a opção do Computador
print('Computador Escolheu: ', opcao[computador-1],'\n'*2) #'\n'*2 = enter 2 vezes
# -1 pois índice começa no 0
if jogador == computador: # Jogador e Computador escolhem mesmo número
print('Empate!')
elif jogador == 1 and computador == 2: # Jogador: Pedra / PC: Papel
print('Computador Ganhou!')
elif jogador == 1 and computador == 3: # Jogador: Pedra / PC: Tesoura
print('Jogador Ganhou!')
elif jogador == 2 and computador == 1: # Jogador: Papel / PC: Pedra
print('Jogador Ganhou')
elif jogador == 2 and computador == 3: # Jogador: Papel / PC: Tesoura
print('Computador Ganhou!')
elif jogador == 3 and computador == 1: # Jogador: Tesoura / PC: Pedra
print('Computador Ganhou')
elif jogador == 3 and computador == 2: # Jogador: Tesoura / PC: Papel
print('Jogador Ganhou!')
print('Até a próxima..') | [
"random.randint",
"time.sleep"
] | [((398, 411), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (405, 411), False, 'from random import randint\n'), ((771, 779), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (776, 779), False, 'from time import sleep\n'), ((835, 843), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (840, 843), False, 'from time import sleep\n'), ((871, 879), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (876, 879), False, 'from time import sleep\n')] |
from testopencv2 import OpticalSense
if __name__ == '__main__':
oc = OpticalSense()
oc.main()
| [
"testopencv2.OpticalSense"
] | [((74, 88), 'testopencv2.OpticalSense', 'OpticalSense', ([], {}), '()\n', (86, 88), False, 'from testopencv2 import OpticalSense\n')] |
# Python-wrapped REST API utilities for AppResponse 11
import os
import sys
import requests
import time
import argparse
import json
import getpass
from requests.packages.urllib3.exceptions import InsecureRequestWarning
# Avoid warnings for insecure certificates
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
AR11_UTILITIES_ACTIONS = ["data_layout", \
"interface_summary", \
"packet_download", \
"pull_backup", \
"report_job_durations", \
"roles_delete", \
"roles_export", \
"roles_import", \
"saml_export", \
"saml_import", \
"saml_spmetadata_download", \
"users_delete", \
"users_export", \
"users_import", \
"web_server_settings_export", \
"web_server_settings_import"]
##### HELPER FUNCTIONS
### jkraenzle: Update to be used by each call
# Run REST APIs to appliance and return result
# Assume 'payload' is JSON formatted
def ar11_rest_api (action, path, appliance, access_token, version, payload = None):
url = "https://" + appliance + path
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
if (action == "GET"):
r = requests.get (url, headers=headers, verify=False)
elif (action == "POST"):
r = requests.post (url, headers=headers, data=json.dumps (payload), verify=False)
elif (action == "PUT"):
r = requests.put (url, headers=headers, data=json.dumps (payload), verify=False)
elif (action == "DELETE"):
r = requests.delete (url, headers=headers, verify=False)
if (r.status_code not in [200, 201, 204]):
print ("Status code was %s" % r.status_code)
print ("Error: %s" % r.content)
result = None
else:
if (("Content-Type" in r.headers.keys ()) and ("application/json" in r.headers ["Content-Type"])):
result = json.loads (r.content)
elif (("Content-Type" in r.headers.keys ()) and ("application/x-gzip" in r.headers ["Content-Type"])):
result = r.content
else:
result = r.text
return result
##### ACTION - report_job_durations
# Helper function to report duration of Capture Jobs from start and end time
def ar11_capture_job_durations(jobs):
output = []
output.append(['Job Name', 'State', 'Duration'])
for j in jobs:
job_name = j['config']['name']
job_id = j['id']
status = j['state']['status']
state = status['state']
start_time = status['packet_start_time']
end_time = status['packet_end_time']
duration = int(float(end_time) - float(start_time))
days = round(duration / (60 * 60 * 24))
hours = round((duration % (60 * 60 * 24)) / (60 * 60))
minutes = round(((duration % (60 * 60 * 24)) % (60 * 60)) / 60)
duration_str = str (days) + " days, " + str(hours) + " hours, " + str(minutes) + " minutes"
output.append([job_name, state, duration_str])
return output
# REST API Python wrapper to request storage layout information
# URL https://<appliance>/api/npm.data_manager/2.1/layout
# Header: Authorization: Bearer <access_token>
def ar11_data_layout_get (appliance, access_token, version):
url = "https://" + appliance + "/api/npm.data_manager/2.1/layout"
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
r = requests.get (url, headers=headers, verify=False)
if (r.status_code != 200):
print ("Status code was %s" % r.status_code)
print ("Error: %s" % r.content)
result = []
else:
result = json.loads (r.content)
return result
def ar11_interface_summary_get (appliance, access_token, version):
result = ar11_rest_api ("GET", "/api/npm.packet_capture/3.0/interfaces", appliance, access_token, version)
return result
# REST API Python wrapper to create backup on appliance
def ar11_backup_create (appliance, access_token, version):
# Kick off backup and give time to process
payload = {"description": "Automated Backup"}
backup_in_process = ar11_rest_api ("POST", "/api/npm.backup/1.0/backups", appliance, access_token, version, payload)
# If backup creation failed, return upstream showing the failure
if (backup_in_process == None):
return None
# Get backup id and sleep so there's time for backup to initially create
backup_id = backup_in_process ["id"]
time.sleep (5)
# Keep checking if backup has completed
backup_complete = False
while (backup_complete == False):
backup_list = ar11_rest_api ("GET", "/api/npm.backup/1.0/backups", appliance, access_token, version)
backups = backup_list ["items"]
found = False
for backup in backups:
if (backup ["id"] == backup_id):
found = True
if (backup ["status"] == "completed"):
backup_complete = True
# If backup "id" is not found on appliance
if (found == False):
print ("Error starting backup on %s" % appliance)
return None
elif (backup_complete == False):
time.sleep (2)
return backup_id
# REST API Python wrapper to download and delete automated backup
def ar11_backup_download_and_delete (appliance, access_token, version, backup_id):
backup_file = ar11_rest_api ("GET", "/api/npm.backup/1.0/backups/items/" + backup_id + "/file", appliance, access_token, version)
if (backup_file != None):
with open (appliance + ".backup.tgz", "wb") as backup_f:
backup_f.write (backup_file)
ar11_rest_api ("DELETE", "/api/npm.backup/1.0/backups/items/" + backup_id, appliance, access_token, version)
return
# REST API Python wrapper to create and pull backup from appliance
def ar11_backup_get (appliance, access_token, version):
backup_id = ar11_backup_create (appliance, access_token, version)
if (backup_id != None):
ar11_backup_download_and_delete (appliance, access_token, version, backup_id)
return True
else:
return False
# REST API Python wrapper to request Capture Job information
# URL https://<appliance>/api/npm.packet_capture/1.0/jobs
# Header: Authorization: Bearer <access_token>
def ar11_capture_jobs_get (appliance, access_token, version):
if (version <= 11.4):
url = "https://" + appliance + "/api/npm.packet_capture/1.0/jobs"
else:
url = "https://" + appliance + "/api/npm.packet_capture/2.0/jobs"
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
r = requests.get(url, headers=headers, verify=False)
if (r.status_code != 200):
print ("Status code was %s" % r.status_code)
print ("Error: %s" % r.content)
result = []
else:
result = json.loads(r.content)
return result
def ar11_packet_download (appliance, access_token, version, settings_f):
### jkraenzle: Yet to implement
# Read settings and verify that they are valid for this appliance
# Validate source (job by job name, etc.)
# Confirm time range within Capture Job, etc.
# Call to packet download with settings
# Loop until packets have been downloaded
return
##### ACTIONS - roles_export, roles_import, roles_delete
def ar11_remote_auth_get (appliance, access_token, version):
result = None
if (version > 11.5):
result = ar11_rest_api ("GET", "/api/mgmt.aaa/2.0/remote_authentication", appliance, access_token, version)
return result
# URL: https://<appliance>/api/mgmt.aaa/2.0/roles/<id>
# PUT
def ar11_role_set (appliance, access_token, version, role):
if (version > 11.5):
role_id = role ["id"]
url = "https://" + appliance + "/api/mgmt.aaa/2.0/roles/" + str(role_id)
bearer = "Bearer " + access_token
headers = {"Content-Type":"application/json", "Authorization":bearer}
r = requests.put (url, data=json.dumps(role), headers=headers, verify=False)
return
# URL: https://<appliance>/api/mgmt.aaa/2.0/roles
# POST
def ar11_role_create (appliance, access_token, version, role):
# Check if role name exists, and if so, delete?
if (version > 11.5):
url = "https://" + appliance + "/api/mgmt.aaa/2.0/roles"
bearer = "Bearer " + access_token
headers = {"Content-Type":"application/json", "Authorization":bearer}
r = requests.post (url, data=json.dumps(role), headers=headers, verify=False)
return
# REST API Python wrapper to get current roles
# URL: https://<appliance>/api/mgmt.aaa/2.0/roles
def ar11_roles_get (appliance, access_token, version):
if (version > 11.5):
url = "https://" + appliance + "/api/mgmt.aaa/2.0/roles"
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
r = requests.get (url, headers=headers, verify=False)
result = json.loads(r.content)
return result["items"]
# Counterpart to from_file function
def ar11_roles_to_file (roles, export_f):
for role in roles:
export_f.write (str("%s, %s, " % (role["pretty_name"], role["description"])).rstrip('\n'))
permissions = role["permissions"]
i = 0
for pg in permissions:
if (pg["permission_group"] == "ALL_OBJECTS_ACCESS"):
permission = "All objects"
elif (pg["permission_group"] == "APP_CONFIG_ACCESS"):
permission = "Application configuration"
elif (pg["permission_group"] == "JOB_CONFIG_ACCESS"):
permission = "Job configuration"
elif (pg["permission_group"] == "PACKET_ACCESS"):
permission = "Network packets"
elif (pg["permission_group"] == "RBAC_CONFIG_ACCESS"):
permission = "RBAC configuration"
elif (pg["permission_group"] == "SYSTEM_MGMT_CONFIG_ACCESS"):
permission = "System configuration"
else:
permission = "!!UNKNOWN PERMISSIONS!!"
if (pg["operation"] == "read_only"):
operation = "RO"
elif (pg["operation"] == "read_write"):
operation = "RW"
else:
operation = "NR"
if (i != len (permissions) - 1):
export_f.write (str("%s:%s," % (permission, operation)).rstrip('\n'))
else:
export_f.write ("%s:%s\n" % (permission, operation))
i+=1
# Export current roles from AR11 appliance
# For now, assume file has been opened and this function uses the file at its current marker
def ar11_roles_export (appliance, access_token, version, export_f):
# Make comment on current appliance from which these roles came
export_f.write ("# Roles on appliance %s\n" % appliance)
roles = ar11_roles_get (appliance, access_token, version)
ar11_roles_to_file (roles, export_f)
# Write a newline so there's space between each appliance export
export_f.write ("\n")
return
def ar11_roles_from_file (import_f):
roles = []
for line in import_f:
comment_test = line
if (len(comment_test.lstrip()) == 0):
continue
if (comment_test.lstrip()[0] == "#"):
continue
line_split = line.strip("\n").split (",")
pretty_name = line_split[0].strip()
description = line_split[1].strip()
i = 2
permissions = []
while (i <= len(line_split) - 1):
key_value = line_split [i].split(":")
permission_key = key_value[0].strip ()
if (len(key_value) == 2):
if (permission_key == "All objects"):
permission = "ALL_OBJECTS_ACCESS"
elif (permission_key == "Application configuration"):
permission = "APP_CONFIG_ACCESS"
elif (permission_key == "Job configuration"):
permission = "JOB_CONFIG_ACCESS"
elif (permission_key == "Network packets"):
permission = "PACKET_ACCESS"
elif (permission_key == "RBAC configuration"):
permission = "RBAC_CONFIG_ACCESS"
elif (permission_key == "System configuration"):
permission = "SYSTEM_MGMT_CONFIG_ACCESS"
else:
permission = "!!UNKNOWN!!"
if (key_value[1] == "RO"):
operation = "read_only"
elif (key_value[1] == "RW"):
operation = "read_write"
elif (key_value[1] == "NR"):
operation = "no_read"
else:
operation = "!!UNKNOWN!!"
if not(permission == "!!UNKNOWN!!" or operation == "!!UNKNOWN!!"):
permissions.append({"permission_group":permission, "operation":operation})
else:
print ("Error reading permission %s: operation %s" % (permission_key, key_value[1]))
i += 1
role = {"description":description, "pretty_name":pretty_name, "permissions":permissions}
roles.append (role)
return roles
def ar11_roles_import (appliance, access_token, version, import_f):
# Get list of roles from import file
imported_roles = ar11_roles_from_file (import_f)
# Get list of roles from appliance
existing_roles = ar11_roles_get (appliance, access_token, version)
set_list = []
create_list = []
i = 0
for role in imported_roles:
found = False
id = 0
for existing_role in existing_roles:
if (role ["pretty_name"] == existing_role ["pretty_name"]):
found = True
role.update ({"id":existing_role["id"]})
if (found):
set_list.append(i)
else:
create_list.append(i)
i += 1
for item in set_list:
role = imported_roles [item]
ar11_role_set (appliance, access_token, version, role)
for item in create_list:
role = imported_roles [item]
ar11_role_create (appliance, access_token, version, role)
return
def ar11_role_names_from_file (import_f):
roles = []
for line in import_f:
comment_test = line
if (len(comment_test.lstrip()) == 0):
continue
if (comment_test.lstrip()[0] == "#"):
continue
role = line.strip("\n")
roles.append(role)
return roles
# URL: https://<appliance>/api/mgmt.aaa/2.0/roles/<id>
def ar11_role_delete (appliance, access_token, version, role_id):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/mgmt.aaa/2.0/roles/" + str(role_id)
r = requests.delete (url, headers=headers, verify=False)
return
def ar11_roles_delete (appliance, access_token, version, delete_f):
if (version > 11.5):
# Get list of roles to delete
roles_to_delete = ar11_role_names_from_file (delete_f)
# Get list of roles from appliance
existing_roles = ar11_roles_get (appliance, access_token, version)
delete_list = []
for role in roles_to_delete:
found = False
for existing_role in existing_roles:
if (role == existing_role ["pretty_name"]):
delete_list.append(existing_role ["id"])
found = True
break
if found == False:
print ("WARNING: Role %s did not exist on %s" % (role, appliance))
# Loop through roles, deleting one at a time from appliance
j = 0
while (j < len(delete_list)):
ar11_role_delete (appliance, access_token, version, delete_list[j])
j += 1
return
##### ACTIONS - saml_export, saml_import, saml_spmetadata_download
# For consistency, return configuration in JSON format
def ar11_saml_configuration_get (appliance, access_token, version):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/npm.saml/1.0/settings"
r = requests.get (url, headers=headers, verify=False)
result = json.loads(r.content)
return result
def ar11_saml_configuration_set (appliance, access_token, version, saml_config):
bearer = "Bearer " + access_token
headers = {"Content-Type":"application/json", "Authorization":bearer}
url = "https://" + appliance + "/api/npm.saml/1.0/settings"
r = requests.put (url, headers=headers, json=saml_config, verify=False)
if (r.status_code != 200):
print ("SAML configuration returned with status code %d." % (r.status_code,))
print (r.text)
return
def ar11_saml_export (appliance, access_token, version, export_f):
saml_config = ar11_saml_configuration_get (appliance, access_token, version)
# Write file in text format
export_f.write (json.dumps(saml_config))
return
def ar11_saml_import (appliance, access_token, version, import_f):
imported_txt = import_f.read ()
#stripped_txt = imported_txt.strip("\n")
#saml_config = stripped_txt.replace ("\\r\\n", "")
#saml_config = replaced_txt.replace (" false", "\"False\"")
saml_config = json.loads (imported_txt)
ar11_saml_configuration_set (appliance, access_token, version, saml_config)
return
def ar11_saml_spmetadata_download (appliance, access_token, version):
spmetadata_file = ar11_rest_api ("GET", "/saml/metadata", appliance, access_token, version)
if (spmetadata_file != None):
with open (appliance + "_spmetadata.xml", "a+") as spmetadata_f:
spmetadata_f.write (json.dumps (spmetadata_file))
return True
else:
print ("Did not return a file")
return False
##### ACTIONS - users_delete, users_export, users_import
# URL: http://<appliance>/api/mgmt.aaa/2.0/users
def ar11_users_get (appliance, access_token, version):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/mgmt.aaa/2.0/users"
r = requests.get (url, headers=headers, verify=False)
result = json.loads(r.content)
return result
def ar11_users_from_file (users_f):
users = []
for line in users_f:
comment_test = line
if (len(comment_test.lstrip()) == 0):
continue
if (comment_test.lstrip()[0] == "#"):
continue
user = line.strip("\n")
users.append(user)
return users
# URL: https://<appliance>/api/mgmt.aaa/2.0/users/<name>
def ar11_user_delete (appliance, access_token, version, username):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/mgmt.aaa/2.0/users/" + username
r = requests.delete (url, headers=headers, verify=False)
if (r.status_code != 204):
print ("Failed to delete username %s" % (username))
print ("Status code is %s" % (r.status_code))
print ("Error: %s" % (r.content))
return
def ar11_users_delete (appliance, access_token, version, users_f):
# Get current list of users; returns an array of dicts
users_dict = ar11_users_get (appliance, access_token, version)
users_list = users_dict ["items"]
# Get list of users to delete
users_to_delete_list = ar11_users_from_file (users_f)
# Confirm users exist before attempting to delete
found_users_list = []
for user in users_to_delete_list:
found = False
i = 0
while (i < len (users_list)):
existing_user = users_list [i]
if (user == existing_user ["name"]):
found = True
break
i += 1
if (found == True):
found_users_list.append (user)
else:
print ("User %s was not found on appliance %s" % (user, appliance))
for user in found_users_list:
ar11_user_delete (appliance, access_token, version, user)
def ar11_users_export (appliance, access_token, version, users_f):
# Get current list of users; returns an array of dicts
users_dict = ar11_users_get (appliance, access_token, version)
# Write file in text format
users_f.write (json.dumps (users_dict))
def ar11_user_set (appliance, access_token, version, imported_user):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/mgmt.aaa/2.0/users/" + str(imported_user["name"])
r = requests.put (url, headers=headers, json=imported_user, verify=False)
if (r.status_code != 200):
print ("Status code was %s" % r.status_code)
print ("Error: %s" % r.content)
return
def ar11_user_create (appliance, access_token, version, imported_user):
bearer = "Bearer " + access_token
headers = {"Authorization":bearer}
url = "https://" + appliance + "/api/mgmt.aaa/2.0/users"
r = requests.post (url, headers=headers, json=imported_user, verify=False)
if (r.status_code != 201):
print ("Status code was %s" % r.status_code)
print ("Error: %s" % r.content)
return
def ar11_users_import (appliance, access_token, version, users_f):
# Get list of users to update or create
imported_txt = users_f.read ()
imported_users_dict = json.loads (imported_txt)
imported_users_list = imported_users_dict["items"]
# Get list of existing users
existing_users_dict = ar11_users_get (appliance, access_token, version)
existing_users_list = existing_users_dict["items"]
# Iterate over each imported user to see if that user already exists on this appliance
for imported_user in imported_users_list:
found = False
for existing_user in existing_users_list:
if (existing_user ["name"] == imported_user ["name"]):
found = True
if (found):
ar11_user_set (appliance, access_token, version, imported_user)
else:
ar11_user_create (appliance, access_token, version, imported_user)
def ar11_web_server_settings_export (appliance, access_token, version, settings_f):
settings_dict = ar11_rest_api ("GET", "/api/npm.https/1.0/https", appliance, access_token, version)
if (settings_dict == None):
return False
else:
# Write file in text format
settings_f.write (json.dumps (settings_dict))
return True
def ar11_web_server_settings_import (appliance, access_token, version, settings_f):
imported_txt = settings_f.read ()
imported_settings_dict = json.loads (imported_txt)
result = ar11_rest_api ("PUT", "/api/npm.https/1.0/https", appliance, access_token, version, imported_settings_dict)
return True
##### GENERAL FUNCTIONS
# REST API Python wrapper to authenticate to the server (Login)
# URL: https://<appliance>/api/mgmt.aaa/1.0/token ; pre-version 11.6
# URL: https://<appliance>/api/mgmt.aaa/2.0/token ; version 11.6 or later
# Header: Content-Type:application/json
# Body: {"user_credentials":{"username":<username>, "password":<password>},"generate_refresh_token":"true"}
def ar11_authenticate (appliance, username, password, version):
if (version <= 11.5):
url = "https://" + appliance + "/api/mgmt.aaa/1.0/token"
else:
url = "https://" + appliance + "/api/mgmt.aaa/2.0/token"
credentials = {"username":username, "password":password}
payload = {"user_credentials":credentials, "generate_refresh_token":True}
headers = {"Content-Type":"application/json"}
r = requests.post(url, data=json.dumps(payload), headers=headers, verify=False)
if (r.status_code != 201):
print ("Status code was %s" % r.status_code)
print ("Error %s" % r.content)
return None, None
else:
result = json.loads(r.content)
return result["access_token"], result["refresh_token"]
# REST API Python wrapper to revoke refresh token (Logout)
# URL: https://<appliance>/api/mgmt.aaa/1.0/refresh_tokens/revoke
# Header: Authorization: Bearer <access_token>
def ar11_refresh_token_revoke (appliance, access_token, refresh_token):
url = "https://" + appliance + "/api/mgmt.aaa/1.0/refresh_tokens/revoke"
bearer = "Bearer " + access_token
headers = {"Content-Type":"application/json", "Authorization":bearer}
payload = {"refresh_token":refresh_token}
r = requests.post(url, data=json.dumps(payload), headers=headers, verify=False)
return
# Helper function to get list of hostnames from input
def hostnamelist_get (hostnamelist):
hostnamelist_f = open (hostnamelist, "r")
output = []
for row in hostnamelist_f:
hostname = row.rstrip()
output.append (hostname)
hostnamelist_f.close ()
return output
# REST API Python wrapper to request version information
# URL: https://<appliance>/api/common/1.0/info
# Header: AUthorization: Bearer <access_token>
def ar11_version_get (appliance, access_token, version):
url = "https://" + appliance + "/api/common/1.0/info"
r = requests.get (url, verify=False)
result = json.loads(r.content)
version_str = result["sw_version"]
if "11.4" in version_str:
return 11.4
elif "11.5" in version_str:
return 11.5
elif "11.6" in version_str:
return 11.6
elif "11.7" in version_str:
return 11.7
elif "11.8" in version_str:
return 11.8
elif "11.9" in version_str:
return 11.9
return 11.9
def main():
# set up arguments in appropriate variables
parser = argparse.ArgumentParser (description="Python utilities to automate information collection or \
configuration tasks within AppResponse 11 environments")
parser.add_argument('--hostname', help="Hostname or IP address of the AppResponse 11 appliance")
parser.add_argument('--hostnamelist', help="File containing hostnames or IP addresses, one per line")
parser.add_argument('--username', help="Username for the appliance")
parser.add_argument('--password', help="<PASSWORD> the <PASSWORD>")
parser.add_argument('--action', help="Action to perform: %s" % AR11_UTILITIES_ACTIONS)
parser.add_argument('--actionfile', help="Settings file associated with action")
args = parser.parse_args()
# Check inputs for required data and prep variables
if (args.hostname == None or args.hostname == "") and (args.hostnamelist == None or args.hostnamelist == ""):
print ("Please specify a hostname using --hostname or a list of hostnames in a file using --hostnamelist")
return
if (args.username == None or args.username == ""):
print ("Please specify a username using --username")
return
if (args.action == None or args.action == ""):
print ("Please specify an action using --action")
return
# Use either hostname or hostname list; if both are accidentally specified, use hostname list
if not(args.hostname == None or args.hostname == ""):
hostnamelist = [args.hostname]
elif not(args.hostnamelist == None or args.hostnamelist == ""):
hostnamelist = hostnamelist_get (args.hostnamelist)
# Check that action exist in set of known actions
if not (args.action in AR11_UTILITIES_ACTIONS):
print ("Action %s is unknown" % args.action)
if (args.password == None or args.password == ""):
print ("Please provide password for account %s" % args.username)
password = getpass.getpass ()
else:
password = args.password
# Loop through hosts, applying 'action'
for hostname in hostnamelist:
version = ar11_version_get (hostname, args.username, password)
access_token, refresh_token = ar11_authenticate (hostname, args.username, password, version)
if (access_token == None or access_token == ""):
print ("Failed to login to %s" % hostname)
continue
# ACTION - data_layout
if (args.action == "data_layout"):
layout = ar11_data_layout_get (hostname, access_token, version)
data_sections = layout ["configuration"]["data_sections"]
print ("%s:" % (hostname))
if (len (data_sections) == 0):
print ("No RAID configuration storage")
for data_section in data_sections:
if (data_section ["mode"] != ""):
print ("%s\t%s\t%s" % (data_section ["id"], data_section ["model"], data_section ["mode"]))
print ("")
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - interface_summary
if (args.action == "interface_summary"):
interface_summary = ar11_interface_summary_get (hostname, access_token, version)
interfaces = interface_summary ["items"]
print ("\t%s\t\t%s\t\t%s\t\t%s" % ("Name", "Status".ljust (8), "Packets - 1 hr".rjust (16), "Drops - 1 hr".rjust (16)))
for interface in interfaces:
print ("\t%s\t\t%s\t\t%s\t\t%s" % (interface ["name"],
str (interface ["state"]["status"]).ljust (8),
str (interface ["state"]["stats"]["packets_total"]["last_hour"]).rjust (16),
str (interface ["state"]["stats"]["packets_dropped"]["last_hour"]).rjust (16)))
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - pull_backup
elif (args.action == "pull_backup"):
backup = ar11_backup_get (hostname, access_token, version)
if (backup == True):
print ("Backup for %s was successful!" % (hostname))
else:
print ("Backup for %s was unsuccessful!" % (hostname))
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - report_job_durations
elif (args.action == "report_job_durations"):
# Get capture jobs from appliance
capture_jobs = ar11_capture_jobs_get (hostname, access_token, version)
if (len(capture_jobs) > 0):
output = ar11_capture_job_durations (capture_jobs["items"])
else:
output = ["No Capture Jobs found on appliance"]
print ("Appliance %s" % hostname)
for row in output:
print (row)
print ("")
# Okay to logout since only one action processed at a time
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - roles_export
elif (args.action == "roles_export"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename for role export in --actionfile parameter")
return
else:
with open(args.actionfile, "a+") as roles_f:
ar11_roles_export (hostname, access_token, version, roles_f)
# Okay to logout since only one action processed at a time
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - roles_import
elif (args.action == "roles_import"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename for role import in --actionfile parameter")
return
else:
with open(args.actionfile, "r") as roles_f:
ar11_roles_import (hostname, access_token, version, roles_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - roles_delete
elif (args.action == "roles_delete"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename for roles to delete in --actionfile parameter, one role name per line")
return
else:
with open(args.actionfile, "r") as roles_f:
ar11_roles_delete (hostname, access_token, version, roles_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - saml_export
elif (args.action == "saml_export"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename for SAML configuration to be exported")
return
else:
with open(args.actionfile, "a+") as export_f:
ar11_saml_export (hostname, access_token, version, export_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - saml_import
elif (args.action == "saml_import"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename containing SAML configuration in JSON format")
return
else:
with open(args.actionfile, "r") as import_f:
ar11_saml_import (hostname, access_token, version, import_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
elif (args.action == "saml_spmetadata_download"):
download = ar11_saml_spmetadata_download (hostname, access_token, version)
if (download == True):
print ("Download for %s was successful!" % (hostname))
else:
print ("Download for %s was unsuccessful!" % (hostname))
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - users_delete
elif (args.action == "users_delete"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename containing the users to delete, one per line")
return
else:
with open(args.actionfile, "r") as users_f:
ar11_users_delete (hostname, access_token, version, users_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - users export
elif (args.action == "users_export"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename to be used for export of user information")
return
else:
with open(args.actionfile, "a+") as users_f:
ar11_users_export (hostname, access_token, version, users_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - users import
elif (args.action == "users_import"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify the filename containing the users to import in JSON format")
return
else:
with open(args.actionfile, "r") as users_f:
ar11_users_import (hostname, access_token, version, users_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - web server settings export
elif (args.action == "web_server_settings_export"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify a filename to be used for export of web server settings")
else:
with open (args.actionfile, "a+") as settings_f:
ar11_web_server_settings_export (hostname, access_token, version, settings_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
# ACTION - web server settings import
elif (args.action == "web_server_settings_import"):
if (args.actionfile == None or args.actionfile == ""):
print ("Please specify the filename containing the web server settings to import")
else:
with open (args.actionfile, "r") as settings_f:
ar11_web_server_settings_import (hostname, access_token, version, settings_f)
ar11_refresh_token_revoke (hostname, access_token, refresh_token)
if __name__ == "__main__":
main()
| [
"json.loads",
"requests.post",
"requests.packages.urllib3.disable_warnings",
"argparse.ArgumentParser",
"json.dumps",
"time.sleep",
"requests.get",
"requests.delete",
"getpass.getpass",
"requests.put"
] | [((264, 330), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', (['InsecureRequestWarning'], {}), '(InsecureRequestWarning)\n', (306, 330), False, 'import requests\n'), ((3145, 3193), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (3157, 3193), False, 'import requests\n'), ((4140, 4153), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4150, 4153), False, 'import time\n'), ((6100, 6148), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (6112, 6148), False, 'import requests\n'), ((13149, 13200), 'requests.delete', 'requests.delete', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (13164, 13200), False, 'import requests\n'), ((14350, 14398), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (14362, 14398), False, 'import requests\n'), ((14411, 14432), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (14421, 14432), False, 'import json\n'), ((14704, 14770), 'requests.put', 'requests.put', (['url'], {'headers': 'headers', 'json': 'saml_config', 'verify': '(False)'}), '(url, headers=headers, json=saml_config, verify=False)\n', (14716, 14770), False, 'import requests\n'), ((15409, 15433), 'json.loads', 'json.loads', (['imported_txt'], {}), '(imported_txt)\n', (15419, 15433), False, 'import json\n'), ((16211, 16259), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (16223, 16259), False, 'import requests\n'), ((16272, 16293), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (16282, 16293), False, 'import json\n'), ((16843, 16894), 'requests.delete', 'requests.delete', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (16858, 16894), False, 'import requests\n'), ((18391, 18459), 'requests.put', 'requests.put', (['url'], {'headers': 'headers', 'json': 'imported_user', 'verify': '(False)'}), '(url, headers=headers, json=imported_user, verify=False)\n', (18403, 18459), False, 'import requests\n'), ((18791, 18860), 'requests.post', 'requests.post', (['url'], {'headers': 'headers', 'json': 'imported_user', 'verify': '(False)'}), '(url, headers=headers, json=imported_user, verify=False)\n', (18804, 18860), False, 'import requests\n'), ((19148, 19172), 'json.loads', 'json.loads', (['imported_txt'], {}), '(imported_txt)\n', (19158, 19172), False, 'import json\n'), ((20287, 20311), 'json.loads', 'json.loads', (['imported_txt'], {}), '(imported_txt)\n', (20297, 20311), False, 'import json\n'), ((22630, 22661), 'requests.get', 'requests.get', (['url'], {'verify': '(False)'}), '(url, verify=False)\n', (22642, 22661), False, 'import requests\n'), ((22674, 22695), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (22684, 22695), False, 'import json\n'), ((23073, 23236), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Python utilities to automate information collection or \t\t configuration tasks within AppResponse 11 environments"""'}), "(description=\n 'Python utilities to automate information collection or \\t\\t configuration tasks within AppResponse 11 environments'\n )\n", (23096, 23236), False, 'import argparse\n'), ((1150, 1198), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (1162, 1198), False, 'import requests\n'), ((3337, 3358), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (3347, 3358), False, 'import json\n'), ((6291, 6312), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (6301, 6312), False, 'import json\n'), ((8197, 8245), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (8209, 8245), False, 'import requests\n'), ((8259, 8280), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (8269, 8280), False, 'import json\n'), ((15101, 15124), 'json.dumps', 'json.dumps', (['saml_config'], {}), '(saml_config)\n', (15111, 15124), False, 'import json\n'), ((18129, 18151), 'json.dumps', 'json.dumps', (['users_dict'], {}), '(users_dict)\n', (18139, 18151), False, 'import json\n'), ((21449, 21470), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (21459, 21470), False, 'import json\n'), ((24858, 24875), 'getpass.getpass', 'getpass.getpass', ([], {}), '()\n', (24873, 24875), False, 'import getpass\n'), ((1767, 1788), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (1777, 1788), False, 'import json\n'), ((20098, 20123), 'json.dumps', 'json.dumps', (['settings_dict'], {}), '(settings_dict)\n', (20108, 20123), False, 'import json\n'), ((21250, 21269), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (21260, 21269), False, 'import json\n'), ((22026, 22045), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (22036, 22045), False, 'import json\n'), ((4735, 4748), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (4745, 4748), False, 'import time\n'), ((7364, 7380), 'json.dumps', 'json.dumps', (['role'], {}), '(role)\n', (7374, 7380), False, 'import json\n'), ((7821, 7837), 'json.dumps', 'json.dumps', (['role'], {}), '(role)\n', (7831, 7837), False, 'import json\n'), ((15810, 15837), 'json.dumps', 'json.dumps', (['spmetadata_file'], {}), '(spmetadata_file)\n', (15820, 15837), False, 'import json\n'), ((1274, 1293), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (1284, 1293), False, 'import json\n'), ((1452, 1503), 'requests.delete', 'requests.delete', (['url'], {'headers': 'headers', 'verify': '(False)'}), '(url, headers=headers, verify=False)\n', (1467, 1503), False, 'import requests\n'), ((1382, 1401), 'json.dumps', 'json.dumps', (['payload'], {}), '(payload)\n', (1392, 1401), False, 'import json\n')] |
import functools
class Solution:
def largestNumber(self, nums):
"""
:type nums: List[int]
:rtype: str
"""
res = ""
num_dic = {}
for item in nums:
str_item = str(item)
number = int(str_item[0])
if number in num_dic:
num_dic[number].append(str_item)
else:
num_dic[number] = []
num_dic[number].append(str_item)
for index in num_dic:
num_dic[index] = sorted(num_dic[index], reverse = True, key=functools.cmp_to_key(self.compare))
for i in reversed(range(10)):
if i in num_dic:
for j in num_dic[i]:
res += str(j)
return str(int(res))
def compare(self, str_num1, str_num2):
num1 = int(str_num1 + str_num2)
num2 = int(str_num2 + str_num1)
if num1 > num2:
return 1
elif num1 < num2:
return -1
else:
return 0
| [
"functools.cmp_to_key"
] | [((567, 601), 'functools.cmp_to_key', 'functools.cmp_to_key', (['self.compare'], {}), '(self.compare)\n', (587, 601), False, 'import functools\n')] |
# -*- coding: utf-8 -*-
"""
test queen
"""
__author__ = "<NAME> <<EMAIL>>"
__copyright__ = "3-clause BSD License"
__version__ = '1.0'
__date__ = "15 January 2015"
from nose.tools import assert_equal
from progressmonitor.callback import (store_till_end_callback_factory,
multi_callback_factory)
def test_ste():
msgs = ["aaa", "bbb", "ccc"]
def dest(l):
for x, y in zip(l, msgs):
assert_equal(x, y)
stecb = store_till_end_callback_factory(dest)
for msg in msgs[:-1]:
stecb(msg)
stecb(msgs[-1], True)
def test_mcf():
msgs = ["aaa", "bbb", "ccc"]
l1 = []
l2 = []
def l1_cb_fac():
def l1_cb(string, last_com=False):
l1.append(string)
return l1_cb
def l2_cb_fac():
def l2_cb(string, last_com=False):
l2.append(string)
return l2_cb
cb = multi_callback_factory([l1_cb_fac, l2_cb_fac])
for msg in msgs:
cb(msg)
for i in xrange(len(msgs)):
assert_equal(msgs[i], l1[i])
assert_equal(msgs[i], l2[i])
| [
"progressmonitor.callback.store_till_end_callback_factory",
"nose.tools.assert_equal",
"progressmonitor.callback.multi_callback_factory"
] | [((481, 518), 'progressmonitor.callback.store_till_end_callback_factory', 'store_till_end_callback_factory', (['dest'], {}), '(dest)\n', (512, 518), False, 'from progressmonitor.callback import store_till_end_callback_factory, multi_callback_factory\n'), ((911, 957), 'progressmonitor.callback.multi_callback_factory', 'multi_callback_factory', (['[l1_cb_fac, l2_cb_fac]'], {}), '([l1_cb_fac, l2_cb_fac])\n', (933, 957), False, 'from progressmonitor.callback import store_till_end_callback_factory, multi_callback_factory\n'), ((1036, 1064), 'nose.tools.assert_equal', 'assert_equal', (['msgs[i]', 'l1[i]'], {}), '(msgs[i], l1[i])\n', (1048, 1064), False, 'from nose.tools import assert_equal\n'), ((1073, 1101), 'nose.tools.assert_equal', 'assert_equal', (['msgs[i]', 'l2[i]'], {}), '(msgs[i], l2[i])\n', (1085, 1101), False, 'from nose.tools import assert_equal\n'), ((449, 467), 'nose.tools.assert_equal', 'assert_equal', (['x', 'y'], {}), '(x, y)\n', (461, 467), False, 'from nose.tools import assert_equal\n')] |
#About print the ID card
import random
# 编程实现对一个元素全为数字的列表,求最大值、最小值
# 随机生成一个 8个元素 -100,100
# 定义一个空的列表
my_list = []
for value in range(8):
my_list.append(random.randint(-100, 0))
print(my_list)
# 最大值
# 定义一个变量保存最大值
# my_max = my_list[0]
# # 遍历
# for value in my_list:
# # 使用if 判断value 还是my_max大
# if value > my_max:
# my_max = value
#
# print("最大值:%d" % my_max)
# 比如:"hello world" 字符串统计的结果为: h:1 e:1 l:3 o:2 d:1 r:1 w:1
a = "hello world"
# count = a.count("l")
# print(count)
# 方案01
# 定义一个列表
# my_list = []
# # 遍历字符串
# for c in a:
# # 通过遍历的每个字符串计算每个字符串在字符串中个数
# # 如果不是空格 在计算
# if c != " " and c not in my_list:
# count = a.count(c)
# print("%s:%d" % (c, count))
# my_list.append(c)
# 方案02
# 定义一个集合
my_set = set(a)
# print(my_set)
# 遍历集合
for value in my_set:
# 如果不是空格
if value != " ":
count = a.count(value)
print("%s:%d" % (value, count))
# 保存学生数据
# 小明 20
# 小红 22
# [{"name": "小明", "age": 20},{"name": "小红", "age": 22}]
# for dict in list:
# if "小明" in dict:
# 删除
# else:
# print("输入的姓名有误")
# all_dict = {"小明": {"name": "小明", "age": 20}, "小红": {"name": "小红", "age": 22}}
#
# if "小明" in all_dict:
# 删除
# else:
# 有误
# 定义一个字符串
str_info = """请选择:
1.添加名片
2.删除名片
3.修改名片
4.查询名片
5.退出系统
"""
# 定义一个字典保存所有人的数据
all_dict = {}
# 死循环
while True:
index = input(str_info)
# 进行判断
if index.isdigit() and (0 < int(index) < 6):
# 1.添加名片
if index == "1":
my_name = input("请输入您的名字:")
my_age = input("请输入您的年龄:")
# 定义一个字典保存个人数据
my_dict = {"name": my_name, "age": my_age}
# 保存到大字典中
all_dict[my_name] = my_dict
print("保存数据成功...")
# 2.删除名片
elif index == "2":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
# 删除
del all_dict[my_name]
print("删除数据成功...")
else:
print("您输入的名字有误!!!")
# 3.修改名片
elif index == "3":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
my_age = input("请输入修改的年龄:")
all_dict[my_name]["age"] = my_age
print("修改数据完成...")
else:
print("您输入的名字有误!!!")
# 4.查询名片
elif index == "4":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
print(all_dict[my_name])
else:
print("您输入的名字有误!!!")
# 5.退出系统
elif index == "5":
print("欢迎下次使用!")
break
else:
print("输入有误")
# 保存学生数据
# 小明 20
# 小红 22
# [{"name": "小明", "age": 20},{"name": "小红", "age": 22}]
# for dict in list:
# if "小明" in dict:
# 删除
# else:
# print("输入的姓名有误")
# all_dict = {"小明": {"name": "小明", "age": 20}, "小红": {"name": "小红", "age": 22}}
#
# if "小明" in all_dict:
# 删除
# else:
# 有误
# 定义一个字符串
str_info = """请选择:
1.添加名片
2.删除名片
3.修改名片
4.查询名片
5.退出系统
"""
# 定义一个字典保存所有人的数据
all_dict = {}
# 死循环
while True:
index = input(str_info)
# 进行判断
if index.isdigit() and (0 < int(index) < 6):
# 1.添加名片
if index == "1":
my_name = input("请输入您的名字:")
my_age = input("请输入您的年龄:")
# 定义一个字典保存个人数据
my_dict = {"name": my_name, "age": my_age}
# 保存到大字典中
all_dict[my_name] = my_dict
print("保存数据成功...")
# 2.删除名片
elif index == "2":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
# 删除
del all_dict[my_name]
print("删除数据成功...")
else:
print("您输入的名字有误!!!")
# 3.修改名片
elif index == "3":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
my_age = input("请输入修改的年龄:")
all_dict[my_name]["age"] = my_age
print("修改数据完成...")
else:
print("您输入的名字有误!!!")
# 4.查询名片
elif index == "4":
my_name = input("请输入删除的名字:")
# 判断
if my_name in all_dict:
print(all_dict[my_name])
else:
print("您输入的名字有误!!!")
# 5.退出系统
elif index == "5":
print("欢迎下次使用!")
break
else:
print("输入有误")
| [
"random.randint"
] | [((158, 181), 'random.randint', 'random.randint', (['(-100)', '(0)'], {}), '(-100, 0)\n', (172, 181), False, 'import random\n')] |
""" Find stuff around NGC 5897 """
from __future__ import division, print_function
__author__ = "adrn <<EMAIL>>"
# Standard library
import os
import sys
# Third-party
from astropy import log as logger
import numpy as np
import filelock
import h5py
# Project
from globber.core import likelihood_worker
# HACK:
# from globber.ngc5897 import mixing_matrix as W
W = None
def initialize_dataset(dset_name, group_path, XCov_filename, lock_filename):
# only one process should modify the file to add the dataset if it doesn't exist
with h5py.File(XCov_filename, mode='r') as f:
make_group = False
make_dataset = True
try:
group = f[group_path]
logger.debug("Group already exists")
except KeyError:
make_group = True
logger.debug("Group doesn't exist...")
if not make_group and dset_name in group:
make_dataset = False
if make_group or make_dataset:
lock = filelock.FileLock(lock_filename)
try:
with lock.acquire(timeout=90):
logger.debug("File lock acquired: creating dataset for log-likelihoods")
with h5py.File(XCov_filename, mode='r+') as f:
if make_group and group_path not in f:
group = f.create_group(group_path)
else:
group = f[group_path]
if dset_name not in group: # double checking!
ll_shape = (f['search']['X'].shape[0],)
ll_dset = group.create_dataset(dset_name, ll_shape, dtype='f')
ll_dset[:] = np.nan
except filelock.Timeout:
logger.error("Timed out trying to acquire file lock to create dataset.")
sys.exit(1)
def main(XCov_filename, chunk_index, n_per_chunk, ll_name, overwrite=False,
n_compare=None, smooth=None, dm=None):
if not os.path.exists(XCov_filename):
raise IOError("XCov file '{}' does not exist! Run photometry-to-xcov.py first."
.format(XCov_filename))
lock_filename = "{}.lock".format(os.path.splitext(XCov_filename)[0])
# define a slice object for this chunk to process
slc = slice(chunk_index*n_per_chunk, (chunk_index+1)*n_per_chunk)
# name of the log-likelihood dataset
if ll_name == 'isochrone':
if dm is None:
raise ValueError("If isochrone, must specify distance modulus (--dm=...)")
dset_name = "{:.2f}".format(dm)
group_path = 'log_likelihood/isochrone'
else:
dset_name = ll_name
group_path = 'log_likelihood'
dset_path = os.path.join(group_path, dset_name)
initialize_dataset(dset_name, group_path, XCov_filename, lock_filename)
with h5py.File(XCov_filename, mode='r') as f:
ll = f[dset_path][slc]
if np.isfinite(ll).all() and not overwrite:
logger.debug("All log-likelihoods already computed for Chunk {} ({}:{})"
.format(chunk_index,slc.start,slc.stop))
return
if not np.isfinite(ll).all() and not overwrite:
some_unfinished = True
unfinished_idx = np.isnan(ll)
logger.debug("{} log-likelihoods already computed -- will fill unfinished values."
.format(len(ll) - unfinished_idx.sum()))
else:
some_unfinished = False
# slice out this chunk
X = f['search']['X'][slc]
Cov = f['search']['Cov'][slc]
if some_unfinished:
X = X[unfinished_idx]
Cov = Cov[unfinished_idx]
X_compare = f[ll_name]['X']
if 'Cov' not in f[ll_name]:
Cov_compare = None
else:
Cov_compare = f[ll_name]['Cov']
if n_compare is not None and n_compare < X_compare.shape[0]:
# Note: can't use randint here because non-unique lists cause an OSError,
# using np.random.choice on an int array uses a bit of memory
idx = []
iterations = 0
while len(idx) < n_compare and iterations < 1E8:
s = np.random.randint(X_compare.shape[0])
if s not in idx:
idx.append(s)
iterations += 1
idx = sorted(idx)
X_compare = X_compare[idx]
if Cov_compare is not None:
Cov_compare = Cov_compare[idx]
else:
X_compare = X_compare[:]
if Cov_compare is not None:
Cov_compare = Cov_compare[:]
if ll_name == 'isochrone':
X_compare[:,0] += dm # add distance modulus
logger.debug("{} total stars, {} comparison stars, {} chunk stars"
.format(f['search']['X'].shape[0], X_compare.shape[0], X.shape[0]))
logger.debug("Computing likelihood for Chunk {} ({}:{})..."
.format(chunk_index,slc.start,slc.stop))
ll = likelihood_worker(X, Cov, X_compare, Cov_compare, smooth=smooth, W=W)
logger.debug("...finished computing log-likelihoods (nan/inf: {})"
.format(np.logical_not(np.isfinite(ll)).sum()))
lock = filelock.FileLock(lock_filename)
try:
with lock.acquire(timeout=300):
logger.debug("File lock acquired - writing to results")
with h5py.File(XCov_filename, mode='r+') as f:
f[dset_path][slc] = ll
except filelock.Timeout:
logger.error("Timed out trying to acquire file lock to write results.")
sys.exit(1)
def status(XCov_filename, ll_name, dm=None):
if ll_name == 'isochrone':
if dm is None:
raise ValueError("If isochrone, must specify distance modulus (--dm=...)")
dset_name = "{:.2f}".format(dm)
group_path = 'log_likelihood/isochrone'
else:
dset_name = ll_name
group_path = 'log_likelihood'
dset_path = os.path.join(group_path, dset_name)
with h5py.File(XCov_filename, mode='r') as f:
if dset_path not in f:
logger.info("0 done for '{}'".format(ll_name))
return
ll = f[dset_path]
ndone = np.isfinite(ll).sum()
nnot = np.isnan(ll).sum()
logger.info("{} done, {} not done".format(ndone, nnot))
# check what blocks are unfinished
if nnot != 0:
idx, = np.where(np.isnan(ll))
diff = idx[1:]-idx[:-1]
derp, = np.where(diff > 1)
if 0 not in derp:
derp = np.concatenate(([0], derp, [len(idx)-1]))
logger.debug("Unfinished blocks:")
blocks = []
for d1,d2 in zip(derp[:-1],derp[1:]):
if d1 == 0:
blocks.append("{}-{}".format(idx[d1], idx[d2]))
else:
blocks.append("{}-{}".format(idx[d1+1], idx[d2]))
logger.debug(", ".join(blocks))
if __name__ == "__main__":
from argparse import ArgumentParser
import logging
# Define parser object
parser = ArgumentParser(description="")
parser.add_argument("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="Be chatty! (default = False)")
parser.add_argument("-q", "--quiet", action="store_true", dest="quiet",
default=False, help="Be quiet! (default = False)")
parser.add_argument("-o", "--overwrite", action="store_true", dest="overwrite",
default=False, help="DESTROY OLD VALUES.")
parser.add_argument("--status", dest="status", action="store_true", default=False,
help="Check status of results file.")
parser.add_argument("-f", "--xcov-filename", dest="XCov_filename", required=True,
type=str, help="Full path to XCov file")
parser.add_argument("--name", dest="name", required=True,
type=str, help="name for log-likelihood calc. (cluster, control, isochrone)")
parser.add_argument("-n", "--nperchunk", dest="n_per_chunk", default=1000,
type=int, help="Number of stars per chunk.")
parser.add_argument("-i", "--chunk-index", dest="index", default=None,
type=int, help="Index of the chunk to process.")
parser.add_argument("--ncompare", dest="n_compare", default=None,
type=int, help="Number of points (stars for cluster or noncluster) "
"to compare to.")
parser.add_argument("--smooth", dest="smooth", default=None,
type=float, help="Smooth comparison by this amount (units: mag)")
parser.add_argument("--dm", dest="distance_modulus", default=None,
type=float, help="Distance modulus for isochrone.")
args = parser.parse_args()
# Set logger level based on verbose flags
if args.verbose:
logger.setLevel(logging.DEBUG)
elif args.quiet:
logger.setLevel(logging.ERROR)
else:
logger.setLevel(logging.INFO)
if args.status:
status(args.XCov_filename, args.name, dm=args.distance_modulus)
sys.exit(0)
if args.index is None:
raise ValueError("You must supply a chunk index to process! (-i or --chunk-index)")
main(args.XCov_filename, chunk_index=args.index, n_per_chunk=args.n_per_chunk,
overwrite=args.overwrite, ll_name=args.name, n_compare=args.n_compare,
smooth=args.smooth, dm=args.distance_modulus)
| [
"os.path.exists",
"astropy.log.setLevel",
"globber.core.likelihood_worker",
"argparse.ArgumentParser",
"numpy.where",
"filelock.FileLock",
"os.path.join",
"os.path.splitext",
"astropy.log.error",
"h5py.File",
"numpy.random.randint",
"numpy.isnan",
"numpy.isfinite",
"sys.exit",
"astropy.log.debug"
] | [((2680, 2715), 'os.path.join', 'os.path.join', (['group_path', 'dset_name'], {}), '(group_path, dset_name)\n', (2692, 2715), False, 'import os\n'), ((5226, 5258), 'filelock.FileLock', 'filelock.FileLock', (['lock_filename'], {}), '(lock_filename)\n', (5243, 5258), False, 'import filelock\n'), ((5972, 6007), 'os.path.join', 'os.path.join', (['group_path', 'dset_name'], {}), '(group_path, dset_name)\n', (5984, 6007), False, 'import os\n'), ((7091, 7121), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (7105, 7121), False, 'from argparse import ArgumentParser\n'), ((545, 579), 'h5py.File', 'h5py.File', (['XCov_filename'], {'mode': '"""r"""'}), "(XCov_filename, mode='r')\n", (554, 579), False, 'import h5py\n'), ((979, 1011), 'filelock.FileLock', 'filelock.FileLock', (['lock_filename'], {}), '(lock_filename)\n', (996, 1011), False, 'import filelock\n'), ((1952, 1981), 'os.path.exists', 'os.path.exists', (['XCov_filename'], {}), '(XCov_filename)\n', (1966, 1981), False, 'import os\n'), ((2802, 2836), 'h5py.File', 'h5py.File', (['XCov_filename'], {'mode': '"""r"""'}), "(XCov_filename, mode='r')\n", (2811, 2836), False, 'import h5py\n'), ((5000, 5069), 'globber.core.likelihood_worker', 'likelihood_worker', (['X', 'Cov', 'X_compare', 'Cov_compare'], {'smooth': 'smooth', 'W': 'W'}), '(X, Cov, X_compare, Cov_compare, smooth=smooth, W=W)\n', (5017, 5069), False, 'from globber.core import likelihood_worker\n'), ((6018, 6052), 'h5py.File', 'h5py.File', (['XCov_filename'], {'mode': '"""r"""'}), "(XCov_filename, mode='r')\n", (6027, 6052), False, 'import h5py\n'), ((8972, 9002), 'astropy.log.setLevel', 'logger.setLevel', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (8987, 9002), True, 'from astropy import log as logger\n'), ((9212, 9223), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (9220, 9223), False, 'import sys\n'), ((701, 737), 'astropy.log.debug', 'logger.debug', (['"""Group already exists"""'], {}), "('Group already exists')\n", (713, 737), True, 'from astropy import log as logger\n'), ((2154, 2185), 'os.path.splitext', 'os.path.splitext', (['XCov_filename'], {}), '(XCov_filename)\n', (2170, 2185), False, 'import os\n'), ((3218, 3230), 'numpy.isnan', 'np.isnan', (['ll'], {}), '(ll)\n', (3226, 3230), True, 'import numpy as np\n'), ((5320, 5375), 'astropy.log.debug', 'logger.debug', (['"""File lock acquired - writing to results"""'], {}), "('File lock acquired - writing to results')\n", (5332, 5375), True, 'from astropy import log as logger\n'), ((5512, 5583), 'astropy.log.error', 'logger.error', (['"""Timed out trying to acquire file lock to write results."""'], {}), "('Timed out trying to acquire file lock to write results.')\n", (5524, 5583), True, 'from astropy import log as logger\n'), ((5592, 5603), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5600, 5603), False, 'import sys\n'), ((6495, 6513), 'numpy.where', 'np.where', (['(diff > 1)'], {}), '(diff > 1)\n', (6503, 6513), True, 'import numpy as np\n'), ((6622, 6656), 'astropy.log.debug', 'logger.debug', (['"""Unfinished blocks:"""'], {}), "('Unfinished blocks:')\n", (6634, 6656), True, 'from astropy import log as logger\n'), ((9032, 9062), 'astropy.log.setLevel', 'logger.setLevel', (['logging.ERROR'], {}), '(logging.ERROR)\n', (9047, 9062), True, 'from astropy import log as logger\n'), ((9081, 9110), 'astropy.log.setLevel', 'logger.setLevel', (['logging.INFO'], {}), '(logging.INFO)\n', (9096, 9110), True, 'from astropy import log as logger\n'), ((805, 843), 'astropy.log.debug', 'logger.debug', (['"""Group doesn\'t exist..."""'], {}), '("Group doesn\'t exist...")\n', (817, 843), True, 'from astropy import log as logger\n'), ((1084, 1156), 'astropy.log.debug', 'logger.debug', (['"""File lock acquired: creating dataset for log-likelihoods"""'], {}), "('File lock acquired: creating dataset for log-likelihoods')\n", (1096, 1156), True, 'from astropy import log as logger\n'), ((1718, 1790), 'astropy.log.error', 'logger.error', (['"""Timed out trying to acquire file lock to create dataset."""'], {}), "('Timed out trying to acquire file lock to create dataset.')\n", (1730, 1790), True, 'from astropy import log as logger\n'), ((1803, 1814), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1811, 1814), False, 'import sys\n'), ((4169, 4206), 'numpy.random.randint', 'np.random.randint', (['X_compare.shape[0]'], {}), '(X_compare.shape[0])\n', (4186, 4206), True, 'import numpy as np\n'), ((5393, 5428), 'h5py.File', 'h5py.File', (['XCov_filename'], {'mode': '"""r+"""'}), "(XCov_filename, mode='r+')\n", (5402, 5428), False, 'import h5py\n'), ((6211, 6226), 'numpy.isfinite', 'np.isfinite', (['ll'], {}), '(ll)\n', (6222, 6226), True, 'import numpy as np\n'), ((6248, 6260), 'numpy.isnan', 'np.isnan', (['ll'], {}), '(ll)\n', (6256, 6260), True, 'import numpy as np\n'), ((6425, 6437), 'numpy.isnan', 'np.isnan', (['ll'], {}), '(ll)\n', (6433, 6437), True, 'import numpy as np\n'), ((1178, 1213), 'h5py.File', 'h5py.File', (['XCov_filename'], {'mode': '"""r+"""'}), "(XCov_filename, mode='r+')\n", (1187, 1213), False, 'import h5py\n'), ((2886, 2901), 'numpy.isfinite', 'np.isfinite', (['ll'], {}), '(ll)\n', (2897, 2901), True, 'import numpy as np\n'), ((3113, 3128), 'numpy.isfinite', 'np.isfinite', (['ll'], {}), '(ll)\n', (3124, 3128), True, 'import numpy as np\n'), ((5189, 5204), 'numpy.isfinite', 'np.isfinite', (['ll'], {}), '(ll)\n', (5200, 5204), True, 'import numpy as np\n')] |
"""
Copyright (c) 2021, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
"""
from Chatbot.training.training import Training
from Chatbot.testing.testing import Testing
import torch.nn as nn
from Chatbot.training.trainingOptimizer import experiment
from utils import directoryFinder as df
class Main:
"""
Main class.
Running this file allows you to:
1) train the network
2) train multiple networks (via trainOptimizer)
3) test via chat
4) quick test via test_questions.txt
5) quick test multiple nn models
"""
def __init__(self):
"""
-- TrainingOptimizer parameters --
network_params: dependent variable values
independent_variable: independent variable name
-- File parameters --
data_location: Path to the data folder
train_data_file: training data file path
save_file: save file path
save_model: save the trained model (True/False)
-- Training parameters --
hidden_size: hidden size of network
n_epochs: n training iterations
learning_rate: train learning rate
batch_size: train batches size
criterion: optimization function
train_size: train set size (1.00 >= train_size > 0.00)
val_size: test set size (1.00 > val_size >= 0.00)
use_val: use validation set (True/False)
use_acc: use accuracy test (True/False)
show_plots: show plots of model training progressions(True/False)
-- NLP parameters --
spellcheck_distance: maximum min-edit distance for words to be corrected
n_gram_size: size of the ngram tuples
check_synonyms: check for synonyms (True/False)
-- Output parameters --
model_file: path to trained nn model
filtered_tags: tags to not be considered in the guess response
respond_threshold: bot answers if < probability
guess_threshold: bot gives estimated guesses if < probability
guess_gap_perc_threshold: determines how quickly it groups multiple tags together for the guess response,
multiple guesses if (prob[i] - prob[i+1])/prob[i+1] < guess_gap_perc_threshold
max_guesses: max amount of guesses grouped together
"""
# trainingOptimizer parameters
self.network_params = [10, 12, 14, 16]
self.independent_variable = "n_epochs"
# file parameters
self.data_location = df.get_par_dir(__file__, folder="back-end") + "\\Chatbot\\data\\"
self.train_data_file = self.data_location + "trainingdata.csv"
self.save_file = self.data_location + "data.pth"
self.save_model = True
# training parameters
self.hidden_size = 65
self.n_epochs = 10
self.learning_rate = 0.01
self.batch_size = 40
self.criterion = nn.CrossEntropyLoss()
self.train_size = 1.00
self.val_size = 0.00
self.use_val = False
self.use_acc = False
self.show_plots = False
# nlp parameters
self.spellcheck_distance = 2
self.check_synonyms = True
self.n_gram_size = 1
# output parameters
self.model_file = "Data\\data.pth"
self.filtered_tags = ['Greeting', 'Ending']
self.respond_threshold = 0.70
self.guess_threshold = 0.35
self.guess_gap_perc_threshold = 0.60
self.max_guesses = 3
def run(self):
"""
Runs the main console.
Allows you to run one or more sections of the program
1) train the network
2) train multiple networks (via trainOptimizer)
3) test via chat
4) quick test via test_questions.txt
5) quick test multiple models
It is possible to 'chain' multiple commands. For example:
"143" will train, quick_test and then chat.
:return: None
"""
print("What do you want to do?")
print("1) \033[34mtrain\033[0m")
print("2) \033[34mtrainOptimizer\033[0m")
print("3) \033[34mtest chat\033[0m")
print("4) \033[34mquick test\033[0m")
print("5) \033[34mquick test multiple models\033[0m")
order = input("enter number(s): ")
print()
nlp_settings = [self.spellcheck_distance, self.check_synonyms, self.n_gram_size]
output_settings = [self.respond_threshold, self.guess_threshold, self.guess_gap_perc_threshold,
self.max_guesses, self.filtered_tags]
for n in order:
if n == "1":
t = Training(output_settings, nlp_settings, self.criterion, self.hidden_size, self.n_epochs,
self.learning_rate, self.batch_size, self.train_size, self.val_size, self.save_file,
self.use_val, self.use_acc, self.show_plots, self.save_model, self.train_data_file)
t.train()
if n == "2":
experiment(self.network_params, self.independent_variable, output_settings, nlp_settings,
self.criterion, self.hidden_size, self.n_epochs, self.learning_rate, self.batch_size, self.train_size,
self.val_size, self.use_val, self.use_acc, self.show_plots, self.save_model, self.train_data_file)
if n == "3":
t = Testing(self.model_file)
t.chat()
if n == "4":
t = Testing(self.model_file)
t.quick_test()
if n == "5":
test_networks = ["Data\\test_networks\\network" + str(i) + ".pth" for i in range(len(self.network_params))]
for model in test_networks:
t = Testing(model)
t.quick_test()
main = Main()
main.run()
| [
"utils.directoryFinder.get_par_dir",
"torch.nn.CrossEntropyLoss",
"Chatbot.training.training.Training",
"Chatbot.testing.testing.Testing",
"Chatbot.training.trainingOptimizer.experiment"
] | [((3168, 3189), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (3187, 3189), True, 'import torch.nn as nn\n'), ((2767, 2810), 'utils.directoryFinder.get_par_dir', 'df.get_par_dir', (['__file__'], {'folder': '"""back-end"""'}), "(__file__, folder='back-end')\n", (2781, 2810), True, 'from utils import directoryFinder as df\n'), ((4906, 5176), 'Chatbot.training.training.Training', 'Training', (['output_settings', 'nlp_settings', 'self.criterion', 'self.hidden_size', 'self.n_epochs', 'self.learning_rate', 'self.batch_size', 'self.train_size', 'self.val_size', 'self.save_file', 'self.use_val', 'self.use_acc', 'self.show_plots', 'self.save_model', 'self.train_data_file'], {}), '(output_settings, nlp_settings, self.criterion, self.hidden_size,\n self.n_epochs, self.learning_rate, self.batch_size, self.train_size,\n self.val_size, self.save_file, self.use_val, self.use_acc, self.\n show_plots, self.save_model, self.train_data_file)\n', (4914, 5176), False, 'from Chatbot.training.training import Training\n'), ((5290, 5600), 'Chatbot.training.trainingOptimizer.experiment', 'experiment', (['self.network_params', 'self.independent_variable', 'output_settings', 'nlp_settings', 'self.criterion', 'self.hidden_size', 'self.n_epochs', 'self.learning_rate', 'self.batch_size', 'self.train_size', 'self.val_size', 'self.use_val', 'self.use_acc', 'self.show_plots', 'self.save_model', 'self.train_data_file'], {}), '(self.network_params, self.independent_variable, output_settings,\n nlp_settings, self.criterion, self.hidden_size, self.n_epochs, self.\n learning_rate, self.batch_size, self.train_size, self.val_size, self.\n use_val, self.use_acc, self.show_plots, self.save_model, self.\n train_data_file)\n', (5300, 5600), False, 'from Chatbot.training.trainingOptimizer import experiment\n'), ((5682, 5706), 'Chatbot.testing.testing.Testing', 'Testing', (['self.model_file'], {}), '(self.model_file)\n', (5689, 5706), False, 'from Chatbot.testing.testing import Testing\n'), ((5778, 5802), 'Chatbot.testing.testing.Testing', 'Testing', (['self.model_file'], {}), '(self.model_file)\n', (5785, 5802), False, 'from Chatbot.testing.testing import Testing\n'), ((6052, 6066), 'Chatbot.testing.testing.Testing', 'Testing', (['model'], {}), '(model)\n', (6059, 6066), False, 'from Chatbot.testing.testing import Testing\n')] |
import pandas as pd
import time
from bs4 import BeautifulSoup
from IPython.display import display_html
from selenium import webdriver
from mysql_service import insert_day
def selenium_scraper():
url = 'http://ido.xm.com.co/ido/SitePages/ido.aspx'
sleep_time = 3
options = webdriver.ChromeOptions()
options.add_argument('--incognito')
driver = webdriver.Chrome(executable_path = '/Users/juliansantos/Documents/electrical_power_system_CO/electrical_power_system_CO/chromedriver',
options = options)
driver.get(url)
time.sleep(sleep_time)
#Selenium Date Inputbox selection in browser
try:
date_box = driver.find_elements_by_id('report-date')
date_box[0].clear()
except Exception as e:
print('Error: ')
print(e)
print('-*-'*50)
#Startdate initialization
date = pd.to_datetime('02/10/2020', format='%d/%m/%Y')
finish_date = pd.to_datetime('03/10/2020', format='%d/%m/%Y') + pd.DateOffset(days=1) #adds 1 day to the date
while date != finish_date:
try:
date_box[0].send_keys(date.strftime('%d/%m/%Y'))
date_button = driver.find_elements_by_xpath('//div[@id="filter-button"]/button')
date_button[0].click()
time.sleep(sleep_time)
#Tables titles scraping
scraped_table_titles = driver.find_elements_by_xpath('//div[@class="text-blue textL"]/b')
scraped_table_titles.pop(0)
table_titles_string = ''
for table_title in scraped_table_titles:
table_titles_string = table_titles_string + table_title.text + '|'
tables = driver.find_elements_by_xpath('//table[@class="report-table"]')
aportes_x = driver.find_elements_by_xpath('//table[@id="table-aportes-x"]/tbody')
reservas_x = driver.find_elements_by_xpath('//table[@id="table-reservas-x"]/tbody')
html_tables_no_open = ''
for scraped_table in tables:
if scraped_table.get_attribute('id') == 'table-aportes-x':
if (aportes_x[0].get_attribute('innerHTML') != ''):
html_tables_no_open = html_tables_no_open + scraped_table.get_attribute('outerHTML') + '^_^'
else:
continue
elif scraped_table.get_attribute('id') == 'table-reservas-x':
if (reservas_x[0].get_attribute('innerHTML') != ''):
html_tables_no_open = html_tables_no_open + scraped_table.get_attribute('outerHTML') + '^_^'
else:
continue
else:
aportes_vacio = False
reservas_vacio = False
soup = BeautifulSoup(scraped_table.get_attribute('innerHTML'), 'lxml')
td = soup.find('td')
tbody = soup.find('tbody')
#print(td)
if str(td)== '<td>Rio</td>':
if str(tbody) == '<tbody class="report-table-body"></tbody>':
aportes_vacio = True
print('Entra aportes_vacio')
if str(td)== '<td> Embalse </td>':
if str(tbody) == '<tbody class="report-table-body"></tbody>':
reservas_vacio = True
print('entra reservas vacio')
if aportes_vacio:
continue
elif reservas_vacio:
continue
else:
html_tables_no_open = html_tables_no_open + scraped_table.get_attribute('outerHTML') + '^_^'
print('--'*30)
print('Scrapiado: ' + str(date))
print('--'*30)
print('\n')
insert_day(date, table_titles_string, html_tables_no_open)
date = date + pd.DateOffset(days=1) #adds 1 day to the date
except Exception as e:
print('Error: ')
print(e)
print('date: '+str(date))
print('-*-'*30)
continue
if __name__ == "__main__":
selenium_scraper()
| [
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"mysql_service.insert_day",
"time.sleep",
"pandas.DateOffset",
"pandas.to_datetime"
] | [((288, 313), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (311, 313), False, 'from selenium import webdriver\n'), ((368, 527), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""/Users/juliansantos/Documents/electrical_power_system_CO/electrical_power_system_CO/chromedriver"""', 'options': 'options'}), "(executable_path=\n '/Users/juliansantos/Documents/electrical_power_system_CO/electrical_power_system_CO/chromedriver'\n , options=options)\n", (384, 527), False, 'from selenium import webdriver\n'), ((575, 597), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (585, 597), False, 'import time\n'), ((900, 947), 'pandas.to_datetime', 'pd.to_datetime', (['"""02/10/2020"""'], {'format': '"""%d/%m/%Y"""'}), "('02/10/2020', format='%d/%m/%Y')\n", (914, 947), True, 'import pandas as pd\n'), ((966, 1013), 'pandas.to_datetime', 'pd.to_datetime', (['"""03/10/2020"""'], {'format': '"""%d/%m/%Y"""'}), "('03/10/2020', format='%d/%m/%Y')\n", (980, 1013), True, 'import pandas as pd\n'), ((1016, 1037), 'pandas.DateOffset', 'pd.DateOffset', ([], {'days': '(1)'}), '(days=1)\n', (1029, 1037), True, 'import pandas as pd\n'), ((1309, 1331), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (1319, 1331), False, 'import time\n'), ((3954, 4012), 'mysql_service.insert_day', 'insert_day', (['date', 'table_titles_string', 'html_tables_no_open'], {}), '(date, table_titles_string, html_tables_no_open)\n', (3964, 4012), False, 'from mysql_service import insert_day\n'), ((4039, 4060), 'pandas.DateOffset', 'pd.DateOffset', ([], {'days': '(1)'}), '(days=1)\n', (4052, 4060), True, 'import pandas as pd\n')] |
# This file is automatically loaded and run by pytest during its setup process,
# meaning it happens before any of the tests in this directory are run.
# See the pytest documentation on conftest files for more information:
# https://docs.pytest.org/en/2.7.3/plugins.html#conftest-py-plugins
import os
import subprocess
import threading
import time
from tests.cook import util
def _sudo_check(username):
"""
Check if the current user can sudo as a test user.
This is necessary to obtain Kerberos auth headers for multi-user tests.
"""
sudo_ok = (0 == subprocess.call(f'sudo -nu {username} echo CACHED SUDO', shell=True))
assert sudo_ok, "You need to pre-cache your sudo credentials. (Run a simple sudo command as a test user.)"
def _sudo_checker_task(username):
"""Periodically check sudo ability to ensure the credentials stay cached."""
while True:
_sudo_check(username)
time.sleep(60)
if util.kerberos_enabled() and os.getenv('COOK_MAX_TEST_USERS'):
username = next(util._test_user_names())
_sudo_check(username)
threading.Thread(target=_sudo_checker_task, args=[username], daemon=True).start()
| [
"tests.cook.util.kerberos_enabled",
"os.getenv",
"time.sleep",
"subprocess.call",
"threading.Thread",
"tests.cook.util._test_user_names"
] | [((943, 966), 'tests.cook.util.kerberos_enabled', 'util.kerberos_enabled', ([], {}), '()\n', (964, 966), False, 'from tests.cook import util\n'), ((971, 1003), 'os.getenv', 'os.getenv', (['"""COOK_MAX_TEST_USERS"""'], {}), "('COOK_MAX_TEST_USERS')\n", (980, 1003), False, 'import os\n'), ((573, 641), 'subprocess.call', 'subprocess.call', (['f"""sudo -nu {username} echo CACHED SUDO"""'], {'shell': '(True)'}), "(f'sudo -nu {username} echo CACHED SUDO', shell=True)\n", (588, 641), False, 'import subprocess\n'), ((924, 938), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (934, 938), False, 'import time\n'), ((1025, 1048), 'tests.cook.util._test_user_names', 'util._test_user_names', ([], {}), '()\n', (1046, 1048), False, 'from tests.cook import util\n'), ((1080, 1153), 'threading.Thread', 'threading.Thread', ([], {'target': '_sudo_checker_task', 'args': '[username]', 'daemon': '(True)'}), '(target=_sudo_checker_task, args=[username], daemon=True)\n', (1096, 1153), False, 'import threading\n')] |
# Library dependencies
from pyresume import load_data
from pyresume import template_data
# Project dependencies
from data import all_data
def run() -> None:
loaded_data = load_data(all_data)
templated_data = template_data(loaded_data)
for templated_datum in templated_data:
templated_datum.write()
if __name__ == '__main__':
run()
| [
"pyresume.template_data",
"pyresume.load_data"
] | [((178, 197), 'pyresume.load_data', 'load_data', (['all_data'], {}), '(all_data)\n', (187, 197), False, 'from pyresume import load_data\n'), ((219, 245), 'pyresume.template_data', 'template_data', (['loaded_data'], {}), '(loaded_data)\n', (232, 245), False, 'from pyresume import template_data\n')] |
# Copyright (C) 2017-2019 New York University,
# University at Buffalo,
# Illinois Institute of Technology.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from vizier.api.routes.container import ContainerApiUrlFactory
from vizier.api.webservice.container.project import SingleProjectCache
from vizier.api.webservice.container.task import VizierContainerTaskApi
from vizier.api.webservice.datastore import VizierDatastoreApi
from vizier.api.webservice.filestore import VizierFilestoreApi
from vizier.api.webservice.view import VizierDatasetViewApi
from vizier.core import VERSION_INFO
from vizier.core.io.base import DefaultObjectStore
from vizier.core.timestamp import get_current_time
from vizier.core.util import get_short_identifier, get_unique_identifier
from vizier.datastore.fs.factory import FileSystemDatastoreFactory
from vizier.datastore.mimir.factory import MimirDatastoreFactory
from vizier.engine.backend.multiprocess import MultiProcessBackend
from vizier.engine.backend.remote.celery.base import CeleryBackend
from vizier.engine.base import VizierEngine
from vizier.engine.packages.load import load_packages
from vizier.engine.project.base import ProjectHandle
from vizier.engine.task.processor import load_processors
from vizier.filestore.fs.factory import FileSystemFilestoreFactory
from vizier.viztrail.base import ViztrailHandle
import vizier.api.serialize.base as serialize
import vizier.api.serialize.labels as labels
import vizier.config.app as app
import vizier.config.base as base
class VizierContainerApi(object):
"""
"""
def __init__(self, config, init=False):
"""Initialize the API components.
Parameters
----------
config: vizier.config.app.ContainerAppConfig
Container application configuration object
init: bool, optional
Defer initialization if False
"""
self.config = config
# Set the API components to None for now. It is assumed that the .init()
# method is called before any of the components are accessed for the
# first time
self.engine = None
self.datasets = None
self.files = None
self.tasks = None
self.urls = None
self.service_descriptor = None
if init:
self.init()
def init(self):
"""Initialize the API before the first request."""
# Initialize the API compinents
self.urls = ContainerApiUrlFactory(
base_url=self.config.app_base_url,
api_doc_url=self.config.webservice.doc_url
)
self.engine = get_engine(self.config)
self.projects =self.engine.projects
self.datasets = VizierDatastoreApi(
projects=self.projects,
urls=self.urls,
defaults=self.config.webservice.defaults
)
self.views = VizierDatasetViewApi(
projects=self.projects,
urls=self.urls
)
self.files = VizierFilestoreApi(
projects=self.projects,
urls=self.urls
)
self.tasks = VizierContainerTaskApi(
engine=self.engine,
controller_url=self.config.controller_url
)
# Initialize the service descriptor
self.service_descriptor = {
'name': self.config.webservice.name,
'startedAt': get_current_time().isoformat(),
'defaults': {
'maxFileSize': self.config.webservice.defaults.max_file_size
},
'environment': {
'name': self.engine.name,
'version': VERSION_INFO,
'backend': self.config.engine.backend.identifier,
'packages': list(self.engine.packages.keys())
},
labels.LINKS: serialize.HATEOAS({
'self': self.urls.service_descriptor(),
'doc': self.urls.api_doc()
})
}
# ------------------------------------------------------------------------------
# Helper Methods
# ------------------------------------------------------------------------------
def get_engine(config):
"""Create instance of vizier engine using the default datastore, filestore
and viztrails factories. The default engine may use a multi-process backend
or a celery backend.
Parameters
----------
config: vizier.config.app.AppConfig
Application configuration object
Returns
-------
vizier.engine.base.VizierEngine
"""
# Get backend identifier. Raise ValueError if value does not identify
# a valid backend.
backend_id = config.engine.backend.identifier
if not backend_id in base.BACKENDS:
raise ValueError('unknown backend \'' + str(backend_id) + '\'')
# Get the identifier factory for the viztrails repository and create
# the object store. At this point we use the default object store only.
# We could add another environment variable to use different object
# stores (once implemented).
if config.engine.use_short_ids:
id_factory = get_short_identifier
else:
id_factory = get_unique_identifier
object_store = DefaultObjectStore(
identifier_factory=id_factory
)
# By default the vizier engine uses the objectstore implementation for
# the viztrails repository. The datastore and filestore factories depend
# on the values of engine identifier (DEV or MIMIR).
base_dir = config.engine.data_dir
viztrails_dir = os.path.join(base_dir, app.DEFAULT_VIZTRAILS_DIR)
if config.engine.identifier in [base.DEV_ENGINE, base.MIMIR_ENGINE]:
filestores_dir = os.path.join(base_dir, app.DEFAULT_FILESTORES_DIR)
filestore_factory=FileSystemFilestoreFactory(filestores_dir)
datastores_dir = os.path.join(base_dir, app.DEFAULT_DATASTORES_DIR)
if config.engine.identifier == base.DEV_ENGINE:
datastore_factory = FileSystemDatastoreFactory(datastores_dir)
else:
datastore_factory = MimirDatastoreFactory(datastores_dir)
else:
raise ValueError('unknown vizier engine \'' + str(config.engine.identifier) + '\'')
# The default engine uses a common project cache.
projects = SingleProjectCache(
ProjectHandle(
viztrail=ViztrailHandle(identifier=config.project_id),
datastore=datastore_factory.get_datastore(config.project_id),
filestore=filestore_factory.get_filestore(config.project_id)
)
)
# Create workflow execution backend and processor for synchronous task
packages = load_packages(config.engine.package_path)
processors = load_processors(config.engine.processor_path)
# Create the backend
if backend_id == base.BACKEND_MULTIPROCESS:
backend = MultiProcessBackend(
processors=processors,
projects=projects,
synchronous=None
)
elif backend_id == base.BACKEND_CELERY:
# Create and configure routing information (if given)
backend = CeleryBackend(
routes=config_routes(config),
synchronous=None
)
else:
# For completeness. Validity of the backend id is be checked before.
raise ValueError('unknown backend \'' + str(backend_id) + '\'')
return VizierEngine(
name=config.engine.identifier + ' (' + backend_id + ')',
projects=projects,
backend=backend,
packages=packages
)
| [
"vizier.engine.packages.load.load_packages",
"vizier.api.routes.container.ContainerApiUrlFactory",
"vizier.api.webservice.container.task.VizierContainerTaskApi",
"vizier.engine.backend.multiprocess.MultiProcessBackend",
"vizier.core.timestamp.get_current_time",
"os.path.join",
"vizier.api.webservice.filestore.VizierFilestoreApi",
"vizier.core.io.base.DefaultObjectStore",
"vizier.engine.task.processor.load_processors",
"vizier.engine.base.VizierEngine",
"vizier.filestore.fs.factory.FileSystemFilestoreFactory",
"vizier.datastore.mimir.factory.MimirDatastoreFactory",
"vizier.viztrail.base.ViztrailHandle",
"vizier.api.webservice.view.VizierDatasetViewApi",
"vizier.api.webservice.datastore.VizierDatastoreApi",
"vizier.datastore.fs.factory.FileSystemDatastoreFactory"
] | [((5725, 5774), 'vizier.core.io.base.DefaultObjectStore', 'DefaultObjectStore', ([], {'identifier_factory': 'id_factory'}), '(identifier_factory=id_factory)\n', (5743, 5774), False, 'from vizier.core.io.base import DefaultObjectStore\n'), ((6056, 6105), 'os.path.join', 'os.path.join', (['base_dir', 'app.DEFAULT_VIZTRAILS_DIR'], {}), '(base_dir, app.DEFAULT_VIZTRAILS_DIR)\n', (6068, 6105), False, 'import os\n'), ((7149, 7190), 'vizier.engine.packages.load.load_packages', 'load_packages', (['config.engine.package_path'], {}), '(config.engine.package_path)\n', (7162, 7190), False, 'from vizier.engine.packages.load import load_packages\n'), ((7208, 7253), 'vizier.engine.task.processor.load_processors', 'load_processors', (['config.engine.processor_path'], {}), '(config.engine.processor_path)\n', (7223, 7253), False, 'from vizier.engine.task.processor import load_processors\n'), ((7861, 7989), 'vizier.engine.base.VizierEngine', 'VizierEngine', ([], {'name': "(config.engine.identifier + ' (' + backend_id + ')')", 'projects': 'projects', 'backend': 'backend', 'packages': 'packages'}), "(name=config.engine.identifier + ' (' + backend_id + ')',\n projects=projects, backend=backend, packages=packages)\n", (7873, 7989), False, 'from vizier.engine.base import VizierEngine\n'), ((2994, 3100), 'vizier.api.routes.container.ContainerApiUrlFactory', 'ContainerApiUrlFactory', ([], {'base_url': 'self.config.app_base_url', 'api_doc_url': 'self.config.webservice.doc_url'}), '(base_url=self.config.app_base_url, api_doc_url=self.\n config.webservice.doc_url)\n', (3016, 3100), False, 'from vizier.api.routes.container import ContainerApiUrlFactory\n'), ((3244, 3349), 'vizier.api.webservice.datastore.VizierDatastoreApi', 'VizierDatastoreApi', ([], {'projects': 'self.projects', 'urls': 'self.urls', 'defaults': 'self.config.webservice.defaults'}), '(projects=self.projects, urls=self.urls, defaults=self.\n config.webservice.defaults)\n', (3262, 3349), False, 'from vizier.api.webservice.datastore import VizierDatastoreApi\n'), ((3412, 3472), 'vizier.api.webservice.view.VizierDatasetViewApi', 'VizierDatasetViewApi', ([], {'projects': 'self.projects', 'urls': 'self.urls'}), '(projects=self.projects, urls=self.urls)\n', (3432, 3472), False, 'from vizier.api.webservice.view import VizierDatasetViewApi\n'), ((3528, 3586), 'vizier.api.webservice.filestore.VizierFilestoreApi', 'VizierFilestoreApi', ([], {'projects': 'self.projects', 'urls': 'self.urls'}), '(projects=self.projects, urls=self.urls)\n', (3546, 3586), False, 'from vizier.api.webservice.filestore import VizierFilestoreApi\n'), ((3642, 3732), 'vizier.api.webservice.container.task.VizierContainerTaskApi', 'VizierContainerTaskApi', ([], {'engine': 'self.engine', 'controller_url': 'self.config.controller_url'}), '(engine=self.engine, controller_url=self.config.\n controller_url)\n', (3664, 3732), False, 'from vizier.api.webservice.container.task import VizierContainerTaskApi\n'), ((6204, 6254), 'os.path.join', 'os.path.join', (['base_dir', 'app.DEFAULT_FILESTORES_DIR'], {}), '(base_dir, app.DEFAULT_FILESTORES_DIR)\n', (6216, 6254), False, 'import os\n'), ((6281, 6323), 'vizier.filestore.fs.factory.FileSystemFilestoreFactory', 'FileSystemFilestoreFactory', (['filestores_dir'], {}), '(filestores_dir)\n', (6307, 6323), False, 'from vizier.filestore.fs.factory import FileSystemFilestoreFactory\n'), ((6349, 6399), 'os.path.join', 'os.path.join', (['base_dir', 'app.DEFAULT_DATASTORES_DIR'], {}), '(base_dir, app.DEFAULT_DATASTORES_DIR)\n', (6361, 6399), False, 'import os\n'), ((7345, 7424), 'vizier.engine.backend.multiprocess.MultiProcessBackend', 'MultiProcessBackend', ([], {'processors': 'processors', 'projects': 'projects', 'synchronous': 'None'}), '(processors=processors, projects=projects, synchronous=None)\n', (7364, 7424), False, 'from vizier.engine.backend.multiprocess import MultiProcessBackend\n'), ((6488, 6530), 'vizier.datastore.fs.factory.FileSystemDatastoreFactory', 'FileSystemDatastoreFactory', (['datastores_dir'], {}), '(datastores_dir)\n', (6514, 6530), False, 'from vizier.datastore.fs.factory import FileSystemDatastoreFactory\n'), ((6577, 6614), 'vizier.datastore.mimir.factory.MimirDatastoreFactory', 'MimirDatastoreFactory', (['datastores_dir'], {}), '(datastores_dir)\n', (6598, 6614), False, 'from vizier.datastore.mimir.factory import MimirDatastoreFactory\n'), ((6850, 6894), 'vizier.viztrail.base.ViztrailHandle', 'ViztrailHandle', ([], {'identifier': 'config.project_id'}), '(identifier=config.project_id)\n', (6864, 6894), False, 'from vizier.viztrail.base import ViztrailHandle\n'), ((3916, 3934), 'vizier.core.timestamp.get_current_time', 'get_current_time', ([], {}), '()\n', (3932, 3934), False, 'from vizier.core.timestamp import get_current_time\n')] |
#########################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# SPDX-License-Identifier: MIT-0 #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy of this #
# software and associated documentation files (the "Software"), to deal in the Software #
# without restriction, including without limitation the rights to use, copy, modify, #
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to #
# permit persons to whom the Software is furnished to do so. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, #
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A #
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT #
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION #
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE #
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #
#########################################################################################
# Version: 13APR2021.01
from __future__ import print_function
import sys
import argparse
import json
import boto3
import botocore.exceptions
import csv
import mfcommon
serverendpoint = mfcommon.serverendpoint
appendpoint = mfcommon.appendpoint
with open('FactoryEndpoints.json') as json_file:
endpoints = json.load(json_file)
def assume_role(account_id, region):
sts_client = boto3.client('sts')
role_arn = 'arn:aws:iam::' + account_id + ':role/Factory-Automation'
# Call the assume_role method of the STSConnection object and pass the role
# ARN and a role session name.
try:
user = sts_client.get_caller_identity()['Arn']
sessionname = user.split('/')[1]
response = sts_client.assume_role(RoleArn=role_arn, RoleSessionName=sessionname)
credentials = response['Credentials']
session = boto3.Session(
region_name = region,
aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken']
)
return session
except botocore.exceptions.ClientError as e:
print(str(e))
def GetInstanceId(serverlist):
for account in serverlist:
target_account_session = assume_role(str(account['aws_accountid']), account['aws_region'])
print("")
print("Account: " + account['aws_accountid'] + ", Region: " + account['aws_region'])
mgn_client = target_account_session.client("mgn", account['aws_region'])
mgn_sourceservers = mgn_client.describe_source_servers(filters={})
for factoryserver in account['servers']:
if 'server_fqdn' not in factoryserver:
print("ERROR: server_fqdn does not exist for server: " + factoryserver['server_name'])
sys.exit()
else:
sourceserver = mfcommon.get_MGN_Source_Server(factoryserver, mgn_sourceservers['items'])
if sourceserver is not None:
# Get target instance Id for the source server in Application Migration Service
if sourceserver['isArchived'] == False:
if 'launchedInstance' in sourceserver:
if 'ec2InstanceID' in sourceserver['launchedInstance']:
factoryserver['target_ec2InstanceID'] = sourceserver['launchedInstance']['ec2InstanceID']
print(factoryserver['server_name'] + " : " + factoryserver['target_ec2InstanceID'])
else:
factoryserver['target_ec2InstanceID'] = ''
print("ERROR: target instance Id does not exist for server: " + factoryserver['server_name'] + ", please wait for a few minutes")
else:
factoryserver['target_ec2InstanceID'] = ''
print("ERROR: target instance does not exist for server: " + factoryserver['server_name'] + ", please wait for a few minutes")
else:
print("ERROR: Server: " + factoryserver['server_name'] + " is archived in Application Migration Service (Account: " + account['aws_accountid'] + ", Region: " + account['aws_region'] + "), Please install the agent")
sys.exit()
return serverlist
def get_instance_ips(InstanceList, waveid):
all_instance_ips = []
for account in InstanceList:
target_account_session = assume_role(str(account['aws_accountid']), account['aws_region'])
print("")
print("######################################################")
print("#### In Account: " + account['aws_accountid'], ", region: " + account['aws_region'] + " ####")
print("######################################################")
#### Change this line, and not hardcoded endpoint_url
ec2_client = target_account_session.client("ec2", region_name=account['aws_region'])
instanceIds = []
for server in account['servers']:
if 'target_ec2InstanceID' in server:
if server['target_ec2InstanceID'] != '':
instanceIds.append(server['target_ec2InstanceID'])
if len(instanceIds) != 0:
#resp = ec2_client.describe_instances(InstanceIds=instanceIds, Filters=[{'Name': 'instance-state-name', 'Values': ['available', 'terminated']}])
resp = ec2_client.describe_instances(InstanceIds=instanceIds)
else:
print("")
print("*** No target instances available for this wave ***")
return
for r in resp['Reservations']:
for instance in r['Instances']:
instance_ips = {}
instance_name = ""
ips = ""
name_exist = False
for tag in instance['Tags']:
if tag['Key'] == "Name":
if tag['Value'] != "":
instance_name = tag['Value']
name_exist = True
if name_exist == False:
print("ERROR: Name Tag does not exist for instance " + instance['InstanceId'])
sys.exit()
for nic in instance['NetworkInterfaces']:
for ip in nic['PrivateIpAddresses']:
ips = ips + ip['PrivateIpAddress'] + ","
instance_ips['instance_name'] = instance_name
instance_ips['instance_ips'] = ips[:-1]
print(instance_name + " , " + ips[:-1])
all_instance_ips.append(instance_ips)
filename = "Wave" + waveid + "-IPs.csv"
if len(all_instance_ips) != 0:
with open(filename, "w", newline='') as csvfile:
writer = csv.DictWriter(csvfile, all_instance_ips[0].keys())
writer.writeheader()
writer.writerows(all_instance_ips)
print("")
print("*** Exported Instance IPs to " + filename + " ***")
else:
print("")
print("*** No target instances available for this wave ***")
def main(arguments):
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--Waveid', required=True)
args = parser.parse_args(arguments)
UserHOST = ""
# Get MF endpoints from FactoryEndpoints.json file
if 'UserApiUrl' in endpoints:
UserHOST = endpoints['UserApiUrl']
else:
print("ERROR: Invalid FactoryEndpoints.json file, please update UserApiUrl")
sys.exit()
print("")
print("****************************")
print("*Login to Migration factory*")
print("****************************")
token = mfcommon.Factorylogin()
print("****************************")
print("*** Getting Server List ****")
print("****************************")
get_servers = mfcommon.get_factory_servers(args.Waveid, token, UserHOST, False)
print("******************************")
print("* Getting Target Instance Id *")
print("******************************")
InstanceList = GetInstanceId(get_servers)
print("")
print("*****************************")
print("* Get target Instance IPs *")
print("*****************************")
get_instance_ips(InstanceList, args.Waveid)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"boto3.client",
"argparse.ArgumentParser",
"mfcommon.get_MGN_Source_Server",
"boto3.Session",
"mfcommon.Factorylogin",
"mfcommon.get_factory_servers",
"sys.exit",
"json.load"
] | [((1843, 1863), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (1852, 1863), False, 'import json\n'), ((1920, 1939), 'boto3.client', 'boto3.client', (['"""sts"""'], {}), "('sts')\n", (1932, 1939), False, 'import boto3\n'), ((8114, 8217), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(description=__doc__, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n', (8137, 8217), False, 'import argparse\n'), ((8739, 8762), 'mfcommon.Factorylogin', 'mfcommon.Factorylogin', ([], {}), '()\n', (8760, 8762), False, 'import mfcommon\n'), ((8908, 8973), 'mfcommon.get_factory_servers', 'mfcommon.get_factory_servers', (['args.Waveid', 'token', 'UserHOST', '(False)'], {}), '(args.Waveid, token, UserHOST, False)\n', (8936, 8973), False, 'import mfcommon\n'), ((2386, 2575), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region', 'aws_access_key_id': "credentials['AccessKeyId']", 'aws_secret_access_key': "credentials['SecretAccessKey']", 'aws_session_token': "credentials['SessionToken']"}), "(region_name=region, aws_access_key_id=credentials[\n 'AccessKeyId'], aws_secret_access_key=credentials['SecretAccessKey'],\n aws_session_token=credentials['SessionToken'])\n", (2399, 2575), False, 'import boto3\n'), ((8575, 8585), 'sys.exit', 'sys.exit', ([], {}), '()\n', (8583, 8585), False, 'import sys\n'), ((3409, 3419), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3417, 3419), False, 'import sys\n'), ((3477, 3550), 'mfcommon.get_MGN_Source_Server', 'mfcommon.get_MGN_Source_Server', (['factoryserver', "mgn_sourceservers['items']"], {}), "(factoryserver, mgn_sourceservers['items'])\n", (3507, 3550), False, 'import mfcommon\n'), ((7117, 7127), 'sys.exit', 'sys.exit', ([], {}), '()\n', (7125, 7127), False, 'import sys\n'), ((5061, 5071), 'sys.exit', 'sys.exit', ([], {}), '()\n', (5069, 5071), False, 'import sys\n')] |
from conftest import QL_URL, do_clean_crate
import pytest
import requests
import time
import urllib
from .utils import send_notifications
entity_type = 'TestDevice'
attr1 = 'AtTr1'
attr2 = 'aTtr_2'
attr1_value = '1'
attr2_value = 2
entity1_id = 'd1'
entity2_id = 'd2'
def mk_entity(eid):
return {
'id': eid,
'type': entity_type,
attr1: {
'type': 'Text',
'value': attr1_value
},
attr2: {
'type': 'Number',
'value': attr2_value
}
}
def mk_entities():
return [
mk_entity(entity1_id), mk_entity(entity1_id),
mk_entity(entity2_id), mk_entity(entity2_id)
]
def insert_entities():
notification_data = [{'data': mk_entities()}]
send_notifications(notification_data)
@pytest.fixture(scope='module')
def manage_db_entities():
insert_entities()
time.sleep(2)
yield
do_clean_crate()
def query_1t1e1a(entity_id, attr_name):
escaped_attr_name = urllib.parse.quote(attr_name)
url = "{}/entities/{}/attrs/{}".format(QL_URL, entity_id, escaped_attr_name)
response = requests.get(url)
assert response.status_code == 200
return response.json()
def query_1tne1a(attr_name):
escaped_attr_name = urllib.parse.quote(attr_name)
url = "{}/types/{}/attrs/{}".format(QL_URL, entity_type, escaped_attr_name)
response = requests.get(url)
assert response.status_code == 200
return response.json()
def query_1t1ena(entity_id, attr1_name, attr2_name):
url = "{}/entities/{}".format(QL_URL, entity_id)
query_params = {
'attrs': attr1_name + ',' + attr2_name,
}
response = requests.get(url, query_params)
assert response.status_code == 200
return response.json()
@pytest.mark.parametrize('attr_name', [
attr1, 'attr1', 'atTr1'
])
def test_1t1e1a(attr_name, manage_db_entities):
query_result = query_1t1e1a(entity1_id, attr_name)
query_result.pop('index', None)
assert query_result == {
'attrName': attr_name,
'entityId': entity1_id,
'values': [attr1_value, attr1_value]
}
@pytest.mark.parametrize('attr_name', [
attr1, 'attr1', 'atTr1'
])
def test_1tne1a(attr_name, manage_db_entities):
query_result = query_1tne1a(attr_name)
for e in query_result['entities']:
e.pop('index', None)
assert query_result == {
'entityType': entity_type,
'attrName': attr_name,
'entities': [
{
'entityId': entity1_id,
'values': [attr1_value, attr1_value]
},
{
'entityId': entity2_id,
'values': [attr1_value, attr1_value]
}
]
}
@pytest.mark.parametrize('attr1_name, attr2_name', [
(attr1, attr2), ('attr1', 'attr_2'), ('atTr1', 'ATtr_2')
])
def test_1t1ena(attr1_name, attr2_name, manage_db_entities):
query_result = query_1t1ena(entity2_id, attr1_name, attr2_name)
query_result.pop('index', None)
assert query_result == {
'entityId': entity2_id,
'attributes': [
{
'attrName': attr1,
'values': [attr1_value, attr1_value]
},
{
'attrName': attr2,
'values': [attr2_value, attr2_value]
}
]
}
| [
"urllib.parse.quote",
"time.sleep",
"requests.get",
"pytest.mark.parametrize",
"pytest.fixture",
"conftest.do_clean_crate"
] | [((808, 838), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (822, 838), False, 'import pytest\n'), ((1777, 1840), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""attr_name"""', "[attr1, 'attr1', 'atTr1']"], {}), "('attr_name', [attr1, 'attr1', 'atTr1'])\n", (1800, 1840), False, 'import pytest\n'), ((2132, 2195), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""attr_name"""', "[attr1, 'attr1', 'atTr1']"], {}), "('attr_name', [attr1, 'attr1', 'atTr1'])\n", (2155, 2195), False, 'import pytest\n'), ((2741, 2854), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""attr1_name, attr2_name"""', "[(attr1, attr2), ('attr1', 'attr_2'), ('atTr1', 'ATtr_2')]"], {}), "('attr1_name, attr2_name', [(attr1, attr2), ('attr1',\n 'attr_2'), ('atTr1', 'ATtr_2')])\n", (2764, 2854), False, 'import pytest\n'), ((891, 904), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (901, 904), False, 'import time\n'), ((921, 937), 'conftest.do_clean_crate', 'do_clean_crate', ([], {}), '()\n', (935, 937), False, 'from conftest import QL_URL, do_clean_crate\n'), ((1004, 1033), 'urllib.parse.quote', 'urllib.parse.quote', (['attr_name'], {}), '(attr_name)\n', (1022, 1033), False, 'import urllib\n'), ((1130, 1147), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1142, 1147), False, 'import requests\n'), ((1269, 1298), 'urllib.parse.quote', 'urllib.parse.quote', (['attr_name'], {}), '(attr_name)\n', (1287, 1298), False, 'import urllib\n'), ((1394, 1411), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1406, 1411), False, 'import requests\n'), ((1676, 1707), 'requests.get', 'requests.get', (['url', 'query_params'], {}), '(url, query_params)\n', (1688, 1707), False, 'import requests\n')] |
# -*- coding: utf-8 -*-
"""
.. moduleauthor:: <NAME> (<EMAIL>, <EMAIL>)
"""
import fnmatch
import os
import random
import shutil
import time
from collections import OrderedDict
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from scipy import stats
from scipy.stats import spearmanr, pearsonr
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from ..shared import apply_cyclic_transform, pickle_file
class ModelGeneratorBase(object):
def __init__(self, analysis_id, random_seed=None, **kwargs):
"""
Base class for generating ROMs
:param analysis_id: string, identifier of the model to build
:param random_seed: int, random seed to use
:param kwargs:
See below
:Keyword Arguments:
* *downsample* (``double``) -- Fraction to downsample the dataframe. If this exists
then the data will be downsampled, and the results will be stored in a directory with
this value appended.
"""
self.analysis_id = analysis_id
self.random_seed = random_seed if random_seed else np.random.seed(time.time())
self.model_results = []
self.model_type = self.__class__.__name__
self.dataset = None
self.downsample = kwargs.get('downsample', None)
print("Initializing %s" % self.model_type)
# Initialize the directories where results are to be stored.
if self.downsample:
self.base_dir = 'output/%s_%s/%s' % (self.analysis_id, self.downsample, self.model_type)
else:
self.base_dir = 'output/%s/%s' % (self.analysis_id, self.model_type)
self.images_dir = '%s/images' % self.base_dir
self.models_dir = '%s/models' % self.base_dir
if self.downsample:
self.validation_dir = 'output/%s_%s/ValidationData' % (
self.analysis_id, self.downsample)
else:
self.validation_dir = 'output/%s/ValidationData' % self.analysis_id
self.data_dir = '%s/data' % self.base_dir
# Remove some directories if they exist
for dir_n in ['images_dir', 'models_dir']:
if os.path.exists(getattr(self, dir_n)):
# print("removing the directory %s" % dir)
shutil.rmtree(getattr(self, dir_n))
# create directory if not exist for each of the above
for dir_n in ['base_dir', 'images_dir', 'models_dir', 'data_dir', 'validation_dir']:
if not os.path.exists(getattr(self, dir_n)):
os.makedirs(getattr(self, dir_n))
for root, dirnames, filenames in os.walk(self.base_dir):
for filename in fnmatch.filter(filenames, 'cv_results_*.csv'):
os.remove('%s/%s' % (self.base_dir, filename))
for filename in fnmatch.filter(filenames, 'model_results.csv'):
os.remove('%s/%s' % (self.base_dir, filename))
def save_dataframe(self, dataframe, path):
pickle_file(dataframe, path)
def inspect(self):
"""
Inspect the dataframe and return the statistics of the dataframe.
:return:
"""
# look at the entire datatset and save the statistics from the file to the data_dir
out_df = self.dataset.describe()
out_df.to_csv(f'{self.data_dir}/statistics.csv')
# list out all the columns
out_df = self.dataset.columns
with open(f'{self.data_dir}/column_names.csv', 'w') as f:
for column in self.dataset.columns:
f.write(column + '\n')
def load_data(self, datafile):
"""
Load the data into a dataframe. The data needs to be a CSV file at the moment.
:param datafile: str, path to the CSV file to load
:return: None
"""
if os.path.exists(datafile):
self.dataset = pd.read_csv(datafile)
else:
raise Exception(f"Datafile does not exist: {datafile}")
print(f'Loading results data file: {datafile}')
# message the data as needed based on the kwargs arg
# TODO: remove these hard coded options and pass in as kwargs.
drop_columns = ['DistrictCoolingOutletTemperature']
rename_columns = {
'DistrictHeatingOutletTemperature': 'ETSInletTemperature',
'DistrictHeatingInletTemperature': 'ETSHeatingOutletTemperature',
'DistrictCoolingInletTemperature': 'ETSCoolingOutletTemperature',
}
for column in drop_columns:
if column in list(self.dataset.columns.values):
self.dataset = self.dataset.drop(column, 1)
if rename_columns:
self.dataset = self.dataset.rename(columns=rename_columns)
def evaluate(self, model, model_name, model_moniker, x_data, y_data, downsample,
build_time, cv_time, covariates=None, scaler=None):
"""
Generic base function to evaluate the performance of the models.
:param model:
:param model_name:
:param x_data:
:param y_data:
:param downsample:
:param build_time:
:return: Ordered dict
"""
yhat = model.predict(x_data)
if scaler:
yhat = scaler[model_name].inverse_transform(yhat)
y_data = scaler[model_name].inverse_transform(y_data)
errors = abs(yhat - y_data)
spearman = spearmanr(y_data, yhat)
pearson = pearsonr(y_data, yhat)
slope, intercept, r_value, _p_value, _std_err = stats.linregress(y_data, yhat)
self.yy_plots(y_data, yhat, model_name)
return yhat, OrderedDict([
('name', model_name),
('model_type', model_moniker),
('downsample', downsample),
('slope', slope),
('intercept', intercept),
('mae', np.mean(errors)),
('r_value', r_value),
('r_squared', r_value ** 2),
('spearman', spearman[0]),
('pearson', pearson[0]),
('time_to_build', build_time),
('time_to_cv', cv_time),
])
def build(self, metamodel, **kwargs):
if self.dataset is None:
raise Exception("Need to load the datafile first by calling Metamodel.load_data(<path-to-file>)")
# Type cast the columns - this is probably not needed
data_types = metamodel.covariate_types(self.model_type)
self.dataset[data_types['float']] = self.dataset[data_types['float']].astype(float)
self.dataset[data_types['int']] = self.dataset[data_types['int']].astype(int)
def train_test_validate_split(self, dataset, metamodel, downsample=None, scale=False):
"""
Use the built in method to generate the train and test data. This adds an additional
set of data for validation. This vaildation dataset is a unique ID that is pulled out
of the dataset before the test_train method is called.
"""
print("Initial dataset size is %s" % len(dataset))
validate_id = None
if metamodel.validation_id == 'first':
# grab the first id in the dataset. This is non-ideal, but allow for rapid testing
validate_id = dataset.iloc[0]['id']
elif metamodel.validation_id == 'median':
raise Exception('Median validation ID is not implemented')
# look at all of the covariates and try to find the median value from them all
# this method should be deterministic
# Code below only works if the space is fully filled out and if only looking at variables
# that are constant for the whole annual simulation.
# closest_medians = dataset
# for cv in metamodel.covariates(self.model_type):
# if cv.get('alogithm_option', None):
# if cv['algorithm_options'].get(self.model_type, None):
# if cv['algorithm_options'][self.model_type]['ignore']:
# continue
# median = dataset[cv['name']].median()
# print(f'my median is {median}')
# closest_medians = closest_medians[closest_medians[cv['name']] == median]
# print(f'len of dataframe is {len(closest_medians)}')
elif metamodel.validation_id == 'random':
ids = dataset['id'].unique()
validate_id = random.choice(ids)
else:
# assume that there is a validation id that has been passed
validate_id = metamodel.validation_id
if validate_id and validate_id in dataset['id'].unique():
print('Extracting validation dataset and converting to date time')
validate_xy = dataset[dataset['id'] == validate_id]
# Covert the validation dataset datetime to actual datetime objects
# validate_xy['DateTime'] = pd.to_datetime(dataset['DateTime'])
#
# Constrain to minute precision to make this method much faster
validate_xy['DateTime'] = validate_xy['DateTime'].astype('datetime64[m]')
dataset = dataset[dataset['id'] != validate_id]
else:
raise Exception(
"Validation id does not exist in dataframe. ID was %s" % validate_id)
if downsample:
num_rows = int(len(dataset.index.values) * downsample)
print("Downsampling dataframe by %s to %s rows" % (downsample, num_rows))
dataset = dataset.sample(n=num_rows)
for cv in metamodel.covariates(self.model_type):
if cv.get('algorithm_options', None):
if cv['algorithm_options'].get(self.model_type, None):
if cv['algorithm_options'][self.model_type].get('variable_type', None):
if cv['algorithm_options'][self.model_type]['variable_type'] == 'cyclical':
print("Transforming covariate to be cyclical %s" % cv['name'])
dataset[cv['name']] = dataset.apply(
apply_cyclic_transform,
column_name=cv['name'],
category_count=cv['algorithm_options'][self.model_type][
'category_count'],
axis=1
)
train_x, test_x, train_y, test_y = train_test_split(
dataset[metamodel.covariate_names(self.model_type)],
dataset[metamodel.available_response_names(self.model_type)],
train_size=0.7,
test_size=0.3,
random_state=self.random_seed
)
# If scaling, then fit the scaler on the training data, then use the trained data
# scalar to scale the test data.
if scale:
scalers = {'features': StandardScaler()}
train_x[train_x.columns] = scalers['features'].fit_transform(train_x[train_x.columns])
test_x[test_x.columns] = scalers['features'].transform(test_x[test_x.columns])
for response in metamodel.available_response_names(self.model_type):
scalers[response] = StandardScaler()
train_y[response] = scalers[response].fit_transform(
train_y[response].values.reshape(-1, 1)
)
test_y[response] = scalers[response].transform(
test_y[response].values.reshape(-1, 1)
)
else:
scalers = None
print("Dataset size is %s" % len(dataset))
print("Training dataset size is %s" % len(train_x))
print("Validation dataset size is %s" % len(validate_xy))
return train_x, test_x, train_y, test_y, validate_xy, scalers
def yy_plots(self, y_data, yhat, model_name):
"""
Plot the yy-plots
:param y_data:
:param yhat:
:param model_name:
:return:
"""
# This need to be updated with the creating a figure with a size
sns.set(color_codes=True)
# Find the items that are zero / zero across y and yhat and remove to look at
# plots and other statistics
clean_data = zip(y_data, yhat)
clean_data = [x for x in clean_data if x != (0, 0)]
y_data = np.asarray([y[0] for y in clean_data])
yhat = np.asarray([y[1] for y in clean_data])
# Convert data to dataframe
data = pd.DataFrame.from_dict({'Y': y_data, 'Yhat': yhat})
with plt.rc_context(dict(sns.axes_style("whitegrid"))):
fig = plt.figure(figsize=(6, 6), dpi=100)
sns.regplot(
x='Y',
y='Yhat',
data=data,
ci=None,
scatter_kws={"s": 50, "alpha": 1}
)
# plt.title("Training Set: Y-Y Plot for %s" % model_name)
plt.tight_layout()
plt.savefig('%s/fig_yy_%s.png' % (self.images_dir, model_name))
fig.clf()
plt.clf()
# Hex plots for YY data
# Full resolution YY Plots
with plt.rc_context(dict(sns.axes_style("ticks"))):
newplt = sns.jointplot(
data['Y'], data['Yhat'], kind="hex", space=0
)
newplt.savefig('%s/fig_yy_hexplot_%s.png' % (self.images_dir, model_name))
plt.clf()
# Remove 0,0 points for higher resolution
sub_data = data[(data.Y != 0) & (data.Yhat != 0)]
# Hex plots for YY data
newplt = sns.jointplot(
sub_data['Y'], sub_data['Yhat'], kind="hex", space=0
)
newplt.savefig('%s/fig_yy_hexplot_hres_%s.png' % (self.images_dir, model_name))
plt.clf()
def anova_plots(self, y_data, yhat, model_name):
residuals = y_data - yhat
# figsize = width, height
fig = plt.figure(figsize=(8, 4), dpi=100)
ax1 = fig.add_subplot(1, 2, 1)
ax1.plot(yhat, residuals, 'o')
plt.axhline(y=0, color='grey', linestyle='dashed')
ax1.set_xlabel('Fitted values')
ax1.set_ylabel('Residuals')
ax1.set_title('Residuals vs Fitted')
# ax2 = fig.add_subplot(1, 2, 2)
# sm.qqplot(residuals, line='s', ax=ax2)
# ax2.set_title('Normal Q-Q')
# plt.tight_layout()
# fig.savefig('%s/fig_anova_%s.png' % (self.images_dir, model_name))
# fig.clf()
# plt.clf()
| [
"scipy.stats.linregress",
"pandas.read_csv",
"scipy.stats.pearsonr",
"os.walk",
"os.remove",
"os.path.exists",
"seaborn.set",
"seaborn.regplot",
"numpy.mean",
"numpy.asarray",
"pandas.DataFrame.from_dict",
"matplotlib.pyplot.axhline",
"scipy.stats.spearmanr",
"seaborn.axes_style",
"random.choice",
"matplotlib.pyplot.savefig",
"seaborn.jointplot",
"time.time",
"matplotlib.pyplot.clf",
"sklearn.preprocessing.StandardScaler",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"fnmatch.filter"
] | [((2697, 2719), 'os.walk', 'os.walk', (['self.base_dir'], {}), '(self.base_dir)\n', (2704, 2719), False, 'import os\n'), ((3881, 3905), 'os.path.exists', 'os.path.exists', (['datafile'], {}), '(datafile)\n', (3895, 3905), False, 'import os\n'), ((5480, 5503), 'scipy.stats.spearmanr', 'spearmanr', (['y_data', 'yhat'], {}), '(y_data, yhat)\n', (5489, 5503), False, 'from scipy.stats import spearmanr, pearsonr\n'), ((5522, 5544), 'scipy.stats.pearsonr', 'pearsonr', (['y_data', 'yhat'], {}), '(y_data, yhat)\n', (5530, 5544), False, 'from scipy.stats import spearmanr, pearsonr\n'), ((5602, 5632), 'scipy.stats.linregress', 'stats.linregress', (['y_data', 'yhat'], {}), '(y_data, yhat)\n', (5618, 5632), False, 'from scipy import stats\n'), ((12137, 12162), 'seaborn.set', 'sns.set', ([], {'color_codes': '(True)'}), '(color_codes=True)\n', (12144, 12162), True, 'import seaborn as sns\n'), ((12403, 12441), 'numpy.asarray', 'np.asarray', (['[y[0] for y in clean_data]'], {}), '([y[0] for y in clean_data])\n', (12413, 12441), True, 'import numpy as np\n'), ((12457, 12495), 'numpy.asarray', 'np.asarray', (['[y[1] for y in clean_data]'], {}), '([y[1] for y in clean_data])\n', (12467, 12495), True, 'import numpy as np\n'), ((12548, 12599), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (["{'Y': y_data, 'Yhat': yhat}"], {}), "({'Y': y_data, 'Yhat': yhat})\n", (12570, 12599), True, 'import pandas as pd\n'), ((14001, 14036), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 4)', 'dpi': '(100)'}), '(figsize=(8, 4), dpi=100)\n', (14011, 14036), True, 'import matplotlib.pyplot as plt\n'), ((14124, 14174), 'matplotlib.pyplot.axhline', 'plt.axhline', ([], {'y': '(0)', 'color': '"""grey"""', 'linestyle': '"""dashed"""'}), "(y=0, color='grey', linestyle='dashed')\n", (14135, 14174), True, 'import matplotlib.pyplot as plt\n'), ((2749, 2794), 'fnmatch.filter', 'fnmatch.filter', (['filenames', '"""cv_results_*.csv"""'], {}), "(filenames, 'cv_results_*.csv')\n", (2763, 2794), False, 'import fnmatch\n'), ((2888, 2934), 'fnmatch.filter', 'fnmatch.filter', (['filenames', '"""model_results.csv"""'], {}), "(filenames, 'model_results.csv')\n", (2902, 2934), False, 'import fnmatch\n'), ((3934, 3955), 'pandas.read_csv', 'pd.read_csv', (['datafile'], {}), '(datafile)\n', (3945, 3955), True, 'import pandas as pd\n'), ((12683, 12718), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 6)', 'dpi': '(100)'}), '(figsize=(6, 6), dpi=100)\n', (12693, 12718), True, 'import matplotlib.pyplot as plt\n'), ((12731, 12818), 'seaborn.regplot', 'sns.regplot', ([], {'x': '"""Y"""', 'y': '"""Yhat"""', 'data': 'data', 'ci': 'None', 'scatter_kws': "{'s': 50, 'alpha': 1}"}), "(x='Y', y='Yhat', data=data, ci=None, scatter_kws={'s': 50,\n 'alpha': 1})\n", (12742, 12818), True, 'import seaborn as sns\n'), ((12991, 13009), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (13007, 13009), True, 'import matplotlib.pyplot as plt\n'), ((13022, 13085), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s/fig_yy_%s.png' % (self.images_dir, model_name))"], {}), "('%s/fig_yy_%s.png' % (self.images_dir, model_name))\n", (13033, 13085), True, 'import matplotlib.pyplot as plt\n'), ((13120, 13129), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (13127, 13129), True, 'import matplotlib.pyplot as plt\n'), ((13280, 13339), 'seaborn.jointplot', 'sns.jointplot', (["data['Y']", "data['Yhat']"], {'kind': '"""hex"""', 'space': '(0)'}), "(data['Y'], data['Yhat'], kind='hex', space=0)\n", (13293, 13339), True, 'import seaborn as sns\n'), ((13469, 13478), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (13476, 13478), True, 'import matplotlib.pyplot as plt\n'), ((13653, 13720), 'seaborn.jointplot', 'sns.jointplot', (["sub_data['Y']", "sub_data['Yhat']"], {'kind': '"""hex"""', 'space': '(0)'}), "(sub_data['Y'], sub_data['Yhat'], kind='hex', space=0)\n", (13666, 13720), True, 'import seaborn as sns\n'), ((13855, 13864), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (13862, 13864), True, 'import matplotlib.pyplot as plt\n'), ((1203, 1214), 'time.time', 'time.time', ([], {}), '()\n', (1212, 1214), False, 'import time\n'), ((2812, 2858), 'os.remove', 'os.remove', (["('%s/%s' % (self.base_dir, filename))"], {}), "('%s/%s' % (self.base_dir, filename))\n", (2821, 2858), False, 'import os\n'), ((2952, 2998), 'os.remove', 'os.remove', (["('%s/%s' % (self.base_dir, filename))"], {}), "('%s/%s' % (self.base_dir, filename))\n", (2961, 2998), False, 'import os\n'), ((10945, 10961), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (10959, 10961), False, 'from sklearn.preprocessing import StandardScaler\n'), ((11271, 11287), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (11285, 11287), False, 'from sklearn.preprocessing import StandardScaler\n'), ((8486, 8504), 'random.choice', 'random.choice', (['ids'], {}), '(ids)\n', (8499, 8504), False, 'import random\n'), ((12634, 12661), 'seaborn.axes_style', 'sns.axes_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (12648, 12661), True, 'import seaborn as sns\n'), ((13232, 13255), 'seaborn.axes_style', 'sns.axes_style', (['"""ticks"""'], {}), "('ticks')\n", (13246, 13255), True, 'import seaborn as sns\n'), ((5923, 5938), 'numpy.mean', 'np.mean', (['errors'], {}), '(errors)\n', (5930, 5938), True, 'import numpy as np\n')] |
import numpy as np
import os
import sys
def plot_sample_efficiency(file_name, num_samples, pretrain_scores, scratch_scores, metric_name):
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.figure import Figure
rows, cols, size = 1,1,5
font_size = 30; label_size = 25; line_width = 2.5; markder_size = 10
fig = Figure(tight_layout=True,figsize=(8, 6)); ax = fig.subplots(rows,cols)
ax.plot(num_samples, pretrain_scores,linewidth = line_width, marker = 's', markersize=markder_size);ax.plot(num_samples, scratch_scores, linewidth=line_width, marker = 'o', markersize=markder_size)
ax.set_ylabel(metric_name, fontsize = font_size-2);ax.set_xlabel('Number of training images',fontsize = font_size);ax.legend(['Pretrain','Scratch'],fontsize = font_size)
ax.tick_params(axis = 'x', labelsize = label_size); ax.tick_params(axis = 'y', labelsize = label_size);
ax.set_xlim([100-10, num_samples[-1]+10])
canvas = FigureCanvasAgg(fig); canvas.print_figure(file_name, dpi=100)
pretrain_scores = np.array([0.5784, 0.6756, 0.7117, 0.7137, 0.7352])*100
scratch_scores = np.array([0.4585, 0.5193, 0.5790, 0.6218, 0.6767])*100
num_samples = [100,300,500,700,899]
metric_name = 'Average pixel f1-score(%)'
file_name = 'sample_efficiency.png'
plot_sample_efficiency(file_name, num_samples, pretrain_scores, scratch_scores, metric_name) | [
"matplotlib.figure.Figure",
"numpy.array",
"matplotlib.backends.backend_agg.FigureCanvasAgg"
] | [((376, 417), 'matplotlib.figure.Figure', 'Figure', ([], {'tight_layout': '(True)', 'figsize': '(8, 6)'}), '(tight_layout=True, figsize=(8, 6))\n', (382, 417), False, 'from matplotlib.figure import Figure\n'), ((980, 1000), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvasAgg', (['fig'], {}), '(fig)\n', (995, 1000), False, 'from matplotlib.backends.backend_agg import FigureCanvasAgg\n'), ((1062, 1112), 'numpy.array', 'np.array', (['[0.5784, 0.6756, 0.7117, 0.7137, 0.7352]'], {}), '([0.5784, 0.6756, 0.7117, 0.7137, 0.7352])\n', (1070, 1112), True, 'import numpy as np\n'), ((1135, 1184), 'numpy.array', 'np.array', (['[0.4585, 0.5193, 0.579, 0.6218, 0.6767]'], {}), '([0.4585, 0.5193, 0.579, 0.6218, 0.6767])\n', (1143, 1184), True, 'import numpy as np\n')] |
import json
from django.contrib.auth import get_user_model as user_model
User = user_model()
from django.urls import reverse
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from .serializers import OrderSerializer
from .models import Order
from rest_framework.response import Response
class OrderTestCase(APITestCase):
def setUp(self):
self.user = User.objects.create_user(email = "<EMAIL>",
username = "Customer",
phone_number = "+254722122122",
password = "<PASSWORD>")
self.token = self.user.tokens()['access']
self.api_authentication()
self.order = Order.objects.create(item = "pens", amount=5, customer=self.user)
def tearDown(self):
User.objects.all().delete()
Order.objects.all().delete()
def api_authentication(self):
self.client.credentials(HTTP_AUTHORIZATION="Bearer "+ self.token)
def test_post_order(self):
data = {"item": "books",
"amount": 50}
response = self.client.post(reverse("order-list"), data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# self.assertEqual(response.data["item"], "books")
# self.assertEqual(response.data["amount"], 50)
def test_get_customer_orders_by_owner(self):
this_customer_id = self.user.id
response = self.client.get(reverse("order-detail", kwargs={"pk": this_customer_id}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_single_order_by_owner(self):
this_order_id = self.order.id
response = self.client.get(reverse("order-detail", kwargs={"pk": this_order_id}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["item"], "pens")
def test_update_single_order_by_owner(self):
this_order_id = self.order.id
response = self.client.put(reverse("order-detail", kwargs={"pk": this_order_id}),
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["amount"], 10)
def test_delete_single_order(self):
this_order_id = self.order.id
# import pdb; pdb.set_trace()
response = self.client.delete(reverse("order-detail", kwargs={"pk": this_order_id}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| [
"django.contrib.auth.get_user_model",
"django.urls.reverse"
] | [((80, 92), 'django.contrib.auth.get_user_model', 'user_model', ([], {}), '()\n', (90, 92), True, 'from django.contrib.auth import get_user_model as user_model\n'), ((1260, 1281), 'django.urls.reverse', 'reverse', (['"""order-list"""'], {}), "('order-list')\n", (1267, 1281), False, 'from django.urls import reverse\n'), ((1617, 1673), 'django.urls.reverse', 'reverse', (['"""order-detail"""'], {'kwargs': "{'pk': this_customer_id}"}), "('order-detail', kwargs={'pk': this_customer_id})\n", (1624, 1673), False, 'from django.urls import reverse\n'), ((1878, 1931), 'django.urls.reverse', 'reverse', (['"""order-detail"""'], {'kwargs': "{'pk': this_order_id}"}), "('order-detail', kwargs={'pk': this_order_id})\n", (1885, 1931), False, 'from django.urls import reverse\n'), ((2187, 2240), 'django.urls.reverse', 'reverse', (['"""order-detail"""'], {'kwargs': "{'pk': this_order_id}"}), "('order-detail', kwargs={'pk': this_order_id})\n", (2194, 2240), False, 'from django.urls import reverse\n'), ((2573, 2626), 'django.urls.reverse', 'reverse', (['"""order-detail"""'], {'kwargs': "{'pk': this_order_id}"}), "('order-detail', kwargs={'pk': this_order_id})\n", (2580, 2626), False, 'from django.urls import reverse\n')] |
from tkinter import * # this is a base for us project, tkinter is native python GUI.
from tkinter import colorchooser # For chooser all parameters of colors.
import pyscreenshot # library for a screenshot
class PaintTk:
"""In this class we have the entire project. I will enter comments by step"""
def __init__(self):
# Start the tkinter in the variable window. Define a title, and
# the size window.
self.window = Tk()
self.window.title('PaintTk by Rafa')
self.window.geometry("1200x600")
self.window.minsize(width=680, height=480)
self.window.resizable(0, 0) # This blocks screen scaling.
# The applications start with oval brush selected.
self.oval_brush = True
self.line_brush = False
self.eraser_brush = False
# Set icons
self.img_line = PhotoImage(file='assets/icons/line.png')
self.img_oval = PhotoImage(file='assets/icons/oval.png')
self.img_eraser = PhotoImage(file='assets/icons/eraser.png')
self.img_save = PhotoImage(file='assets/icons/save.png')
self.img_square = PhotoImage(file='assets/icons/rainbow.png')
self.img_new = PhotoImage(file='assets/icons/new.png')
self.list_colors = ('black', 'gray', 'red', 'green', 'blue',
'yellow', 'magenta', 'cyan')
self.pick_colors = 'black'
self.bar_menu = Frame(self.window, bg='#3b3b3b', padx=10, pady=10)
self.bar_menu.pack(fill='x') # "fill='x' fills the entire x-axis
self.text_color = Label(self.bar_menu, text=' Colors: ', fg='white', bg='#3b3b3b')
self.text_color.pack(side='left')
for cor in self.list_colors:
self.button_color = Button(self.bar_menu, bg=cor, width=3, height=2, bd=1,
command=lambda col=cor: self.select_colors(col)).pack(side='left')
self.label_colors_choose = Label(self.bar_menu, text=' Color Choose: ', fg='white', bg='#3b3b3b')
self.label_colors_choose.pack(side='left')
self.color_choose = Button(self.bar_menu, image=self.img_square, bd=1, command=self.selected_color)
self.color_choose.pack(side='left')
self.text_pen_size = Label(self.bar_menu, text=' Size: ', fg='white', bg='#3b3b3b')
self.text_pen_size.pack(side='left')
self.pen_size = Spinbox(self.bar_menu, justify='center', from_=1, to=50, width=4)
self.pen_size.pack(side='left')
self.text_brushs = Label(self.bar_menu, text=' Brushs: ', fg='white', bg='#3b3b3b').pack(side='left')
self.button_line = Button(self.bar_menu, image=self.img_line, bd=1, command=self.brush_line)
self.button_line.pack(side='left')
self.button_oval = Button(self.bar_menu, image=self.img_oval, bd=1, command=self.brush_oval)
self.button_oval.pack(side='left')
self.button_eraser = Button(self.bar_menu, image=self.img_eraser, bd=1, command=self.brush_eraser)
self.button_eraser.pack(side='left')
self.text_options = Label(self.bar_menu, text=' Options: ', fg='white', bg='#3b3b3b').pack(side='left')
self.button_save = Button(self.bar_menu, image=self.img_save, bd=3, command=self.save).pack(side='left')
self.button_new = Button(self.bar_menu, image=self.img_new, bd=3, command=self.clean).pack(side='left')
self.area_draw = Canvas(self.window, height=720, bg='gainsboro') # Canvas widget to display graphical
# elements like lines or text.
self.area_draw.pack(fill='both') # "fill = both" in x-axis and y-axis
self.area_draw.bind('<B1-Motion>', self.draw)
# The mouse is moved, with mouse button 1 being held down (use
# B2 for the middle button, B3 for the right button).
# Utilizing bind for define shortcut key
self.window.bind("<F1>", self.save)
self.window.bind("<F2>", self.clean)
self.window.mainloop()
# Defined x and y axis for the mouse draw
def draw(self, event):
x1, y1 = event.x, event.y
x2, y2 = event.x, event.y
if self.oval_brush:
self.area_draw.create_oval(x1, y1, x2, y2, fill=self.pick_colors,
outline=self.pick_colors, width=self.pen_size.get())
elif self.line_brush:
self.area_draw.create_line(x1 - 10, y1 - 10, x2, y2, fill=self.pick_colors,
width=self.pen_size.get())
else:
self.area_draw.create_oval(x1, y1, x2, y2, fill='gainsboro',
outline='gainsboro', width=self.pen_size.get())
def select_colors(self, col):
self.pick_colors = col
def brush_oval(self):
self.oval_brush = True
self.line_brush = False
self.eraser_brush = False
def brush_line(self):
self.oval_brush = False
self.line_brush = True
self.eraser_brush = False
def brush_eraser(self):
self.oval_brush = False
self.line_brush = False
self.eraser_brush = True
def clean(self, event):
self.area_draw.delete("all")
def save(self, event):
x = self.window.winfo_rootx() + self.area_draw.winfo_x()
y = self.window.winfo_rooty() + self.area_draw.winfo_y()
x1 = self.window.winfo_rootx() + self.area_draw.winfo_width()
y1 = self.window.winfo_rooty() + self.area_draw.winfo_height()
img = pyscreenshot.grab(bbox=(x, y, x1, y1))
img.save('image.png', 'png')
def selected_color(self):
color = colorchooser.askcolor()
self.pick_colors = color[1]
if __name__ == '__main__':
PaintTk()
| [
"pyscreenshot.grab",
"tkinter.colorchooser.askcolor"
] | [((5501, 5539), 'pyscreenshot.grab', 'pyscreenshot.grab', ([], {'bbox': '(x, y, x1, y1)'}), '(bbox=(x, y, x1, y1))\n', (5518, 5539), False, 'import pyscreenshot\n'), ((5624, 5647), 'tkinter.colorchooser.askcolor', 'colorchooser.askcolor', ([], {}), '()\n', (5645, 5647), False, 'from tkinter import colorchooser\n')] |
from functools import partial
import os
import numpy as np
import cv2
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.functional as F
from .dataset.transform import SegmentationTransform
import envs.GTAV.models as models
from .modules.bn import ABN
from .modules.deeplab import DeeplabV3
from utils import color_text
class_map = {
0: 2, # "animal--bird"
1: 2, # "animal--ground-animal"
2: 0, # "construction--barrier--curb"
3: 0, # "construction--barrier--fence"
4: 0, # "construction--barrier--guard-rail"
5: 0, # "construction--barrier--other-barrier"
6: 0, # "construction--barrier--wall"
7: 1, # "construction--flat--bike-lane"
8: 1, # "construction--flat--crosswalk-plain"
9: 1, # "construction--flat--curb-cut"
10: 1, # "construction--flat--parking"
11: 0, # "construction--flat--pedestrian-area"
12: 1, # "construction--flat--rail-track"
13: 1, # "construction--flat--road"
14: 1, # "construction--flat--service-lane"
15: 0, # "construction--flat--sidewalk"
16: 0, # "construction--structure--bridge"
17: 0, # "construction--structure--building"
18: 0, # "construction--structure--tunnel"
19: 2, # "human--person"
20: 2, # "human--rider--bicyclist"
21: 2, # "human--rider--motorcyclist"
22: 2, # "human--rider--other-rider"
23: 1, # "marking--crosswalk-zebra"
24: 1, # "marking--general"
25: 0, # "nature--mountain"
26: 0, # "nature--sand" Ignored, due to rare to see
27: 3, # "nature--sky"
28: 0, # "nature--snow" Not sure whether snow mountain or snow on road
29: 0, # "nature--terrain" Ignored due to rare appearance
30: 0, # "nature--vegetation"
31: 0, # "nature--water"
32: 0, # "object--banner"
33: 0, # "object--bench"
34: 0, # "object--bike-rack"
35: 0, # "object--billboard"
36: 0, # "object--catch-basin" Ignored since not frequent
37: 0, # "object--cctv-camera" Ignored since not frequent
38: 0, # "object--fire-hydrant"
39: 0, # "object--junction-box"
40: 0, # "object--mailbox"
41: 0, # "object--manhole"
42: 0, # "object--phone-booth"
43: 0, # "object--pothole" Ignored, since not frequent
44: 0, # "object--street-light"
45: 0, # "object--support--pole"
46: 0, # "object--support--traffic-sign-frame"
47: 0, # "object--support--utility-pole"
48: 0, # "object--traffic-light"
49: 0, # "object--traffic-sign--back"
50: 0, # "object--traffic-sign--front"
51: 0, # "object--trash-can"
52: 2, # "object--vehicle--bicycle"
53: 0, # "object--vehicle--boat" Ignoring boat
54: 2, # "object--vehicle--bus"
55: 2, # "object--vehicle--car"
56: 2, # "object--vehicle--caravan"
57: 2, # "object--vehicle--motorcycle"
58: 2, # "object--vehicle--on-rails"
59: 2, # "object--vehicle--other-vehicle"
60: 2, # "object--vehicle--trailer"
61: 2, # "object--vehicle--truck"
62: 2, # "object--vehicle--wheeled-slow"
63: 2, # "void--car-mount"
64: 2 # "void--ego-vehicle"
}
def vis(array):
classes = {
0: [0, 0, 0], # None
1: [70, 70, 70], # Buildings
2: [190, 153, 153], # Fences
3: [72, 0, 90], # Other
4: [220, 20, 60], # Pedestrians
5: [153, 153, 153], # Poles
6: [157, 234, 50], # RoadLines
7: [128, 64, 128], # Roads
8: [244, 35, 232], # Sidewalks
9: [107, 142, 35], # Vegetation
10: [0, 0, 255], # Vehicles
11: [102, 102, 156], # Walls
12: [220, 220, 0] # TrafficSigns
}
result = np.zeros((array.shape[0], array.shape[1], 3))
for key, value in classes.items():
result[np.where(array == key)] = value
return result
class SegmentationModule(nn.Module):
def __init__(self, head_channels, classes, snapshot_file='model.pth.tar'):
super(SegmentationModule, self).__init__()
norm_act = partial(ABN, activation="leaky_relu", slope=.01)
self.body = models.__dict__["net_wider_resnet38_a2"](norm_act=norm_act, dilation=(1, 2, 4, 4))
self.head = DeeplabV3(4096, 256, 256, norm_act=norm_act, pooling_size=(84, 84))
self.cls = nn.Conv2d(head_channels, classes, 1)
self.transform = SegmentationTransform(
2048,
(0.41738699, 0.45732192, 0.46886091),
(0.25685097, 0.26509955, 0.29067996),
)
dir_path = os.path.dirname(os.path.realpath(__file__))
snapshot_file = os.path.join(dir_path, snapshot_file)
if snapshot_file is not None:
if not os.path.exists(snapshot_file):
print(color_text('No local model found at {}'.format(snapshot_file), 'red'))
print(color_text('Please download pretrained model from https://drive.google.com/file/d/1SJJx5-LFG3J3M99TrPMU-z6ZmgWynxo-/view', 'red'))
data = torch.load(snapshot_file)
self.body.load_state_dict(data["state_dict"]["body"])
self.head.load_state_dict(data["state_dict"]["head"])
self.cls.load_state_dict(data["state_dict"]["cls"])
print('Loading segmentation model from %s' % snapshot_file)
def forward(self, x):
x = self.transform(x).unsqueeze(0).cuda()
img_shape = x.shape[-2:]
x = self.body(x)
x = self.head(x)
x = self.cls(x)
x = F.interpolate(x, size=img_shape, mode='bilinear', align_corners=True)
x = torch.argmax(x, dim=1).data.cpu().numpy()[0]
result = np.zeros_like(x, dtype=np.int32)
for key, value in class_map.items():
result[np.where(x == key)] = value
return result
def main():
cudnn.benchmark = True
model = SegmentationModule(256, 65)
model = model.cuda().eval()
transformation = SegmentationTransform(
2048,
(0.41738699, 0.45732192, 0.46886091),
(0.25685097, 0.26509955, 0.29067996),
)
# Run testing
for fname in os.listdir('a'):
print(fname)
x = cv2.imread(os.path.join('a', fname))
x = cv2.cvtColor(x, cv2.COLOR_RGB2BGR)
with torch.no_grad():
y = model(x)
y = vis(y)
cv2.imwrite(fname.replace('.jpg', '.png'), y)
break
if __name__ == '__main__':
main()
| [
"os.path.exists",
"os.listdir",
"numpy.where",
"torch.load",
"os.path.join",
"torch.nn.Conv2d",
"os.path.realpath",
"numpy.zeros",
"utils.color_text",
"functools.partial",
"torch.nn.functional.interpolate",
"cv2.cvtColor",
"torch.no_grad",
"numpy.zeros_like",
"torch.argmax"
] | [((3669, 3714), 'numpy.zeros', 'np.zeros', (['(array.shape[0], array.shape[1], 3)'], {}), '((array.shape[0], array.shape[1], 3))\n', (3677, 3714), True, 'import numpy as np\n'), ((6047, 6062), 'os.listdir', 'os.listdir', (['"""a"""'], {}), "('a')\n", (6057, 6062), False, 'import os\n'), ((4008, 4057), 'functools.partial', 'partial', (['ABN'], {'activation': '"""leaky_relu"""', 'slope': '(0.01)'}), "(ABN, activation='leaky_relu', slope=0.01)\n", (4015, 4057), False, 'from functools import partial\n'), ((4267, 4303), 'torch.nn.Conv2d', 'nn.Conv2d', (['head_channels', 'classes', '(1)'], {}), '(head_channels, classes, 1)\n', (4276, 4303), True, 'import torch.nn as nn\n'), ((4568, 4605), 'os.path.join', 'os.path.join', (['dir_path', 'snapshot_file'], {}), '(dir_path, snapshot_file)\n', (4580, 4605), False, 'import os\n'), ((5449, 5518), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'size': 'img_shape', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(x, size=img_shape, mode='bilinear', align_corners=True)\n", (5462, 5518), True, 'import torch.nn.functional as F\n'), ((5594, 5626), 'numpy.zeros_like', 'np.zeros_like', (['x'], {'dtype': 'np.int32'}), '(x, dtype=np.int32)\n', (5607, 5626), True, 'import numpy as np\n'), ((6146, 6180), 'cv2.cvtColor', 'cv2.cvtColor', (['x', 'cv2.COLOR_RGB2BGR'], {}), '(x, cv2.COLOR_RGB2BGR)\n', (6158, 6180), False, 'import cv2\n'), ((3769, 3791), 'numpy.where', 'np.where', (['(array == key)'], {}), '(array == key)\n', (3777, 3791), True, 'import numpy as np\n'), ((4516, 4542), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (4532, 4542), False, 'import os\n'), ((4959, 4984), 'torch.load', 'torch.load', (['snapshot_file'], {}), '(snapshot_file)\n', (4969, 4984), False, 'import torch\n'), ((6108, 6132), 'os.path.join', 'os.path.join', (['"""a"""', 'fname'], {}), "('a', fname)\n", (6120, 6132), False, 'import os\n'), ((6194, 6209), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6207, 6209), False, 'import torch\n'), ((4663, 4692), 'os.path.exists', 'os.path.exists', (['snapshot_file'], {}), '(snapshot_file)\n', (4677, 4692), False, 'import os\n'), ((5691, 5709), 'numpy.where', 'np.where', (['(x == key)'], {}), '(x == key)\n', (5699, 5709), True, 'import numpy as np\n'), ((4809, 4948), 'utils.color_text', 'color_text', (['"""Please download pretrained model from https://drive.google.com/file/d/1SJJx5-LFG3J3M99TrPMU-z6ZmgWynxo-/view"""', '"""red"""'], {}), "(\n 'Please download pretrained model from https://drive.google.com/file/d/1SJJx5-LFG3J3M99TrPMU-z6ZmgWynxo-/view'\n , 'red')\n", (4819, 4948), False, 'from utils import color_text\n'), ((5531, 5553), 'torch.argmax', 'torch.argmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (5543, 5553), False, 'import torch\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from numpy import array, savetxt
if __name__ == "__main__":
missing_goja_results = []
with open(".Gate/main/main.split") as f:
lines = f.readlines()
for line in lines:
if line[0:14] == "Root filename:":
output = line.split(' ')[2]
path = output.replace('\n','') + '.root'
if os.path.isfile(path):
gate_result = int(output.split('output/output')[-1])
goja_result = "./goja/output" + str(gate_result) + "_"
if not os.path.isfile(goja_result + "coincidences") or \
not os.path.isfile(goja_result + "realtime") or \
not os.path.isfile(goja_result + "statistics"):
missing_goja_result = gate_result
missing_goja_results.append(missing_goja_result)
savetxt("./missing_goja_results.txt", array(missing_goja_results).T, fmt="%d")
| [
"os.path.isfile",
"numpy.array"
] | [((866, 893), 'numpy.array', 'array', (['missing_goja_results'], {}), '(missing_goja_results)\n', (871, 893), False, 'from numpy import array, savetxt\n'), ((377, 397), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (391, 397), False, 'import os\n'), ((544, 588), 'os.path.isfile', 'os.path.isfile', (["(goja_result + 'coincidences')"], {}), "(goja_result + 'coincidences')\n", (558, 588), False, 'import os\n'), ((611, 651), 'os.path.isfile', 'os.path.isfile', (["(goja_result + 'realtime')"], {}), "(goja_result + 'realtime')\n", (625, 651), False, 'import os\n'), ((674, 716), 'os.path.isfile', 'os.path.isfile', (["(goja_result + 'statistics')"], {}), "(goja_result + 'statistics')\n", (688, 716), False, 'import os\n')] |
#################################################################################
# Author: <NAME>
# Username:mualcinp
#
# Assignment:Drawing a house
# Purpose:
# Google Doc Link: https://docs.google.com/document/d/18hX6TBTxMAM3jqEVwUzHyhHAaNCBpwCo1KeekIJJCHw/edit#
#
#################################################################################
#
#
#
#################################################################################
import turtle
def create_box():
"""
this function draws the bottom half of a house
"""
square = turtle.Turtle()
square.color("black")
square.pensize(6)
for i in range(2):
square.forward(200)
square.right(90)
square.forward(200)
square.right(90)
square.forward(90)
def create_roof():
"""
creates roof of house
"""
tri = turtle.Turtle()
tri.forward(200)
tri.left(120)
tri.forward(290)
tri.left(120)
tri.forward(300)
def create_window():
"""creates windows"""
fry = turtle.Turtle()
fry.pencolor(100, 40, 69)
fry.penup()
fry.right(90)
fry.forward(30)
fry.pendown()
fry.right(90)
fry.forward(50)
fry.left(90)
fry.forward(50)
fry.left(90)
fry.forward(50)
fry.left(90)
fry.forward(50)
fry.penup()
fry.right(90)
fry.forward(110)
for i in range(2):
fry.pendown()
fry.forward(50)
fry.right(90)
fry.forward(50)
fry.right(90)
fry.penup()
def create_door():
dor = turtle.Turtle()
dor.penup()
dor.forward(30)
dor.right(90)
dor.forward(200)
dor.pendown()
dor.left(180)
dor.forward(90)
dor.right(90)
dor.forward(60)
dor.right(90)
dor.forward(90)
def main():
wn = turtle.Screen()
wn.colormode(255)
wn.bgcolor("pink")
create_box()
create_roof()
create_window()
create_door()
wn.exitonclick()
main()
| [
"turtle.Screen",
"turtle.Turtle"
] | [((554, 569), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (567, 569), False, 'import turtle\n'), ((848, 863), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (861, 863), False, 'import turtle\n'), ((1022, 1037), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (1035, 1037), False, 'import turtle\n'), ((1530, 1545), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (1543, 1545), False, 'import turtle\n'), ((1776, 1791), 'turtle.Screen', 'turtle.Screen', ([], {}), '()\n', (1789, 1791), False, 'import turtle\n')] |
import re
from werkzeug import datastructures
from werkzeug.wrappers import Request, Response, ResponseStream
from datetime import date, datetime, timedelta
unhealth = False;
unready_until = datetime.now()
def set_unhealth():
global unhealth
unhealth = True
def set_unready_for_seconds(seconds):
global unready_until
unready_until = datetime.now() + timedelta(0,seconds)
class middleware():
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
global unhealth
global unready_until
request = Request(environ)
print(request.path)
if unhealth:
res = Response(u'Out', mimetype= 'text/plain', status=500)
elif unready_until > datetime.now() and '/ready' in request.path:
res = Response(u'UnReady', mimetype= 'text/plain', status=500)
else:
return self.app(environ, start_response)
| [
"werkzeug.wrappers.Response",
"datetime.datetime.now",
"datetime.timedelta",
"werkzeug.wrappers.Request"
] | [((193, 207), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (205, 207), False, 'from datetime import date, datetime, timedelta\n'), ((353, 367), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (365, 367), False, 'from datetime import date, datetime, timedelta\n'), ((370, 391), 'datetime.timedelta', 'timedelta', (['(0)', 'seconds'], {}), '(0, seconds)\n', (379, 391), False, 'from datetime import date, datetime, timedelta\n'), ((587, 603), 'werkzeug.wrappers.Request', 'Request', (['environ'], {}), '(environ)\n', (594, 603), False, 'from werkzeug.wrappers import Request, Response, ResponseStream\n'), ((673, 724), 'werkzeug.wrappers.Response', 'Response', (['u"""Out"""'], {'mimetype': '"""text/plain"""', 'status': '(500)'}), "(u'Out', mimetype='text/plain', status=500)\n", (681, 724), False, 'from werkzeug.wrappers import Request, Response, ResponseStream\n'), ((818, 873), 'werkzeug.wrappers.Response', 'Response', (['u"""UnReady"""'], {'mimetype': '"""text/plain"""', 'status': '(500)'}), "(u'UnReady', mimetype='text/plain', status=500)\n", (826, 873), False, 'from werkzeug.wrappers import Request, Response, ResponseStream\n'), ((755, 769), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (767, 769), False, 'from datetime import date, datetime, timedelta\n')] |
import logging
from pathlib import Path
import re
import typing as ty
from bs4 import BeautifulSoup # type: ignore
import pycountry # type: ignore
from termcolor import colored
from .utils.load_chapters import load_chapter
from .utils.misc import get_package_folders
from .utils.misc import load_common_words
from .utils.misc import load_word_pairs
from .utils.Sentence import Sentence
from .utils.SentenceList import SentenceList
def color_in_dict(
color_sent: Sentence,
match_sent: Sentence,
word_pairs: ty.Dict[str, ty.List[str]],
common_words_color: ty.Set[str],
color_pair: str = "green",
color_frac: str = "yellow",
) -> str:
r"""Use match_sent to color interesting words in color_sent.
Args:
color_sent: Sentence to color.
match_sent: Sentence to match from.
word_pairs: Dict from match to color language.
common_words_color: Common words in the color language.
color_pair: Color to use for translation match.
color_frac: Color to use for fraction match.
"""
logg = logging.getLogger(f"c.{__name__}.color_in_dict")
# logg.setLevel("DEBUG")
logg.debug("Start color_in_dict")
# build set of words in match_sent
words_seen_match = set()
words_seen_color = set()
for word_match in match_sent.norm_tra.split(" "):
# keep only alphabetic chars [^\W\d_]
word_match = re.sub(r"[\W\d_]", "", word_match)
word_match = word_match.lower()
# keep vaguely interesting match words
if len(word_match) < 3:
continue
words_seen_match.add(word_match)
# add the color words if a translation exists
if word_match in word_pairs:
for word_color in word_pairs[word_match]:
# if len(word_color) < 2:
# continue
word_color_clean = re.sub(r"[\W\d_]", "", word_color)
word_color_clean = word_color_clean.lower()
if word_color_clean not in common_words_color:
words_seen_color.add(word_color_clean)
logg.debug(f"words_seen_match: {words_seen_match}")
color_str = ""
for word_color_orig in color_sent.norm_tra.split(" "):
# only keep alpha and compare lowercase
word_color_clean = re.sub(r"[\W\d_]", "", word_color_orig)
word_color_clean = word_color_clean.lower()
# match if the color word is a translation of one of the match words
if word_color_clean in words_seen_color:
matched_pair = True
else:
matched_pair = False
# match using fractions of the l0 and l1 word
# if the beginning of a seen word_color is in the current word_color, color it
matched_frac = False
for word_match in words_seen_match:
frac_len = int(len(word_match) * 0.6)
# keep vaguely interesting matches
if frac_len < 3:
continue
if word_color_clean.startswith(word_match[:frac_len]):
logg.debug(f"word_color_clean: {word_color_clean} matches {word_match}")
matched_frac = True
if matched_pair:
color_str += colored(f"{word_color_orig} ", color_pair)
elif matched_frac:
color_str += colored(f"{word_color_orig} ", color_frac)
else:
color_str += f"{word_color_orig} "
return color_str
def align_chapter_basic(
sent0: SentenceList,
sent1: SentenceList,
inc_len0: ty.List[int],
inc_sca_len1: ty.List[float],
) -> ty.Tuple[SentenceList, ty.List[ty.Tuple[int, int]]]:
r"""MAKEDOC: what is align_chapter_basic doing?
l1 paragraphs have to be after l0
we track the current len for l0 and the scaled len for l1
if the old tot_len0 is smaller than scaled_len1
add the l0 paragraph
(we want to read the l0 paragraph before the l1)
else
add the l1 paragraph
finally
add the remaining paragraphs
p0 p1 link
len0 len1 sc_len1
tot0 tot1 t_sc_1
----------------------------
> 0 a A 0
46 | 47 | 43.08
46 | 47 | 43.08
> 1 - B 0
1 | 306 | 280.47
47 | 353 | 323.55
> 2 bc C 1
857 | 551 | 505.03
904 | 904 | 828.57
> 3 def D 3
1289 | 147 | 134.73
2193 | 1051 | 963.31
> 4 g E 6
723 | 774 | 709.42
2916 | 1825 | 1672.72
> 5 h F 7
95 | 582 | 533.44
3011 | 2407 | 2206.16
> 6 i G 8
406 | 664 | 608.60
3417 | 3071 | 2814.76
> 7 jkl H 9
768 | 76 | 69.66
4185 | 3147 | 2884.42
> 8 - I -
916 | 388 | 355.63
5101 | 3535 | 3240.04
> 9 - J -
1520 | 305 | 279.55
6621 | 3840 | 3519.60
> 10 - K -
820 | 337 | 308.88
7441 | 4177 | 3828.48
> 11 - L -
532 | 152 | 139.32
7973 | 4329 | 3967.79
"""
logg = logging.getLogger(f"c.{__name__}.align_chapter_basic")
# logg.setLevel("DEBUG")
logg.debug("Start align_chapter_basic")
# the first paragraph is always from l0: if in l1 the first paragraph is split in
# smaller sentences, those are not added before the long single paragraph from l0
# the index of the NEXT sentence to add
next0: int = 1
next1: int = 0
# the aligned list of sentences
composed = SentenceList()
composed.append(sent0[0])
# the aligned list of indexes
composed_indexes: ty.List[ty.Tuple[int, int]] = [(0, 0)]
# while there are sentences left on either list
while next0 < len(sent0) and next1 < len(sent1):
recap = f"> next {next0: 3d} {next1: 3d}"
recap += f" curr {next0-1: 3d} {next1-1: 3d}"
logg.debug(recap)
# we know that there are more sentences on BOTH
# the total length if the next was added
curr_tot0 = inc_len0[next0 - 1]
next_tot0 = inc_len0[next0]
next_tot_sca1 = inc_sca_len1[next1]
recap = f" curr_tot0: {curr_tot0}"
recap += f" next_tot0: {next_tot0}"
recap += f" next_tot_sca1: {next_tot_sca1:.2f}"
logg.debug(recap)
# different approach
# if the end of the next l0 is lower than the next l1
# if next_tot0 <= next_tot_sca1 :
# if the current 0 is still lower than the next 1
# you need to add p0
# add the sentence 0
if curr_tot0 <= next_tot_sca1:
composed.append(sent0[next0])
logg.debug(f" add 0: {next0}")
composed_indexes.append((0, next0))
next0 += 1
# add the sentence 1
else:
composed.append(sent1[next1])
logg.debug(f" add 1: {next1}")
composed_indexes.append((1, next1))
next1 += 1
# we check if there are more sentences on l0
if next0 < len(sent0):
logg.debug(" finish 0")
while next0 < len(sent0):
composed.append(sent0[next0])
logg.debug(f" add 0: {next0}")
composed_indexes.append((0, next0))
next0 += 1
# we check if there are more sentences on l1
if next1 < len(sent1):
logg.debug(" finish 1")
while next1 < len(sent1):
composed.append(sent1[next1])
logg.debug(f" add 1: {next1}")
composed_indexes.append((1, next1))
next1 += 1
return composed, composed_indexes
def compute_incremental_len(
sent0: SentenceList, sent1: SentenceList
) -> ty.Tuple[float, ty.List[int], ty.List[float]]:
r"""MAKEDOC: what is compute_incremental_len doing?"""
logg = logging.getLogger(f"c.{__name__}.compute_incremental_len")
# logg.setLevel("DEBUG")
logg.debug("Start compute_incremental_len")
tot_len0: int = 0
tot_len1: int = 0
tot_scaled_len1: float = 0
scaling_factor = sent0.tot_chars / sent1.tot_chars
logg.debug(f"scaling_factor: {scaling_factor}")
logg.debug(f"len(sent0): {len(sent0)}")
logg.debug(f"len(sent1): {len(sent1)}")
inc_len0: ty.List[int] = []
inc_sca_len1: ty.List[float] = []
# for i in range(len(sent0), len(sent1)):
for i in range(len(sent0)):
len0 = sent0[i].len_norm_tra
tot_len0 += len0
inc_len0.append(tot_len0)
for i in range(len(sent1)):
len1 = sent1[i].len_norm_tra
tot_len1 += len1
scaled_len1 = len1 * scaling_factor
tot_scaled_len1 += scaled_len1
inc_sca_len1.append(tot_scaled_len1)
# this fancy log is done by iterating over both lists at once
# recap = f"\n>>>>>> {i}"
# recap += f"\n0 ({len0}): >{sent0[i]}<"
# recap += f"\n1 ({len1}): >{sent1[i]}<"
# recap += f"\nlen0 {len0: 6d}"
# recap += f" | len1 {len1: 6d}"
# recap += f" | sc_len1 {scaled_len1: 10.2f}"
# recap += f"\ntot0 {tot_len0: 6d}"
# recap += f" | tot1 {tot_len1: 6d}"
# recap += f" | t_sc_1 {tot_scaled_len1: 10.2f}"
# logg.debug(recap)
return scaling_factor, inc_len0, inc_sca_len1
def interactive_hints( # noqa: C901 very COMPLEX sorry
sent0: SentenceList,
sent1: SentenceList,
composed_indexes: ty.List[ty.Tuple[int, int]],
hint_dist: int,
word_pairs: ty.Dict[str, ty.List[str]],
common_words1: ty.Set[str],
) -> ty.List[ty.Tuple[int, int]]:
r"""MAKEDOC: what is interactive_hints doing?"""
logg = logging.getLogger(f"c.{__name__}.interactive_hints")
logg.setLevel("DEBUG")
logg.debug("Start interactive_hints")
# for ci in composed_indexes:
# logg.debug(f"{' ' if ci[0] == 1 else ''}{ci[1]: 3d}")
# build the links from s0 to s1, to provide a better prompt
curr_i1: int = len(sent1) - 1
link0to1: ty.List[int] = []
for ci in composed_indexes[::-1]:
if ci[0] == 0:
link0to1.insert(0, curr_i1)
else:
curr_i1 = ci[1]
# for i in range(len(link0to1)):
# logg.debug(f"i: {i} -> {link0to1[i]}")
hint_indexes0 = list(range(3, len(sent0), hint_dist))
logg.debug(f"hint_indexes0: {hint_indexes0}")
logg.debug(f"len(sent0): {len(sent0)}")
logg.debug(f"len(sent1): {len(sent1)}")
# a visible tag
vt = ">>>>>> "
tv = " <<<<<<"
# the hint will link a sentence from l0 to l1
hints: ty.List[ty.Tuple[int, int]] = []
window_size0 = 1
window_size1 = 4
for hi0 in hint_indexes0:
done = False
curr_hi0 = hi0
curr_window_size0 = window_size0
curr_window_size1 = window_size1
while not done:
# extract the central index for l1
i1 = link0to1[curr_hi0]
# line to split the flow
recap = colored(f"{vt*3}", "grey", "on_cyan")
recap += colored(f"curr_hi0: {curr_hi0} -> {i1}", "white", "on_cyan")
recap += colored(f"{tv*3}", "grey", "on_cyan")
logg.debug(recap)
# the l0 sentence to align
# logg.debug(f"sent0[{curr_hi0}]:\n{sent0[curr_hi0]}")
i0_min = max(curr_hi0 - curr_window_size0, 0)
i0_max = min(curr_hi0 + curr_window_size0, len(sent0) - 1)
for hi0_show in range(i0_min, i0_max + 1):
recap = f"\n{vt}"
if hi0_show == curr_hi0:
recap += colored(f"sent0[{hi0_show}]:", "green", attrs=["bold"])
recap += f"{tv}"
else:
recap += f"sent0[{hi0_show}]:"
recap += f"\n{sent0[hi0_show]}"
logg.debug(recap)
# line to split the flow
recap = f"\n{vt*3}"
recap += colored(f"{vt}{tv}", "cyan")
recap += f"{tv*3}"
logg.debug(recap)
# the range of l1 sentences to pick from
i1_min = max(i1 - curr_window_size1, 0)
i1_max = min(i1 + curr_window_size1, len(sent1) - 1)
for hi1 in range(i1_min, i1_max + 1):
recap = f"\n{vt}"
if hi1 == i1:
recap += colored(
f"sent1[{hi1-i1}] ({hi1}):", "green", attrs=["bold"]
)
recap += f"{tv}"
else:
recap += f"sent1[{hi1-i1}] ({hi1}):"
# recap += f"\n{sent1[hi1]}"
color_str = color_in_dict(
sent1[hi1], sent0[curr_hi0], word_pairs, common_words1
)
recap += f"\n{color_str}"
logg.debug(recap)
prompt = "Change the l0 sentence: s0[NUM]."
prompt += "\tChange the window size: w{0|1}[NUM]."
prompt += "\tInsert the correct l1 sentence: [NUM]: "
ri = input(prompt)
# change the l0 sentence
if ri.startswith("s0"):
ri_cmd = ri[2:]
recap = f"Change l0: parsing {ri}"
recap += f" ri_cmd {ri_cmd}"
logg.debug(recap)
try:
delta_hi0 = int(ri_cmd)
except ValueError:
logg.warning(f"{ri_cmd} is not a valid integer.")
delta_hi0 = 0
curr_hi0 += delta_hi0
# validate the value for hi0
curr_hi0 = max(curr_hi0, 0)
curr_hi0 = min(curr_hi0, len(sent0) - 1)
logg.debug(f"Using delta_hi0: {delta_hi0} curr_hi0: {curr_hi0}")
# change the window size
elif ri.startswith("w0") or ri.startswith("w1"):
ri_type = ri[:2]
ri_cmd = ri[2:]
recap = f"Window size: parsing {ri}"
recap += f" ri_type {ri_type} ri_cmd {ri_cmd}"
logg.debug(recap)
try:
delta_winsize = int(ri_cmd)
except ValueError:
logg.warning(f"{ri_cmd} is not a valid integer.")
delta_winsize = 0
if ri_type == "w0":
curr_window_size0 += delta_winsize
# validate win size, must be at least 0 for l0
curr_window_size0 = max(curr_window_size0, 0)
elif ri_type == "w1":
curr_window_size1 += delta_winsize
# validate win size, must be at least 2 for l1
curr_window_size1 = max(curr_window_size1, 2)
# select which sentence to align
else:
ri_cmd = ri
try:
delta_hi1 = int(ri_cmd)
good_hi1 = i1 + delta_hi1
# validate the result, within the list
good_hi1 = max(good_hi1, 0)
good_hi1 = min(good_hi1, len(sent1) - 1)
done = True
except ValueError:
logg.warning(f"{ri_cmd} is not a valid integer.")
logg.debug(f"\n{vt}Adding curr_hi0: {curr_hi0} good_hi1: {good_hi1}")
hints.append((curr_hi0, good_hi1))
# sanity check
for hint in hints:
logg.debug(f"\n{vt*3}{hint}")
logg.debug(f"{vt}sent0[{hint[0]}]:\n{sent0[hint[0]]}")
logg.debug(f"{vt}sent1[{hint[1]}]:\n{sent1[hint[1]]}")
return hints
def align_with_hints(
sent0: SentenceList,
sent1: SentenceList,
inc_len0: ty.List[int],
inc_sca_len1: ty.List[float],
hints: ty.List[ty.Tuple[int, int]],
) -> SentenceList:
r"""MAKEDOC: what is align_with_hints doing?
We basically copy align_chapter_basic but fancier.
"""
logg = logging.getLogger(f"c.{__name__}.align_with_hints")
logg.setLevel("DEBUG")
logg.debug("Start align_with_hints")
# the aligned list of sentences
composed = SentenceList()
# the aligned list of indexes
composed_indexes: ty.List[ty.Tuple[int, int]] = []
# a visible tag
vt = ">>>>>> "
logg.debug(f"len(sent0): {len(sent0)}")
logg.debug(f"len(sent1): {len(sent1)}")
# add artificial first and last hint
hints.insert(0, (0, 0))
hints.append((len(sent0), len(sent1)))
for ih, hint in enumerate(hints[:-1]):
next_hint = hints[ih + 1]
logg.debug(f"\n{vt*3}{hint} - {next_hint}{vt*3}\n")
# how many sentences in each list to use
chunk_len0 = next_hint[0] - hint[0]
chunk_len1 = next_hint[1] - hint[1]
# the index of the NEXT sentence to add, relative to the hint
rel_next0: int = 1
rel_next1: int = 0
# add a Sentence to debug things
# sentence_soup = BeautifulSoup("<p>New hint.</p>", "html.parser")
# sentence_tag = sentence_soup.find_all("p")[0]
# composed.append(Sentence(sentence_tag))
# add the first sentence from l0
composed.append(sent0[hint[0]])
composed_indexes.append((0, hint[0]))
logg.debug(f" add 0 manual: {hint[0]}")
# track the amount of chars in each side relative to the start of the hint
start_curr_tot0 = inc_len0[hint[0]]
start_next_tot_sca1 = inc_sca_len1[hint[1]]
while rel_next0 < chunk_len0 and rel_next1 < chunk_len1:
# the absolute position in the SentenceList
next0 = hint[0] + rel_next0
next1 = hint[1] + rel_next1
recap = f"> rel_next {rel_next0: 2d} {rel_next1: 2d}"
recap += f" - curr {rel_next0-1: 2d} {rel_next1-1: 2d}"
recap += f" - next {next0: 2d} {next1: 2d}"
logg.debug(recap)
# the relative amount of chars in each side
curr_tot0 = inc_len0[next0 - 0] - start_curr_tot0
next_tot_sca1 = inc_sca_len1[next1] - start_next_tot_sca1
recap = f" curr_tot0: {curr_tot0}"
recap += f" next_tot_sca1: {next_tot_sca1:.2f}"
logg.debug(recap)
# if the current 0 is still lower than the next 1
# you need to add p0
# add the sentence 0
if curr_tot0 <= next_tot_sca1:
composed.append(sent0[next0])
logg.debug(f" add 0: {next0}")
composed_indexes.append((0, next0))
rel_next0 += 1
# add the sentence 1
else:
composed.append(sent1[next1])
logg.debug(f" add 1: {next1}")
composed_indexes.append((1, next1))
rel_next1 += 1
recap = f"> rel_next0 {rel_next0: 2d} chunk_len0 {chunk_len0: 2d}"
logg.debug(recap)
if rel_next0 < chunk_len0:
logg.debug(" finish 0")
while rel_next0 < chunk_len0:
next0 = hint[0] + rel_next0
composed.append(sent0[next0])
logg.debug(f" add 0: {next0}")
composed_indexes.append((0, next0))
rel_next0 += 1
recap = f"> rel_next1 {rel_next1: 2d} chunk_len1 {chunk_len1: 2d}"
logg.debug(recap)
if rel_next1 < chunk_len1:
logg.debug(" finish 1")
while rel_next1 < chunk_len1:
next1 = hint[1] + rel_next1
composed.append(sent1[next1])
logg.debug(f" add 1: {next1}")
composed_indexes.append((1, next1))
rel_next1 += 1
# sanity check
for ci in composed_indexes:
logg.debug(f"{' ' if ci[0] == 1 else ''}{ci[1]: 3d}")
return composed
def align_chapter(
sent0: SentenceList,
sent1: SentenceList,
do_interactive: bool,
word_pairs: ty.Dict[str, ty.List[str]],
common_words1: ty.Set[str],
) -> SentenceList:
r"""MAKEDOC: what is align_chapter doing?"""
logg = logging.getLogger(f"c.{__name__}.align_chapter")
logg.setLevel("DEBUG")
logg.debug("Start align_chapter")
#############################################################
# compute the incremental lengths
#############################################################
scaling_factor, inc_len0, inc_sca_len1 = compute_incremental_len(sent0, sent1)
#############################################################
# align the paragraphs with basic method
#############################################################
composed, composed_indexes = align_chapter_basic(
sent0, sent1, inc_len0, inc_sca_len1
)
#############################################################
# use the basic alignment to prompt for hints
#############################################################
hint_dist = 5
if do_interactive:
hints = interactive_hints(
sent0, sent1, composed_indexes, hint_dist, word_pairs, common_words1
)
composed = align_with_hints(sent0, sent1, inc_len0, inc_sca_len1, hints)
return composed
def align_book(
book_folder: Path,
languages: ty.Tuple[str, str],
chapter_templates: ty.Tuple[str, str],
chapter_start_indexes: ty.Tuple[int, int],
tot_chapter_num: int,
author_name_full: str,
book_name_full: str,
do_interactive: bool,
) -> None:
r"""Align every chapter in a book"""
logg = logging.getLogger(f"c.{__name__}.align_book")
logg.setLevel("DEBUG")
logg.info("\nStart align_book")
#############################################################
# build the input/output paths
#############################################################
# get the chapter output folder
composed_folder = book_folder / "composed"
if not composed_folder.exists(): # pragma: nocover
composed_folder.mkdir(parents=True, exist_ok=True)
# get the template path
template_epub_folder = get_package_folders("template_epub")
tmpl_ch_path = template_epub_folder / "tmpl_ch.xhtml"
# load the template
tmpl_ch = tmpl_ch_path.read_text()
# build the composed tag
composed_tag = f"{languages[0]}/{languages[1]}"
# load the word pairs
word_pairs = load_word_pairs(languages)
# load the common words for l1
common_words1 = load_common_words(languages[1], 300)
# for chapter_index in list(range(tot_chapter_num))[10:11]:
for chapter_index in list(range(tot_chapter_num))[:]:
chapter_index0 = chapter_index + chapter_start_indexes[0]
chapter_index1 = chapter_index + chapter_start_indexes[1]
recap = f"\nchapter_index0: {chapter_index0}"
recap += f" chapter_index1: {chapter_index1}"
logg.info(recap)
# get the chapter output path
composed_chapter_name = f"ch_{chapter_index+1:04d}.xhtml"
composed_chapter_path = composed_folder / composed_chapter_name
logg.debug(f"composed_chapter_path: {composed_chapter_path}")
if composed_chapter_path.exists():
logg.info(f"Skipping: {composed_chapter_path}, already processed.")
continue
#############################################################
# load the chapter for l0
#############################################################
lang_folder0 = book_folder / languages[0]
chapter_name0 = chapter_templates[0].format(chapter_index0)
chapter_path0 = lang_folder0 / chapter_name0
lang0 = pycountry.languages.get(name=languages[0])
logg.debug(f"lang0: {lang0}")
lang_alpha2_tag0 = lang0.alpha_2
sent0 = load_chapter(chapter_path0, lang_alpha2_tag0)
recap = f"{chapter_path0=}"
recap += f" len(sent0): {len(sent0)}"
recap += f" sent0.tot_chars: {sent0.tot_chars}"
logg.info(recap)
#############################################################
# load the chapter for l1
#############################################################
lang_folder1 = book_folder / languages[1]
chapter_name1 = chapter_templates[1].format(chapter_index1)
chapter_path1 = lang_folder1 / chapter_name1
lang1 = pycountry.languages.get(name=languages[1])
logg.debug(f"lang1: {lang1}")
lang_alpha2_tag1 = lang1.alpha_2
sent1 = load_chapter(chapter_path1, lang_alpha2_tag1)
recap = f"{chapter_path1=}"
recap += f" len(sent1): {len(sent1)}"
recap += f" sent1.tot_chars: {sent1.tot_chars}"
logg.info(recap)
#############################################################
# align the two SentenceList
#############################################################
composed = align_chapter(
sent0, sent1, do_interactive, word_pairs, common_words1
)
#############################################################
# save the composed chapter
#############################################################
# build the chapter content
composed_chapter_text = ""
for i, sent in enumerate(composed):
composed_chapter_text += f"{sent.orig_str}\n"
recap = f"\n>>>>>> {i}"
recap += f"\n>{sent}<"
logg.debug(recap)
# build a vague chapter title
chapter_title = f"Chapter {chapter_index+1}"
# create the full chapter text
full_ch_text = tmpl_ch.format(
book_title=book_name_full,
book_author=author_name_full,
composed_tag=composed_tag,
chapter_title=chapter_title,
chapter_content=composed_chapter_text,
)
# composed_chapter_path.write_text(full_ch_text)
# build a soup for the chapter
parsed_text = BeautifulSoup(full_ch_text, features="html.parser")
# write the prettified text
composed_chapter_path.write_text(parsed_text.prettify())
| [
"logging.getLogger",
"termcolor.colored",
"bs4.BeautifulSoup",
"pycountry.languages.get",
"re.sub"
] | [((1068, 1116), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.color_in_dict"""'], {}), "(f'c.{__name__}.color_in_dict')\n", (1085, 1116), False, 'import logging\n'), ((5509, 5563), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.align_chapter_basic"""'], {}), "(f'c.{__name__}.align_chapter_basic')\n", (5526, 5563), False, 'import logging\n'), ((8217, 8275), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.compute_incremental_len"""'], {}), "(f'c.{__name__}.compute_incremental_len')\n", (8234, 8275), False, 'import logging\n'), ((9978, 10030), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.interactive_hints"""'], {}), "(f'c.{__name__}.interactive_hints')\n", (9995, 10030), False, 'import logging\n'), ((16188, 16239), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.align_with_hints"""'], {}), "(f'c.{__name__}.align_with_hints')\n", (16205, 16239), False, 'import logging\n'), ((20296, 20344), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.align_chapter"""'], {}), "(f'c.{__name__}.align_chapter')\n", (20313, 20344), False, 'import logging\n'), ((21736, 21781), 'logging.getLogger', 'logging.getLogger', (['f"""c.{__name__}.align_book"""'], {}), "(f'c.{__name__}.align_book')\n", (21753, 21781), False, 'import logging\n'), ((1404, 1439), 're.sub', 're.sub', (['"""[\\\\W\\\\d_]"""', '""""""', 'word_match'], {}), "('[\\\\W\\\\d_]', '', word_match)\n", (1410, 1439), False, 'import re\n'), ((2304, 2344), 're.sub', 're.sub', (['"""[\\\\W\\\\d_]"""', '""""""', 'word_color_orig'], {}), "('[\\\\W\\\\d_]', '', word_color_orig)\n", (2310, 2344), False, 'import re\n'), ((23815, 23857), 'pycountry.languages.get', 'pycountry.languages.get', ([], {'name': 'languages[0]'}), '(name=languages[0])\n', (23838, 23857), False, 'import pycountry\n'), ((24526, 24568), 'pycountry.languages.get', 'pycountry.languages.get', ([], {'name': 'languages[1]'}), '(name=languages[1])\n', (24549, 24568), False, 'import pycountry\n'), ((26130, 26181), 'bs4.BeautifulSoup', 'BeautifulSoup', (['full_ch_text'], {'features': '"""html.parser"""'}), "(full_ch_text, features='html.parser')\n", (26143, 26181), False, 'from bs4 import BeautifulSoup\n'), ((3211, 3253), 'termcolor.colored', 'colored', (['f"""{word_color_orig} """', 'color_pair'], {}), "(f'{word_color_orig} ', color_pair)\n", (3218, 3253), False, 'from termcolor import colored\n'), ((11283, 11322), 'termcolor.colored', 'colored', (['f"""{vt * 3}"""', '"""grey"""', '"""on_cyan"""'], {}), "(f'{vt * 3}', 'grey', 'on_cyan')\n", (11290, 11322), False, 'from termcolor import colored\n'), ((11342, 11402), 'termcolor.colored', 'colored', (['f"""curr_hi0: {curr_hi0} -> {i1}"""', '"""white"""', '"""on_cyan"""'], {}), "(f'curr_hi0: {curr_hi0} -> {i1}', 'white', 'on_cyan')\n", (11349, 11402), False, 'from termcolor import colored\n'), ((11424, 11463), 'termcolor.colored', 'colored', (['f"""{tv * 3}"""', '"""grey"""', '"""on_cyan"""'], {}), "(f'{tv * 3}', 'grey', 'on_cyan')\n", (11431, 11463), False, 'from termcolor import colored\n'), ((12226, 12254), 'termcolor.colored', 'colored', (['f"""{vt}{tv}"""', '"""cyan"""'], {}), "(f'{vt}{tv}', 'cyan')\n", (12233, 12254), False, 'from termcolor import colored\n'), ((1875, 1910), 're.sub', 're.sub', (['"""[\\\\W\\\\d_]"""', '""""""', 'word_color'], {}), "('[\\\\W\\\\d_]', '', word_color)\n", (1881, 1910), False, 'import re\n'), ((3306, 3348), 'termcolor.colored', 'colored', (['f"""{word_color_orig} """', 'color_frac'], {}), "(f'{word_color_orig} ', color_frac)\n", (3313, 3348), False, 'from termcolor import colored\n'), ((11887, 11942), 'termcolor.colored', 'colored', (['f"""sent0[{hi0_show}]:"""', '"""green"""'], {'attrs': "['bold']"}), "(f'sent0[{hi0_show}]:', 'green', attrs=['bold'])\n", (11894, 11942), False, 'from termcolor import colored\n'), ((12630, 12693), 'termcolor.colored', 'colored', (['f"""sent1[{hi1 - i1}] ({hi1}):"""', '"""green"""'], {'attrs': "['bold']"}), "(f'sent1[{hi1 - i1}] ({hi1}):', 'green', attrs=['bold'])\n", (12637, 12693), False, 'from termcolor import colored\n')] |
from comparator.comparator import Comparator
from gate.and_gate import And
from gate.or_gate import Or
class HazardDetectionUnit:
def __init__(self, rm_id_ex, rt_id_ex, rt_if_id, rs_if_id, name="HazardDetectionUnit"):
self.memread_id_ex = rm_id_ex
self.rt_id_ex = rt_id_ex
self.rt_if_id = rt_if_id
self.rs_if_id = rs_if_id
self.name = name
self.output = None
self.build()
def build(self):
self.output = And((
self.memread_id_ex,
Or((
Comparator((self.rt_id_ex, self.rs_if_id), 5),
Comparator((self.rt_id_ex, self.rt_if_id), 5)
))
))
def logic(self, depend=[]):
if self in depend:
return self.output
self.output.logic(depend + [self])
return self.output.output
def get_output(self):
return self.output.output
| [
"comparator.comparator.Comparator"
] | [((551, 596), 'comparator.comparator.Comparator', 'Comparator', (['(self.rt_id_ex, self.rs_if_id)', '(5)'], {}), '((self.rt_id_ex, self.rs_if_id), 5)\n', (561, 596), False, 'from comparator.comparator import Comparator\n'), ((614, 659), 'comparator.comparator.Comparator', 'Comparator', (['(self.rt_id_ex, self.rt_if_id)', '(5)'], {}), '((self.rt_id_ex, self.rt_if_id), 5)\n', (624, 659), False, 'from comparator.comparator import Comparator\n')] |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Join source and target text files generated for MCD splits to TSV."""
from absl import app
from absl import flags
from language.nqg.tasks import tsv_utils
from tensorflow.io import gfile
FLAGS = flags.FLAGS
flags.DEFINE_string("source", "", "Source txt file.")
flags.DEFINE_string("target", "", "Target txt file.")
flags.DEFINE_string("output", "", "Joined tsv file.")
def read_examples(source_file, target_file):
"""Return list of (source, target) tuples."""
sources = []
targets = []
with gfile.GFile(source_file, "r") as txt_file:
for line in txt_file:
sources.append(line.rstrip("\n"))
with gfile.GFile(target_file, "r") as txt_file:
for line in txt_file:
targets.append(line.rstrip("\n"))
examples = list(zip(sources, targets))
return examples
def main(unused_argv):
examples = read_examples(FLAGS.source, FLAGS.target)
tsv_utils.write_tsv(examples, FLAGS.output)
if __name__ == "__main__":
app.run(main)
| [
"absl.flags.DEFINE_string",
"tensorflow.io.gfile.GFile",
"absl.app.run",
"language.nqg.tasks.tsv_utils.write_tsv"
] | [((830, 883), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""source"""', '""""""', '"""Source txt file."""'], {}), "('source', '', 'Source txt file.')\n", (849, 883), False, 'from absl import flags\n'), ((885, 938), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""target"""', '""""""', '"""Target txt file."""'], {}), "('target', '', 'Target txt file.')\n", (904, 938), False, 'from absl import flags\n'), ((940, 993), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""output"""', '""""""', '"""Joined tsv file."""'], {}), "('output', '', 'Joined tsv file.')\n", (959, 993), False, 'from absl import flags\n'), ((1495, 1538), 'language.nqg.tasks.tsv_utils.write_tsv', 'tsv_utils.write_tsv', (['examples', 'FLAGS.output'], {}), '(examples, FLAGS.output)\n', (1514, 1538), False, 'from language.nqg.tasks import tsv_utils\n'), ((1570, 1583), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (1577, 1583), False, 'from absl import app\n'), ((1127, 1156), 'tensorflow.io.gfile.GFile', 'gfile.GFile', (['source_file', '"""r"""'], {}), "(source_file, 'r')\n", (1138, 1156), False, 'from tensorflow.io import gfile\n'), ((1244, 1273), 'tensorflow.io.gfile.GFile', 'gfile.GFile', (['target_file', '"""r"""'], {}), "(target_file, 'r')\n", (1255, 1273), False, 'from tensorflow.io import gfile\n')] |
import pytest
from gemd.entity.source.performed_source import PerformedSource
def test_construction():
"""Make sure we can and cannot construct correct and incorrect performed sources."""
PerformedSource()
PerformedSource(performed_by="<NAME>")
PerformedSource(performed_date="1898-07-01")
PerformedSource(performed_date="1898-07-01", performed_by="<NAME>")
with pytest.raises(TypeError):
PerformedSource(performed_date=1234)
with pytest.raises(TypeError):
PerformedSource(performed_by={"first_name": "Marie", "last_name": "Curie"})
| [
"gemd.entity.source.performed_source.PerformedSource",
"pytest.raises"
] | [((199, 216), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {}), '()\n', (214, 216), False, 'from gemd.entity.source.performed_source import PerformedSource\n'), ((221, 259), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {'performed_by': '"""<NAME>"""'}), "(performed_by='<NAME>')\n", (236, 259), False, 'from gemd.entity.source.performed_source import PerformedSource\n'), ((264, 308), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {'performed_date': '"""1898-07-01"""'}), "(performed_date='1898-07-01')\n", (279, 308), False, 'from gemd.entity.source.performed_source import PerformedSource\n'), ((313, 380), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {'performed_date': '"""1898-07-01"""', 'performed_by': '"""<NAME>"""'}), "(performed_date='1898-07-01', performed_by='<NAME>')\n", (328, 380), False, 'from gemd.entity.source.performed_source import PerformedSource\n'), ((391, 415), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (404, 415), False, 'import pytest\n'), ((425, 461), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {'performed_date': '(1234)'}), '(performed_date=1234)\n', (440, 461), False, 'from gemd.entity.source.performed_source import PerformedSource\n'), ((472, 496), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (485, 496), False, 'import pytest\n'), ((506, 581), 'gemd.entity.source.performed_source.PerformedSource', 'PerformedSource', ([], {'performed_by': "{'first_name': 'Marie', 'last_name': 'Curie'}"}), "(performed_by={'first_name': 'Marie', 'last_name': 'Curie'})\n", (521, 581), False, 'from gemd.entity.source.performed_source import PerformedSource\n')] |
import pandas as pd
import re
def read_data(file_path):
data=pd.read_excel(file_path)
new_data=data.iloc[:, 0:2]
new_data.columns=[["Question", "Answers"]]
new_data=new_data[:-1]
return new_data
origin = read_data("../DATA/all.xlsx")
year = "[0-9][0-9][0-9][0-9]"
y = [re.search(year, i[0]) != None for i in origin["Question"].values]
other = [not i for i in y]
df_y = origin[y]
df_other = origin[other]
df_y.to_csv ('../DATA/year_related.csv',index=False)
df_other.to_csv ('../DATA/other.csv',index=False) | [
"re.search",
"pandas.read_excel"
] | [((69, 93), 'pandas.read_excel', 'pd.read_excel', (['file_path'], {}), '(file_path)\n', (82, 93), True, 'import pandas as pd\n'), ((304, 325), 're.search', 're.search', (['year', 'i[0]'], {}), '(year, i[0])\n', (313, 325), False, 'import re\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2018-05-16 13:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('library', '0023_auto_20180516_2054'),
]
operations = [
migrations.AddField(
model_name='ordering',
name='date_due_to_returned',
field=models.DateField(default='2018-5-16'),
),
migrations.AddField(
model_name='ordering',
name='date_issued',
field=models.DateField(default='2018-5-16'),
),
migrations.AddField(
model_name='ordering',
name='date_returned',
field=models.DateField(null=True),
),
]
| [
"django.db.models.DateField"
] | [((415, 452), 'django.db.models.DateField', 'models.DateField', ([], {'default': '"""2018-5-16"""'}), "(default='2018-5-16')\n", (431, 452), False, 'from django.db import migrations, models\n'), ((579, 616), 'django.db.models.DateField', 'models.DateField', ([], {'default': '"""2018-5-16"""'}), "(default='2018-5-16')\n", (595, 616), False, 'from django.db import migrations, models\n'), ((745, 772), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)'}), '(null=True)\n', (761, 772), False, 'from django.db import migrations, models\n')] |
import codecs
import json
import util
TG_BASE_URL = 'http://tangorin.com'
TG_KANJI_PATH = '/kanji'
class Tangorin:
# for each kanji on the list, loads word examples from tangorin website
# and returns a map {kanji : {reading : [words sorted by appearance]}}
@staticmethod
def get_kanji_to_words(cache_file, kanjis, log):
log.info('loading tangorin words')
kanji_to_words = {}
try:
with codecs.open(cache_file, 'rb', 'utf-8') as f:
kanji_to_words = json.load(f)
log.info('loaded from cache file %s', cache_file)
except (FileNotFoundError, IOError):
pass
i = 1
for kanji in kanjis:
if kanji not in kanji_to_words:
kanji_to_words[kanji] = Tangorin._get_words_for_kanji(kanji, log)
log.debug('[%d/%d] %s: %s', i, len(kanjis), kanji, str(kanji_to_words[kanji]))
i += 1
try:
with codecs.open(cache_file, 'wb', 'utf-8') as f:
json.dump(kanji_to_words, f, ensure_ascii=False)
log.info('saved cache file %s', cache_file)
except IOError:
pass
return kanji_to_words
# given one kanji, uses tangorin to find example words
# and returns a map {kanji : {reading : [words sorted by appearance]}}
# a word is {'word': <in kanji>, 'furigana': <kana>, 'meaning': <meaning>}
@staticmethod
def _get_words_for_kanji(kanji, log):
try:
doc = util.get_html(TG_BASE_URL + TG_KANJI_PATH + '/' + kanji, log)
if doc is not None:
kanji_words = {}
for tr in doc.xpath('//table[@class="k-compounds-table"]//tr'):
reading, reading_words = Tangorin._process_reading_row(tr)
if not reading:
log.debug('invalid kanji: %s', kanji)
return None
kanji_words[reading] = reading_words
return kanji_words
except:
log.exception('failed to load words for kanji %s', kanji)
return None
# processes one row from the tangorin page main table
@staticmethod
def _process_reading_row(tr):
words = []
reading = str(tr.xpath('.//td[1]/span[@class="kana"]/b')[0].text).strip()
for a in tr.xpath('.//td[2]/a'):
word = ''.join(a.xpath('.//text()')).strip()
furigana = a.xpath('(./following-sibling::span[@class="kana"])[1]')[0].text.strip()
meaning = a.xpath('(./following-sibling::span[@class="romaji"])[1]')[0].tail.replace(u'】', '').strip()
words.append({'word': word, 'furigana': furigana, 'meaning': meaning})
return reading, words
| [
"json.load",
"util.get_html",
"codecs.open",
"json.dump"
] | [((1513, 1574), 'util.get_html', 'util.get_html', (["(TG_BASE_URL + TG_KANJI_PATH + '/' + kanji)", 'log'], {}), "(TG_BASE_URL + TG_KANJI_PATH + '/' + kanji, log)\n", (1526, 1574), False, 'import util\n'), ((442, 480), 'codecs.open', 'codecs.open', (['cache_file', '"""rb"""', '"""utf-8"""'], {}), "(cache_file, 'rb', 'utf-8')\n", (453, 480), False, 'import codecs\n'), ((520, 532), 'json.load', 'json.load', (['f'], {}), '(f)\n', (529, 532), False, 'import json\n'), ((971, 1009), 'codecs.open', 'codecs.open', (['cache_file', '"""wb"""', '"""utf-8"""'], {}), "(cache_file, 'wb', 'utf-8')\n", (982, 1009), False, 'import codecs\n'), ((1032, 1080), 'json.dump', 'json.dump', (['kanji_to_words', 'f'], {'ensure_ascii': '(False)'}), '(kanji_to_words, f, ensure_ascii=False)\n', (1041, 1080), False, 'import json\n')] |
import warnings
import pytest
from nipype.external.version import LooseVersion as Vendored
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
from distutils.version import LooseVersion as Original
except ImportError:
pytest.skip()
@pytest.mark.parametrize("v1, v2", [("0.0.0", "0.0.0"), ("0.0.0", "1.0.0")])
def test_LooseVersion_compat(v1, v2):
vend1, vend2 = Vendored(v1), Vendored(v2)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
orig1, orig2 = Original(v1), Original(v2)
assert vend1 == orig1
assert orig1 == vend1
assert vend2 == orig2
assert orig2 == vend2
assert (vend1 == orig2) == (v1 == v2)
assert (vend1 < orig2) == (v1 < v2)
assert (vend1 > orig2) == (v1 > v2)
assert (vend1 <= orig2) == (v1 <= v2)
assert (vend1 >= orig2) == (v1 >= v2)
assert (orig1 == vend2) == (v1 == v2)
assert (orig1 < vend2) == (v1 < v2)
assert (orig1 > vend2) == (v1 > v2)
assert (orig1 <= vend2) == (v1 <= v2)
assert (orig1 >= vend2) == (v1 >= v2)
| [
"warnings.catch_warnings",
"nipype.external.version.LooseVersion",
"pytest.mark.parametrize",
"warnings.simplefilter",
"distutils.version.LooseVersion",
"pytest.skip"
] | [((283, 358), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""v1, v2"""', "[('0.0.0', '0.0.0'), ('0.0.0', '1.0.0')]"], {}), "('v1, v2', [('0.0.0', '0.0.0'), ('0.0.0', '1.0.0')])\n", (306, 358), False, 'import pytest\n'), ((99, 124), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (122, 124), False, 'import warnings\n'), ((130, 161), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (151, 161), False, 'import warnings\n'), ((416, 428), 'nipype.external.version.LooseVersion', 'Vendored', (['v1'], {}), '(v1)\n', (424, 428), True, 'from nipype.external.version import LooseVersion as Vendored\n'), ((430, 442), 'nipype.external.version.LooseVersion', 'Vendored', (['v2'], {}), '(v2)\n', (438, 442), True, 'from nipype.external.version import LooseVersion as Vendored\n'), ((452, 477), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (475, 477), False, 'import warnings\n'), ((487, 518), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (508, 518), False, 'import warnings\n'), ((266, 279), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (277, 279), False, 'import pytest\n'), ((542, 554), 'distutils.version.LooseVersion', 'Original', (['v1'], {}), '(v1)\n', (550, 554), True, 'from distutils.version import LooseVersion as Original\n'), ((556, 568), 'distutils.version.LooseVersion', 'Original', (['v2'], {}), '(v2)\n', (564, 568), True, 'from distutils.version import LooseVersion as Original\n')] |
#!/bin/python
import sys
import json
import os.path
import argparse
from google.cloud import storage
# Synopsis:
# Copies workflow outputs needed for downstream processing to a destination bucket.
#
# Author: <NAME> (<EMAIL>)
# Output file definitions: FILENAME_MAP[workflow_name][output_variable] = destination_file_suffix
FILENAME_MAP = {
'GATKSVPipelinePhase1': {
'filtered_depth_vcf': 'filtered_depth_vcf.vcf.gz',
'filtered_pesr_vcf': 'filtered_pesr_vcf.vcf.gz',
'cutoffs': 'rf_cutoffs.tsv',
'outlier_samples_excluded_file': 'outliers.list',
'batch_samples_postOutlierExclusion_file': 'filtered_samples.list',
'ped_file_postOutlierExclusion': 'filtered.ped',
'merged_SR': 'SR.txt.gz',
'merged_SR_index': 'SR.txt.gz.tbi',
'merged_PE': 'PE.txt.gz',
'merged_PE_index': 'PE.txt.gz.tbi',
'merged_bincov': 'RD.txt.gz',
'merged_bincov_index': 'RD.txt.gz.tbi',
'median_cov': 'median_cov.bed'
},
'MergeCohortVcfs': {
'cohort_pesr_vcf': 'cohort_pesr.vcf.gz',
'cohort_depth_vcf': 'cohort_depth.vcf.gz',
'cohort_combined': 'cohort.combined.bed',
'lookup': 'master_cluster_dups.bed',
'cohort_sort': 'cohort.sort.bed',
'cluster_combined': 'cluster.combined.bed'
},
'Module04': {
'sr_bothside_pass': 'sr_bothside_pass.txt',
'sr_background_fail': 'sr_background_fail.txt',
'trained_PE_metrics': 'trained_PE_metrics.txt',
'trained_SR_metrics': 'trained_SR_metrics.txt',
'trained_genotype_pesr_pesr_sepcutoff': 'trained_genotype_pesr_pesr_sepcutoff.txt',
'trained_genotype_pesr_depth_sepcutoff': 'trained_genotype_pesr_depth_sepcutoff.txt',
'trained_genotype_depth_pesr_sepcutoff': 'trained_genotype_depth_pesr_sepcutoff.txt',
'trained_genotype_depth_depth_sepcutoff': 'trained_genotype_depth_depth_sepcutoff.txt',
'genotyped_depth_vcf': 'genotyped.depth.vcf.gz',
'genotyped_depth_vcf_index': 'genotyped.depth.vcf.gz.tbi',
'genotyped_pesr_vcf': 'genotyped.pesr.vcf.gz',
'genotyped_pesr_vcf_index': 'genotyped.pesr.vcf.gz.tbi',
'regeno_depth': 'regeno_depth.bed'
}
}
def get_uris(metadata, output_name, dest_prefix):
if 'workflowName' not in metadata:
raise ValueError("Workflow name not found. Check metadata file.")
workflow = metadata['workflowName']
if workflow not in FILENAME_MAP:
raise ValueError(f"Unknown workflow {workflow}")
outputs = metadata['outputs']
for var in FILENAME_MAP[workflow]:
key = f"{workflow}.{var}"
if outputs[key] is not None:
source_uri = outputs[key]
dest_filename = f"{output_name}.{FILENAME_MAP[workflow][var]}"
dest_uri = os.path.join(dest_prefix, dest_filename)
yield (source_uri, dest_uri)
def copy_blob(storage_client, bucket_name, blob_name, destination_bucket_name, destination_blob_name):
source_bucket = storage_client.bucket(bucket_name)
source_blob = source_bucket.blob(blob_name)
destination_bucket = storage_client.bucket(destination_bucket_name)
destination_blob = destination_bucket.blob(destination_blob_name)
source_uri = f"gs://{source_bucket.name}/{source_blob.name}"
destination_uri = f"gs://{destination_bucket.name}/{destination_blob_name}"
if destination_blob.exists():
sys.stderr.write(
f"Target {destination_uri} exists, cautiously refusing to overwrite. Aborting...\n")
sys.exit(1)
sys.stderr.write(f"Copying {source_uri}...")
(token, bytes_rewritten, total_bytes) = destination_blob.rewrite(source=source_blob)
while token is not None:
(token, bytes_rewritten, total_bytes) = destination_blob.rewrite(
source=source_blob, token=token)
size_kb = int(bytes_rewritten / 1024)
sys.stderr.write(f"done ({size_kb} KB)\n")
def copy_uri(source_uri, dest_uri, storage_client):
def _parse_uri(uri):
tokens = uri.split('/')
bucket_name = tokens[2]
bucket_object = '/'.join(tokens[3:])
return bucket_name, bucket_object
source_bucket_name, source_blob_name = _parse_uri(source_uri)
dest_bucket_name, dest_blob_name = _parse_uri(dest_uri)
copy_blob(storage_client, source_bucket_name,
source_blob_name, dest_bucket_name, dest_blob_name)
# Main function
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--name", help="Batch or cohort name", required=True)
parser.add_argument(
"--metadata", help="Workflow metadata JSON file", required=True)
parser.add_argument(
"--dest", help="Destination GCS URI (e.g. \"gs://my-bucket/output\")", required=True)
args = parser.parse_args()
metadata = json.load(open(args.metadata, 'r'))
output_uris = get_uris(metadata, args.name, args.dest)
client = storage.Client()
for source_uri, dest_uri in output_uris:
copy_uri(source_uri, dest_uri, client)
if __name__ == "__main__":
main()
| [
"sys.stderr.write",
"google.cloud.storage.Client",
"argparse.ArgumentParser",
"sys.exit"
] | [((3573, 3617), 'sys.stderr.write', 'sys.stderr.write', (['f"""Copying {source_uri}..."""'], {}), "(f'Copying {source_uri}...')\n", (3589, 3617), False, 'import sys\n'), ((3901, 3943), 'sys.stderr.write', 'sys.stderr.write', (['f"""done ({size_kb} KB)\n"""'], {}), "(f'done ({size_kb} KB)\\n')\n", (3917, 3943), False, 'import sys\n'), ((4459, 4484), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4482, 4484), False, 'import argparse\n'), ((4934, 4950), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (4948, 4950), False, 'from google.cloud import storage\n'), ((3434, 3545), 'sys.stderr.write', 'sys.stderr.write', (['f"""Target {destination_uri} exists, cautiously refusing to overwrite. Aborting...\n"""'], {}), "(\n f'Target {destination_uri} exists, cautiously refusing to overwrite. Aborting...\\n'\n )\n", (3450, 3545), False, 'import sys\n'), ((3557, 3568), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3565, 3568), False, 'import sys\n')] |
from keras.models import Sequential
from keras.layers import Dense, Flatten
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
def generate_vgg16():
"""
搭建VGG16网络结构
:return: VGG16网络
"""
input_shape = (224, 224, 3)
model = Sequential([
Conv2D(64, (3, 3), input_shape=input_shape, padding='same', activation='relu'),
Conv2D(64, (3, 3), padding='same', activation='relu'),
MaxPooling2D(pool_size=(2,2), strides=(2,2)),
Conv2D(128, (3, 3), padding='same', activation='relu'),
Conv2D(128, (3, 3), padding='same', activation='relu'),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(256, (3, 3), padding='same', activation='relu'),
Conv2D(256, (3, 3), padding='same', activation='relu'),
Conv2D(256, (3, 3), padding='same', activation='relu'),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(512, (3, 3), padding='same', activation='relu'),
Conv2D(512, (3, 3), padding='same', activation='relu'),
Conv2D(512, (3, 3), padding='same', activation='relu'),
MaxPooling2D(pool_size=(2, 2), strides=(2, 2)),
Conv2D(512, (3, 3), padding='same', activation='relu'),
Conv2D(512, (3, 3), padding='same', activation='relu'),
Conv2D(512, (3, 3), padding='same', activation='relu'),
MaxPooling2D(pool_size=(2,2), strides=(2,2)),
Flatten(),
Dense(4096, activation='relu'),
Dense(4096, activation='relu'),
Dense(1000, activation='softmax')
])
return model
if __name__ == '__main__':
model = generate_vgg16()
model.summary() | [
"keras.layers.Dense",
"keras.layers.Conv2D",
"keras.layers.Flatten",
"keras.layers.MaxPooling2D"
] | [((288, 366), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'input_shape': 'input_shape', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (3, 3), input_shape=input_shape, padding='same', activation='relu')\n", (294, 366), False, 'from keras.layers import Conv2D\n'), ((376, 429), 'keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, (3, 3), padding='same', activation='relu')\n", (382, 429), False, 'from keras.layers import Conv2D\n'), ((439, 485), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (451, 485), False, 'from keras.layers import MaxPooling2D\n'), ((493, 547), 'keras.layers.Conv2D', 'Conv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(128, (3, 3), padding='same', activation='relu')\n", (499, 547), False, 'from keras.layers import Conv2D\n'), ((557, 611), 'keras.layers.Conv2D', 'Conv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(128, (3, 3), padding='same', activation='relu')\n", (563, 611), False, 'from keras.layers import Conv2D\n'), ((621, 667), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (633, 667), False, 'from keras.layers import MaxPooling2D\n'), ((677, 731), 'keras.layers.Conv2D', 'Conv2D', (['(256)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (3, 3), padding='same', activation='relu')\n", (683, 731), False, 'from keras.layers import Conv2D\n'), ((741, 795), 'keras.layers.Conv2D', 'Conv2D', (['(256)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (3, 3), padding='same', activation='relu')\n", (747, 795), False, 'from keras.layers import Conv2D\n'), ((805, 859), 'keras.layers.Conv2D', 'Conv2D', (['(256)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(256, (3, 3), padding='same', activation='relu')\n", (811, 859), False, 'from keras.layers import Conv2D\n'), ((869, 915), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (881, 915), False, 'from keras.layers import MaxPooling2D\n'), ((925, 979), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (931, 979), False, 'from keras.layers import Conv2D\n'), ((989, 1043), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (995, 1043), False, 'from keras.layers import Conv2D\n'), ((1053, 1107), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (1059, 1107), False, 'from keras.layers import Conv2D\n'), ((1117, 1163), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (1129, 1163), False, 'from keras.layers import MaxPooling2D\n'), ((1173, 1227), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (1179, 1227), False, 'from keras.layers import Conv2D\n'), ((1237, 1291), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (1243, 1291), False, 'from keras.layers import Conv2D\n'), ((1301, 1355), 'keras.layers.Conv2D', 'Conv2D', (['(512)', '(3, 3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(512, (3, 3), padding='same', activation='relu')\n", (1307, 1355), False, 'from keras.layers import Conv2D\n'), ((1365, 1411), 'keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'strides': '(2, 2)'}), '(pool_size=(2, 2), strides=(2, 2))\n', (1377, 1411), False, 'from keras.layers import MaxPooling2D\n'), ((1419, 1428), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1426, 1428), False, 'from keras.layers import Dense, Flatten\n'), ((1438, 1468), 'keras.layers.Dense', 'Dense', (['(4096)'], {'activation': '"""relu"""'}), "(4096, activation='relu')\n", (1443, 1468), False, 'from keras.layers import Dense, Flatten\n'), ((1478, 1508), 'keras.layers.Dense', 'Dense', (['(4096)'], {'activation': '"""relu"""'}), "(4096, activation='relu')\n", (1483, 1508), False, 'from keras.layers import Dense, Flatten\n'), ((1518, 1551), 'keras.layers.Dense', 'Dense', (['(1000)'], {'activation': '"""softmax"""'}), "(1000, activation='softmax')\n", (1523, 1551), False, 'from keras.layers import Dense, Flatten\n')] |
from datetime import datetime
from dateutil.parser import parse as date_parser
from easytrakt.models import Episode
from easytrakt.models import Movie
from easytrakt.models import MovieWatchlist
from easytrakt.models import Season
from easytrakt.models import Settings
from easytrakt.models import Show
from easytrakt.models import ShowWatchlist
def test_search_show(client):
result = client.search("Dexter")
assert result
show = result[0]
assert isinstance(show, Show)
assert show.title == "Dexter"
assert show.ids.trakt == 1396
assert show.id == 1396
assert show.images.poster.full.startswith("http")
assert show.images.banner.full.startswith("http")
def test_search_movie(client):
result = client.movies("The Big Lebowski")
assert result
movie = result[0]
assert isinstance(movie, Movie)
assert movie.title == "The Big Lebowski"
assert movie.year == 1998
assert movie.id == 84
assert isinstance(movie.released, datetime)
assert movie.released == date_parser("1998-03-06")
assert movie.images.poster.full.startswith("http")
assert movie.images.fanart.full.startswith("http")
def test_season(client):
show = Show(client, 1396)
seasons = show.seasons
assert seasons
season = seasons[1]
assert isinstance(season, Season)
assert season.number == 1
assert season.trakt == 3999
assert season.aired_episodes == 12
assert season.images.poster.full.startswith("http")
def test_episodes(client):
show = Show(client, 1396)
episodes = show.seasons[1].episodes
episode = episodes[1]
assert isinstance(episode, Episode)
assert episode.season == 1
assert episode.number == 2
assert episode.trakt == 74162
assert episode.title == "Crocodile"
assert isinstance(episode.first_aired, datetime)
assert episode.first_aired == date_parser("2006-10-08T04:00:00.000Z")
assert episode.images.screenshot.full.startswith("http")
def test_settings(client):
settings = Settings(client)
assert settings.user.username == "lad1337"
def test_movie_watchlist(client):
watchlist = MovieWatchlist(client)
assert watchlist.items
assert isinstance(watchlist.items[0], Movie)
def test_show_watchlist(client):
watchlist = ShowWatchlist(client)
assert watchlist.items
assert isinstance(watchlist.items[0], Show)
| [
"dateutil.parser.parse",
"easytrakt.models.ShowWatchlist",
"easytrakt.models.Settings",
"easytrakt.models.MovieWatchlist",
"easytrakt.models.Show"
] | [((1202, 1220), 'easytrakt.models.Show', 'Show', (['client', '(1396)'], {}), '(client, 1396)\n', (1206, 1220), False, 'from easytrakt.models import Show\n'), ((1526, 1544), 'easytrakt.models.Show', 'Show', (['client', '(1396)'], {}), '(client, 1396)\n', (1530, 1544), False, 'from easytrakt.models import Show\n'), ((2019, 2035), 'easytrakt.models.Settings', 'Settings', (['client'], {}), '(client)\n', (2027, 2035), False, 'from easytrakt.models import Settings\n'), ((2135, 2157), 'easytrakt.models.MovieWatchlist', 'MovieWatchlist', (['client'], {}), '(client)\n', (2149, 2157), False, 'from easytrakt.models import MovieWatchlist\n'), ((2285, 2306), 'easytrakt.models.ShowWatchlist', 'ShowWatchlist', (['client'], {}), '(client)\n', (2298, 2306), False, 'from easytrakt.models import ShowWatchlist\n'), ((1028, 1053), 'dateutil.parser.parse', 'date_parser', (['"""1998-03-06"""'], {}), "('1998-03-06')\n", (1039, 1053), True, 'from dateutil.parser import parse as date_parser\n'), ((1874, 1913), 'dateutil.parser.parse', 'date_parser', (['"""2006-10-08T04:00:00.000Z"""'], {}), "('2006-10-08T04:00:00.000Z')\n", (1885, 1913), True, 'from dateutil.parser import parse as date_parser\n')] |
from pydatastructs.miscellaneous_data_structures.sparse_table import SparseTable
from pydatastructs.miscellaneous_data_structures.segment_tree import ArraySegmentTree
from pydatastructs.utils.misc_util import (
_check_range_query_inputs, Backend,
raise_if_backend_is_not_python)
__all__ = [
'RangeQueryStatic',
'RangeQueryDynamic'
]
class RangeQueryStatic:
"""
Produces results for range queries of different kinds
by using specified data structure.
Parameters
==========
array: OneDimensionalArray
The array for which we need to answer queries.
All the elements should be of type `int`.
func: callable
The function to be used for generating results
of a query. It should accept only one tuple as an
argument. The size of the tuple will be either 1 or 2
and any one of the elements can be `None`. You can treat
`None` in whatever way you want according to the query
you are performing. For example, in case of range minimum
queries, `None` can be treated as infinity. We provide
the following which can be used as an argument value for this
parameter,
`minimum` - For range minimum queries.
`greatest_common_divisor` - For queries finding greatest
common divisor of a range.
`summation` - For range sum queries.
data_structure: str
The data structure to be used for performing
range queries.
Currently the following data structures are supported,
'array' -> Array data structure.
Each query takes O(end - start) time asymptotically.
'sparse_table' -> Sparse table data structure.
Each query takes O(log(end - start)) time
asymptotically.
By default, 'sparse_table'.
backend: pydatastructs.Backend
The backend to be used.
Optional, by default, the best available
backend is used.
Examples
========
>>> from pydatastructs import OneDimensionalArray, RangeQueryStatic
>>> from pydatastructs import minimum
>>> arr = OneDimensionalArray(int, [4, 6, 1, 5, 7, 3])
>>> RMQ = RangeQueryStatic(arr, minimum)
>>> RMQ.query(3, 4)
5
>>> RMQ.query(0, 4)
1
>>> RMQ.query(0, 2)
1
Note
====
The array once passed as an input should not be modified
once the `RangeQueryStatic` constructor is called. If you
have updated the array, then you need to create a new
`RangeQueryStatic` object with this updated array.
"""
def __new__(cls, array, func, data_structure='sparse_table', **kwargs):
raise_if_backend_is_not_python(
cls, kwargs.get('backend', Backend.PYTHON))
if len(array) == 0:
raise ValueError("Input %s array is empty."%(array))
if data_structure == 'array':
return RangeQueryStaticArray(array, func)
elif data_structure == 'sparse_table':
return RangeQueryStaticSparseTable(array, func)
else:
raise NotImplementedError(
"Currently %s data structure for range "
"query without updates isn't implemented yet."
% (data_structure))
@classmethod
def methods(cls):
return ['query']
def query(start, end):
"""
Method to perform a query in [start, end) range.
Parameters
==========
start: int
The starting index of the range.
end: int
The ending index of the range.
"""
raise NotImplementedError(
"This is an abstract method.")
class RangeQueryStaticSparseTable(RangeQueryStatic):
__slots__ = ["sparse_table", "bounds"]
def __new__(cls, array, func, **kwargs):
raise_if_backend_is_not_python(
cls, kwargs.get('backend', Backend.PYTHON))
obj = object.__new__(cls)
sparse_table = SparseTable(array, func)
obj.bounds = (0, len(array))
obj.sparse_table = sparse_table
return obj
@classmethod
def methods(cls):
return ['query']
def query(self, start, end):
_check_range_query_inputs((start, end + 1), self.bounds)
return self.sparse_table.query(start, end)
class RangeQueryStaticArray(RangeQueryStatic):
__slots__ = ["array", "func"]
def __new__(cls, array, func):
obj = object.__new__(cls)
obj.array = array
obj.func = func
return obj
@classmethod
def methods(cls):
return ['query']
def query(self, start, end):
_check_range_query_inputs((start, end + 1), (0, len(self.array)))
rsize = end - start + 1
if rsize == 1:
return self.func((self.array[start],))
query_ans = self.func((self.array[start], self.array[start + 1]))
for i in range(start + 2, end + 1):
query_ans = self.func((query_ans, self.array[i]))
return query_ans
class RangeQueryDynamic:
"""
Produces results for range queries of different kinds
while allowing point updates by using specified
data structure.
Parameters
==========
array: OneDimensionalArray
The array for which we need to answer queries.
All the elements should be of type `int`.
func: callable
The function to be used for generating results
of a query. It should accept only one tuple as an
argument. The size of the tuple will be either 1 or 2
and any one of the elements can be `None`. You can treat
`None` in whatever way you want according to the query
you are performing. For example, in case of range minimum
queries, `None` can be treated as infinity. We provide
the following which can be used as an argument value for this
parameter,
`minimum` - For range minimum queries.
`greatest_common_divisor` - For queries finding greatest
common divisor of a range.
`summation` - For range sum queries.
data_structure: str
The data structure to be used for performing
range queries.
Currently the following data structures are supported,
'array' -> Array data structure.
Each query takes O(end - start) time asymptotically.
Each point update takes O(1) time asymptotically.
'segment_tree' -> Segment tree data structure.
Each query takes O(log(end - start)) time
asymptotically.
Each point update takes O(log(len(array))) time
asymptotically.
By default, 'segment_tree'.
backend: pydatastructs.Backend
The backend to be used.
Optional, by default, the best available
backend is used.
Examples
========
>>> from pydatastructs import OneDimensionalArray, RangeQueryDynamic
>>> from pydatastructs import minimum
>>> arr = OneDimensionalArray(int, [4, 6, 1, 5, 7, 3])
>>> RMQ = RangeQueryDynamic(arr, minimum)
>>> RMQ.query(3, 4)
5
>>> RMQ.query(0, 4)
1
>>> RMQ.query(0, 2)
1
>>> RMQ.update(2, 0)
>>> RMQ.query(0, 2)
0
Note
====
The array once passed as an input should be modified
only with `RangeQueryDynamic.update` method.
"""
def __new__(cls, array, func, data_structure='segment_tree', **kwargs):
raise_if_backend_is_not_python(
cls, kwargs.get('backend', Backend.PYTHON))
if len(array) == 0:
raise ValueError("Input %s array is empty."%(array))
if data_structure == 'array':
return RangeQueryDynamicArray(array, func, **kwargs)
elif data_structure == 'segment_tree':
return RangeQueryDynamicSegmentTree(array, func, **kwargs)
else:
raise NotImplementedError(
"Currently %s data structure for range "
"query with point updates isn't implemented yet."
% (data_structure))
@classmethod
def methods(cls):
return ['query', 'update']
def query(start, end):
"""
Method to perform a query in [start, end) range.
Parameters
==========
start: int
The starting index of the range.
end: int
The ending index of the range.
"""
raise NotImplementedError(
"This is an abstract method.")
def update(self, index, value):
"""
Method to update index with a new value.
Parameters
==========
index: int
The index to be update.
value: int
The new value.
"""
raise NotImplementedError(
"This is an abstract method.")
class RangeQueryDynamicArray(RangeQueryDynamic):
__slots__ = ["range_query_static"]
def __new__(cls, array, func, **kwargs):
raise_if_backend_is_not_python(
cls, kwargs.get('backend', Backend.PYTHON))
obj = object.__new__(cls)
obj.range_query_static = RangeQueryStaticArray(array, func)
return obj
@classmethod
def methods(cls):
return ['query', 'update']
def query(self, start, end):
return self.range_query_static.query(start, end)
def update(self, index, value):
self.range_query_static.array[index] = value
class RangeQueryDynamicSegmentTree(RangeQueryDynamic):
__slots__ = ["segment_tree", "bounds"]
def __new__(cls, array, func, **kwargs):
raise_if_backend_is_not_python(
cls, kwargs.pop('backend', Backend.PYTHON))
obj = object.__new__(cls)
obj.segment_tree = ArraySegmentTree(array, func, dimensions=1)
obj.segment_tree.build()
obj.bounds = (0, len(array))
return obj
@classmethod
def methods(cls):
return ['query', 'update']
def query(self, start, end):
_check_range_query_inputs((start, end + 1), self.bounds)
return self.segment_tree.query(start, end)
def update(self, index, value):
self.segment_tree.update(index, value)
| [
"pydatastructs.utils.misc_util._check_range_query_inputs",
"pydatastructs.miscellaneous_data_structures.segment_tree.ArraySegmentTree",
"pydatastructs.miscellaneous_data_structures.sparse_table.SparseTable"
] | [((4022, 4046), 'pydatastructs.miscellaneous_data_structures.sparse_table.SparseTable', 'SparseTable', (['array', 'func'], {}), '(array, func)\n', (4033, 4046), False, 'from pydatastructs.miscellaneous_data_structures.sparse_table import SparseTable\n'), ((4250, 4306), 'pydatastructs.utils.misc_util._check_range_query_inputs', '_check_range_query_inputs', (['(start, end + 1)', 'self.bounds'], {}), '((start, end + 1), self.bounds)\n', (4275, 4306), False, 'from pydatastructs.utils.misc_util import _check_range_query_inputs, Backend, raise_if_backend_is_not_python\n'), ((9864, 9907), 'pydatastructs.miscellaneous_data_structures.segment_tree.ArraySegmentTree', 'ArraySegmentTree', (['array', 'func'], {'dimensions': '(1)'}), '(array, func, dimensions=1)\n', (9880, 9907), False, 'from pydatastructs.miscellaneous_data_structures.segment_tree import ArraySegmentTree\n'), ((10114, 10170), 'pydatastructs.utils.misc_util._check_range_query_inputs', '_check_range_query_inputs', (['(start, end + 1)', 'self.bounds'], {}), '((start, end + 1), self.bounds)\n', (10139, 10170), False, 'from pydatastructs.utils.misc_util import _check_range_query_inputs, Backend, raise_if_backend_is_not_python\n')] |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm, animation
def animate3d(C,dt,phi,frames=100,interval=50,cmin=0,cmax=1):
"""Creates an animation of a time series of scalar values on a 2D grid.
The scalar field is plotted as a surface mesh."""
step = len(C)//frames
C = np.vstack([C[0],C[1::step]])
n = int(np.sqrt(len(C[0])))
(x,y) = np.meshgrid(range(n),range(n))
def init():
surf = ax.plot_surface(x,y,C[0].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text.set_text('t={0:.4f}'.format(0))
return (surf,time_text)
def animate(i):
#ax.collections.clear()
surf = ax.plot_surface(x,y,C[i].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text.set_text('t={0:.4f}'.format(i*dt*step))
return (surf,time_text)
fig, ax = plt.subplots(subplot_kw={'projection': '3d'})
surf = ax.plot_surface(x,y,C[0].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text = ax.text2D(0.75, 0.9, 't=0', transform=ax.transAxes, fontsize=12)
phi_text = ax.text2D(0.1, 0.9, r'$\phi=$%.2f'%phi, transform=ax.transAxes, fontsize=12)
fig.colorbar(surf, label=r'$C/C_0$')
ax.axes.set_zlim3d(bottom=cmin, top=1)
ax.set_title('Concentration Profile')
ax.set_xlabel('x')
ax.set_ylabel('y')
ani = animation.FuncAnimation(fig, animate, init_func=init,
frames=len(C), interval=interval, blit=True)
plt.close()
return ani
def animate2d(C,dt,phi,cmin=1,cmax=1,frames=100,interval=50):
"""Creates an animation of a time series of scalar values on a 2D grid.
The scalar field is plotted as a heatmap."""
step = len(C)//frames
C = np.vstack([C[0],C[1::step]])
n = int(np.sqrt(len(C[0])))
def init():
surf = ax.imshow(C[0].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text.set_text('t={0:.4f}'.format(0))
return (surf,time_text)
def animate(i):
surf = ax.imshow(C[i].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text.set_text('t={0:.4f}'.format(i*dt*step))
return (surf,time_text)
fig, ax = plt.subplots()
surf = ax.imshow(C[0].reshape(n,n),cmap=cm.coolwarm,vmin=cmin,vmax=cmax)
time_text = ax.text(0.65, 0.9, 't=0', transform=ax.transAxes, color='w', fontsize=14)
phi_text = ax.text(0.05, 0.9, r'$\phi=$%.2f'%phi, transform=ax.transAxes, color='w', fontsize=14)
fig.colorbar(surf, label=r'$C/C_0$')
ax.set_title('Concentration Profile')
ax.set_xlabel('x')
ax.set_ylabel('y')
ani = animation.FuncAnimation(fig, animate, init_func=init,
frames=len(C), interval=interval, blit=True)
plt.close()
return ani
| [
"matplotlib.pyplot.subplots",
"numpy.vstack",
"matplotlib.pyplot.close"
] | [((310, 339), 'numpy.vstack', 'np.vstack', (['[C[0], C[1::step]]'], {}), '([C[0], C[1::step]])\n', (319, 339), True, 'import numpy as np\n'), ((816, 861), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'subplot_kw': "{'projection': '3d'}"}), "(subplot_kw={'projection': '3d'})\n", (828, 861), True, 'import matplotlib.pyplot as plt\n'), ((1420, 1431), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1429, 1431), True, 'import matplotlib.pyplot as plt\n'), ((1670, 1699), 'numpy.vstack', 'np.vstack', (['[C[0], C[1::step]]'], {}), '([C[0], C[1::step]])\n', (1679, 1699), True, 'import numpy as np\n'), ((2111, 2125), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2123, 2125), True, 'import matplotlib.pyplot as plt\n'), ((2672, 2683), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2681, 2683), True, 'import matplotlib.pyplot as plt\n')] |
# Copyright (c) 2021. Universidad de Pinar del Rio
# This file is part of SCEIBA (sceiba.cu).
# SCEIBA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
#
from flask_login import current_user
from invenio_access import Permission, action_factory
from invenio_access.utils import get_identity
# creando action
notification_admin_actions = action_factory('notification_admin_actions')
ObjectNotificationViewed = action_factory('notification_viewed_actions', parameter=True)
notification_viewed_actions = ObjectNotificationViewed(None)
def notification_viewed_permission_factory(obj):
try:
permission = Permission(notification_admin_actions)
current_identity = get_identity(current_user)
if permission.allows(current_identity):
return permission
except Exception as e:
pass
return Permission(ObjectNotificationViewed(obj['id']))
| [
"invenio_access.action_factory",
"invenio_access.Permission",
"invenio_access.utils.get_identity"
] | [((426, 470), 'invenio_access.action_factory', 'action_factory', (['"""notification_admin_actions"""'], {}), "('notification_admin_actions')\n", (440, 470), False, 'from invenio_access import Permission, action_factory\n'), ((499, 560), 'invenio_access.action_factory', 'action_factory', (['"""notification_viewed_actions"""'], {'parameter': '(True)'}), "('notification_viewed_actions', parameter=True)\n", (513, 560), False, 'from invenio_access import Permission, action_factory\n'), ((703, 741), 'invenio_access.Permission', 'Permission', (['notification_admin_actions'], {}), '(notification_admin_actions)\n', (713, 741), False, 'from invenio_access import Permission, action_factory\n'), ((769, 795), 'invenio_access.utils.get_identity', 'get_identity', (['current_user'], {}), '(current_user)\n', (781, 795), False, 'from invenio_access.utils import get_identity\n')] |
#!/usr/bin/env python
"""
Function eee for Efficient/Sequential Elementary Effects, an extension of
Morris' method of Elementary Effects by Cuntz, Mai et al. (Water Res Research,
2015).
This function was written by <NAME> while at Institut National de
Recherche pour l'Agriculture, l'Alimentation et l'Environnement (INRAE), Nancy,
France.
Copyright (c) 2017-2021 <NAME> - mc (at) macu (dot) de
Released under the MIT License; see LICENSE file for details.
.. moduleauthor:: <NAME>
The following functions are provided
.. autosummary::
see
eee
History
* Written Nov 2017 by <NAME> (mc (at) macu (dot) de)
* Added `weight` option, Jan 2018, <NAME>
* Added `plotfile` and made docstring sphinx compatible option,
Jan 2018, <NAME>
* x0 optional; added verbose keyword; distinguish iterable and array_like
parameter types, Jan 2020, <NAME>
* Rename ntsteps to nsteps to be consistent with screening/ee; and check if
logfile is string rather thean checking for file handle, Feb 2020,
<NAME>
* Sample not only from uniform distribution but allow all distributions of
scipy.stats, Mar 2020, <NAME>
* Use pyjams package, Oct 2021, <NAME>
* Make flake8 compatible, Oct 2021, <NAME>
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import scipy.optimize as opt
from pyjams import screening, tee
from pyjams.functions import cost_square, curvature, logistic_offset_p
from pyjams.functions import dlogistic, d2logistic
__all__ = ['eee', 'see']
def _cleanup(lfile, pool, ipool):
''' Helper function closing logfile and pool if open. '''
try:
lfile.close()
except:
pass
if pool is None:
ipool.close()
# Python 3
# def eee(func, lb, ub,
# x0=None, mask=None, ntfirst=5, ntlast=5, nsteps=6, weight=False,
# seed=None, processes=1, pool=None,
# verbose=0, logfile=None, plotfile=None):
def eee(func, *args, **kwargs):
"""
Parameter screening using Efficient/Sequential Elementary Effects of
Cuntz, Mai et al. (Water Res Research, 2015).
Note, the input function must be callable as `func(x)`.
Parameters
----------
func : callable
Python function callable as `func(x)` with `x` the function parameters.
lb : array_like
Lower bounds of parameters or lower fraction of percent point function
`ppf` if distribution given.
Be aware that the percent point function `ppf` of most continuous
distributions is infinite at 0.
ub : array_like
Upper bounds of parameters or upper fraction of percent point function
`ppf` if distribution given.
Be aware that the percent point function `ppf` of most continuous
distributions is infinite at 1.
x0 : array_like, optional
Parameter values used with `mask==0`.
mask : array_like, optional
Include (1,True) or exclude (0,False) parameters in screening
(default: include all parameters).
ntfirst : int, optional
Number of trajectories in first step of sequential elementary effects
(default: 5).
ntlast : int, optional
Number of trajectories in last step of sequential elementary effects
(default: 5).
nsteps : int, optional
Number of intervals for each trajectory (default: 6)
dist : list, optional
List of None or scipy.stats distribution objects for each factor
having the method `ppf`, Percent Point Function (Inverse of CDF)
(default: None)
If None, the uniform distribution will be sampled from lower bound `lb`
to upper bound `ub`.
If `dist` is scipy.stats.uniform, the `ppf` will be sampled from the
lower fraction given in `lb` and the upper fraction in `ub`. The
sampling interval is then given by the parameters `loc=lower` and
`scale=interval=upper-lower` in distparam. This means
`dist=None`, `lb=a`, `ub=b`
corresponds to
`lb=0`, `ub=1`, `dist=scipy.stats.uniform`, `distparam=[a,b-a]`
distparam : list, optional
List with tuples with parameters as required for `dist`
(default: (0,1)).
All distributions of scipy.stats have location and scale parameters, at
least. `loc` and `scale` are implemented as keyword arguments in
scipy.stats. Other parameters such as the shape parameter of the gamma
distribution must hence be given first, e.g. `(shape,loc,scale)` for
the gamma distribution.
`distparam` is ignored if `dist` is None.
The percent point function `ppf` is called like this:
`dist(*distparam).ppf(x)`
weight : boolean, optional
If False, use the arithmetic mean mu* for each parameter if function
has multiple outputs, such as the mean mu* of each time step of a time
series (default).
If True, return weighted mean `mu*`, weighted by `sd`.
seed : int or array_like
Seed for numpy's random number generator (default: None).
processes : int, optinal
The number of processes to use to evaluate objective function and
constraints (default: 1).
pool : `schwimmbad` pool object, optinal
Generic map function used from module `schwimmbad
<https://schwimmbad.readthedocs.io/en/latest/>`_, which provides,
serial, multiprocessor, and MPI mapping functions (default: None).
The pool is chosen with:
schwimmbad.choose_pool(mpi=True/False, processes=processes).
The pool will be chosen automatically if `pool` is None.
MPI pools can only be opened and closed once. If you want to use
screening several times in one program, then you have to choose the
`pool`, pass it to `eee`, and later close the `pool` in the calling
program.
verbose : int, optional
Print progress report during execution if `verbose>0` (default: 0).
logfile : File handle or logfile name
File name of possible log file
(default: None = no logfile will be written).
plotfile : Plot file name
File name of possible plot file with fit of logistic function to `mu*`
of first trajectories (default: None = no plot produced).
Returns
-------
mask : ndarray
(len(lb),) mask with 1=informative and 0=uninformative model
parameters, to be used with '&' on input mask.
See Also
--------
:func:`~pyeee.screening.screening` : Elementary Effects, same as
:func:`~pyeee.screening.ee` : Elementary Effects
Examples
--------
>>> from functools import partial
>>> import numpy as np
>>> import scipy.stats as stats
>>> from pyjams.functions import G
>>> from partialwrap import function_wrapper
>>> seed = 1234
>>> np.random.seed(seed=seed)
>>> func = G
>>> npars = 6
>>> params = [78., 12., 0.5, 2., 97., 33.] # G
>>> arg = [params]
>>> kwarg = {}
>>> obj = partial(function_wrapper, func, arg, kwarg)
>>> lb = np.zeros(npars)
>>> ub = np.ones(npars)
>>> ntfirst = 10
>>> ntlast = 5
>>> nsteps = 6
>>> out = eee(obj, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
... nsteps=nsteps, processes=4)
>>> print(np.where(out)[0] + 1)
[2 3 4 6]
"""
# Get keyword arguments This allows mixing keyword arguments of eee and
# keyword arguments to be passed to optimiser. The mixed syntax eee(func,
# *args, logfile=None, **kwargs) is only working in Python 3 so need a
# workaround in Python 2, i.e. read all as keyword args and take out the
# keywords for eee.
x0 = kwargs.pop('x0', None)
mask = kwargs.pop('mask', None)
ntfirst = kwargs.pop('ntfirst', 5)
ntlast = kwargs.pop('ntlast', 5)
nsteps = kwargs.pop('nsteps', 6)
dist = kwargs.pop('dist', None)
distparam = kwargs.pop('distparam', None)
weight = kwargs.pop('weight', False)
seed = kwargs.pop('seed', None)
processes = kwargs.pop('processes', 1)
pool = kwargs.pop('pool', None)
verbose = kwargs.pop('verbose', 0)
logfile = kwargs.pop('logfile', None)
plotfile = kwargs.pop('plotfile', None)
# Set up MPI if available
try:
from mpi4py import MPI
comm = MPI.COMM_WORLD
csize = comm.Get_size()
crank = comm.Get_rank()
except ImportError:
comm = None
csize = 1
crank = 0
# Logfile
if crank == 0:
if logfile is None:
lfile = None
else:
# haswrite = getattr(logfile, "write", None)
# if haswrite is None:
# lfile = open(logfile, "w")
# else:
# if not callable(haswrite):
# lfile = logfile
# else:
# raise InputError('x0 must be given if mask is set')
if isinstance(logfile, str):
lfile = open(logfile, "w")
else:
lfile = logfile
else:
lfile = None
# Start
if crank == 0:
if (verbose > 0):
tee('Start screening in eee.', file=lfile)
else:
if lfile is not None:
print('Start screening in eee.', file=lfile)
# Check
assert len(args) == 2, 'lb and ub must be given as arguments.'
lb, ub = args[:2]
npara = len(lb)
if crank == 0:
assert len(lb) == len(ub), (
'Lower and upper bounds have not the same size.')
lb = np.array(lb)
ub = np.array(ub)
# mask
if mask is None:
ix0 = np.ones(npara)
imask = np.ones(npara, dtype=bool)
iimask = np.arange(npara, dtype=int)
nmask = npara
else:
if x0 is None:
raise IOError('x0 must be given if mask is set')
ix0 = np.copy(x0)
imask = np.copy(mask)
iimask = np.where(imask)[0]
nmask = iimask.size
if nmask == 0:
if crank == 0:
if (verbose > 0):
tee('\nAll parameters masked, nothing to do.', file=lfile)
tee('Finished screening in eee.', file=lfile)
else:
if lfile is not None:
print('\nAll parameters masked, nothing to do.',
file=lfile)
print('Finished screening in eee.', file=lfile)
if logfile is not None:
lfile.close()
# Return all true
if mask is None:
return np.ones(len(lb), dtype=bool)
else:
return mask
if crank == 0:
if (verbose > 0):
tee('\nScreen unmasked parameters: ', nmask, iimask+1, file=lfile)
else:
if lfile is not None:
print('\nScreen unmasked parameters: ', nmask, iimask+1,
file=lfile)
# Seed random number generator
if seed is not None:
# same on all ranks because trajectories are sampled on all ranks
np.random.seed(seed=seed)
# Choose the right mapping function: single, multiprocessor or mpi
if pool is None:
import schwimmbad
ipool = schwimmbad.choose_pool(mpi=False if csize == 1 else True,
processes=processes)
else:
ipool = pool
# Step 1 of Cuntz et al. (2015) - first screening with ntfirst
# trajectories, calc mu*
res = screening( # returns (npara,3) with mu*, mu, std if nt>1
func, lb, ub, ntfirst,
x0=ix0, mask=imask,
nsteps=nsteps, ntotal=10*ntfirst,
dist=dist, distparam=distparam,
processes=processes, pool=ipool,
verbose=0)
if res.ndim > 2:
if weight:
mustar = (np.sum(res[:, iimask, 2] * res[:, iimask, 0], axis=0) /
np.sum(res[:, iimask, 2], axis=0))
else:
mustar = np.mean(res[:, iimask, 0], axis=0)
else:
mustar = res[iimask, 0]
# Step 2 of Cuntz et al. (2015) - calc eta*
mumax = np.amax(mustar)
xx = np.arange(nmask) / float(nmask-1)
iisort = np.argsort(mustar)
yy = mustar[iisort] / mumax
if crank == 0:
if (verbose > 0):
tee('\nSorted means of absolute elementary effects (mu*): ',
mustar[iisort], file=lfile)
tee('Normalised mu* = eta*: ', yy, file=lfile)
tee('Corresponding to parameters: ', iimask[iisort] + 1,
file=lfile)
else:
if lfile is not None:
print('\nSorted means of absolute elementary effects (mu*): ',
mustar[iisort], file=lfile)
print('Normalised mu* = eta*: ', yy, file=lfile)
print('Corresponding to parameters: ', iimask[iisort] + 1,
file=lfile)
# Step 3.1 of Cuntz et al. (2015) - fit logistic function
# [y-max, steepness, inflection point, offset]
pini = np.array([yy.max(), (yy.max() - yy.max()) / xx.max(),
0.5 * xx.max(), yy.min()])
plogistic, f, d = opt.fmin_l_bfgs_b(cost_square,
pini,
args=(logistic_offset_p, xx, yy),
approx_grad=1,
bounds=[(None, None), (None, None),
(None, None), (None, None)],
iprint=0,
disp=0)
# Step 3.2 of Cuntz et al. (2015) - determine point of steepest curvature
# -> eta*_thresh
def mcurvature(*args, **kwargs):
return -curvature(*args, **kwargs)
x_K = opt.brent(mcurvature, # x_K
args=(dlogistic, d2logistic, plogistic[0], plogistic[1],
plogistic[2]),
brack=(xx[0], xx[-1]))
curvmax = logistic_offset_p(x_K, plogistic) # L(x_K)
# eta*_thresh = L(x_K) # in range 0-1
eta_thresh = curvmax
if (curvmax > 0.2) or (x_K < xx[0]):
x_scaled = xx[0] # x_K = min(x)
eta_thresh = np.min(mustar) / mumax # eta*_thresh = min(mu*)/max(mu*)
mu_thresh = eta_thresh * mumax # mu*_thresh = eta*_thresh*max(mu*)
if crank == 0:
if (verbose > 0):
tee('\nThreshold eta*_thresh, mu*_tresh: ', eta_thresh, mu_thresh,
file=lfile)
tee('L(x_K): ', logistic_offset_p(x_K, plogistic), file=lfile)
tee('p_opt of L: ', plogistic, file=lfile)
else:
if lfile is not None:
print('\nThreshold eta*_thresh, mu*_tresh: ', eta_thresh,
mu_thresh, file=lfile)
print('L(x_K): ', logistic_offset_p(x_K, plogistic),
file=lfile)
print('p_opt of L: ', plogistic, file=lfile)
# Plot first mu* of elementary effects with logistic function and threshold
if crank == 0:
if plotfile is not None:
try:
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
mpl.rcParams['font.family'] = 'sans-serif'
mpl.rcParams['font.sans-serif'] = 'Arial' # Arial, Verdana
mpl.rc('savefig', dpi=300, format='png')
if npara > 99:
mpl.rc('font', size=8)
else:
mpl.rc('font', size=11)
fig = plt.figure()
sub = plt.subplot(111)
xx = xx
yy = mustar[iisort]
line1 = sub.plot(xx, yy, 'ro')
nn = 1000
xx2 = (xx.min() + np.arange(nn) / float(nn - 1) *
(xx.max() - xx.min()))
yy2 = logistic_offset_p(xx2, plogistic) * mumax
line2 = sub.plot(xx2, yy2, 'b-')
xmin, xmax = sub.get_xlim()
line3 = sub.plot([xmin, xmax], [mu_thresh, mu_thresh], 'k-')
if npara > 99:
xnames = ['{:03d}'.format(i) for i in iimask[iisort] + 1]
else:
xnames = ['{:02d}'.format(i) for i in iimask[iisort] + 1]
plt.setp(sub, xticks=xx, xticklabels=xnames)
plt.setp(sub, xlabel='Parameter')
plt.setp(sub, ylabel=r'$\mu*$')
fig.savefig(plotfile, transparent=False, bbox_inches='tight',
pad_inches=0.035)
plt.close(fig)
except ImportError:
pass
# Step 4 of Cuntz et al. (2015) - Discard from next steps all parameters
# with eta* >= eta*_tresh, i.e. mu* >= mu*_tresh
imask[iimask] = imask[iimask] & (mustar < mu_thresh)
if np.all(~imask):
if crank == 0:
if (verbose > 0):
tee('\nNo more parameters to screen, i.e. all '
'(unmasked) parameters are informative.', file=lfile)
tee('Finished screening in eee.', file=lfile)
else:
if lfile is not None:
print('\nNo more parameters to screen, i.e. all '
'(unmasked) parameters are informative.', file=lfile)
print('Finished screening in eee.', file=lfile)
_cleanup(lfile, pool, ipool)
# Return all true
if mask is None:
return np.ones(len(lb), dtype=bool)
else:
return mask
# Step 5 and 6 of Cuntz et al. (2015)
# - Next trajectory with remaining parameters.
# Discard all parameters with |EE| >= mu*_tresh
# - Repeat until no |EE| >= mu*_tresh
niter = 1
donext = True
while donext:
if crank == 0:
if (verbose > 0):
tee('\nParameters remaining for iteration ', niter, ':',
np.where(imask)[0] + 1, file=lfile)
else:
if lfile is not None:
print('\nParameters remaining for iteration ', niter, ':',
np.where(imask)[0] + 1, file=lfile)
iimask = np.where(imask)[0]
res = screening( # returns EE(parameters) if nt=1
func, lb, ub, 1,
x0=ix0, mask=imask,
nsteps=nsteps, ntotal=10,
dist=dist, distparam=distparam,
processes=processes, pool=ipool,
verbose=0)
# absolute EE
if res.ndim > 2:
if weight:
mustar = (np.sum(res[:, iimask, 2] * res[:, iimask, 0], axis=0)
/ np.sum(res[:, iimask, 2], axis=0))
else:
mustar = np.mean(res[:, iimask, 0], axis=0)
else:
mustar = res[iimask, 0]
if crank == 0:
if (verbose > 0):
tee('Absolute elementary effects |EE|: ', mustar, file=lfile)
else:
if lfile is not None:
print('Absolute elementary effects |EE|: ', mustar,
file=lfile)
imask[iimask] = imask[iimask] & (mustar < mu_thresh)
if np.all(~imask):
if crank == 0:
if (verbose > 0):
tee('\nNo more parameters to screen, i.e. all '
'(unmasked) parameters are informative.', file=lfile)
tee('Finished screening in eee.', file=lfile)
else:
if lfile is not None:
print('\nNo more parameters to screen, i.e. all '
'(unmasked) parameters are informative.',
file=lfile)
print('Finished screening in eee.', file=lfile)
_cleanup(lfile, pool, ipool)
# Return all true
if mask is None:
return np.ones(len(lb), dtype=bool)
else:
return mask
# Step 6 of Cuntz et al. (2015) - Repeat until no |EE| >= mu*_tresh
if np.all(mustar < mu_thresh):
donext = False
niter += 1
# Step 7 of Cuntz et al. (2015)
# - last screening with ntlast trajectories all parameters with
# mu* < mu*_thresh are final noninformative parameters
if crank == 0:
if (verbose > 0):
tee('\nParameters remaining for last screening:',
np.where(imask)[0] + 1, file=lfile)
else:
if lfile is not None:
print('\nParameters remaining for last screening:',
np.where(imask)[0] + 1, file=lfile)
iimask = np.where(imask)[0]
res = screening( # (npara,3) with mu*, mu, std if nt>1
func, lb, ub, ntlast,
x0=ix0, mask=imask,
nsteps=nsteps, ntotal=10 * ntlast,
dist=dist, distparam=distparam,
processes=processes, pool=ipool,
verbose=0)
if res.ndim > 2:
if weight:
mustar = (np.sum(res[:, iimask, 2] * res[:, iimask, 0], axis=0) /
np.sum(res[:, iimask, 2], axis=0))
else:
mustar = np.mean(res[:, iimask, 0], axis=0)
else:
mustar = res[iimask, 0]
if crank == 0:
if ntlast > 1:
if (verbose > 0):
tee('Final mu*: ', mustar, file=lfile)
else:
if lfile is not None:
print('Final mu*: ', mustar, file=lfile)
else:
if (verbose > 0):
tee('Final absolute elementary effects |EE|: ', mustar,
file=lfile)
else:
if lfile is not None:
print('Final absolute elementary effects |EE|: ', mustar,
file=lfile)
imask[iimask] = imask[iimask] & (mustar < mu_thresh)
if np.all(~imask):
if crank == 0:
if (verbose > 0):
tee('\nNo more parameters left after screening, i.e. all '
'(unmasked) parameters are informative.',
file=lfile)
tee('Finished screening in eee.', file=lfile)
else:
if lfile is not None:
print('\nNo more parameters left after screening, '
'i.e. all (unmasked) parameters are informative.',
file=lfile)
print('Finished screening in eee.', file=lfile)
_cleanup(lfile, pool, ipool)
# Return all true
if mask is None:
return np.ones(len(lb), dtype=bool)
else:
return mask
# Return mask with unmasked informative model parameters (to be used with
# 'and' on initial mask)
if mask is None:
out = ~imask
else:
# (true where now zero, i.e. were masked or informative) and (initial
# mask)
out = (~imask) & mask
if crank == 0:
if (verbose > 0):
tee('\nFinal informative parameters:', np.sum(out),
np.where(out)[0] + 1, file=lfile)
tee('Final noninformative parameters:', np.sum(imask),
np.where(imask)[0] + 1, file=lfile)
tee('\nFinished screening in eee.', file=lfile)
else:
if lfile is not None:
print('\nFinal informative parameters:', np.sum(out),
np.where(out)[0] + 1, file=lfile)
print('Final noninformative parameters:', np.sum(imask),
np.where(imask)[0] + 1, file=lfile)
print('\nFinished screening in eee.', file=lfile)
# Close logfile and pool
_cleanup(lfile, pool, ipool)
return out
def see(func, *args, **kwargs):
"""
Wrapper function for :func:`~pyeee.eee.eee`.
"""
return eee(func, *args, **kwargs)
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
# # from pyjams import tee
# # from pyjams.functions import fmorris
# # # Morris with MPI
# # import time as ptime
# # t1 = ptime.time()
# # try:
# # from mpi4py import MPI
# # comm = MPI.COMM_WORLD
# # csize = comm.Get_size()
# # crank = comm.Get_rank()
# # except ImportError:
# # comm = None
# # csize = 1
# # crank = 0
# # seed = None # 1025
# # if seed is not None:
# # np.random.seed(seed=seed)
# # func = fmorris
# # npars = 20
# # lb = np.zeros(npars)
# # ub = np.ones(npars)
# # beta0 = 0.
# # beta1 = np.random.standard_normal(npars)
# # beta1[:10] = 20.
# # beta2 = np.random.standard_normal((npars,npars))
# # beta2[:6,:6] = -15.
# # beta3 = np.zeros((npars,npars,npars))
# # beta3[:5,:5,:5] = -10.
# # beta4 = np.zeros((npars,npars,npars,npars))
# # beta4[:4,:4,:4,:4] = 5.
# # args = [beta0, beta1, beta2, beta3, beta4] # Morris
# # ntfirst = 10
# # ntlast = 5
# # nsteps = 6
# # verbose = 1
# # out = eee(func, lb, ub, *args, x0=None, mask=None, ntfirst=ntfirst,
# # ntlast=ntlast, nsteps=nsteps, processes=4)
# # t2 = ptime.time()
# # if crank == 0:
# # strin = ('[m]: {:.1f}'.format((t2-t1)/60.)
# # if (t2-t1)>60. else '[s]: {:d}'.format(int(t2-t1)))
# # tee('Time elapsed: ', strin)
# # tee('mask (1: informative, 0: noninformative): ', out)
# # PYEEE
# from functools import partial
# import numpy as np
# import scipy.stats as stats
# from pyjams.functions import G, Gstar, K, fmorris
# from partialwrap import function_wrapper
# #
# # G function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = G
# npars = 6
# params = [78., 12., 0.5, 2., 97., 33.] # G
# # Partialise function with fixed parameters
# arg = [params]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# out = eee(obj, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, processes=4, plotfile='g.png')
# print('G')
# print(np.where(out)[0] + 1)
# #
# # Gstar function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = Gstar
# npars = 10
# params = [[np.ones(npars), np.random.random(npars),
# [0., 0., 9., 9., 9., 9., 9., 9., 9., 9.]], # G*
# [np.ones(npars), np.random.random(npars),
# [0., 0.1, 0.2, 0.3, 0.4, 0.8, 1., 2., 3., 4.]],
# [np.ones(npars)*0.5, np.random.random(npars),
# [0., 0., 9., 9., 9., 9., 9., 9., 9., 9.]],
# [np.ones(npars)*0.5, np.random.random(npars),
# [0., 0.1, 0.2, 0.3, 0.4, 0.8, 1., 2., 3., 4.]],
# [np.ones(npars)*2.0, np.random.random(npars),
# [0., 0., 9., 9., 9., 9., 9., 9., 9., 9.]],
# [np.ones(npars)*2.0, np.random.random(npars),
# [0., 0.1, 0.2, 0.3, 0.4, 0.8, 1., 2., 3., 4.]]
# ]
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# for ii in range(len(params)):
# # Partialise function with fixed parameters
# arg = params[ii]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# out = eee(obj, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, processes=4,
# plotfile='gstar'+str(ii)+'.png',logfile='log'+str(ii)+'.txt')
# print('G* ', ii)
# print(np.where(out)[0] + 1)
# #
# # Bratley / K function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = K
# npars = 10
# params = [] # K
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# out = eee(func, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, processes=4, plotfile='k.png')
# print('K')
# print(np.where(out)[0] + 1)
# #
# # Morris function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = fmorris
# npars = 20
# beta0 = 0.
# beta1 = np.random.standard_normal(npars)
# beta1[:10] = 20.
# beta2 = np.random.standard_normal((npars,npars))
# beta2[:6,:6] = -15.
# beta3 = np.zeros((npars,npars,npars))
# beta3[:5,:5,:5] = -10.
# beta4 = np.zeros((npars,npars,npars,npars))
# beta4[:4,:4,:4,:4] = 5.
# # Partialise Morris function with fixed parameters beta0-4
# arg = [beta0, beta1, beta2, beta3, beta4]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# out = eee(obj, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, processes=4, plotfile='morris.png', verbose=1)
# print('Morris')
# print(np.where(out)[0] + 1)
# #
# # Morris function with distributions
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = fmorris
# npars = 20
# beta0 = 0.
# beta1 = np.random.standard_normal(npars)
# beta1[:10] = 20.
# beta2 = np.random.standard_normal((npars,npars))
# beta2[:6,:6] = -15.
# beta3 = np.zeros((npars,npars,npars))
# beta3[:5,:5,:5] = -10.
# beta4 = np.zeros((npars,npars,npars,npars))
# beta4[:4,:4,:4,:4] = 5.
# # Partialise Morris function with fixed parameters beta0-4
# arg = [beta0, beta1, beta2, beta3, beta4]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# dist = [ stats.uniform for i in range(npars) ]
# distparam = [ (lb[i],ub[i]-lb[i]) for i in range(npars) ]
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# out = eee(obj, lb, ub, mask=None, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, dist=dist, distparam=distparam, processes=4)
# print('Morris dist')
# print(np.where(out)[0] + 1)
# #
# # Morris function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = fmorris
# npars = 20
# beta0 = 0.
# beta1 = np.random.standard_normal(npars)
# beta1[:10] = 20.
# beta2 = np.random.standard_normal((npars,npars))
# beta2[:6,:6] = -15.
# beta3 = np.zeros((npars,npars,npars))
# beta3[:5,:5,:5] = -10.
# beta4 = np.zeros((npars,npars,npars,npars))
# beta4[:4,:4,:4,:4] = 5.
# # Partialise Morris function with fixed parameters beta0-4
# arg = [beta0, beta1, beta2, beta3, beta4]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# x0 = np.ones(npars)*0.5
# mask = np.ones(npars, dtype=bool)
# mask[1] = False
# out = eee(obj, lb, ub, x0=x0, mask=mask, ntfirst=ntfirst,
# ntlast=ntlast, nsteps=nsteps, processes=4)
# print('Morris mask')
# print(np.where(out)[0] + 1)
# #
# # Morris function
# # seed for reproducible results
# seed = 1234
# np.random.seed(seed=seed)
# func = fmorris
# npars = 20
# beta0 = 0.
# beta1 = np.random.standard_normal(npars)
# beta1[:10] = 20.
# beta2 = np.random.standard_normal((npars,npars))
# beta2[:6,:6] = -15.
# beta3 = np.zeros((npars,npars,npars))
# beta3[:5,:5,:5] = -10.
# beta4 = np.zeros((npars,npars,npars,npars))
# beta4[:4,:4,:4,:4] = 5.
# # Partialise Morris function with fixed parameters beta0-4
# arg = [beta0, beta1, beta2, beta3, beta4]
# kwarg = {}
# obj = partial(function_wrapper, func, arg, kwarg)
# # eee parameters
# lb = np.zeros(npars)
# ub = np.ones(npars)
# dist = [ stats.uniform for i in range(npars) ]
# distparam = [ (lb[i],ub[i]-lb[i]) for i in range(npars) ]
# lb = np.zeros(npars)
# ub = np.ones(npars)
# ntfirst = 10
# ntlast = 5
# nsteps = 6
# verbose = 1
# x0 = np.ones(npars)*0.5
# mask = np.ones(npars, dtype=bool)
# mask[1] = False
# out = eee(obj, lb, ub, x0=x0, mask=mask, ntfirst=ntfirst, ntlast=ntlast,
# nsteps=nsteps, dist=dist, distparam=distparam, processes=4)
# print('Morris dist mask')
# print(np.where(out)[0] + 1)
| [
"numpy.argsort",
"numpy.array",
"pyjams.functions.logistic_offset_p",
"matplotlib.rc",
"schwimmbad.choose_pool",
"pyjams.functions.curvature",
"numpy.arange",
"numpy.mean",
"numpy.where",
"matplotlib.pyplot.close",
"doctest.testmod",
"numpy.random.seed",
"numpy.min",
"numpy.amax",
"numpy.ones",
"matplotlib.use",
"matplotlib.pyplot.subplot",
"scipy.optimize.brent",
"scipy.optimize.fmin_l_bfgs_b",
"numpy.copy",
"matplotlib.pyplot.setp",
"pyjams.tee",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.all",
"pyjams.screening"
] | [((9616, 9628), 'numpy.array', 'np.array', (['lb'], {}), '(lb)\n', (9624, 9628), True, 'import numpy as np\n'), ((9638, 9650), 'numpy.array', 'np.array', (['ub'], {}), '(ub)\n', (9646, 9650), True, 'import numpy as np\n'), ((11597, 11771), 'pyjams.screening', 'screening', (['func', 'lb', 'ub', 'ntfirst'], {'x0': 'ix0', 'mask': 'imask', 'nsteps': 'nsteps', 'ntotal': '(10 * ntfirst)', 'dist': 'dist', 'distparam': 'distparam', 'processes': 'processes', 'pool': 'ipool', 'verbose': '(0)'}), '(func, lb, ub, ntfirst, x0=ix0, mask=imask, nsteps=nsteps, ntotal=\n 10 * ntfirst, dist=dist, distparam=distparam, processes=processes, pool\n =ipool, verbose=0)\n', (11606, 11771), False, 'from pyjams import screening, tee\n'), ((12205, 12220), 'numpy.amax', 'np.amax', (['mustar'], {}), '(mustar)\n', (12212, 12220), True, 'import numpy as np\n'), ((12281, 12299), 'numpy.argsort', 'np.argsort', (['mustar'], {}), '(mustar)\n', (12291, 12299), True, 'import numpy as np\n'), ((13269, 13445), 'scipy.optimize.fmin_l_bfgs_b', 'opt.fmin_l_bfgs_b', (['cost_square', 'pini'], {'args': '(logistic_offset_p, xx, yy)', 'approx_grad': '(1)', 'bounds': '[(None, None), (None, None), (None, None), (None, None)]', 'iprint': '(0)', 'disp': '(0)'}), '(cost_square, pini, args=(logistic_offset_p, xx, yy),\n approx_grad=1, bounds=[(None, None), (None, None), (None, None), (None,\n None)], iprint=0, disp=0)\n', (13286, 13445), True, 'import scipy.optimize as opt\n'), ((13917, 14038), 'scipy.optimize.brent', 'opt.brent', (['mcurvature'], {'args': '(dlogistic, d2logistic, plogistic[0], plogistic[1], plogistic[2])', 'brack': '(xx[0], xx[-1])'}), '(mcurvature, args=(dlogistic, d2logistic, plogistic[0], plogistic[\n 1], plogistic[2]), brack=(xx[0], xx[-1]))\n', (13926, 14038), True, 'import scipy.optimize as opt\n'), ((14143, 14176), 'pyjams.functions.logistic_offset_p', 'logistic_offset_p', (['x_K', 'plogistic'], {}), '(x_K, plogistic)\n', (14160, 14176), False, 'from pyjams.functions import cost_square, curvature, logistic_offset_p\n'), ((17048, 17062), 'numpy.all', 'np.all', (['(~imask)'], {}), '(~imask)\n', (17054, 17062), True, 'import numpy as np\n'), ((20949, 21121), 'pyjams.screening', 'screening', (['func', 'lb', 'ub', 'ntlast'], {'x0': 'ix0', 'mask': 'imask', 'nsteps': 'nsteps', 'ntotal': '(10 * ntlast)', 'dist': 'dist', 'distparam': 'distparam', 'processes': 'processes', 'pool': 'ipool', 'verbose': '(0)'}), '(func, lb, ub, ntlast, x0=ix0, mask=imask, nsteps=nsteps, ntotal=\n 10 * ntlast, dist=dist, distparam=distparam, processes=processes, pool=\n ipool, verbose=0)\n', (20958, 21121), False, 'from pyjams import screening, tee\n'), ((22118, 22132), 'numpy.all', 'np.all', (['(~imask)'], {}), '(~imask)\n', (22124, 22132), True, 'import numpy as np\n'), ((24176, 24233), 'doctest.testmod', 'doctest.testmod', ([], {'optionflags': 'doctest.NORMALIZE_WHITESPACE'}), '(optionflags=doctest.NORMALIZE_WHITESPACE)\n', (24191, 24233), False, 'import doctest\n'), ((9701, 9715), 'numpy.ones', 'np.ones', (['npara'], {}), '(npara)\n', (9708, 9715), True, 'import numpy as np\n'), ((9733, 9759), 'numpy.ones', 'np.ones', (['npara'], {'dtype': 'bool'}), '(npara, dtype=bool)\n', (9740, 9759), True, 'import numpy as np\n'), ((9777, 9804), 'numpy.arange', 'np.arange', (['npara'], {'dtype': 'int'}), '(npara, dtype=int)\n', (9786, 9804), True, 'import numpy as np\n'), ((9939, 9950), 'numpy.copy', 'np.copy', (['x0'], {}), '(x0)\n', (9946, 9950), True, 'import numpy as np\n'), ((9968, 9981), 'numpy.copy', 'np.copy', (['mask'], {}), '(mask)\n', (9975, 9981), True, 'import numpy as np\n'), ((11180, 11205), 'numpy.random.seed', 'np.random.seed', ([], {'seed': 'seed'}), '(seed=seed)\n', (11194, 11205), True, 'import numpy as np\n'), ((11341, 11419), 'schwimmbad.choose_pool', 'schwimmbad.choose_pool', ([], {'mpi': '(False if csize == 1 else True)', 'processes': 'processes'}), '(mpi=False if csize == 1 else True, processes=processes)\n', (11363, 11419), False, 'import schwimmbad\n'), ((12234, 12250), 'numpy.arange', 'np.arange', (['nmask'], {}), '(nmask)\n', (12243, 12250), True, 'import numpy as np\n'), ((18450, 18602), 'pyjams.screening', 'screening', (['func', 'lb', 'ub', '(1)'], {'x0': 'ix0', 'mask': 'imask', 'nsteps': 'nsteps', 'ntotal': '(10)', 'dist': 'dist', 'distparam': 'distparam', 'processes': 'processes', 'pool': 'ipool', 'verbose': '(0)'}), '(func, lb, ub, 1, x0=ix0, mask=imask, nsteps=nsteps, ntotal=10,\n dist=dist, distparam=distparam, processes=processes, pool=ipool, verbose=0)\n', (18459, 18602), False, 'from pyjams import screening, tee\n'), ((19420, 19434), 'numpy.all', 'np.all', (['(~imask)'], {}), '(~imask)\n', (19426, 19434), True, 'import numpy as np\n'), ((20323, 20349), 'numpy.all', 'np.all', (['(mustar < mu_thresh)'], {}), '(mustar < mu_thresh)\n', (20329, 20349), True, 'import numpy as np\n'), ((20919, 20934), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (20927, 20934), True, 'import numpy as np\n'), ((9215, 9257), 'pyjams.tee', 'tee', (['"""Start screening in eee."""'], {'file': 'lfile'}), "('Start screening in eee.', file=lfile)\n", (9218, 9257), False, 'from pyjams import screening, tee\n'), ((9999, 10014), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (10007, 10014), True, 'import numpy as np\n'), ((10815, 10886), 'pyjams.tee', 'tee', (['"""\nScreen unmasked parameters: """', 'nmask', '(iimask + 1)'], {'file': 'lfile'}), '("""\nScreen unmasked parameters: """, nmask, iimask + 1, file=lfile)\n', (10818, 10886), False, 'from pyjams import screening, tee\n'), ((12066, 12100), 'numpy.mean', 'np.mean', (['res[:, iimask, 0]'], {'axis': '(0)'}), '(res[:, iimask, 0], axis=0)\n', (12073, 12100), True, 'import numpy as np\n'), ((12394, 12490), 'pyjams.tee', 'tee', (['"""\nSorted means of absolute elementary effects (mu*): """', 'mustar[iisort]'], {'file': 'lfile'}), '("""\nSorted means of absolute elementary effects (mu*): """, mustar[\n iisort], file=lfile)\n', (12397, 12490), False, 'from pyjams import screening, tee\n'), ((12511, 12557), 'pyjams.tee', 'tee', (['"""Normalised mu* = eta*: """', 'yy'], {'file': 'lfile'}), "('Normalised mu* = eta*: ', yy, file=lfile)\n", (12514, 12557), False, 'from pyjams import screening, tee\n'), ((12570, 12638), 'pyjams.tee', 'tee', (['"""Corresponding to parameters: """', '(iimask[iisort] + 1)'], {'file': 'lfile'}), "('Corresponding to parameters: ', iimask[iisort] + 1, file=lfile)\n", (12573, 12638), False, 'from pyjams import screening, tee\n'), ((13879, 13905), 'pyjams.functions.curvature', 'curvature', (['*args'], {}), '(*args, **kwargs)\n', (13888, 13905), False, 'from pyjams.functions import cost_square, curvature, logistic_offset_p\n'), ((14376, 14390), 'numpy.min', 'np.min', (['mustar'], {}), '(mustar)\n', (14382, 14390), True, 'import numpy as np\n'), ((14564, 14650), 'pyjams.tee', 'tee', (['"""\nThreshold eta*_thresh, mu*_tresh: """', 'eta_thresh', 'mu_thresh'], {'file': 'lfile'}), '("""\nThreshold eta*_thresh, mu*_tresh: """, eta_thresh, mu_thresh, file=\n lfile)\n', (14567, 14650), False, 'from pyjams import screening, tee\n'), ((14746, 14788), 'pyjams.tee', 'tee', (['"""p_opt of L: """', 'plogistic'], {'file': 'lfile'}), "('p_opt of L: ', plogistic, file=lfile)\n", (14749, 14788), False, 'from pyjams import screening, tee\n'), ((18417, 18432), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (18425, 18432), True, 'import numpy as np\n'), ((21410, 21444), 'numpy.mean', 'np.mean', (['res[:, iimask, 0]'], {'axis': '(0)'}), '(res[:, iimask, 0], axis=0)\n', (21417, 21444), True, 'import numpy as np\n'), ((23482, 23532), 'pyjams.tee', 'tee', (['"""\nFinished screening in eee."""'], {'file': 'lfile'}), '("""\nFinished screening in eee.""", file=lfile)\n', (23485, 23532), False, 'from pyjams import screening, tee\n'), ((11918, 11971), 'numpy.sum', 'np.sum', (['(res[:, iimask, 2] * res[:, iimask, 0])'], {'axis': '(0)'}), '(res[:, iimask, 2] * res[:, iimask, 0], axis=0)\n', (11924, 11971), True, 'import numpy as np\n'), ((11996, 12029), 'numpy.sum', 'np.sum', (['res[:, iimask, 2]'], {'axis': '(0)'}), '(res[:, iimask, 2], axis=0)\n', (12002, 12029), True, 'import numpy as np\n'), ((14687, 14720), 'pyjams.functions.logistic_offset_p', 'logistic_offset_p', (['x_K', 'plogistic'], {}), '(x_K, plogistic)\n', (14704, 14720), False, 'from pyjams.functions import cost_square, curvature, logistic_offset_p\n'), ((15327, 15341), 'matplotlib.use', 'mpl.use', (['"""Agg"""'], {}), "('Agg')\n", (15334, 15341), True, 'import matplotlib as mpl\n'), ((15541, 15581), 'matplotlib.rc', 'mpl.rc', (['"""savefig"""'], {'dpi': '(300)', 'format': '"""png"""'}), "('savefig', dpi=300, format='png')\n", (15547, 15581), True, 'import matplotlib as mpl\n'), ((15744, 15756), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15754, 15756), True, 'import matplotlib.pyplot as plt\n'), ((15779, 15795), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (15790, 15795), True, 'import matplotlib.pyplot as plt\n'), ((16500, 16544), 'matplotlib.pyplot.setp', 'plt.setp', (['sub'], {'xticks': 'xx', 'xticklabels': 'xnames'}), '(sub, xticks=xx, xticklabels=xnames)\n', (16508, 16544), True, 'import matplotlib.pyplot as plt\n'), ((16561, 16594), 'matplotlib.pyplot.setp', 'plt.setp', (['sub'], {'xlabel': '"""Parameter"""'}), "(sub, xlabel='Parameter')\n", (16569, 16594), True, 'import matplotlib.pyplot as plt\n'), ((16611, 16642), 'matplotlib.pyplot.setp', 'plt.setp', (['sub'], {'ylabel': '"""$\\\\mu*$"""'}), "(sub, ylabel='$\\\\mu*$')\n", (16619, 16642), True, 'import matplotlib.pyplot as plt\n'), ((16783, 16797), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (16792, 16797), True, 'import matplotlib.pyplot as plt\n'), ((17133, 17239), 'pyjams.tee', 'tee', (['"""\nNo more parameters to screen, i.e. all (unmasked) parameters are informative."""'], {'file': 'lfile'}), '("""\nNo more parameters to screen, i.e. all (unmasked) parameters are informative."""\n , file=lfile)\n', (17136, 17239), False, 'from pyjams import screening, tee\n'), ((17271, 17316), 'pyjams.tee', 'tee', (['"""Finished screening in eee."""'], {'file': 'lfile'}), "('Finished screening in eee.', file=lfile)\n", (17274, 17316), False, 'from pyjams import screening, tee\n'), ((18963, 18997), 'numpy.mean', 'np.mean', (['res[:, iimask, 0]'], {'axis': '(0)'}), '(res[:, iimask, 0], axis=0)\n', (18970, 18997), True, 'import numpy as np\n'), ((19118, 19179), 'pyjams.tee', 'tee', (['"""Absolute elementary effects |EE|: """', 'mustar'], {'file': 'lfile'}), "('Absolute elementary effects |EE|: ', mustar, file=lfile)\n", (19121, 19179), False, 'from pyjams import screening, tee\n'), ((21262, 21315), 'numpy.sum', 'np.sum', (['(res[:, iimask, 2] * res[:, iimask, 0])'], {'axis': '(0)'}), '(res[:, iimask, 2] * res[:, iimask, 0], axis=0)\n', (21268, 21315), True, 'import numpy as np\n'), ((21340, 21373), 'numpy.sum', 'np.sum', (['res[:, iimask, 2]'], {'axis': '(0)'}), '(res[:, iimask, 2], axis=0)\n', (21346, 21373), True, 'import numpy as np\n'), ((21576, 21614), 'pyjams.tee', 'tee', (['"""Final mu*: """', 'mustar'], {'file': 'lfile'}), "('Final mu*: ', mustar, file=lfile)\n", (21579, 21614), False, 'from pyjams import screening, tee\n'), ((21792, 21859), 'pyjams.tee', 'tee', (['"""Final absolute elementary effects |EE|: """', 'mustar'], {'file': 'lfile'}), "('Final absolute elementary effects |EE|: ', mustar, file=lfile)\n", (21795, 21859), False, 'from pyjams import screening, tee\n'), ((22203, 22320), 'pyjams.tee', 'tee', (['"""\nNo more parameters left after screening, i.e. all (unmasked) parameters are informative."""'], {'file': 'lfile'}), '("""\nNo more parameters left after screening, i.e. all (unmasked) parameters are informative."""\n , file=lfile)\n', (22206, 22320), False, 'from pyjams import screening, tee\n'), ((22372, 22417), 'pyjams.tee', 'tee', (['"""Finished screening in eee."""'], {'file': 'lfile'}), "('Finished screening in eee.', file=lfile)\n", (22375, 22417), False, 'from pyjams import screening, tee\n'), ((23288, 23299), 'numpy.sum', 'np.sum', (['out'], {}), '(out)\n', (23294, 23299), True, 'import numpy as np\n'), ((23403, 23416), 'numpy.sum', 'np.sum', (['imask'], {}), '(imask)\n', (23409, 23416), True, 'import numpy as np\n'), ((10151, 10212), 'pyjams.tee', 'tee', (['"""\nAll parameters masked, nothing to do."""'], {'file': 'lfile'}), '("""\nAll parameters masked, nothing to do.""", file=lfile)\n', (10154, 10212), False, 'from pyjams import screening, tee\n'), ((10230, 10275), 'pyjams.tee', 'tee', (['"""Finished screening in eee."""'], {'file': 'lfile'}), "('Finished screening in eee.', file=lfile)\n", (10233, 10275), False, 'from pyjams import screening, tee\n'), ((14990, 15023), 'pyjams.functions.logistic_offset_p', 'logistic_offset_p', (['x_K', 'plogistic'], {}), '(x_K, plogistic)\n', (15007, 15023), False, 'from pyjams.functions import cost_square, curvature, logistic_offset_p\n'), ((15633, 15655), 'matplotlib.rc', 'mpl.rc', (['"""font"""'], {'size': '(8)'}), "('font', size=8)\n", (15639, 15655), True, 'import matplotlib as mpl\n'), ((15698, 15721), 'matplotlib.rc', 'mpl.rc', (['"""font"""'], {'size': '(11)'}), "('font', size=11)\n", (15704, 15721), True, 'import matplotlib as mpl\n'), ((16063, 16096), 'pyjams.functions.logistic_offset_p', 'logistic_offset_p', (['xx2', 'plogistic'], {}), '(xx2, plogistic)\n', (16080, 16096), False, 'from pyjams.functions import cost_square, curvature, logistic_offset_p\n'), ((18803, 18856), 'numpy.sum', 'np.sum', (['(res[:, iimask, 2] * res[:, iimask, 0])'], {'axis': '(0)'}), '(res[:, iimask, 2] * res[:, iimask, 0], axis=0)\n', (18809, 18856), True, 'import numpy as np\n'), ((18885, 18918), 'numpy.sum', 'np.sum', (['res[:, iimask, 2]'], {'axis': '(0)'}), '(res[:, iimask, 2], axis=0)\n', (18891, 18918), True, 'import numpy as np\n'), ((19517, 19623), 'pyjams.tee', 'tee', (['"""\nNo more parameters to screen, i.e. all (unmasked) parameters are informative."""'], {'file': 'lfile'}), '("""\nNo more parameters to screen, i.e. all (unmasked) parameters are informative."""\n , file=lfile)\n', (19520, 19623), False, 'from pyjams import screening, tee\n'), ((19663, 19708), 'pyjams.tee', 'tee', (['"""Finished screening in eee."""'], {'file': 'lfile'}), "('Finished screening in eee.', file=lfile)\n", (19666, 19708), False, 'from pyjams import screening, tee\n'), ((23635, 23646), 'numpy.sum', 'np.sum', (['out'], {}), '(out)\n', (23641, 23646), True, 'import numpy as np\n'), ((23762, 23775), 'numpy.sum', 'np.sum', (['imask'], {}), '(imask)\n', (23768, 23775), True, 'import numpy as np\n'), ((20695, 20710), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (20703, 20710), True, 'import numpy as np\n'), ((23317, 23330), 'numpy.where', 'np.where', (['out'], {}), '(out)\n', (23325, 23330), True, 'import numpy as np\n'), ((23434, 23449), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (23442, 23449), True, 'import numpy as np\n'), ((15963, 15976), 'numpy.arange', 'np.arange', (['nn'], {}), '(nn)\n', (15972, 15976), True, 'import numpy as np\n'), ((18167, 18182), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (18175, 18182), True, 'import numpy as np\n'), ((20869, 20884), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (20877, 20884), True, 'import numpy as np\n'), ((23670, 23683), 'numpy.where', 'np.where', (['out'], {}), '(out)\n', (23678, 23683), True, 'import numpy as np\n'), ((23799, 23814), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (23807, 23814), True, 'import numpy as np\n'), ((18364, 18379), 'numpy.where', 'np.where', (['imask'], {}), '(imask)\n', (18372, 18379), True, 'import numpy as np\n')] |
import functools
import itertools
import matplotlib.pyplot as plt
import networkx as nx
from . import utils
# Colorbrewer2: qualitative
_colors = [
'#a6cee3', '#1f78b4', '#b2df8a', '#33a02c', '#fb9a99', '#e31a1c',
'#fdbf6f', '#ff7f00', '#cab2d6', '#6a3d9a', '#ffff99',
]
class Plot(object):
def __init__(self, g):
self.g = g
self.vias = []
self.path = []
def plot(self):
plt.hold(True)
colors = itertools.cycle(_colors)
for n1, n2 in self.plottable_edges():
lon1, lat1 = self.g.node[n1]['coordinates']
lon2, lat2 = self.g.node[n2]['coordinates']
plt.plot([lon1, lon2], [lat1, lat2], color=next(colors))
coordinates = [self.g.node[n]['coordinates']
for n in self.g.nodes_iter()]
lon, lat = zip(*coordinates)
plt.scatter(lon, lat, s=0.5, picker=True)
plt.gcf().canvas.mpl_connect('pick_event', self.on_click)
def plottable_edges(self):
"""
If the forward and backward edge are in the graph, only use one
Use the edge where first node_id is less than the second.
"""
edges = set(self.g.edges())
for n1, n2 in edges:
if n1 < n2 or (n2, n1) not in edges:
yield n1, n2
def on_click(self, event):
ind = event.ind[0]
node = self.g.nodes()[ind]
self.add_remove_via(node)
self.update_path()
return node
def add_remove_via(self, node):
if node in self.vias:
self.vias.remove(node)
self.remove_via(node)
else:
self.vias.append(node)
self.add_via(node)
self.compute_path()
def remove_via(self, node):
# Delete
pass
def add_via(self, node):
lon, lat = self.g.node[node]['coordinates']
plt.scatter(lon, lat, c='k', s=25.0)
def compute_path(self):
self.path = []
if len(self.vias) <= 1:
return
for i, (n1, n2) in enumerate(utils.pairwise(self.vias)):
nodes = nx.shortest_path(self.g, n1, n2, weight='length')
if i > 0:
nodes = nodes[1:]
self.path.extend(nodes)
def update_path(self):
coordinates = [self.g.node[n]['coordinates'] for n in self.path]
if not coordinates:
return
lon, lat = zip(*coordinates)
plt.plot(lon, lat, linewidth=5, color='k')
| [
"itertools.cycle",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.plot",
"networkx.shortest_path",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.hold"
] | [((426, 440), 'matplotlib.pyplot.hold', 'plt.hold', (['(True)'], {}), '(True)\n', (434, 440), True, 'import matplotlib.pyplot as plt\n'), ((458, 482), 'itertools.cycle', 'itertools.cycle', (['_colors'], {}), '(_colors)\n', (473, 482), False, 'import itertools\n'), ((862, 903), 'matplotlib.pyplot.scatter', 'plt.scatter', (['lon', 'lat'], {'s': '(0.5)', 'picker': '(True)'}), '(lon, lat, s=0.5, picker=True)\n', (873, 903), True, 'import matplotlib.pyplot as plt\n'), ((1884, 1920), 'matplotlib.pyplot.scatter', 'plt.scatter', (['lon', 'lat'], {'c': '"""k"""', 's': '(25.0)'}), "(lon, lat, c='k', s=25.0)\n", (1895, 1920), True, 'import matplotlib.pyplot as plt\n'), ((2446, 2488), 'matplotlib.pyplot.plot', 'plt.plot', (['lon', 'lat'], {'linewidth': '(5)', 'color': '"""k"""'}), "(lon, lat, linewidth=5, color='k')\n", (2454, 2488), True, 'import matplotlib.pyplot as plt\n'), ((2110, 2159), 'networkx.shortest_path', 'nx.shortest_path', (['self.g', 'n1', 'n2'], {'weight': '"""length"""'}), "(self.g, n1, n2, weight='length')\n", (2126, 2159), True, 'import networkx as nx\n'), ((913, 922), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (920, 922), True, 'import matplotlib.pyplot as plt\n')] |
"""Script to compare different versions (mutative vs. constructive) versions of DyNA-PPO. For reference, the original DyNA-PPO paper is constructive."""
from typing import Callable
import flexs
from flexs import baselines
from flexs.utils import sequence_utils as s_utils
alphabet = s_utils.RNAA
sequences_batch_size = 100
model_queries_per_batch = 2000
def run_dynappo_constructive(landscape, wt, problem_name, start_num):
def make_explorer(model, ss):
return baselines.explorers.DynaPPO(
model=model,
landscape=landscape,
rounds=10,
starting_sequence=wt,
sequences_batch_size=sequences_batch_size,
model_queries_per_batch=model_queries_per_batch,
num_experiment_rounds=10,
num_model_rounds=8,
alphabet=alphabet,
log_file=f"runs/dynappo_constructive/{problem_name}_start{start_num}_ss{ss}",
)
results = flexs.evaluate.robustness(landscape, make_explorer, verbose=False)
return results
def run_dynappo_mutative(landscape, wt, problem_name, start_num):
def make_explorer(model, ss):
return baselines.explorers.DynaPPOMutative(
model=model,
landscape=landscape,
rounds=10,
starting_sequence=wt,
sequences_batch_size=sequences_batch_size,
model_queries_per_batch=model_queries_per_batch,
num_experiment_rounds=10,
num_model_rounds=8,
alphabet=alphabet,
log_file=f"runs/dynappo_mutative/{problem_name}_start{start_num}_ss{ss}",
)
results = flexs.evaluate.robustness(landscape, make_explorer, verbose=False)
return results
if __name__ == "__main__":
for p in ["L14_RNA1"]:
problem = flexs.landscapes.rna.registry()[p]
landscape = flexs.landscapes.RNABinding(**problem["params"])
for s in range(5):
wt = problem["starts"][s]
results = run_dynappo_constructive(landscape, wt, p, s)
results = run_dynappo_mutative(landscape, wt, p, s)
| [
"flexs.evaluate.robustness",
"flexs.landscapes.rna.registry",
"flexs.landscapes.RNABinding",
"flexs.baselines.explorers.DynaPPOMutative",
"flexs.baselines.explorers.DynaPPO"
] | [((953, 1019), 'flexs.evaluate.robustness', 'flexs.evaluate.robustness', (['landscape', 'make_explorer'], {'verbose': '(False)'}), '(landscape, make_explorer, verbose=False)\n', (978, 1019), False, 'import flexs\n'), ((1636, 1702), 'flexs.evaluate.robustness', 'flexs.evaluate.robustness', (['landscape', 'make_explorer'], {'verbose': '(False)'}), '(landscape, make_explorer, verbose=False)\n', (1661, 1702), False, 'import flexs\n'), ((477, 824), 'flexs.baselines.explorers.DynaPPO', 'baselines.explorers.DynaPPO', ([], {'model': 'model', 'landscape': 'landscape', 'rounds': '(10)', 'starting_sequence': 'wt', 'sequences_batch_size': 'sequences_batch_size', 'model_queries_per_batch': 'model_queries_per_batch', 'num_experiment_rounds': '(10)', 'num_model_rounds': '(8)', 'alphabet': 'alphabet', 'log_file': 'f"""runs/dynappo_constructive/{problem_name}_start{start_num}_ss{ss}"""'}), "(model=model, landscape=landscape, rounds=10,\n starting_sequence=wt, sequences_batch_size=sequences_batch_size,\n model_queries_per_batch=model_queries_per_batch, num_experiment_rounds=\n 10, num_model_rounds=8, alphabet=alphabet, log_file=\n f'runs/dynappo_constructive/{problem_name}_start{start_num}_ss{ss}')\n", (504, 824), False, 'from flexs import baselines\n'), ((1156, 1506), 'flexs.baselines.explorers.DynaPPOMutative', 'baselines.explorers.DynaPPOMutative', ([], {'model': 'model', 'landscape': 'landscape', 'rounds': '(10)', 'starting_sequence': 'wt', 'sequences_batch_size': 'sequences_batch_size', 'model_queries_per_batch': 'model_queries_per_batch', 'num_experiment_rounds': '(10)', 'num_model_rounds': '(8)', 'alphabet': 'alphabet', 'log_file': 'f"""runs/dynappo_mutative/{problem_name}_start{start_num}_ss{ss}"""'}), "(model=model, landscape=landscape,\n rounds=10, starting_sequence=wt, sequences_batch_size=\n sequences_batch_size, model_queries_per_batch=model_queries_per_batch,\n num_experiment_rounds=10, num_model_rounds=8, alphabet=alphabet,\n log_file=f'runs/dynappo_mutative/{problem_name}_start{start_num}_ss{ss}')\n", (1191, 1506), False, 'from flexs import baselines\n'), ((1851, 1899), 'flexs.landscapes.RNABinding', 'flexs.landscapes.RNABinding', ([], {}), "(**problem['params'])\n", (1878, 1899), False, 'import flexs\n'), ((1796, 1827), 'flexs.landscapes.rna.registry', 'flexs.landscapes.rna.registry', ([], {}), '()\n', (1825, 1827), False, 'import flexs\n')] |
import argparse
import time
import subprocess
import paramiko
from scp import SCPClient
from pathlib import Path
from google.cloud import storage
import os
parser = argparse.ArgumentParser(description='Download and uncompress the data on all the nodes.')
parser.add_argument('data', type=int, choices=[100, 300, 1000, 3000, 10000], help='data scale factor.')
parser.add_argument('ip', type=str, help='starting ip address')
parser.add_argument('nodes', type=int, help='the number of nodes')
parser.add_argument('--key','-k', type=str, default=None, help='location of the service key json file')
parser.add_argument('--thread','-t', type=int, default=1, help='number of threads for each node')
parser.add_argument('--parts','-p', type=int, default=0, help='number of parts to split the data (0 means the same as node number)')
parser.add_argument('--start','-s', type=int, default=0, help='the start index of the data partition (default 0)')
args = parser.parse_args()
user = "tigergraph"
pin = "tigergraph" # please change the pin here
workdir = '/home/tigergraph'
bucket_name = 'ldbc_bi'
root = f'sf{args.data}-bi/'
def createSSHClient(server, port, user, password):
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(server, port, user, password)
return client
def main():
key = ''
if args.key:
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = args.key
key = f'-k {args.key}'
print("check data accessibility")
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
stats = storage.Blob(bucket=bucket, name=root).exists(storage_client)
print("The bucket can be accessed")
start_ip = args.ip.split('.')
for i in range(args.nodes):
ip4 = int(start_ip[-1]) + i
ip = start_ip[:-1] + [str(ip4)]
ip = '.'.join(ip)
ssh = createSSHClient(ip, 22, user, pin)
scp = SCPClient(ssh.get_transport())
print(f'logging to {ip}')
scp.put('../k8s/download_one_partition.py', workdir)
scp.put('../k8s/download_decompress.sh', workdir)
if args.key:
scp.put(args.key, workdir)
stdin, stdout, stderr = ssh.exec_command(f'''
cd {workdir}
. .profile
pip3 install google-cloud-storage
export SF={args.data}
export i={i + args.start}
export NUM_NODES={args.parts if args.parts else args.nodes}
export target=~/sf{args.data}
export DOWNLOAD_THREAD={args.thread}
export SERVICE_KEY="{key}"
nohup sh download_decompress.sh $i > log.download 2>&1 < /dev/null &
''')
time.sleep(4)
stdin, stdout, stderr = ssh.exec_command(f'tail {workdir}/log.download')
for line in stdout.read().splitlines():
print(line.decode('utf-8'))
ssh.close()
scp.close()
if __name__ == '__main__':
main()
| [
"google.cloud.storage.Client",
"argparse.ArgumentParser",
"paramiko.AutoAddPolicy",
"time.sleep",
"google.cloud.storage.Blob",
"paramiko.SSHClient"
] | [((167, 261), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Download and uncompress the data on all the nodes."""'}), "(description=\n 'Download and uncompress the data on all the nodes.')\n", (190, 261), False, 'import argparse\n'), ((1185, 1205), 'paramiko.SSHClient', 'paramiko.SSHClient', ([], {}), '()\n', (1203, 1205), False, 'import paramiko\n'), ((1554, 1570), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (1568, 1570), False, 'from google.cloud import storage\n'), ((1280, 1304), 'paramiko.AutoAddPolicy', 'paramiko.AutoAddPolicy', ([], {}), '()\n', (1302, 1304), False, 'import paramiko\n'), ((2625, 2638), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (2635, 2638), False, 'import time\n'), ((1629, 1667), 'google.cloud.storage.Blob', 'storage.Blob', ([], {'bucket': 'bucket', 'name': 'root'}), '(bucket=bucket, name=root)\n', (1641, 1667), False, 'from google.cloud import storage\n')] |
from jinja2 import Environment, FileSystemLoader
import sys
# Import config.py
with open('/etc/footloose-vpc/footloose-vpc.conf') as f:
config_path = f.read().split('\n')[0]
sys.path.insert(0, config_path)
import config as cfg
# Import modules
import htansw
def print_j2_template(template, j2_vars):
htansw.print_html()
env = Environment(loader=FileSystemLoader('%s/' % cfg.DIR_PATH['templates']))
j2_html = env.get_template(template)
rendered_html = j2_html.render(j2_vars=j2_vars)
print(rendered_html)
| [
"jinja2.FileSystemLoader",
"htansw.print_html",
"sys.path.insert"
] | [((179, 210), 'sys.path.insert', 'sys.path.insert', (['(0)', 'config_path'], {}), '(0, config_path)\n', (194, 210), False, 'import sys\n'), ((311, 330), 'htansw.print_html', 'htansw.print_html', ([], {}), '()\n', (328, 330), False, 'import htansw\n'), ((360, 411), 'jinja2.FileSystemLoader', 'FileSystemLoader', (["('%s/' % cfg.DIR_PATH['templates'])"], {}), "('%s/' % cfg.DIR_PATH['templates'])\n", (376, 411), False, 'from jinja2 import Environment, FileSystemLoader\n')] |
import optuna
import json
import numpy as np
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--study-name", help="Study name used during hyperparameter optimization", type=str, default=None)
parser.add_argument("--storage", help="Database storage path used during hyperparameter optimization", type=str, default=None)
parser.add_argument("--print-n-best-trials", help="Show final return values for n best trials", type=int, default=0)
parser.add_argument("--save-n-best-hyperparameters", help="Save the hyperparameters for the n best trials that resulted in the best returns", type=int, default=0)
args = parser.parse_args()
study = optuna.create_study(study_name=args.study_name, storage=args.storage, load_if_exists=True, direction="maximize")
values = []
for i in study.trials:
if i.number < args.print_n_best_trials:
print(i.value)
values.append(i.value)
scratch_values = [-np.inf if i is None else i for i in values]
ordered_indices = np.argsort(scratch_values)[::-1]
for i in range(args.save_n_best_hyperparameters):
params = study.trials[ordered_indices[i]].params
text = json.dumps(params)
jsonFile = open('hyperparameter_jsons/' + 'hyperparameters_' + str(i) + ".json", "w+")
jsonFile.write(text)
jsonFile.close()
| [
"numpy.argsort",
"json.dumps",
"optuna.create_study",
"argparse.ArgumentParser"
] | [((72, 97), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (95, 97), False, 'import argparse\n'), ((660, 776), 'optuna.create_study', 'optuna.create_study', ([], {'study_name': 'args.study_name', 'storage': 'args.storage', 'load_if_exists': '(True)', 'direction': '"""maximize"""'}), "(study_name=args.study_name, storage=args.storage,\n load_if_exists=True, direction='maximize')\n", (679, 776), False, 'import optuna\n'), ((985, 1011), 'numpy.argsort', 'np.argsort', (['scratch_values'], {}), '(scratch_values)\n', (995, 1011), True, 'import numpy as np\n'), ((1133, 1151), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1143, 1151), False, 'import json\n')] |
import time
import requests
from absl import app, flags
flags.DEFINE_string("database_id", None, "The id of the todo database.", required=True)
flags.DEFINE_list(
"complete_selects",
"Completed,Archived",
"The status of the objects to mark as completed.",
)
flags.DEFINE_string("datetime_field", "Completed On", "The datetime to use to mark the todo.")
flags.DEFINE_string("token", None, "The API token of the notion integration.", required=True)
flags.DEFINE_string("status_column", "Status", "The column to use for completion status selection")
flags.DEFINE_integer("sleep_for", 1, "The number of seconds to sleep between requests.")
FLAGS = flags.FLAGS
def main(*unused_argv):
# Setup the authorization headers
headers = {
"Authorization": f"Bearer {FLAGS.token}",
"Notion-Version": "2021-05-13",
}
# Setup the database
# Get the database, and see if the edit time is in the future.
res = requests.get(f"https://api.notion.com/v1/databases/{FLAGS.database_id}", headers=headers)
if res.status_code == 404:
print(f"Error finding database: {FLAGS.database_id}. Are you sure you shared the DB with the integration?")
return
db = res.json()
if FLAGS.datetime_field not in db["properties"] or db["properties"][FLAGS.datetime_field]["type"] != "date":
print(f"Database does not have the required field {FLAGS.datetime_field}, or it is not a datetime")
if (
FLAGS.status_column not in db["properties"]
or db["properties"][FLAGS.status_column]["type"] != "select"
or any(
f not in [q["name"] for q in db["properties"][FLAGS.status_column]["select"]["options"]]
for f in FLAGS.complete_selects
)
):
print(db["properties"][FLAGS.status_column]["select"]["options"])
print(f"Database does not have a {FLAGS.status_column} column with fields {FLAGS.complete_selects}")
return
while True:
# Query the database for all todos with the status in the complete_selects list
# and does not have a completion time.
filter_data = {
"and": [
{"property": FLAGS.datetime_field, "date": {"is_empty": True}},
{"or": [{"property": FLAGS.status_column, "select": {"equals": q}} for q in FLAGS.complete_selects]},
]
}
objects_needing_updates = []
while True:
start_cursor = None
if start_cursor:
page = requests.post(
f"https://api.notion.com/v1/databases/{FLAGS.database_id}/query",
json={"filter": filter_data, "start_cursor": start_cursor},
headers=headers,
)
else:
page = requests.post(
f"https://api.notion.com/v1/databases/{FLAGS.database_id}/query",
json={"filter": filter_data},
headers=headers,
)
if page.status_code != 200:
print(f"Error querying database: {FLAGS.database_id}")
return
if res.status_code == 429:
# Back-off, and retry
time.sleep(FLAGS.sleep_for)
continue
page = page.json()
for q in page["results"]:
objects_needing_updates.append(q)
if page["has_more"]:
start_cursor = page["next_cursor"]
else:
break
for page in objects_needing_updates:
page_id = page["id"]
patch = {
"properties": {
FLAGS.datetime_field: {
"date": {"start": page["last_edited_time"]},
}
}
}
print(f"Updating TODO, id: {page_id}, patch: {patch}")
res = requests.patch(f"https://api.notion.com/v1/pages/{page_id}", json=patch, headers=headers)
time.sleep(0.2)
if __name__ == "__main__":
app.run(main)
| [
"requests.post",
"absl.flags.DEFINE_integer",
"requests.patch",
"absl.app.run",
"requests.get",
"time.sleep",
"absl.flags.DEFINE_string",
"absl.flags.DEFINE_list"
] | [((58, 149), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""database_id"""', 'None', '"""The id of the todo database."""'], {'required': '(True)'}), "('database_id', None, 'The id of the todo database.',\n required=True)\n", (77, 149), False, 'from absl import app, flags\n'), ((146, 260), 'absl.flags.DEFINE_list', 'flags.DEFINE_list', (['"""complete_selects"""', '"""Completed,Archived"""', '"""The status of the objects to mark as completed."""'], {}), "('complete_selects', 'Completed,Archived',\n 'The status of the objects to mark as completed.')\n", (163, 260), False, 'from absl import app, flags\n'), ((272, 370), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""datetime_field"""', '"""Completed On"""', '"""The datetime to use to mark the todo."""'], {}), "('datetime_field', 'Completed On',\n 'The datetime to use to mark the todo.')\n", (291, 370), False, 'from absl import app, flags\n'), ((367, 464), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""token"""', 'None', '"""The API token of the notion integration."""'], {'required': '(True)'}), "('token', None,\n 'The API token of the notion integration.', required=True)\n", (386, 464), False, 'from absl import app, flags\n'), ((461, 564), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""status_column"""', '"""Status"""', '"""The column to use for completion status selection"""'], {}), "('status_column', 'Status',\n 'The column to use for completion status selection')\n", (480, 564), False, 'from absl import app, flags\n'), ((561, 653), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""sleep_for"""', '(1)', '"""The number of seconds to sleep between requests."""'], {}), "('sleep_for', 1,\n 'The number of seconds to sleep between requests.')\n", (581, 653), False, 'from absl import app, flags\n'), ((951, 1044), 'requests.get', 'requests.get', (['f"""https://api.notion.com/v1/databases/{FLAGS.database_id}"""'], {'headers': 'headers'}), "(f'https://api.notion.com/v1/databases/{FLAGS.database_id}',\n headers=headers)\n", (963, 1044), False, 'import requests\n'), ((4051, 4064), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (4058, 4064), False, 'from absl import app, flags\n'), ((4002, 4017), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4012, 4017), False, 'import time\n'), ((3903, 3996), 'requests.patch', 'requests.patch', (['f"""https://api.notion.com/v1/pages/{page_id}"""'], {'json': 'patch', 'headers': 'headers'}), "(f'https://api.notion.com/v1/pages/{page_id}', json=patch,\n headers=headers)\n", (3917, 3996), False, 'import requests\n'), ((2512, 2677), 'requests.post', 'requests.post', (['f"""https://api.notion.com/v1/databases/{FLAGS.database_id}/query"""'], {'json': "{'filter': filter_data, 'start_cursor': start_cursor}", 'headers': 'headers'}), "(f'https://api.notion.com/v1/databases/{FLAGS.database_id}/query',\n json={'filter': filter_data, 'start_cursor': start_cursor}, headers=headers\n )\n", (2525, 2677), False, 'import requests\n'), ((2789, 2919), 'requests.post', 'requests.post', (['f"""https://api.notion.com/v1/databases/{FLAGS.database_id}/query"""'], {'json': "{'filter': filter_data}", 'headers': 'headers'}), "(f'https://api.notion.com/v1/databases/{FLAGS.database_id}/query',\n json={'filter': filter_data}, headers=headers)\n", (2802, 2919), False, 'import requests\n'), ((3222, 3249), 'time.sleep', 'time.sleep', (['FLAGS.sleep_for'], {}), '(FLAGS.sleep_for)\n', (3232, 3249), False, 'import time\n')] |
'''
The main file for the URL shortener service - implemented using the Falcon REST framework for Python
'''
# External imports
import falcon
import os
# Internal imports
from models.userhandler import UserHandler
from models.urlshandler import UrlsHandler
from models.urlshistoryhandler import UrlsHistoryHandler
from routers.adduserrouter import AddUserRouter
from routers.urlsrouter import UrlsRouter
from routers.redirectrouter import RedirectRouter
from routers.urlinforouter import UrlInfoRouter
# Retrieve some OS variables
mongodb_location = os.environ.get("DBURL","mongodb://localhost:27017/")
mongodb_name = os.environ.get("DBNAME","urlsdb")
memcached_location = os.environ.get("MEMCACHEHOST","localhost")
# Create DB Handlers and connect to the database
usersHandler = UserHandler(mongodb_location,mongodb_name)
usersHandler.connectToDatabase()
urlsHandler = UrlsHandler(mongodb_location,mongodb_name,memcached_location)
urlsHandler.connectToDatabase()
urlsHistoryHandler = UrlsHistoryHandler(mongodb_location,mongodb_name)
urlsHistoryHandler.connectToDatabase()
# Create routers
addUsersRouter = AddUserRouter(usersHandler,urlsHandler,urlsHistoryHandler)
urlsRouter = UrlsRouter(usersHandler,urlsHandler,urlsHistoryHandler)
redirectRouter = RedirectRouter(usersHandler,urlsHandler,urlsHistoryHandler)
urlInfoRouter = UrlInfoRouter(usersHandler,urlsHandler,urlsHistoryHandler)
# Configure the REST framework routing
app = falcon.API()
app.add_route("/users",addUsersRouter)
app.add_route("/urls",urlsRouter)
app.add_route("/{shortform}",redirectRouter)
app.add_route("/urls/{shortform}",urlInfoRouter)
| [
"models.urlshandler.UrlsHandler",
"models.urlshistoryhandler.UrlsHistoryHandler",
"models.userhandler.UserHandler",
"falcon.API",
"os.environ.get",
"routers.redirectrouter.RedirectRouter",
"routers.urlinforouter.UrlInfoRouter",
"routers.urlsrouter.UrlsRouter",
"routers.adduserrouter.AddUserRouter"
] | [((558, 611), 'os.environ.get', 'os.environ.get', (['"""DBURL"""', '"""mongodb://localhost:27017/"""'], {}), "('DBURL', 'mongodb://localhost:27017/')\n", (572, 611), False, 'import os\n'), ((626, 660), 'os.environ.get', 'os.environ.get', (['"""DBNAME"""', '"""urlsdb"""'], {}), "('DBNAME', 'urlsdb')\n", (640, 660), False, 'import os\n'), ((681, 724), 'os.environ.get', 'os.environ.get', (['"""MEMCACHEHOST"""', '"""localhost"""'], {}), "('MEMCACHEHOST', 'localhost')\n", (695, 724), False, 'import os\n'), ((789, 832), 'models.userhandler.UserHandler', 'UserHandler', (['mongodb_location', 'mongodb_name'], {}), '(mongodb_location, mongodb_name)\n', (800, 832), False, 'from models.userhandler import UserHandler\n'), ((879, 942), 'models.urlshandler.UrlsHandler', 'UrlsHandler', (['mongodb_location', 'mongodb_name', 'memcached_location'], {}), '(mongodb_location, mongodb_name, memcached_location)\n', (890, 942), False, 'from models.urlshandler import UrlsHandler\n'), ((994, 1044), 'models.urlshistoryhandler.UrlsHistoryHandler', 'UrlsHistoryHandler', (['mongodb_location', 'mongodb_name'], {}), '(mongodb_location, mongodb_name)\n', (1012, 1044), False, 'from models.urlshistoryhandler import UrlsHistoryHandler\n'), ((1119, 1179), 'routers.adduserrouter.AddUserRouter', 'AddUserRouter', (['usersHandler', 'urlsHandler', 'urlsHistoryHandler'], {}), '(usersHandler, urlsHandler, urlsHistoryHandler)\n', (1132, 1179), False, 'from routers.adduserrouter import AddUserRouter\n'), ((1191, 1248), 'routers.urlsrouter.UrlsRouter', 'UrlsRouter', (['usersHandler', 'urlsHandler', 'urlsHistoryHandler'], {}), '(usersHandler, urlsHandler, urlsHistoryHandler)\n', (1201, 1248), False, 'from routers.urlsrouter import UrlsRouter\n'), ((1264, 1325), 'routers.redirectrouter.RedirectRouter', 'RedirectRouter', (['usersHandler', 'urlsHandler', 'urlsHistoryHandler'], {}), '(usersHandler, urlsHandler, urlsHistoryHandler)\n', (1278, 1325), False, 'from routers.redirectrouter import RedirectRouter\n'), ((1340, 1400), 'routers.urlinforouter.UrlInfoRouter', 'UrlInfoRouter', (['usersHandler', 'urlsHandler', 'urlsHistoryHandler'], {}), '(usersHandler, urlsHandler, urlsHistoryHandler)\n', (1353, 1400), False, 'from routers.urlinforouter import UrlInfoRouter\n'), ((1446, 1458), 'falcon.API', 'falcon.API', ([], {}), '()\n', (1456, 1458), False, 'import falcon\n')] |
import pytest
import climpred
@pytest.mark.xfail(
reason="not properly implemented see https://github.com/pangeo-data/climpred/issues/605"
)
@pytest.mark.parametrize(
"cross_validate", [False, pytest.param(True, marks=pytest.mark.xfail)]
)
def test_seasonality_remove_bias(hindcast_recon_1d_dm, cross_validate):
"""Test the climpred.set_option(seasonality) changes bias reduction. Currently fails for cross_validate bias reduction."""
hindcast = hindcast_recon_1d_dm
hindcast._datasets["initialized"] = (
hindcast.get_initialized().resample(init="1MS").interpolate("linear")
)
alignment = "maximize"
kw = {
"metric": "mse",
"comparison": "e2o",
"dim": "init",
"alignment": alignment,
"reference": None,
}
with climpred.set_options(seasonality="dayofyear"):
dayofyear_seasonality = hindcast.remove_bias(
alignment=alignment, cross_validate=cross_validate
)
with climpred.set_options(seasonality="weekofyear"):
weekofyear_seasonality = hindcast.remove_bias(
alignment=alignment, cross_validate=cross_validate
)
assert not weekofyear_seasonality.get_initialized().identical(
dayofyear_seasonality.get_initialized()
)
assert not weekofyear_seasonality.verify(**kw).identical(
dayofyear_seasonality.verify(**kw)
)
def test_seasonality_climatology(hindcast_recon_1d_dm):
"""Test the climpred.set_option(seasonality) changes climatology."""
hindcast = hindcast_recon_1d_dm
alignment = "maximize"
kw = {
"metric": "mse",
"comparison": "e2o",
"dim": "init",
"alignment": alignment,
"reference": "climatology",
}
with climpred.set_options(seasonality="dayofyear"):
dayofyear_seasonality = hindcast.verify(**kw).sel(skill="climatology")
with climpred.set_options(seasonality="month"):
month_seasonality = hindcast.verify(**kw).sel(skill="climatology")
assert not month_seasonality.identical(dayofyear_seasonality)
@pytest.mark.parametrize("option_bool", [False, True])
def test_option_warn_for_failed_PredictionEnsemble_xr_call(
hindcast_recon_1d_dm, option_bool
):
with climpred.set_options(warn_for_failed_PredictionEnsemble_xr_call=option_bool):
with pytest.warns(None if not option_bool else UserWarning) as record:
hindcast_recon_1d_dm.sel(lead=[1, 2])
if not option_bool:
assert len(record) == 0, print(record[0])
@pytest.mark.parametrize("option_bool", [False, True])
def test_climpred_warnings(hindcast_recon_1d_dm, option_bool):
with climpred.set_options(warn_for_failed_PredictionEnsemble_xr_call=True):
with climpred.set_options(climpred_warnings=option_bool):
print(climpred.options.OPTIONS)
with pytest.warns(UserWarning if option_bool else None) as record:
hindcast_recon_1d_dm.sel(lead=[1, 2])
if not option_bool:
assert len(record) == 0, print(record[0])
| [
"pytest.mark.xfail",
"pytest.param",
"pytest.mark.parametrize",
"climpred.set_options",
"pytest.warns"
] | [((34, 151), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""not properly implemented see https://github.com/pangeo-data/climpred/issues/605"""'}), "(reason=\n 'not properly implemented see https://github.com/pangeo-data/climpred/issues/605'\n )\n", (51, 151), False, 'import pytest\n'), ((2081, 2134), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""option_bool"""', '[False, True]'], {}), "('option_bool', [False, True])\n", (2104, 2134), False, 'import pytest\n'), ((2537, 2590), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""option_bool"""', '[False, True]'], {}), "('option_bool', [False, True])\n", (2560, 2590), False, 'import pytest\n'), ((803, 848), 'climpred.set_options', 'climpred.set_options', ([], {'seasonality': '"""dayofyear"""'}), "(seasonality='dayofyear')\n", (823, 848), False, 'import climpred\n'), ((986, 1032), 'climpred.set_options', 'climpred.set_options', ([], {'seasonality': '"""weekofyear"""'}), "(seasonality='weekofyear')\n", (1006, 1032), False, 'import climpred\n'), ((204, 247), 'pytest.param', 'pytest.param', (['(True)'], {'marks': 'pytest.mark.xfail'}), '(True, marks=pytest.mark.xfail)\n', (216, 247), False, 'import pytest\n'), ((1759, 1804), 'climpred.set_options', 'climpred.set_options', ([], {'seasonality': '"""dayofyear"""'}), "(seasonality='dayofyear')\n", (1779, 1804), False, 'import climpred\n'), ((1894, 1935), 'climpred.set_options', 'climpred.set_options', ([], {'seasonality': '"""month"""'}), "(seasonality='month')\n", (1914, 1935), False, 'import climpred\n'), ((2245, 2321), 'climpred.set_options', 'climpred.set_options', ([], {'warn_for_failed_PredictionEnsemble_xr_call': 'option_bool'}), '(warn_for_failed_PredictionEnsemble_xr_call=option_bool)\n', (2265, 2321), False, 'import climpred\n'), ((2663, 2732), 'climpred.set_options', 'climpred.set_options', ([], {'warn_for_failed_PredictionEnsemble_xr_call': '(True)'}), '(warn_for_failed_PredictionEnsemble_xr_call=True)\n', (2683, 2732), False, 'import climpred\n'), ((2336, 2390), 'pytest.warns', 'pytest.warns', (['(None if not option_bool else UserWarning)'], {}), '(None if not option_bool else UserWarning)\n', (2348, 2390), False, 'import pytest\n'), ((2747, 2798), 'climpred.set_options', 'climpred.set_options', ([], {'climpred_warnings': 'option_bool'}), '(climpred_warnings=option_bool)\n', (2767, 2798), False, 'import climpred\n'), ((2861, 2911), 'pytest.warns', 'pytest.warns', (['(UserWarning if option_bool else None)'], {}), '(UserWarning if option_bool else None)\n', (2873, 2911), False, 'import pytest\n')] |
from LJ_surrogates.LJ_surrogates.build_surrogates import collate_physical_property_data
import json
import matplotlib.pyplot as plt
import numpy as np
path = '/media/owenmadin/storage/alcohol_alkane/modified_8_runs_7_15'
smirks_types_to_change = ['[#6X4:1]', '[#1:1]-[#6X4]', '[#8X2H1+0:1]', '[#1:1]-[#8]']
properties_all = collate_physical_property_data(path, smirks_types_to_change)
y = properties_all[15][list(properties_all[15].keys())[0]][2]
x = np.linspace(0,np.shape(y)[0]-1,num=np.shape(y)[0])
plt.scatter(x,y[:,0],ls='None',marker='x',label='Simulated Values')
plt.axhline(0.717,ls='--',color='k',label='Experimental Value')
plt.title(f'Density \n {list(properties_all[15].keys())[0]}')
plt.legend()
plt.show()
y = properties_all[30][list(properties_all[30].keys())[0]][2]
x = np.linspace(0,np.shape(y)[0]-1,num=np.shape(y)[0])
plt.scatter(x,y[:,0],ls='None',marker='x',label='Simulated Values')
plt.axhline(0.5012,ls='--',color='k',label='Experimental Value')
plt.title(f'Enthalpy of Mixing \n {list(properties_all[30].keys())[0]}')
plt.legend()
plt.show() | [
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.scatter",
"numpy.shape",
"LJ_surrogates.LJ_surrogates.build_surrogates.collate_physical_property_data",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((326, 386), 'LJ_surrogates.LJ_surrogates.build_surrogates.collate_physical_property_data', 'collate_physical_property_data', (['path', 'smirks_types_to_change'], {}), '(path, smirks_types_to_change)\n', (356, 386), False, 'from LJ_surrogates.LJ_surrogates.build_surrogates import collate_physical_property_data\n'), ((506, 578), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y[:, 0]'], {'ls': '"""None"""', 'marker': '"""x"""', 'label': '"""Simulated Values"""'}), "(x, y[:, 0], ls='None', marker='x', label='Simulated Values')\n", (517, 578), True, 'import matplotlib.pyplot as plt\n'), ((574, 640), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(0.717)'], {'ls': '"""--"""', 'color': '"""k"""', 'label': '"""Experimental Value"""'}), "(0.717, ls='--', color='k', label='Experimental Value')\n", (585, 640), True, 'import matplotlib.pyplot as plt\n'), ((700, 712), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (710, 712), True, 'import matplotlib.pyplot as plt\n'), ((713, 723), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (721, 723), True, 'import matplotlib.pyplot as plt\n'), ((842, 914), 'matplotlib.pyplot.scatter', 'plt.scatter', (['x', 'y[:, 0]'], {'ls': '"""None"""', 'marker': '"""x"""', 'label': '"""Simulated Values"""'}), "(x, y[:, 0], ls='None', marker='x', label='Simulated Values')\n", (853, 914), True, 'import matplotlib.pyplot as plt\n'), ((910, 977), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(0.5012)'], {'ls': '"""--"""', 'color': '"""k"""', 'label': '"""Experimental Value"""'}), "(0.5012, ls='--', color='k', label='Experimental Value')\n", (921, 977), True, 'import matplotlib.pyplot as plt\n'), ((1048, 1060), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1058, 1060), True, 'import matplotlib.pyplot as plt\n'), ((1061, 1071), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1069, 1071), True, 'import matplotlib.pyplot as plt\n'), ((469, 480), 'numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (477, 480), True, 'import numpy as np\n'), ((490, 501), 'numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (498, 501), True, 'import numpy as np\n'), ((805, 816), 'numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (813, 816), True, 'import numpy as np\n'), ((826, 837), 'numpy.shape', 'np.shape', (['y'], {}), '(y)\n', (834, 837), True, 'import numpy as np\n')] |
import plotly
import plotly.offline as offline
import plotly.graph_objs as go
import json
import sys
def plot(input, output):
dataFile = open(input, 'r')
data = json.load(dataFile)
# set up relevant data arrays
best = []
avg = []
fitBest = []
fitAvg = []
gen = []
for x in range(0, len(data) - 1):
# analyze objectives [0] (should usually always be the ceres data)
ceresObj = []
for i in range(0, len(data[x]["objectives"])):
ceresObj.append(data[x]["objectives"][i][0])
best.append(min(ceresObj))
avg.append(sum(ceresObj) / len(ceresObj))
fitBest.append(min(data[x]["fitness"]))
fitAvg.append(sum(data[x]["fitness"]) / len(data[x]["fitness"]))
gen.append(x)
# construct plots
bestGraph = go.Bar(x=gen, y=best, name='Best Ceres Score in Population')
avgGraph = go.Bar(x=gen, y=avg, name='Average Population Score')
plots = [bestGraph, avgGraph]
offline.plot(dict(data=plots, layout=dict(title="Ceres Objective Scores")), filename=output + "_ceres.html", auto_open=False)
fitBestGraph = go.Bar(x=gen, y=fitBest, name='Best Fitness Score in Population')
fitAvgGraph = go.Bar(x=gen, y=fitAvg, name='Average Population Fitness')
plots2 = [fitBestGraph, fitAvgGraph]
offline.plot(dict(data=plots2, layout=dict(title="Fitness Scores")), filename=output + "_fitness.html", auto_open=False)
if __name__ == "__main__":
plot(sys.argv[1], sys.argv[2]) | [
"json.load",
"plotly.graph_objs.Bar"
] | [((164, 183), 'json.load', 'json.load', (['dataFile'], {}), '(dataFile)\n', (173, 183), False, 'import json\n'), ((725, 785), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': 'gen', 'y': 'best', 'name': '"""Best Ceres Score in Population"""'}), "(x=gen, y=best, name='Best Ceres Score in Population')\n", (731, 785), True, 'import plotly.graph_objs as go\n'), ((798, 851), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': 'gen', 'y': 'avg', 'name': '"""Average Population Score"""'}), "(x=gen, y=avg, name='Average Population Score')\n", (804, 851), True, 'import plotly.graph_objs as go\n'), ((1029, 1094), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': 'gen', 'y': 'fitBest', 'name': '"""Best Fitness Score in Population"""'}), "(x=gen, y=fitBest, name='Best Fitness Score in Population')\n", (1035, 1094), True, 'import plotly.graph_objs as go\n'), ((1110, 1168), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': 'gen', 'y': 'fitAvg', 'name': '"""Average Population Fitness"""'}), "(x=gen, y=fitAvg, name='Average Population Fitness')\n", (1116, 1168), True, 'import plotly.graph_objs as go\n')] |
# -*- coding: utf-8 -*-
# Part of BrowseInfo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from datetime import date,datetime
class medical_neomatal_apgar(models.Model):
_name = 'medical.neomatal.apgar'
_rec_name = 'new_born_id'
new_born_id = fields.Many2one('medical.newborn', 'Name')
apgar_activity = fields.Selection([('0', 'None'),('1','Some Flexion'),('2','Fixed Arm and Legs')], 'Activity')
apgar_appearance = fields.Selection([('0', 'Central cyanosis'),('1', 'Acrosynosis'), ('2', 'No Cynosis')], 'Appearance')
apgar_grimace = fields.Selection([('0', 'No response to simulation'), ('1','Grimance when simulated'),('2','Cry Or pull away when simulated')], 'Grimace')
apgar_minute = fields.Integer('Minute', required = True)
apgar_respiration = fields.Selection([('0', 'Absent'),('1', 'Weak / Irregular'),('2', 'Strong')], 'Respiration')
apgar_pulse = fields.Selection([('0', 'None'), ('1', '< 100'), ('2','> 100')], 'Pulse')
apgar_scores = fields.Integer('Apgar Score')
@api.onchange('apgar_activity' , 'apgar_appearance', 'apgar_grimace', 'apgar_minute', 'apgar_respiration', 'apgar_pulse',)
def on_change_selection(self):
self.apgar_scores = int(self.apgar_activity)+ int(self.apgar_appearance)+ int(self.apgar_grimace)+ int(self.apgar_minute)+ int(self.apgar_respiration)+int(self.apgar_pulse)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | [
"odoo.fields.Many2one",
"odoo.fields.Integer",
"odoo.api.onchange",
"odoo.fields.Selection"
] | [((318, 360), 'odoo.fields.Many2one', 'fields.Many2one', (['"""medical.newborn"""', '"""Name"""'], {}), "('medical.newborn', 'Name')\n", (333, 360), False, 'from odoo import api, fields, models, _\n'), ((382, 483), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'None'), ('1', 'Some Flexion'), ('2', 'Fixed Arm and Legs')]", '"""Activity"""'], {}), "([('0', 'None'), ('1', 'Some Flexion'), ('2',\n 'Fixed Arm and Legs')], 'Activity')\n", (398, 483), False, 'from odoo import api, fields, models, _\n'), ((499, 605), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'Central cyanosis'), ('1', 'Acrosynosis'), ('2', 'No Cynosis')]", '"""Appearance"""'], {}), "([('0', 'Central cyanosis'), ('1', 'Acrosynosis'), ('2',\n 'No Cynosis')], 'Appearance')\n", (515, 605), False, 'from odoo import api, fields, models, _\n'), ((621, 770), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'No response to simulation'), ('1', 'Grimance when simulated'), ('2',\n 'Cry Or pull away when simulated')]", '"""Grimace"""'], {}), "([('0', 'No response to simulation'), ('1',\n 'Grimance when simulated'), ('2', 'Cry Or pull away when simulated')],\n 'Grimace')\n", (637, 770), False, 'from odoo import api, fields, models, _\n'), ((780, 819), 'odoo.fields.Integer', 'fields.Integer', (['"""Minute"""'], {'required': '(True)'}), "('Minute', required=True)\n", (794, 819), False, 'from odoo import api, fields, models, _\n'), ((846, 944), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'Absent'), ('1', 'Weak / Irregular'), ('2', 'Strong')]", '"""Respiration"""'], {}), "([('0', 'Absent'), ('1', 'Weak / Irregular'), ('2',\n 'Strong')], 'Respiration')\n", (862, 944), False, 'from odoo import api, fields, models, _\n'), ((957, 1031), 'odoo.fields.Selection', 'fields.Selection', (["[('0', 'None'), ('1', '< 100'), ('2', '> 100')]", '"""Pulse"""'], {}), "([('0', 'None'), ('1', '< 100'), ('2', '> 100')], 'Pulse')\n", (973, 1031), False, 'from odoo import api, fields, models, _\n'), ((1050, 1079), 'odoo.fields.Integer', 'fields.Integer', (['"""Apgar Score"""'], {}), "('Apgar Score')\n", (1064, 1079), False, 'from odoo import api, fields, models, _\n'), ((1090, 1213), 'odoo.api.onchange', 'api.onchange', (['"""apgar_activity"""', '"""apgar_appearance"""', '"""apgar_grimace"""', '"""apgar_minute"""', '"""apgar_respiration"""', '"""apgar_pulse"""'], {}), "('apgar_activity', 'apgar_appearance', 'apgar_grimace',\n 'apgar_minute', 'apgar_respiration', 'apgar_pulse')\n", (1102, 1213), False, 'from odoo import api, fields, models, _\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from collections import OrderedDict
import sys
sys.path.append("coco-caption")
from pycocoevalcap.bleu.bleu import Bleu
from pycocoevalcap.cider.cider import Cider
def array_to_str(arr):
out = ''
for i in range(len(arr)):
out += str(arr[i]) + ' '
if arr[i] == 0:
break
return out.strip()
def get_self_critical_reward(greedy_res, data_gts, sample_res, opt):
batch_size = len(data_gts)
sample_res = sample_res.numpy()
greedy_res = greedy_res.numpy()
scores_greedy = get_scores(greedy_res, data_gts, opt)
scores_sample = get_scores(sample_res, data_gts, opt)
s_sample = scores_sample.reshape([batch_size, -1])
b_greedy = scores_greedy.reshape([batch_size, -1])
scores = s_sample - b_greedy
scores = scores.reshape(-1)
rewards = np.repeat(scores[:, np.newaxis], sample_res.shape[1], 1)
return rewards
def get_scores(candidates, references, opt):
batch_size = len(references)
candidates_size = candidates.shape[0]
seq_per_img = candidates_size // batch_size # gen_result_size = batch_size * seq_per_img
candidates_dict = OrderedDict()
references_dict = OrderedDict()
for i in range(candidates_size):
candidates_dict[i] = [array_to_str(candidates[i])]
for i in range(candidates_size):
references_dict[i] = \
[array_to_str(references[i // seq_per_img][j]) for j in range(len(references[i // seq_per_img]))]
if opt.cider_reward_weight > 0:
_, cider_scores = Cider().compute_score(references_dict, candidates_dict)
else:
cider_scores = 0
if opt.bleu_reward_weight > 0:
_, bleu_scores = Bleu().compute_score(references_dict, candidates_dict)
bleu_scores = np.array(bleu_scores[0])
else:
bleu_scores = 0
scores = opt.cider_reward_weight * cider_scores + opt.bleu_reward_weight * bleu_scores
return scores
| [
"collections.OrderedDict",
"numpy.repeat",
"pycocoevalcap.bleu.bleu.Bleu",
"numpy.array",
"sys.path.append",
"pycocoevalcap.cider.cider.Cider"
] | [((177, 208), 'sys.path.append', 'sys.path.append', (['"""coco-caption"""'], {}), "('coco-caption')\n", (192, 208), False, 'import sys\n'), ((941, 997), 'numpy.repeat', 'np.repeat', (['scores[:, np.newaxis]', 'sample_res.shape[1]', '(1)'], {}), '(scores[:, np.newaxis], sample_res.shape[1], 1)\n', (950, 997), True, 'import numpy as np\n'), ((1257, 1270), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1268, 1270), False, 'from collections import OrderedDict\n'), ((1293, 1306), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1304, 1306), False, 'from collections import OrderedDict\n'), ((1873, 1897), 'numpy.array', 'np.array', (['bleu_scores[0]'], {}), '(bleu_scores[0])\n', (1881, 1897), True, 'import numpy as np\n'), ((1645, 1652), 'pycocoevalcap.cider.cider.Cider', 'Cider', ([], {}), '()\n', (1650, 1652), False, 'from pycocoevalcap.cider.cider import Cider\n'), ((1796, 1802), 'pycocoevalcap.bleu.bleu.Bleu', 'Bleu', ([], {}), '()\n', (1800, 1802), False, 'from pycocoevalcap.bleu.bleu import Bleu\n')] |
#!/usr/bin/python3.8
#OpenCV 4.2, Raspberry pi 3/3b/4b - test on macOS
import cv2
import numpy as np
import time
from utils.dir import get_data_xml
from utils.settings import get_settings_camera
from core.alert import Alert
from core.record import Record
class Camera:
def __init__(self, cam_id = 0, on_guard = False):
super().__init__()
self.font = cv2.FONT_HERSHEY_SIMPLEX
self.on_guard = on_guard
#cam and dimentions
self.cam_id = cam_id
self.cam = None
self.frame1 = None
self.frame2 = None
self.RELEASE_CAM = False
self._ALERT = Alert()
self._RECORD = Record()
#initialize
def initialize(self):
self.settings_camera = get_settings_camera()
self.RELEASE_CAM = False
if self.cam and self.cam.isOpened():
return
elif self.cam:
self.clear()
self._put_cam_video()
def _put_cam_video(self):
self.cam = cv2.VideoCapture(self.cam_id)
fheight = int(self.cam.get(3))
fwidth = int(self.cam.get(4))
self.cam.set(3,fheight)
self.cam.set(4,fwidth)
self._RECORD.set_dimentions(fheight, fwidth)
def _get_frame_gray(self, frame1, frame2):
# Difference between frame1(image) and frame2(image)
diff = cv2.absdiff(frame1, frame2)
# Converting color image to gray_scale image
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
return gray
def _get_frame_diff(self, frame1, frame2):
# Difference between frame1(image) and frame2(image)
diff = cv2.absdiff(frame1, frame2)
# Converting color image to gray_scale image
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
# Converting gray scale image to GaussianBlur, so that change can be find easily
blur = cv2.GaussianBlur(gray, (5, 5), 0)
# If pixel value is greater than 20, it is assigned white(255) otherwise black
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
return thresh
def _get_contours(self, frame1, frame2):
# Difference between frame1(image) and frame2(image)
diff = cv2.absdiff(frame1, frame2)
# Converting color image to gray_scale image
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
# Converting gray scale image to GaussianBlur, so that change can be find easily
blur = cv2.GaussianBlur(gray, (5, 5), 0)
# If pixel value is greater than 20, it is assigned white(255) otherwise black
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
dilated = cv2.dilate(thresh, None, iterations=4)
# finding contours of moving object
contours, hirarchy = cv2.findContours(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
return contours, hirarchy
def get_frame_gray(self):
if self.frame1 is None:
_, self.frame1 = self.cam.read()
elif self.frame2 is None:
_, self.frame1 = self.cam.read()
else:
self.frame1 = self.frame2
ret, self.frame2 = self.cam.read()
contours, hirarchy = self._get_contours(self.frame1, self.frame2)
is_mov = False
# making rectangle around moving object
for contour in contours:
(x, y, w, h) = cv2.boundingRect(contour)
referenceArea = self._is_object(contour)
if referenceArea is None:
continue
is_mov = True
break
frame = self._get_frame_gray(self.frame1, self.frame2)
self._process_mov(4, is_mov, ret, frame)
return frame
def get_frame_diff(self):
if self.frame1 is None:
_, self.frame1 = self.cam.read()
elif self.frame2 is None:
_, self.frame1 = self.cam.read()
else:
self.frame1 = self.frame2
ret, self.frame2 = self.cam.read()
contours, hirarchy = self._get_contours(self.frame1, self.frame2)
is_mov = False
# making rectangle around moving object
for contour in contours:
(x, y, w, h) = cv2.boundingRect(contour)
referenceArea = self._is_object(contour)
if referenceArea is None:
continue
is_mov = True
break
frame = self._get_frame_diff(self.frame1, self.frame2)
self._process_mov(3, is_mov, ret, frame)
return frame
def get_frame_normal(self):
if self.frame1 is None:
_, self.frame1 = self.cam.read()
elif self.frame2 is None:
_, self.frame1 = self.cam.read()
else:
self.frame1 = self.frame2
frame = cv2.flip(self.frame1, 180)
ret, self.frame2 = self.cam.read()
contours, hirarchy = self._get_contours(self.frame1, self.frame2)
is_mov = False
# making rectangle around moving object
for contour in contours:
(x, y, w, h) = cv2.boundingRect(contour)
referenceArea = self._is_object(contour)
if referenceArea is None:
continue
is_mov = True
break
self._process_mov(1, is_mov, ret, frame)
return frame
def get_frame_mov(self):
if self.frame1 is None:
_, self.frame1 = self.cam.read()
elif self.frame2 is None:
_, self.frame1 = self.cam.read()
else:
self.frame1 = self.frame2
ret, self.frame2 = self.cam.read()
contours, hirarchy = self._get_contours(self.frame1, self.frame2)
is_mov = False
# making rectangle around moving object
for contour in contours:
(x, y, w, h) = cv2.boundingRect(contour)
referenceArea = self._is_object(contour)
if referenceArea is None:
continue
self._draw(self.frame1, x, y, w, h, "movimiento")
is_mov = True
self._process_mov(2, is_mov, ret, self.frame1)
return self.frame1
def _draw(self, frame, x, y, w, h, text):
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 255), 2)
cv2.putText(frame, text, (x+5, y-5), self.font, 1, (255, 255, 255), 2)
def _is_object(self, contour):
referenceArea = cv2.contourArea(contour)
if referenceArea < int(self.settings_camera["MIN_AREA_OBJECT"]):
return None
return referenceArea
def _process_mov(self, type_cam, is_mov, ret, frame):
item = { "type_cam": type_cam, "is_mov": is_mov, "ret": ret, "frame": frame }
self._RECORD.put_nowait(item)
if is_mov == True:
if int(self.settings_camera["ON_GUARD"]) == 1:
item = { "message" : "Se ha detectado un movimiento."}
self._ALERT.put_nowait(item=item)
def get_stream(self, type_cam = 1):
if type_cam == 3:
frame = self.get_frame_diff()
ret, frame_jpeg = cv2.imencode('.jpg', frame)
return frame_jpeg
elif type_cam == 4:
frame = self.get_frame_gray()
ret, frame_jpeg = cv2.imencode('.jpg', frame)
return frame_jpeg
elif type_cam == 2:
frame = self.get_frame_mov()
ret, frame_jpeg = cv2.imencode('.jpg', frame)
return frame_jpeg
else:
frame = self.get_frame_normal()
ret, frame_jpeg = cv2.imencode('.jpg', frame)
return frame_jpeg
def generated_stream(self, type_cam = 1):
self.initialize()
while self.cam.isOpened():
frame_jpeg = self.get_stream(type_cam=type_cam)
if self.RELEASE_CAM == True or self.RELEASE_CAM is None:
break
else:
yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame_jpeg.tobytes() + b'\r\n\r\n')
def clear(self):
try:
self.frame1 = None
self.frame2 = None
if self.cam:
self.cam.release()
except Exception as e:
print(e)
def release(self, with_threading = False, window = False):
try:
self.RELEASE_CAM = True
time.sleep(0.9)
self.frame1 = None
self.frame2 = None
if self.cam:
self.cam.release()
if with_threading:
self.cam.stop()
if window:
cv2.destroyAllWindows()
self._RECORD.release()
except Exception as e:
print(e)
def __del__(self):
try:
self.RELEASE_CAM = None
time.sleep(0.9)
self.frame1 = None
self.frame2 = None
if self.cam:
self.cam.release()
self._RECORD.release()
self._RECORD = None
self._ALERT = None
cv2.destroyAllWindows()
except Exception as e:
print(e) | [
"cv2.rectangle",
"core.record.Record",
"utils.settings.get_settings_camera",
"cv2.flip",
"cv2.imencode",
"cv2.threshold",
"core.alert.Alert",
"time.sleep",
"cv2.putText",
"cv2.contourArea",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.findContours",
"cv2.dilate",
"cv2.GaussianBlur",
"cv2.absdiff",
"cv2.boundingRect"
] | [((620, 627), 'core.alert.Alert', 'Alert', ([], {}), '()\n', (625, 627), False, 'from core.alert import Alert\n'), ((651, 659), 'core.record.Record', 'Record', ([], {}), '()\n', (657, 659), False, 'from core.record import Record\n'), ((734, 755), 'utils.settings.get_settings_camera', 'get_settings_camera', ([], {}), '()\n', (753, 755), False, 'from utils.settings import get_settings_camera\n'), ((981, 1010), 'cv2.VideoCapture', 'cv2.VideoCapture', (['self.cam_id'], {}), '(self.cam_id)\n', (997, 1010), False, 'import cv2\n'), ((1328, 1355), 'cv2.absdiff', 'cv2.absdiff', (['frame1', 'frame2'], {}), '(frame1, frame2)\n', (1339, 1355), False, 'import cv2\n'), ((1424, 1462), 'cv2.cvtColor', 'cv2.cvtColor', (['diff', 'cv2.COLOR_BGR2GRAY'], {}), '(diff, cv2.COLOR_BGR2GRAY)\n', (1436, 1462), False, 'import cv2\n'), ((1611, 1638), 'cv2.absdiff', 'cv2.absdiff', (['frame1', 'frame2'], {}), '(frame1, frame2)\n', (1622, 1638), False, 'import cv2\n'), ((1707, 1745), 'cv2.cvtColor', 'cv2.cvtColor', (['diff', 'cv2.COLOR_BGR2GRAY'], {}), '(diff, cv2.COLOR_BGR2GRAY)\n', (1719, 1745), False, 'import cv2\n'), ((1850, 1883), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gray', '(5, 5)', '(0)'], {}), '(gray, (5, 5), 0)\n', (1866, 1883), False, 'import cv2\n'), ((1991, 2038), 'cv2.threshold', 'cv2.threshold', (['blur', '(20)', '(255)', 'cv2.THRESH_BINARY'], {}), '(blur, 20, 255, cv2.THRESH_BINARY)\n', (2004, 2038), False, 'import cv2\n'), ((2183, 2210), 'cv2.absdiff', 'cv2.absdiff', (['frame1', 'frame2'], {}), '(frame1, frame2)\n', (2194, 2210), False, 'import cv2\n'), ((2279, 2317), 'cv2.cvtColor', 'cv2.cvtColor', (['diff', 'cv2.COLOR_BGR2GRAY'], {}), '(diff, cv2.COLOR_BGR2GRAY)\n', (2291, 2317), False, 'import cv2\n'), ((2422, 2455), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gray', '(5, 5)', '(0)'], {}), '(gray, (5, 5), 0)\n', (2438, 2455), False, 'import cv2\n'), ((2563, 2610), 'cv2.threshold', 'cv2.threshold', (['blur', '(20)', '(255)', 'cv2.THRESH_BINARY'], {}), '(blur, 20, 255, cv2.THRESH_BINARY)\n', (2576, 2610), False, 'import cv2\n'), ((2629, 2667), 'cv2.dilate', 'cv2.dilate', (['thresh', 'None'], {'iterations': '(4)'}), '(thresh, None, iterations=4)\n', (2639, 2667), False, 'import cv2\n'), ((2741, 2806), 'cv2.findContours', 'cv2.findContours', (['dilated', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (2757, 2806), False, 'import cv2\n'), ((4710, 4736), 'cv2.flip', 'cv2.flip', (['self.frame1', '(180)'], {}), '(self.frame1, 180)\n', (4718, 4736), False, 'import cv2\n'), ((6095, 6157), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 255), 2)\n', (6108, 6157), False, 'import cv2\n'), ((6162, 6236), 'cv2.putText', 'cv2.putText', (['frame', 'text', '(x + 5, y - 5)', 'self.font', '(1)', '(255, 255, 255)', '(2)'], {}), '(frame, text, (x + 5, y - 5), self.font, 1, (255, 255, 255), 2)\n', (6173, 6236), False, 'import cv2\n'), ((6293, 6317), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (6308, 6317), False, 'import cv2\n'), ((3328, 3353), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (3344, 3353), False, 'import cv2\n'), ((4134, 4159), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (4150, 4159), False, 'import cv2\n'), ((4986, 5011), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (5002, 5011), False, 'import cv2\n'), ((5728, 5753), 'cv2.boundingRect', 'cv2.boundingRect', (['contour'], {}), '(contour)\n', (5744, 5753), False, 'import cv2\n'), ((6981, 7008), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (6993, 7008), False, 'import cv2\n'), ((8223, 8238), 'time.sleep', 'time.sleep', (['(0.9)'], {}), '(0.9)\n', (8233, 8238), False, 'import time\n'), ((8667, 8682), 'time.sleep', 'time.sleep', (['(0.9)'], {}), '(0.9)\n', (8677, 8682), False, 'import time\n'), ((8915, 8938), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (8936, 8938), False, 'import cv2\n'), ((7139, 7166), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (7151, 7166), False, 'import cv2\n'), ((8471, 8494), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (8492, 8494), False, 'import cv2\n'), ((7296, 7323), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (7308, 7323), False, 'import cv2\n'), ((7442, 7469), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'frame'], {}), "('.jpg', frame)\n", (7454, 7469), False, 'import cv2\n')] |
#!/usr/bin/env python3
from __future__ import annotations
from typing import Optional, List
from lxml import etree
from capybara_tw.model.capy_context import CapyContext
from capybara_tw.util.xliff_util import Xliff12Tag
class CapyContextGroup(object):
contexts: List[CapyContext]
def __init__(self):
self.contexts = []
@classmethod
def from_element(cls, elem) -> Optional[CapyContextGroup]:
if elem is None:
return None
obj = cls()
obj.contexts = [CapyContext.from_element(e) for e in elem.iterchildren(Xliff12Tag.context)]
return obj
def to_element(self):
root = etree.Element(Xliff12Tag.context_group)
for context in self.contexts:
root.append(context.to_element())
return root
| [
"lxml.etree.Element",
"capybara_tw.model.capy_context.CapyContext.from_element"
] | [((653, 692), 'lxml.etree.Element', 'etree.Element', (['Xliff12Tag.context_group'], {}), '(Xliff12Tag.context_group)\n', (666, 692), False, 'from lxml import etree\n'), ((516, 543), 'capybara_tw.model.capy_context.CapyContext.from_element', 'CapyContext.from_element', (['e'], {}), '(e)\n', (540, 543), False, 'from capybara_tw.model.capy_context import CapyContext\n')] |
import mimetypes
from datetime import datetime
import requests
import os
from loguru import logger
class ApiVideo:
DATE_TIME_FORMAT = "%Y-%M-%d %H:%M:%S"
CHUNK_SIZE = 6000000
# Set up variables for endpoints (we will create the third URL programmatically later)
AUTH_URL = "https://ws.api.video/auth/api-key"
CREATE_URL = "https://ws.api.video/videos"
# Set up headers and payload for first authentication request
HEADERS = {
"Accept": "application/json",
"Content-Type": "application/json"
}
PAYLOAD = {"apiKey": os.environ["API_VIDEO_API_KEY"]}
def __init__(self):
self.token = None
def get_token(self):
# Send the first authentication request to get a token.
# The token can be used for one hour with the rest of the API endpoints.
response = requests.request("POST", self.AUTH_URL, json=self.PAYLOAD, headers=self.HEADERS)
response = response.json()
return response.get("access_token")
def create_video(self):
# Set up headers for authentication - the rest of the endpoints use Bearer authentication.
headers = {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": self.__auth_token()
}
# Create the video container payload, you can add more parameters if you like,
# check out the docs at https://docs.api.video
payload = {
"title": f"Tesla Video {datetime.now().strftime(self.DATE_TIME_FORMAT)}",
"description": f"Tesla video captured at {datetime.now().strftime(self.DATE_TIME_FORMAT)}"
}
# Send the request to create the container, and retrieve the videoId from the response.
response = requests.request("POST", self.CREATE_URL, json=payload, headers=headers)
response = response.json()
return response["videoId"]
def upload_video(self, file_path: str):
# Set up the chunk size. This is how much you want to read from the file every time you grab a new chunk of
# your file to read. If you're doing a big upload, the recommendation is 50 - 80 MB (50000000-80000000 bytes).
# It's listed at 6MB (6000000 bytes) because then you can try this sample code with a small file just to see how
# it will work. The minimum size for a chunk is 5 MB.
logger.info(f"Uploading {file_path} to api.video")
if self.__is_video(file_path):
# This is our chunk reader. This is what gets the next chunk of data ready to send.
def read_in_chunks(file_object):
while True:
data = file_object.read(self.CHUNK_SIZE)
if not data:
break
yield data
# Create endpoint to upload video to - you have to add the videoId into the URL
video_id = self.create_video()
upload_url = f"{self.CREATE_URL}/{video_id}/source"
content_size = os.stat(file_path).st_size
logger.info(f"{file_path} {content_size / 1024 / 1024} MB")
file = open(file_path, "rb")
index = 0
headers = {}
for chunk in read_in_chunks(file):
offset = index + len(chunk)
headers["Content-Range"] = "bytes %s-%s/%s" % (index, offset - 1, content_size)
headers["Authorization"] = self.__auth_token()
index = offset
try:
file = {"file": chunk}
requests.post(upload_url, files=file, headers=headers)
except Exception as e:
logger.error(e)
logger.info(f"Finished uploading {file_path} to api.video")
else:
logger.debug("Skipping uploading non video file!")
def __auth_token(self):
if not self.token:
self.token = self.get_token()
return "Bearer " + self.get_token()
@staticmethod
def __is_video(file_path):
file_type = mimetypes.guess_type(file_path)[0]
if file_type is not None and file_type.split("/")[0] == "video":
return True
else:
return False
| [
"requests.post",
"loguru.logger.debug",
"loguru.logger.info",
"requests.request",
"loguru.logger.error",
"datetime.datetime.now",
"mimetypes.guess_type",
"os.stat"
] | [((847, 932), 'requests.request', 'requests.request', (['"""POST"""', 'self.AUTH_URL'], {'json': 'self.PAYLOAD', 'headers': 'self.HEADERS'}), "('POST', self.AUTH_URL, json=self.PAYLOAD, headers=self.HEADERS\n )\n", (863, 932), False, 'import requests\n'), ((1783, 1855), 'requests.request', 'requests.request', (['"""POST"""', 'self.CREATE_URL'], {'json': 'payload', 'headers': 'headers'}), "('POST', self.CREATE_URL, json=payload, headers=headers)\n", (1799, 1855), False, 'import requests\n'), ((2399, 2449), 'loguru.logger.info', 'logger.info', (['f"""Uploading {file_path} to api.video"""'], {}), "(f'Uploading {file_path} to api.video')\n", (2410, 2449), False, 'from loguru import logger\n'), ((3082, 3141), 'loguru.logger.info', 'logger.info', (['f"""{file_path} {content_size / 1024 / 1024} MB"""'], {}), "(f'{file_path} {content_size / 1024 / 1024} MB')\n", (3093, 3141), False, 'from loguru import logger\n'), ((3739, 3798), 'loguru.logger.info', 'logger.info', (['f"""Finished uploading {file_path} to api.video"""'], {}), "(f'Finished uploading {file_path} to api.video')\n", (3750, 3798), False, 'from loguru import logger\n'), ((3825, 3875), 'loguru.logger.debug', 'logger.debug', (['"""Skipping uploading non video file!"""'], {}), "('Skipping uploading non video file!')\n", (3837, 3875), False, 'from loguru import logger\n'), ((4089, 4120), 'mimetypes.guess_type', 'mimetypes.guess_type', (['file_path'], {}), '(file_path)\n', (4109, 4120), False, 'import mimetypes\n'), ((3042, 3060), 'os.stat', 'os.stat', (['file_path'], {}), '(file_path)\n', (3049, 3060), False, 'import os\n'), ((3596, 3650), 'requests.post', 'requests.post', (['upload_url'], {'files': 'file', 'headers': 'headers'}), '(upload_url, files=file, headers=headers)\n', (3609, 3650), False, 'import requests\n'), ((3710, 3725), 'loguru.logger.error', 'logger.error', (['e'], {}), '(e)\n', (3722, 3725), False, 'from loguru import logger\n'), ((1504, 1518), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1516, 1518), False, 'from datetime import datetime\n'), ((1608, 1622), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1620, 1622), False, 'from datetime import datetime\n')] |
#! /usr/bin/env python
#################################################################################
# File Name : IPADS_GraphX_Plot_Shuffle.py
# Created By : xd
# Creation Date : [2014-08-14 21:12]
# Last Modified : [2014-08-14 22:09]
# Description : report on the shuffle (cross executor communication)
#################################################################################
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
N = 3
# partition
partition = [340400244, 333502667, 222239123, 244820041, 259525698, 255664500, 361246936, 357519858, 238320000, 258216451, 278573962, 280966093, 361764493, 360257019, 249256047, 259805173, 288140314, 288141909]
# execution
execution = [880857006, 795795372, 942038390, 1444367668, 1575656386, 1581155335, 937547349, 866032921, 1048610600, 1552889437, 1751384300, 1749533720, 1014937184, 972140679, 1278409479, 1670842576, 1952079232, 1951972584]
# convert unit
partition = map(lambda x: x/1024.0/1024/1024, partition)
execution = map(lambda x: x/1024.0/1024/1024, execution)
ind = np.arange(N) # the x locations for the groups
width = 0.1 # the width of the bars: can also be len(x) sequence
labelPartition = ["Partition"]
labelExecution = ["Execution"]
for i in range(0, 6):
if i<len(labelExecution):
plt.bar(ind+i*width, execution[i::6], width, color='b', bottom=partition[i::6], label=labelExecution[i])
else:
plt.bar(ind+i*width, execution[i::6], width, color='b', bottom=partition[i::6])
if i<len(labelPartition):
plt.bar(ind+i*width, partition[i::6], width, color='r', label=labelPartition[i])
else:
plt.bar(ind+i*width, partition[i::6], width, color='r')
plt.ylabel('Network Shuffle (GB)')
plt.xlabel('Number of Partitions')
plt.title('Network Shuffle')
plt.text(0.1,2.0,'\nEach group from left to right:\nHybrid(100), Hybrid(30),\nEdge2D, Edge1D, Random, CanonicalRandom\n')
plt.xticks(ind + 2 * width, ('16', '25', '48'))
plt.legend()
plt.autoscale(tight=False)
#plt.show()
plt.savefig('7.png')
| [
"matplotlib.pyplot.text",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.autoscale",
"matplotlib.pyplot.title",
"numpy.arange"
] | [((1146, 1158), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (1155, 1158), True, 'import numpy as np\n'), ((1787, 1821), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Network Shuffle (GB)"""'], {}), "('Network Shuffle (GB)')\n", (1797, 1821), True, 'import matplotlib.pyplot as plt\n'), ((1822, 1856), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of Partitions"""'], {}), "('Number of Partitions')\n", (1832, 1856), True, 'import matplotlib.pyplot as plt\n'), ((1857, 1885), 'matplotlib.pyplot.title', 'plt.title', (['"""Network Shuffle"""'], {}), "('Network Shuffle')\n", (1866, 1885), True, 'import matplotlib.pyplot as plt\n'), ((1886, 2018), 'matplotlib.pyplot.text', 'plt.text', (['(0.1)', '(2.0)', '"""\nEach group from left to right:\nHybrid(100), Hybrid(30),\nEdge2D, Edge1D, Random, CanonicalRandom\n"""'], {}), '(0.1, 2.0,\n """\nEach group from left to right:\nHybrid(100), Hybrid(30),\nEdge2D, Edge1D, Random, CanonicalRandom\n"""\n )\n', (1894, 2018), True, 'import matplotlib.pyplot as plt\n'), ((2008, 2055), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(ind + 2 * width)', "('16', '25', '48')"], {}), "(ind + 2 * width, ('16', '25', '48'))\n", (2018, 2055), True, 'import matplotlib.pyplot as plt\n'), ((2056, 2068), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2066, 2068), True, 'import matplotlib.pyplot as plt\n'), ((2069, 2095), 'matplotlib.pyplot.autoscale', 'plt.autoscale', ([], {'tight': '(False)'}), '(tight=False)\n', (2082, 2095), True, 'import matplotlib.pyplot as plt\n'), ((2109, 2129), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""7.png"""'], {}), "('7.png')\n", (2120, 2129), True, 'import matplotlib.pyplot as plt\n'), ((1389, 1502), 'matplotlib.pyplot.bar', 'plt.bar', (['(ind + i * width)', 'execution[i::6]', 'width'], {'color': '"""b"""', 'bottom': 'partition[i::6]', 'label': 'labelExecution[i]'}), "(ind + i * width, execution[i::6], width, color='b', bottom=\n partition[i::6], label=labelExecution[i])\n", (1396, 1502), True, 'import matplotlib.pyplot as plt\n'), ((1512, 1600), 'matplotlib.pyplot.bar', 'plt.bar', (['(ind + i * width)', 'execution[i::6]', 'width'], {'color': '"""b"""', 'bottom': 'partition[i::6]'}), "(ind + i * width, execution[i::6], width, color='b', bottom=\n partition[i::6])\n", (1519, 1600), True, 'import matplotlib.pyplot as plt\n'), ((1630, 1719), 'matplotlib.pyplot.bar', 'plt.bar', (['(ind + i * width)', 'partition[i::6]', 'width'], {'color': '"""r"""', 'label': 'labelPartition[i]'}), "(ind + i * width, partition[i::6], width, color='r', label=\n labelPartition[i])\n", (1637, 1719), True, 'import matplotlib.pyplot as plt\n'), ((1729, 1788), 'matplotlib.pyplot.bar', 'plt.bar', (['(ind + i * width)', 'partition[i::6]', 'width'], {'color': '"""r"""'}), "(ind + i * width, partition[i::6], width, color='r')\n", (1736, 1788), True, 'import matplotlib.pyplot as plt\n')] |
"""Position sensor
==================
"""
from __future__ import print_function
import os
import signal
import atexit
import sys
import numpy as np
import u3
from fluiddyn.util import time_as_str
path_save = os.path.join(os.path.expanduser("~"), ".fluidlab")
if not os.path.exists(path_save):
os.mkdir(path_save)
def sig_handler(signo, frame):
sys.exit(0)
class PositionSensor:
"""Communicate with the position sensor (2 output signals A and B in
quadrature) via an acquisition card LabJack U3-HV.
Parameters
----------
port : {4, integer}
Number of the FIO port on the card for A signal. B will be on
the next port. Note tat you can't use AIN0-AIN3 which are
dedicated for analog inputs.
Notes
-----
The acquisition card considers these 2 signals as timers. They have to be
configured in quadrature (mode 8).
The sensor does not provide an absolute position but only a
relative displacement. This class save the absolute position in a
file save_position.txt.
"""
def __init__(self, port=4):
print("start init position sensor")
self.daq_u3 = u3.U3()
self.daq_u3.configIO(
TimerCounterPinOffset=port, NumberOfTimersEnabled=2, FIOAnalog=0
)
self.daq_u3.getFeedback(u3.Timer0Config(8), u3.Timer1Config(8))
self.meter_per_increment = 0.000105 / 4
try:
self._shift_absolute_pos, self._shift_relative_pos = self.load()
except IOError:
self._shift_relative_pos = 0.0
self._shift_absolute_pos = 0.0
atexit.register(self.save)
signal.signal(signal.SIGTERM, sig_handler)
print(time_as_str() + ": position sensor initialized.")
sys.stdout.flush()
def get_value_counter(self):
print("get_value_counter")
return self.daq_u3.getFeedback(u3.QuadratureInputTimer())[0]
def get_relative_position(self):
counter = self.get_value_counter()
return counter * self.meter_per_increment + self._shift_relative_pos
def get_absolute_position(self):
counter = self.get_value_counter()
return counter * self.meter_per_increment + self._shift_absolute_pos
def reset_counter_to_zero(self):
self._shift_relative_pos = self.get_relative_position()
self._shift_absolute_pos = self.get_absolute_position()
self.daq_u3.getFeedback(
u3.Timer0(UpdateReset=True), u3.Timer1(UpdateReset=True)
)
def set_relative_origin(self, value=0.0):
rel_pos = self.get_relative_position()
self._shift_relative_pos += value - rel_pos
def set_absolute_origin(self, value=0.0):
abs_pos = self.get_absolute_position()
self._shift_absolute_pos += value - abs_pos
def save(self):
abs_pos = self.get_absolute_position()
rel_pos = self.get_relative_position()
print("save positions:", [abs_pos, rel_pos])
data = np.array([abs_pos, rel_pos])
date = time_as_str()
path = os.path.join(path_save, "positions_sensor.txt")
np.savetxt(
path,
data,
fmt="%.8e",
header=date + ": positions (absolute and relative, m)",
)
def load(self):
path = os.path.join(path_save, "positions_sensor.txt")
return np.loadtxt(path)
if __name__ == "__main__":
sensor = PositionSensor()
| [
"u3.Timer1",
"os.path.exists",
"signal.signal",
"u3.Timer1Config",
"u3.Timer0",
"os.path.join",
"u3.Timer0Config",
"u3.U3",
"fluiddyn.util.time_as_str",
"u3.QuadratureInputTimer",
"numpy.array",
"numpy.loadtxt",
"os.mkdir",
"numpy.savetxt",
"sys.exit",
"sys.stdout.flush",
"atexit.register",
"os.path.expanduser"
] | [((230, 253), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (248, 253), False, 'import os\n'), ((275, 300), 'os.path.exists', 'os.path.exists', (['path_save'], {}), '(path_save)\n', (289, 300), False, 'import os\n'), ((306, 325), 'os.mkdir', 'os.mkdir', (['path_save'], {}), '(path_save)\n', (314, 325), False, 'import os\n'), ((363, 374), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (371, 374), False, 'import sys\n'), ((1161, 1168), 'u3.U3', 'u3.U3', ([], {}), '()\n', (1166, 1168), False, 'import u3\n'), ((1617, 1643), 'atexit.register', 'atexit.register', (['self.save'], {}), '(self.save)\n', (1632, 1643), False, 'import atexit\n'), ((1652, 1694), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'sig_handler'], {}), '(signal.SIGTERM, sig_handler)\n', (1665, 1694), False, 'import signal\n'), ((1767, 1785), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1783, 1785), False, 'import sys\n'), ((2993, 3021), 'numpy.array', 'np.array', (['[abs_pos, rel_pos]'], {}), '([abs_pos, rel_pos])\n', (3001, 3021), True, 'import numpy as np\n'), ((3037, 3050), 'fluiddyn.util.time_as_str', 'time_as_str', ([], {}), '()\n', (3048, 3050), False, 'from fluiddyn.util import time_as_str\n'), ((3066, 3113), 'os.path.join', 'os.path.join', (['path_save', '"""positions_sensor.txt"""'], {}), "(path_save, 'positions_sensor.txt')\n", (3078, 3113), False, 'import os\n'), ((3123, 3217), 'numpy.savetxt', 'np.savetxt', (['path', 'data'], {'fmt': '"""%.8e"""', 'header': "(date + ': positions (absolute and relative, m)')"}), "(path, data, fmt='%.8e', header=date +\n ': positions (absolute and relative, m)')\n", (3133, 3217), True, 'import numpy as np\n'), ((3309, 3356), 'os.path.join', 'os.path.join', (['path_save', '"""positions_sensor.txt"""'], {}), "(path_save, 'positions_sensor.txt')\n", (3321, 3356), False, 'import os\n'), ((3372, 3388), 'numpy.loadtxt', 'np.loadtxt', (['path'], {}), '(path)\n', (3382, 3388), True, 'import numpy as np\n'), ((1318, 1336), 'u3.Timer0Config', 'u3.Timer0Config', (['(8)'], {}), '(8)\n', (1333, 1336), False, 'import u3\n'), ((1338, 1356), 'u3.Timer1Config', 'u3.Timer1Config', (['(8)'], {}), '(8)\n', (1353, 1356), False, 'import u3\n'), ((2451, 2478), 'u3.Timer0', 'u3.Timer0', ([], {'UpdateReset': '(True)'}), '(UpdateReset=True)\n', (2460, 2478), False, 'import u3\n'), ((2480, 2507), 'u3.Timer1', 'u3.Timer1', ([], {'UpdateReset': '(True)'}), '(UpdateReset=True)\n', (2489, 2507), False, 'import u3\n'), ((1709, 1722), 'fluiddyn.util.time_as_str', 'time_as_str', ([], {}), '()\n', (1720, 1722), False, 'from fluiddyn.util import time_as_str\n'), ((1894, 1919), 'u3.QuadratureInputTimer', 'u3.QuadratureInputTimer', ([], {}), '()\n', (1917, 1919), False, 'import u3\n')] |
Subsets and Splits