repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
endlessm/endless-ndn | eos_data_distribution/parallel.py | 1 | 2456 | # -*- Mode:python; coding: utf-8; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (C) 2016 Endless Mobile, Inc.
# Author: Niv Sardi <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# A copy of the GNU Lesser General Public License is in the file COPYING.
import logging
from gi.repository import GObject
logger = logging.getLogger(__name__)
class Batch(GObject.GObject):
__gsignals__ = {
'complete': (GObject.SIGNAL_RUN_FIRST, None, ()),
}
def __init__(self, workers, type="Batch"):
super(Batch, self).__init__()
self._type = type
self._incomplete_workers = set(workers)
for worker in self._incomplete_workers:
worker.connect('complete', self._on_batch_complete)
def start(self):
if not self._incomplete_workers:
logger.info('%s complete: no workers', self._type)
self.emit('complete')
for worker in self._incomplete_workers:
worker.start()
def _on_batch_complete(self, worker):
logger.info("%s complete: %s", self._type, worker)
self._incomplete_workers.remove(worker)
if len(self._incomplete_workers) == 0:
self.emit('complete')
if __name__ == '__main__':
import argparse
from . import utils
from gi.repository import GLib
from ndn.file import FileConsumer
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output")
parser.add_argument("-c", "--count", default=10, type=int)
args = utils.parse_args(parser=parser)
loop = GLib.MainLoop()
consumers = [FileConsumer("%s-%s"%(args.name, i), "%s-%s"%(args.output, i))
for i in range(args.count)]
batch = Batch(workers=consumers)
batch.connect('complete', lambda *a: loop.quit())
batch.start()
loop.run()
| lgpl-3.0 | -2,895,144,727,426,026,000 | 32.643836 | 81 | 0.65513 | false |
SP2RC-Coding-Club/Codes | 13_07_2017/3D_slab_modes.py | 1 | 35096 |
#import pdb # pause code for debugging at pdb.set_trace()
import numpy as np
import toolbox as tool
import slab_functions as sf
from pysac.plot.mayavi_seed_streamlines import SeedStreamline
import matplotlib.pyplot as plt
from mayavi import mlab
import gc
#import move_seed_points as msp
import mayavi_plotting_functions as mpf
import dispersion_diagram
import img2vid as i2v
from functools import partial
import os
# ================================
# Preamble: set mode options and view parameters
# ================================
# What mode do you want? OPTIONS:
mode_options = ['slow-kink-surf', 'slow-saus-surf', 'slow-saus-body-3',
'slow-kink-body-3', 'slow-saus-body-2', 'slow-kink-body-2',
'slow-saus-body-1', 'slow-kink-body-1', 'fast-saus-body-1',
'fast-kink-body-1', 'fast-saus-body-2', 'fast-kink-body-2',
'fast-saus-body-3', 'fast-kink-body-3', 'fast-kink-surf',
'fast-saus-surf', 'shear-alfven', 'shear-alfven-broadband']
# Which angle shall we view from? OPTIONS:
view_options = ['front', 'front-parallel', 'top', 'top-parallel', 'front-top',
'front-side', 'front-top-side']
# Uniform lighting?
#uniform_light = True
uniform_light = False
show_density = False
show_density_pert = False
show_mag = False
show_mag_scale = False
show_mag_fade = False
show_mag_vec = False
show_vel_front = False
show_vel_front_pert = False
show_vel_top = False
show_vel_top_pert = False
show_disp_top = False
show_disp_front = False
show_axes = False
show_axis_labels = False
show_mini_axis = False
show_boundary = False
# Uncomment the parametrer you would like to see
# No density perturbations or vel/disp pert for alfven modes.
#show_density = True
#show_density_pert = True
show_mag = True
#show_mag_scale = True #must also have show_mag = True
#show_mag_fade = True
#show_mag_vec = True
#show_vel_front = True
#show_vel_front_pert = True
#show_vel_top = True
#show_vel_top_pert = True
#show_disp_top = True
#show_disp_front = True
show_axes = True
#show_axis_labels = True
show_mini_axis = True
show_boundary = True
# Visualisation modules in string form for file-names
vis_modules = [show_density, show_density_pert, show_mag, show_mag_scale,
show_mag_fade, show_mag_vec, show_vel_front, show_vel_front_pert,
show_vel_top, show_vel_top_pert, show_disp_top, show_disp_front]
vis_modules_strings = ['show_density', 'show_density_pert', 'show_mag', 'show_mag_scale',
'show_mag_fade', 'show_mag_vec', 'show_vel_front', 'show_vel_front_pert',
'show_vel_top', 'show_vel_top_pert', 'show_disp_top', 'show_disp_front']
vis_mod_string = ''
for i, j in enumerate(vis_modules):
if vis_modules[i]:
vis_mod_string = vis_mod_string + vis_modules_strings[i][5:] + '_'
# Set to True if you would like the dispersion diagram with chosen mode highlighted.
show_dispersion = False
#show_dispersion = True
# Wanna see the animation? Of course you do
#show_animation = False
show_animation = True
# Basic plot to see which eigensolutions have been found.
show_quick_plot = False
#show_quick_plot = True
# Video resolution
#res = (1920,1080) # There is a problem with this resolution- height must be odd number - Mayavi bug apparently
res = tuple(101 * np.array((16,9)))
#res = tuple(51 * np.array((16,9)))
#res = tuple(21 * np.array((16,9)))
number_of_frames = 1
# Frames per second of output video
fps = 20
#save_images = False
save_images = True
make_video = False
#make_video = True
# Where should I save the animation images/videos?
os.path.abspath(os.curdir)
os.chdir('..')
save_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_animations')
# Where should I save the dispersion diagrams?
save_dispersion_diagram_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_dispersion_diagrams')
# ================================
# Visualisation set-up
# ================================
# Variable definitions (for reference):
# x = k*x
# y = k*y
# z = k*z
# W = omega/k
# K = k*x_0
# t = omega*t
# Loop through selected modes
for mode_ind in [0]:#range(8,14): # for all others. REMEMBER SBB pparameters
#for mode_ind in [14,15]: #for fast body surf. REMEMBER SBS parameters
#for mode_ind in [16, 17]:
#for mode_ind in [13]: #for an individual mode
#for mode_ind in range(2,14):
if mode_ind not in range(len(mode_options)):
raise NameError('Mode not in mode_options')
# (note that fast surface modes, i.e. 14 and 15, can only be
# found with SBS parameters in slab_functions...)
mode = mode_options[mode_ind]
# Specify oscillation parameters
if 'slow' in mode and 'surf' in mode or 'alfven' in mode:
K = 2.
elif 'slow' in mode and 'body' in mode:
K = 8.
elif 'fast' in mode and 'body-1' in mode:
K = 8.
elif 'fast' in mode and 'body-2' in mode:
K = 15.
elif 'fast' in mode and 'body-3' in mode:
K = 22.
elif 'fast' in mode and 'surf' in mode:
K = 8.
else:
raise NameError('Mode not found')
# Specify density ratio R1 := rho_1 / rho_0
# R1 = 1.5 # Higher denisty on left than right
# R1 = 1.8
# R1 = 1.9 # Disp_diagram will only work for R1=1.5, 1.8, 2.0
R1 = 2. # Symmetric slab
# Reduce number of variables in dispersion relation
disp_rel_partial = partial(sf.disp_rel_asym, R1=R1)
# find eigenfrequencies W (= omega/k) within the range Wrange for the given parameters.
Wrange1 = np.linspace(0., sf.cT, 11)
Wrange2 = np.linspace(sf.cT, sf.c0, 401)
Wrange3 = np.linspace(sf.c0, sf.c2, 11)
Woptions_slow_surf = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange1, args=None).transpose())
Woptions_slow_body = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange2, args=None).transpose())
Woptions_fast = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange3, args=None).transpose())
# Remove W values that are very close to characteristic speeds - these are spurious solutions
tol = 1e-2
indices_to_rm = []
for i, w in enumerate(Woptions_slow_surf):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < 0 or w > sf.cT:
indices_to_rm.append(i)
Woptions_slow_surf = np.delete(Woptions_slow_surf, indices_to_rm)
Woptions_slow_surf.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_slow_body):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.cT or w > sf.c0:
indices_to_rm.append(i)
Woptions_slow_body = np.delete(Woptions_slow_body, indices_to_rm)
Woptions_slow_body.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_fast):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.c0 or w > min(sf.c1, sf.c2):
indices_to_rm.append(i)
Woptions_fast = np.delete(Woptions_fast, indices_to_rm)
Woptions_fast.sort()
# remove any higher order slow body modes - we only want to do the first 3 saus/kink
if len(Woptions_slow_body) > 6:
Woptions_slow_body = np.delete(Woptions_slow_body, range(len(Woptions_slow_body) - 6))
Woptions = np.concatenate((Woptions_slow_surf, Woptions_slow_body, Woptions_fast))
# set W to be the eigenfrequency for the requested mode
if 'fast-saus-body' in mode or 'fast-kink-surf' in mode:
W = Woptions_fast[-2]
elif 'fast-kink-body' in mode or 'fast-saus-surf' in mode:
W = Woptions_fast[-1]
elif 'slow' in mode and 'surf' in mode:
W = Woptions_slow_surf[mode_ind]
elif 'slow' in mode and 'body' in mode:
W = Woptions_slow_body[mode_ind-2]
if 'alfven' in mode:
W = sf.vA
else:
W = np.real(W)
# Quick plot to see if we are hitting correct mode
if show_quick_plot:
plt.plot([K] * len(Woptions), Woptions, '.')
plt.plot(K+0.5, W, 'go')
plt.xlim([0,23])
plt.show()
# ================================
# Dispersion diagram
# ================================
if show_dispersion:
if 'alfven' in mode:
raise NameError('Disperion plot requested for an alfven mode. Cant do that.')
dispersion_diagram.dispersion_diagram(mode_options, mode,
disp_rel_partial, K, W, R1)
# plt.tight_layout() # seems to make it chop the sides off with this
plt.savefig(os.path.join(save_dispersion_diagram_directory, 'R1_' + str(R1) + '_' + mode + '.png') )
plt.close()
# ================================
# Animation
# ================================
if show_animation:
print('Starting ' + mode)
# set grid parameters
xmin = -2.*K
xmax = 2.*K
ymin = 0.
ymax = 4.
zmin = 0.
zmax = 2*np.pi
# You can change ny but be careful changing nx, nz.
nx = 300#100 #100 #300 gives us reduced bouncing of field lines for the same video size, but there is significant computational cost.
ny = 300#100 #100 #100#20 #100
nz = 300#100 #100
nt = number_of_frames
if nz % nt != 0:
print("nt doesnt divide nz so there may be a problem with chopping in z direction for each time step")
t_start = 0.
t_end = zmax
t = t_start
xvals = np.linspace(xmin, xmax, nx)
yvals = np.linspace(ymin, ymax, ny)
zvals = np.linspace(zmin, zmax, nz, endpoint=False) # A fudge to give the height as exactly one wavelength
x_spacing = max(nx, ny, nz) / nx
y_spacing = max(nx, ny, nz) / ny
z_spacing = max(nx, ny, nz) / nz
# For masking points for plotting vector fields- have to do it manually due to Mayavi bug
mod = int(4 * nx / 100)
mod_y = int(np.ceil(mod / y_spacing))
# Get the data xi=displacement, v=velocity, b=mag field
if show_disp_top or show_disp_front:
xixvals = np.real(np.repeat(sf.xix(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xizvals = np.real(np.repeat(sf.xiz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xiyvals = np.real(np.repeat(sf.xiy(mode, xvals, zvals, t, W, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front or show_vel_top:
vxvals = np.real(np.repeat(sf.vx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.real(np.repeat(sf.vy(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front_pert or show_vel_top_pert:
vxvals = np.real(np.repeat(sf.vx_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.zeros_like(vxvals)
# Axis is defined on the mag field so we have to set up this data
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
# displacement at the right and left boundaries
if show_boundary:
xix_boundary_r_vals = np.real(np.repeat(K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='r')[:, np.newaxis], ny, axis=1))
xix_boundary_l_vals = np.real(np.repeat(-K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='l')[:, np.newaxis], ny, axis=1))
if show_density:
rho_vals = np.real(np.repeat(sf.rho(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
if show_density_pert:
rho_vals = np.real(np.repeat(sf.rho_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
# ================================
# Starting figure and visualisation modules
# ================================
zgrid_zy, ygrid_zy = np.mgrid[0:nz:(nz)*1j,
0:ny:(ny)*1j]
fig = mlab.figure(size=res) # (1920, 1080) for 1080p , tuple(101 * np.array((16,9))) #16:9 aspect ratio for video upload
# Spacing of grid so that we can display a visualisation cube without having the same number of grid points in each dimension
spacing = np.array([x_spacing, z_spacing, y_spacing])
if show_density or show_density_pert:
# Scalar field density
rho = mlab.pipeline.scalar_field(rho_vals_t, name="density", figure=fig)
rho.spacing = spacing
mpf.volume_red_blue(rho, rho_vals_t)
#Masking points
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
xgrid, zgrid, ygrid = np.mgrid[0:nx:(nx)*1j,
0:nz:(nz)*1j,
0:ny:(ny)*1j]
field = mlab.pipeline.vector_field(bxvals_t, bzvals_t, byvals_t, name="B field",
figure=fig, scalars=zgrid)
field.spacing = spacing
if show_axes:
mpf.axes_no_label(field)
if show_mini_axis:
mpf.mini_axes()
if uniform_light:
#uniform lighting, but if we turn shading of volumes off, we are ok without
mpf.uniform_lighting(fig)
#Black background
mpf.background_colour(fig, (0., 0., 0.))
scalefactor = 8. * nx / 100. # scale factor for direction field vectors
# Set up visualisation modules
if show_mag_vec:
bdirfield_front = mlab.pipeline.vector_field(bxvals_mask_front_t, bzvals_mask_front_t,
byvals_mask_front_t, name="B field front",
figure=fig)
bdirfield_front.spacing = spacing
mpf.vector_cut_plane(bdirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_top or show_vel_top_pert:
vdirfield_top = mlab.pipeline.vector_field(vxvals_mask_top_t, np.zeros_like(vxvals_mask_top_t),
vyvals_mask_top_t, name="V field top",
figure=fig)
vdirfield_top.spacing = spacing
mpf.vector_cut_plane(vdirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_front or show_vel_front_pert:
vdirfield_front = mlab.pipeline.vector_field(vxvals_mask_front_t, vzvals_mask_front_t,
vyvals_mask_front_t, name="V field front",
figure=fig)
vdirfield_front.spacing = spacing
mpf.vector_cut_plane(vdirfield_front,'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_top:
xidirfield_top = mlab.pipeline.vector_field(xixvals_mask_top_t, np.zeros_like(xixvals_mask_top_t),
xiyvals_mask_top_t, name="Xi field top",
figure=fig)
xidirfield_top.spacing = spacing
mpf.vector_cut_plane(xidirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_front:
xidirfield_front = mlab.pipeline.vector_field(xixvals_mask_front_t, xizvals_mask_front_t,
xiyvals_mask_front_t, name="Xi field front",
figure=fig)
xidirfield_front.spacing = spacing
mpf.vector_cut_plane(xidirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
# Loop through time
for t_ind in range(nt):
if t_ind == 0:
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
else:
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
# Update mag field data
field.mlab_source.set(u=bxvals_t, v=bzvals_t, w=byvals_t)
# Update mag field visualisation module
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
bdirfield_front.mlab_source.set(u=bxvals_mask_front_t, v=bzvals_mask_front_t, w=byvals_mask_front_t)
# Update displacement field data
if show_disp_top or show_disp_front:
xixvals_split = np.split(xixvals, [nz - (nz / nt) * t_ind], axis=1)
xiyvals_split = np.split(xiyvals, [nz - (nz / nt) * t_ind], axis=1)
xizvals_split = np.split(xizvals, [nz - (nz / nt) * t_ind], axis=1)
xixvals_t = np.concatenate((xixvals_split[1], xixvals_split[0]), axis=1)
xiyvals_t = np.concatenate((xiyvals_split[1], xiyvals_split[0]), axis=1)
xizvals_t = np.concatenate((xizvals_split[1], xizvals_split[0]), axis=1)
# Update displacement field visualisation module
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
xidirfield_top.mlab_source.set(u=xixvals_mask_top_t, v=np.zeros_like(xixvals_mask_top_t), w=xiyvals_mask_top_t)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
xidirfield_front.mlab_source.set(u=xixvals_mask_front_t, v=xizvals_mask_front_t, w=xiyvals_mask_front_t)
# Update velocity field data
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_split = np.split(vxvals, [nz - (nz / nt) * t_ind], axis=1)
vyvals_split = np.split(vyvals, [nz - (nz / nt) * t_ind], axis=1)
vzvals_split = np.split(vzvals, [nz - (nz / nt) * t_ind], axis=1)
vxvals_t = np.concatenate((vxvals_split[1], vxvals_split[0]), axis=1)
vyvals_t = np.concatenate((vyvals_split[1], vyvals_split[0]), axis=1)
vzvals_t = np.concatenate((vzvals_split[1], vzvals_split[0]), axis=1)
# Update velocity field visualisation module
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
vdirfield_top.mlab_source.set(u=vxvals_mask_top_t, v=np.zeros_like(vxvals_mask_top_t), w=vyvals_mask_top_t)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
vdirfield_front.mlab_source.set(u=vxvals_mask_front_t, v=vzvals_mask_front_t, w=vyvals_mask_front_t)
# Update boundary displacement data
if show_boundary:
xix_boundary_r_vals_split = np.split(xix_boundary_r_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_l_vals_split = np.split(xix_boundary_l_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_r_vals_t = np.concatenate((xix_boundary_r_vals_split[1], xix_boundary_r_vals_split[0]), axis=0)
xix_boundary_l_vals_t = np.concatenate((xix_boundary_l_vals_split[1], xix_boundary_l_vals_split[0]), axis=0)
# Update density data
if show_density or show_density_pert:
rho_vals_split = np.split(rho_vals, [nz - (nz / nt) * t_ind], axis=1)
rho_vals_t = np.concatenate((rho_vals_split[1], rho_vals_split[0]), axis=1)
rho.mlab_source.set(scalars=rho_vals_t)
# Boundary data - Letting mayavi know where to plot the boundary
if show_boundary:
ext_min_r = ((nx) * (xix_boundary_r_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_r = ((nx) * (xix_boundary_r_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
ext_min_l = ((nx) * (xix_boundary_l_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_l = ((nx) * (xix_boundary_l_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
#Make field lines
if show_mag:
# move seed points up with phase speed. - Bit of a fudge.
# Create an array of points for which we want mag field seeds
nx_seed = 9
ny_seed = 13
start_x = 30. * nx / 100.
end_x = nx+1 - start_x
start_y = 1.
if ny == 20: # so that the lines dont go right up to the edge of the box
end_y = ny - 1.
elif ny == 100:
end_y = ny - 2.
elif ny == 300:
end_y = ny - 6.
else:
end_y = ny - 1
seeds=[]
dx_res = (end_x - start_x) / (nx_seed-1)
dy_res = (end_y - start_y) / (ny_seed-1)
for j in range(ny_seed):
for i in range(nx_seed):
x = start_x + (i * dx_res) * x_spacing
y = start_y + (j * dy_res) * y_spacing
z = 1. + (t_start + t_ind*(t_end - t_start)/nt)/zmax * nz
seeds.append((x,z,y))
if 'alfven' in mode:
for i in range(nx_seed):
del seeds[0]
del seeds[-1]
# Remove previous field lines - field lines cannot be updated, just the data that they are built from
if t_ind != 0:
field_lines.remove() # field_lines is defined in first go through loop
field_lines = SeedStreamline(seed_points=seeds)
# Field line visualisation tinkering
field_lines.stream_tracer.integration_direction='both'
field_lines.streamline_type = 'tube'
field_lines.stream_tracer.maximum_propagation = nz * 2
field_lines.tube_filter.number_of_sides = 20
field_lines.tube_filter.radius = 0.7 * max(nx, ny, nz) / 100.
field_lines.tube_filter.capping = True
field_lines.actor.property.opacity = 1.0
field.add_child(field_lines)
module_manager = field_lines.parent
# Colormap of magnetic field strength plotted on the field lines
if show_mag_scale:
module_manager.scalar_lut_manager.lut_mode = 'coolwarm'
module_manager.scalar_lut_manager.data_range=[7,18]
else:
mag_lut = module_manager.scalar_lut_manager.lut.table.to_array()
mag_lut[:,0] = [220]*256
mag_lut[:,1] = [20]*256
mag_lut[:,2] = [20]*256
module_manager.scalar_lut_manager.lut.table = mag_lut
if show_mag_fade:
mpf.colormap_fade(module_manager, fade_value=20)
# Which views do you want to show? Options are defined at the start
views_selected = [0]#[0,1,4,5,6] #range(7) #[2,3]
for view_ind, view_selected in enumerate(views_selected):
view = view_options[view_selected]
# Display boundary - cannot be updated each time
if show_boundary:
# Boundaries should look different depending on view
if view == 'front-parallel':
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 125
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
else:
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 20
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
# Set viewing angle - For some unknown reason we must redefine the camera position each time.
# This is something to do with the boundaries being replaced each time.
mpf.view_position(fig, view, nx, ny, nz)
if save_images:
prefix = 'R1_'+str(R1) + '_' + mode + '_' + vis_mod_string + view + '_'# + '_norho_'
mlab.savefig(os.path.join(save_directory, prefix + str(t_ind+1) + '.png'))
if t_ind == nt - 1:
if make_video:
i2v.image2video(filepath=save_directory, prefix=prefix,
output_name=prefix+'video', out_extension='mp4',
fps=fps, n_loops=4, delete_images=True,
delete_old_videos=True, res=res[1])
# Log: to keep us updated with progress
if t_ind % 5 == 4:
print('Finished frame number ' + str(t_ind + 1) + ' out of ' + str(number_of_frames))
#Release some memory after each time step
gc.collect()
#step t forward
t = t + (t_end - t_start) / nt
# Close Mayavi window each time if we cant to make a video
if make_video:
mlab.close(fig)
print('Finished ' + mode) | mit | 2,062,885,535,254,290,400 | 48.086713 | 208 | 0.498518 | false |
bundgus/python-playground | ssh-playground/demo_sftp.py | 1 | 2786 | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
# based on code provided by raymond mosteller (thanks!)
import base64
import getpass
import os
import socket
import sys
import traceback
import paramiko
from paramiko.py3compat import input
# setup logging
paramiko.util.log_to_file('demo_sftp.log')
hostname = '192.168.1.11'
port = 22
username = 'username'
password = 'password'
# Paramiko client configuration
UseGSSAPI = False # enable GSS-API / SSPI authentication
DoGSSAPIKeyExchange = False
# now, connect and use paramiko Transport to negotiate SSH2 across the connection
try:
t = paramiko.Transport((hostname, port))
t.connect(None, username, password, gss_host=socket.getfqdn(hostname),
gss_auth=UseGSSAPI, gss_kex=DoGSSAPIKeyExchange)
sftp = paramiko.SFTPClient.from_transport(t)
# dirlist on remote host
dirlist = sftp.listdir('.')
print("Dirlist: %s" % dirlist)
# copy this demo onto the server
try:
sftp.mkdir("demo_sftp_folder")
except IOError:
print('(assuming demo_sftp_folder/ already exists)')
with sftp.open('demo_sftp_folder/README', 'w') as f:
f.write('This was created by demo_sftp.py.\n')
with open('demo_sftp.py', 'r') as f:
data = f.read()
sftp.open('demo_sftp_folder/demo_sftp.py', 'w').write(data)
print('created demo_sftp_folder/ on the server')
# copy the README back here
with sftp.open('demo_sftp_folder/README', 'r') as f:
data = f.read()
with open('README_demo_sftp', 'w') as f:
f.write(data.decode('utf-8'))
print('copied README back here')
# BETTER: use the get() and put() methods
sftp.put('demo_sftp.py', 'demo_sftp_folder/demo_sftp.py')
sftp.get('demo_sftp_folder/README', 'README_demo_sftp')
t.close()
except Exception as e:
print('*** Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| mit | 2,605,952,891,896,653,000 | 30.303371 | 81 | 0.684135 | false |
cmjatai/cmj | cmj/cerimonial/models.py | 1 | 27858 |
from django.contrib.auth.models import Group
from django.db import models
from django.db.models.deletion import SET_NULL, PROTECT, CASCADE
from django.utils.translation import ugettext_lazy as _
from cmj.core.models import CmjModelMixin, Trecho, Distrito, RegiaoMunicipal,\
CmjAuditoriaModelMixin, CmjSearchMixin, AreaTrabalho, Bairro, Municipio
from cmj.utils import YES_NO_CHOICES, NONE_YES_NO_CHOICES,\
get_settings_auth_user_model
from sapl.parlamentares.models import Parlamentar, Partido
from sapl.utils import LISTA_DE_UFS
FEMININO = 'F'
MASCULINO = 'M'
SEXO_CHOICE = ((FEMININO, _('Feminino')),
(MASCULINO, _('Masculino')))
IMP_BAIXA = 'B'
IMP_MEDIA = 'M'
IMP_ALTA = 'A'
IMP_CRITICA = 'C'
IMPORTANCIA_CHOICE = (
(IMP_BAIXA, _('Baixa')),
(IMP_MEDIA, _('Média')),
(IMP_ALTA, _('Alta')),
(IMP_CRITICA, _('Crítica')),
)
class DescricaoAbstractModel(models.Model):
descricao = models.CharField(
default='', max_length=254, verbose_name=_('Nome / Descrição'))
class Meta:
abstract = True
ordering = ('descricao',)
def __str__(self):
return self.descricao
class TipoTelefone(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Telefone')
verbose_name_plural = _('Tipos de Telefone')
class TipoEndereco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Endereço')
verbose_name_plural = _('Tipos de Endereço')
class TipoEmail(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Email')
verbose_name_plural = _('Tipos de Email')
class Parentesco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Parentesco')
verbose_name_plural = _('Parentescos')
class EstadoCivil(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Estado Civil')
verbose_name_plural = _('Estados Civis')
class PronomeTratamento(models.Model):
nome_por_extenso = models.CharField(
default='', max_length=254, verbose_name=_('Nome Por Extenso'))
abreviatura_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Masculino'))
abreviatura_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Feminino'))
abreviatura_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Masculino'))
abreviatura_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Feminino'))
vocativo_direto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Masculino'))
vocativo_direto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Feminino'))
vocativo_direto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Masculino'))
vocativo_direto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Feminino'))
vocativo_indireto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Masculino'))
vocativo_indireto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Feminino'))
vocativo_indireto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Masculino'))
vocativo_indireto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Feminino'))
enderecamento_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Masculino'))
enderecamento_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Feminino'))
enderecamento_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Masculino'))
enderecamento_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Feminino'))
prefixo_nome_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Masculino'))
prefixo_nome_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Feminino'))
prefixo_nome_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Masculino'))
prefixo_nome_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Feminino'))
class Meta:
verbose_name = _('Pronome de Tratamento')
verbose_name_plural = _('Pronomes de tratamento')
def __str__(self):
return self.nome_por_extenso
class TipoAutoridade(DescricaoAbstractModel):
pronomes = models.ManyToManyField(
PronomeTratamento,
related_name='tipoautoridade_set')
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Autoridade')
verbose_name_plural = _('Tipos de Autoridade')
class TipoLocalTrabalho(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo do Local de Trabalho')
verbose_name_plural = _('Tipos de Local de Trabalho')
class NivelInstrucao(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Nível de Instrução')
verbose_name_plural = _('Níveis de Instrução')
class OperadoraTelefonia(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Operadora de Telefonia')
verbose_name_plural = _('Operadoras de Telefonia')
class Contato(CmjSearchMixin, CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data de Nascimento'))
sexo = models.CharField(
max_length=1, blank=True,
verbose_name=_('Sexo Biológico'), choices=SEXO_CHOICE)
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
tem_filhos = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
default=None, verbose_name=_('Tem Filhos?'))
quantos_filhos = models.PositiveSmallIntegerField(
default=0, blank=True, verbose_name=_('Quantos Filhos?'))
estado_civil = models.ForeignKey(
EstadoCivil,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Estado Civil'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
naturalidade = models.CharField(
max_length=50, blank=True, verbose_name=_('Naturalidade'))
nome_pai = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome do Pai'))
nome_mae = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome da Mãe'))
numero_sus = models.CharField(
max_length=100, blank=True, verbose_name=_('Número do SUS'))
cpf = models.CharField(max_length=15, blank=True, verbose_name=_('CPF'))
titulo_eleitor = models.CharField(
max_length=15,
blank=True,
verbose_name=_('Título de Eleitor'))
rg = models.CharField(max_length=30, blank=True, verbose_name=_('RG'))
rg_orgao_expedidor = models.CharField(
max_length=20, blank=True, verbose_name=_('Órgão Expedidor'))
rg_data_expedicao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Expedição'))
ativo = models.BooleanField(choices=YES_NO_CHOICES,
default=True, verbose_name=_('Ativo?'))
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='contato_set',
blank=True, null=True, on_delete=PROTECT)
perfil_user = models.ForeignKey(
get_settings_auth_user_model(),
verbose_name=_('Perfil do Usuário'),
related_name='contato_set',
blank=True, null=True, on_delete=CASCADE)
profissao = models.CharField(
max_length=254, blank=True, verbose_name=_('Profissão'))
tipo_autoridade = models.ForeignKey(
TipoAutoridade,
verbose_name=TipoAutoridade._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL)
cargo = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'))
pronome_tratamento = models.ForeignKey(
PronomeTratamento,
verbose_name=PronomeTratamento._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
help_text=_('O pronome de tratamento é opcional, mas será \
obrigatório caso seja selecionado um tipo de autoridade.'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Contato'))
@property
def fields_search(self):
return ['nome',
'nome_social',
'apelido']
class Meta:
verbose_name = _('Contato')
verbose_name_plural = _('Contatos')
ordering = ['nome']
permissions = (
('print_impressoenderecamento',
_('Pode Imprimir Impressos de Endereçamento')),
('print_rel_contato_agrupado_por_processo',
_('Pode Imprimir Relatório de Contatos Agrupados por Processo')),
('print_rel_contato_agrupado_por_grupo',
_('Pode Imprimir Relatório de Contatos Agrupados '
'Grupos de Contato')),
)
unique_together = (
('nome', 'data_nascimento', 'workspace', 'perfil_user'),)
def __str__(self):
return self.nome
class PerfilManager(models.Manager):
def for_user(self, user):
return super(
PerfilManager, self).get_queryset().get(
perfil_user=user)
class Perfil(Contato):
objects = PerfilManager()
class Meta:
proxy = True
class Telefone(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="telefone_set")
operadora = models.ForeignKey(
OperadoraTelefonia, on_delete=SET_NULL,
related_name='telefone_set',
blank=True, null=True,
verbose_name=OperadoraTelefonia._meta.verbose_name)
tipo = models.ForeignKey(
TipoTelefone,
blank=True, null=True,
on_delete=SET_NULL,
related_name='telefone_set',
verbose_name='Tipo')
telefone = models.CharField(max_length=100,
verbose_name='Número do Telefone')
proprio = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
blank=True, null=True, verbose_name=_('Próprio?'))
de_quem_e = models.CharField(
max_length=40, verbose_name='De quem é?', blank=True,
help_text=_('Se não é próprio, de quem é?'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição entre em contato \
com você neste telefone?"))
@property
def numero_nome_contato(self):
return str(self)
class Meta:
verbose_name = _('Telefone')
verbose_name_plural = _('Telefones')
def __str__(self):
return self.telefone
class TelefonePerfil(Telefone):
class Meta:
proxy = True
verbose_name = _('Telefone do Perfil')
verbose_name_plural = _('Telefones do Perfil')
class Email(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="email_set")
tipo = models.ForeignKey(
TipoEmail,
blank=True, null=True,
on_delete=SET_NULL,
related_name='email_set',
verbose_name='Tipo')
email = models.EmailField(verbose_name='Email')
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição envie informações \
para este email?"))
class Meta:
verbose_name = _('Email')
verbose_name_plural = _("Email's")
def __str__(self):
return self.email
class EmailPerfil(Email):
class Meta:
proxy = True
verbose_name = _('Email do Perfil')
verbose_name_plural = _("Email's do Perfil")
class Dependente(CmjAuditoriaModelMixin):
parentesco = models.ForeignKey(Parentesco,
on_delete=PROTECT,
related_name='+',
verbose_name=_('Parentesco'))
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='dependente_set',
on_delete=CASCADE)
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
sexo = models.CharField(
blank=True, max_length=1, verbose_name=_('Sexo Biológico'),
choices=SEXO_CHOICE)
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data Nascimento'))
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='dependente_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
class Meta:
verbose_name = _('Dependente')
verbose_name_plural = _('Dependentes')
def __str__(self):
return self.nome
class DependentePerfil(Dependente):
class Meta:
proxy = True
verbose_name = _('Dependente do Perfil')
verbose_name_plural = _('Dependentes do Perfil')
class LocalTrabalho(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='localtrabalho_set',
on_delete=CASCADE)
nome = models.CharField(
max_length=254, verbose_name=_('Nome / Razão Social'))
nome_social = models.CharField(
blank=True, default='', max_length=254,
verbose_name=_('Nome Fantasia'))
tipo = models.ForeignKey(
TipoLocalTrabalho,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Local de Trabalho'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=Municipio._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca. Nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=30, blank=True, default='',
verbose_name=_('Complemento'))
data_inicio = models.DateField(
blank=True, null=True, verbose_name=_('Data de Início'))
data_fim = models.DateField(
blank=True, null=True, verbose_name=_('Data de Fim'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
cargo = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'),
help_text=_('Ao definir um cargo e função aqui, o '
'Cargo/Função preenchido na aba "Dados Básicos", '
'será desconsiderado ao gerar impressos!'))
class Meta:
verbose_name = _('Local de Trabalho')
verbose_name_plural = _('Locais de Trabalho')
def __str__(self):
return self.nome
class LocalTrabalhoPerfil(LocalTrabalho):
class Meta:
proxy = True
verbose_name = _('Local de Trabalho do Perfil')
verbose_name_plural = _('Locais de Trabalho do Perfil')
class Endereco(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='endereco_set',
on_delete=CASCADE)
tipo = models.ForeignKey(
TipoEndereco,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Endereço'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=_('Município'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca, nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Complemento'))
ponto_referencia = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Pontos de Referência'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Endereço'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
"""help_text=_('Correspondências automáticas serão geradas sempre '
'para os endereços preferenciais.')"""
class Meta:
verbose_name = _('Endereço')
verbose_name_plural = _('Endereços')
def __str__(self):
numero = (' - ' + self.numero) if self.numero else ''
return self.endereco + numero
class EnderecoPerfil(Endereco):
class Meta:
proxy = True
verbose_name = _('Endereço do Perfil')
verbose_name_plural = _('Endereços do Perfil')
class FiliacaoPartidaria(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='filiacaopartidaria_set',
on_delete=CASCADE)
data = models.DateField(verbose_name=_('Data de Filiação'))
partido = models.ForeignKey(Partido,
related_name='filiacaopartidaria_set',
verbose_name=Partido._meta.verbose_name,
on_delete=PROTECT)
data_desfiliacao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Desfiliação'))
@property
def contato_nome(self):
return str(self.contato)
class Meta:
verbose_name = _('Filiação Partidária')
verbose_name_plural = _('Filiações Partidárias')
def __str__(self):
return str(self.partido)
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# PROCESSOS
# -----------------------------------------------------------------
# -----------------------------------------------------------------
class StatusProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Status de Processo')
verbose_name_plural = _('Status de Processos')
class ClassificacaoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Classificacao de Processo')
verbose_name_plural = _('Classificações de Processos')
class TopicoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tópico de Processo')
verbose_name_plural = _('Tópicos de Processos')
class AssuntoProcesso(DescricaoAbstractModel, CmjAuditoriaModelMixin):
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='assuntoprocesso_set',
on_delete=PROTECT)
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Assunto de Processo')
verbose_name_plural = _('Assuntos de Processos')
class Processo(CmjSearchMixin, CmjAuditoriaModelMixin):
titulo = models.CharField(max_length=9999, verbose_name=_('Título'))
data = models.DateField(verbose_name=_('Data de Abertura'))
descricao = models.TextField(
blank=True, default='',
verbose_name=_('Descrição do Processo'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outras observações sobre o Processo'))
solucao = models.TextField(
blank=True, default='',
verbose_name=_('Solução do Processo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos Interessados no Processo'),
related_name='processo_set',)
status = models.ForeignKey(StatusProcesso,
blank=True, null=True,
verbose_name=_('Status do Processo'),
related_name='processo_set',
on_delete=SET_NULL)
importancia = models.CharField(
max_length=1, blank=True,
verbose_name=_('Importância'), choices=IMPORTANCIA_CHOICE)
topicos = models.ManyToManyField(
TopicoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Tópicos'))
classificacoes = models.ManyToManyField(
ClassificacaoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Classificações'),)
assuntos = models.ManyToManyField(
AssuntoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Assuntos'),)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='processo_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
ordering = ('titulo', )
def __str__(self):
return str(self.titulo)
@property
def fields_search(self):
return ['titulo',
'observacoes',
'descricao']
class ProcessoContato(Processo):
class Meta:
proxy = True
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
class GrupoDeContatos(CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100,
verbose_name=_('Nome do Grupo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos do Grupo'),
related_name='grupodecontatos_set',)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='grupodecontatos_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Grupo de Contatos')
verbose_name_plural = _('Grupos de Contatos')
ordering = ('nome', )
def __str__(self):
return str(self.nome)
| gpl-3.0 | 2,557,580,451,169,322,000 | 31.736718 | 79 | 0.601089 | false |
rajalokan/nova | nova/tests/functional/notification_sample_tests/test_instance.py | 1 | 34922 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import mock
from nova import context
from nova import exception
from nova.tests import fixtures
from nova.tests.functional.notification_sample_tests \
import notification_sample_base
from nova.tests.unit import fake_notifier
class TestInstanceNotificationSample(
notification_sample_base.NotificationSampleTestBase):
def setUp(self):
self.flags(use_neutron=True)
super(TestInstanceNotificationSample, self).setUp()
self.neutron = fixtures.NeutronFixture(self)
self.useFixture(self.neutron)
self.cinder = fixtures.CinderFixture(self)
self.useFixture(self.cinder)
def _wait_until_swap_volume(self, server, volume_id):
for i in range(50):
volume_attachments = self.api.get_server_volumes(server['id'])
if len(volume_attachments) > 0:
for volume_attachment in volume_attachments:
if volume_attachment['volumeId'] == volume_id:
return
time.sleep(0.5)
self.fail('Volume swap operation failed.')
def _wait_until_swap_volume_error(self):
for i in range(50):
if self.cinder.swap_error:
return
time.sleep(0.5)
self.fail("Timed out waiting for volume swap error to occur.")
def test_instance_action(self):
# A single test case is used to test most of the instance action
# notifications to avoid booting up an instance for every action
# separately.
# Every instance action test function shall make sure that after the
# function the instance is in active state and usable by other actions.
# Therefore some action especially delete cannot be used here as
# recovering from that action would mean to recreate the instance and
# that would go against the whole purpose of this optimization
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
actions = [
self._test_power_off_on_server,
self._test_restore_server,
self._test_suspend_resume_server,
self._test_pause_unpause_server,
self._test_shelve_server,
self._test_shelve_offload_server,
self._test_unshelve_server,
self._test_resize_server,
self._test_revert_server,
self._test_resize_confirm_server,
self._test_snapshot_server,
self._test_rebuild_server,
self._test_reboot_server,
self._test_reboot_server_error,
self._test_trigger_crash_dump,
self._test_volume_attach_detach_server,
self._test_rescue_server,
self._test_unrescue_server,
self._test_soft_delete_server,
self._test_attach_volume_error,
]
for action in actions:
fake_notifier.reset()
action(server)
# Ensure that instance is in active state after an action
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
def test_create_delete_server(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-create-start',
'instance-create-end',
'instance-delete-start',
'instance-shutdown-start',
'instance-shutdown-end',
'instance-delete-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
@mock.patch('nova.compute.manager.ComputeManager._build_resources')
def test_create_server_error(self, mock_build):
def _build_resources(*args, **kwargs):
raise exception.FlavorDiskTooSmall()
mock_build.side_effect = _build_resources
server = self._boot_a_server(
expected_status='ERROR',
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-create-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-create-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _verify_instance_update_steps(self, steps, notifications,
initial=None):
replacements = {}
if initial:
replacements = initial
for i, step in enumerate(steps):
replacements.update(step)
self._verify_notification(
'instance-update',
replacements=replacements,
actual=notifications[i])
return replacements
def test_create_delete_server_with_instance_update(self):
# This makes server network creation synchronous which is necessary
# for notification samples that expect instance.info_cache.network_info
# to be set.
self.useFixture(fixtures.SpawnIsSynchronousFixture())
self.flags(notify_on_state_change='vm_and_task_state',
group='notifications')
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
instance_updates = self._wait_for_notifications('instance.update', 7)
# The first notification comes from the nova-conductor the
# rest is from the nova-compute. To keep the test simpler
# assert this fact and then modify the publisher_id of the
# first notification to match the template
self.assertEqual('conductor:fake-mini',
instance_updates[0]['publisher_id'])
instance_updates[0]['publisher_id'] = 'nova-compute:fake-mini'
create_steps = [
# nothing -> scheduling
{'reservation_id': server['reservation_id'],
'uuid': server['id'],
'host': None,
'node': None,
'state_update.new_task_state': 'scheduling',
'state_update.old_task_state': 'scheduling',
'state_update.state': 'building',
'state_update.old_state': 'building',
'state': 'building'},
# scheduling -> building
{
'state_update.new_task_state': None,
'state_update.old_task_state': 'scheduling',
'task_state': None},
# scheduled
{'host': 'compute',
'node': 'fake-mini',
'state_update.old_task_state': None},
# building -> networking
{'state_update.new_task_state': 'networking',
'state_update.old_task_state': 'networking',
'task_state': 'networking'},
# networking -> block_device_mapping
{'state_update.new_task_state': 'block_device_mapping',
'state_update.old_task_state': 'networking',
'task_state': 'block_device_mapping',
'ip_addresses': [{
"nova_object.name": "IpPayload",
"nova_object.namespace": "nova",
"nova_object.version": "1.0",
"nova_object.data": {
"mac": "fa:16:3e:4c:2c:30",
"address": "192.168.1.3",
"port_uuid": "ce531f90-199f-48c0-816c-13e38010b442",
"meta": {},
"version": 4,
"label": "private-network",
"device_name": "tapce531f90-19"
}}]
},
# block_device_mapping -> spawning
{'state_update.new_task_state': 'spawning',
'state_update.old_task_state': 'block_device_mapping',
'task_state': 'spawning',
},
# spawning -> active
{'state_update.new_task_state': None,
'state_update.old_task_state': 'spawning',
'state_update.state': 'active',
'launched_at': '2012-10-29T13:42:11Z',
'state': 'active',
'task_state': None,
'power_state': 'running'},
]
replacements = self._verify_instance_update_steps(
create_steps, instance_updates)
fake_notifier.reset()
# Let's generate some bandwidth usage data.
# Just call the periodic task directly for simplicity
self.compute.manager._poll_bandwidth_usage(context.get_admin_context())
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
instance_updates = self._get_notifications('instance.update')
self.assertEqual(2, len(instance_updates))
delete_steps = [
# active -> deleting
{'state_update.new_task_state': 'deleting',
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state': 'active',
'task_state': 'deleting',
'bandwidth': [
{'nova_object.namespace': 'nova',
'nova_object.name': 'BandwidthPayload',
'nova_object.data':
{'network_name': 'private-network',
'out_bytes': 0,
'in_bytes': 0},
'nova_object.version': '1.0'}],
'tags': ["tag1"]
},
# deleting -> deleted
{'state_update.new_task_state': None,
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state_update.state': 'deleted',
'state': 'deleted',
'task_state': None,
'terminated_at': '2012-10-29T13:42:11Z',
'ip_addresses': [],
'power_state': 'pending',
'bandwidth': [],
'tags': ["tag1"]
},
]
self._verify_instance_update_steps(delete_steps, instance_updates,
initial=replacements)
def _test_power_off_on_server(self, server):
self.api.post_server_action(server['id'], {'os-stop': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHUTOFF')
self.api.post_server_action(server['id'], {'os-start': {}})
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-power_off-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-power_off-end',
replacements={
'reservation_id': server['reservation_id'],
'power_state': 'running',
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-power_on-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-power_on-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_shelve_server(self, server):
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
def _test_shelve_offload_server(self, server):
self.flags(shelved_offload_time=-1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.api.post_server_action(server['id'], {'shelveOffload': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-shelve_offload-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-shelve_offload-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.api.post_server_action(server['id'], {'unshelve': None})
def _test_unshelve_server(self, server):
# setting the shelved_offload_time to 0 should set the
# instance status to 'SHELVED_OFFLOADED'
self.flags(shelved_offload_time = 0)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-unshelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-unshelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_suspend_resume_server(self, server):
post = {'suspend': {}}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'SUSPENDED')
post = {'resume': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
# Four versioned notification are generated.
# 0. instance-suspend-start
# 1. instance-suspend-end
# 2. instance-resume-start
# 3. instance-resume-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-suspend-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-suspend-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-resume-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-resume-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.flags(reclaim_instance_interval=0)
def _test_pause_unpause_server(self, server):
self.api.post_server_action(server['id'], {'pause': {}})
self._wait_for_state_change(self.api, server, 'PAUSED')
self.api.post_server_action(server['id'], {'unpause': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
# Four versioned notifications are generated
# 0. instance-pause-start
# 1. instance-pause-end
# 2. instance-unpause-start
# 3. instance-unpause-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-pause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-pause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-unpause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-unpause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_resize_server(self, server):
self.flags(allow_resize_to_same_host=True)
other_flavor_body = {
'flavor': {
'name': 'other_flavor',
'ram': 256,
'vcpus': 1,
'disk': 1,
'id': 'd5a8bb54-365a-45ae-abdb-38d249df7845'
}
}
other_flavor_id = self.api.post_flavor(other_flavor_body)['id']
extra_specs = {
"extra_specs": {
"hw:watchdog_action": "reset"}}
self.admin_api.post_extra_spec(other_flavor_id, extra_specs)
# Ignore the create flavor notification
fake_notifier.reset()
post = {
'resize': {
'flavorRef': other_flavor_id
}
}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.api, server, 'VERIFY_RESIZE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-resize-start',
'instance-resize-end',
'instance-resize_finish-start',
'instance-resize_finish-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
post = {'revertResize': None}
self.api.post_server_action(server['id'], post)
def _test_snapshot_server(self, server):
post = {'createImage': {'name': 'test-snap'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.snapshot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-snapshot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-snapshot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rebuild_server(self, server):
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
# Before going back to ACTIVE state
# server state need to be changed to REBUILD state
self._wait_for_state_change(self.api, server,
expected_status='REBUILD')
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-rebuild-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_rebuild_instance_with_claim')
def test_rebuild_server_exc(self, mock_rebuild):
def _compute_resources_unavailable(*args, **kwargs):
raise exception.ComputeResourcesUnavailable(
reason="fake-resource")
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
fake_notifier.reset()
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
mock_rebuild.side_effect = _compute_resources_unavailable
self._wait_for_state_change(self.api, server, expected_status='ERROR')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
def _test_restore_server(self, server):
self.flags(reclaim_instance_interval=30)
self.api.delete_server(server['id'])
self._wait_for_state_change(self.api, server, 'SOFT_DELETED')
self.api.post_server_action(server['id'], {'restore': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-restore-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-restore-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self.flags(reclaim_instance_interval=0)
def _test_reboot_server(self, server):
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.virt.fake.SmallFakeDriver.reboot')
def _test_reboot_server_error(self, server, mock_reboot):
def _hard_reboot(*args, **kwargs):
raise exception.UnsupportedVirtType(virt="FakeVirt")
mock_reboot.side_effect = _hard_reboot
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.error')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _attach_volume_to_server(self, server, volume_id):
self.api.post_server_volume(
server['id'], {"volumeAttachment": {"volumeId": volume_id}})
self._wait_for_notification('instance.volume_attach.end')
def _detach_volume_from_server(self, server, volume_id):
self.api.delete_server_volume(server['id'], volume_id)
self._wait_for_notification('instance.volume_detach.end')
def _volume_swap_server(self, server, attachement_id, volume_id):
self.api.put_server_volume(server['id'], attachement_id, volume_id)
def test_volume_swap_server(self):
server = self._boot_a_server(
extra_params={'networks':
[{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
self.cinder.swap_volume_instance_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_OLD_VOL,
self.cinder.SWAP_NEW_VOL)
self._wait_until_swap_volume(server, self.cinder.SWAP_NEW_VOL)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def test_volume_swap_server_with_error(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_ERR_OLD_VOL)
self.cinder.swap_volume_instance_error_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_ERR_OLD_VOL,
self.cinder.SWAP_ERR_NEW_VOL)
self._wait_until_swap_volume_error()
# Seven versioned notifications are generated. We only rely on the
# first six because _wait_until_swap_volume_error will return True
# after volume_api.unreserve is called on the cinder fixture, and that
# happens before the instance fault is handled in the compute manager
# which generates the last notification (compute.exception).
# 0. instance-create-start
# 1. instance-create-end
# 2. instance-volume_attach-start
# 3. instance-volume_attach-end
# 4. instance-volume_swap-start
# 5. instance-volume_swap-error
# 6. compute.exception
self.assertTrue(len(fake_notifier.VERSIONED_NOTIFICATIONS) >= 6,
'Unexpected number of versioned notifications. '
'Expected at least 6, got: %s' %
len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'new_volume_id': self.cinder.SWAP_ERR_NEW_VOL,
'old_volume_id': self.cinder.SWAP_ERR_OLD_VOL,
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_revert_server(self, server):
pass
def _test_resize_confirm_server(self, server):
pass
def _test_trigger_crash_dump(self, server):
pass
def _test_volume_attach_detach_server(self, server):
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_attach-start
# 1. volume_attach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
fake_notifier.reset()
self._detach_volume_from_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_detach-start
# 1. volume_detach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_detach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_detach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rescue_server(self, server):
pass
def _test_unrescue_server(self, server):
pass
def _test_soft_delete_server(self, server):
pass
@mock.patch('nova.volume.cinder.API.attach')
def _test_attach_volume_error(self, server, mock_attach):
def attach_volume(*args, **kwargs):
raise exception.CinderConnectionFailed(
reason="Connection timed out")
mock_attach.side_effect = attach_volume
post = {"volumeAttachment": {"volumeId": self.cinder.SWAP_OLD_VOL}}
self.api.post_server_volume(server['id'], post)
self._wait_for_notification('instance.volume_attach.error')
# 0. volume_attach-start
# 1. volume_attach-error
# 2. compute.exception
# We only rely on the first 2 notifications, in this case we don't
# care about the exception notification.
self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
| apache-2.0 | 4,255,457,280,156,007,400 | 39.988263 | 79 | 0.566777 | false |
stchepanhagn/domain-learning | plan_learning.py | 1 | 2767 | """ plan_learning.py
- This module contain the procedure used for learning plans from experience.
Copyright (C) 2016 Stephan Chang
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, located in the root of this repository.
If not, see <http://www.gnu.org/licenses/>.
"""
import pdb
import planning
import sys
import random
def main(args):
verbose = '-v' in args
n_arg = '-n' in args
try:
i = 1 + int(verbose)
examples_file = args[i]
domain_name = args[i+1]
except:
print "usage: {cmd} [-v] examples_file"\
" domain_name".format(cmd=args[0])
return
print """
PDDL Domain Learning Copyright (C) 2016 Stephan Chang
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
"""
examples = []
print "Parsing examples..."
with open(examples_file) as f:
line = f.readline().replace('\n', '')
while line:
triple = line.split('|')
example = (triple[0], triple[1], triple[2])
examples.append(example)
line = f.readline().replace('\n', '')
print "Done reading {n_examples} training examples!".format(n_examples=len(examples))
if not f.closed:
print "Warning: file stream is still open."
if n_arg:
n_examples = int(args[i+3])
else:
n_examples = len(examples)
print "Creating domain..."
domain = planning.Domain(domain_name)
# random.shuffle(examples)
for i in range(n_examples):
preconditions = examples[i][0].split(',')
operators = examples[i][1].split(',')
effects = examples[i][2].split(',')
domain.add_all_predicates(preconditions)
domain.add_all_predicates(effects)
domain.add_actions(operators, preconditions, effects)
print "Done!"
if verbose:
print str(domain)
else:
print "Outputting to file..."
output_file_name = "{domain_name}.pddl".format(domain_name=domain_name)
with open(output_file_name, 'w') as f:
f.write(str(domain))
print "Done!"
if __name__ == '__main__':
main(sys.argv)
| gpl-3.0 | 8,032,233,054,267,307,000 | 29.744444 | 89 | 0.647271 | false |
dcy/epush | examples/rabbitmq/xiaomi.py | 1 | 1181 | #!/usr/bin/env python
#coding:utf-8
import pika
import json
HOST = 'localhost'
USERNAME = 'hisir'
PASSWORD = 'hisir123'
class Xiaomi():
def __init__(self):
credentials = pika.PlainCredentials(USERNAME, PASSWORD)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST, credentials=credentials))
self.channel = self.connection.channel()
def notification_send(self):
data = {'push_method': 'notification_send',
'title': 'Test 中文',
'description': 'Content',
'registration_id': 'go6VssZlTDDypm+hxYdaxycXtqM7M9NsTPbCjzyIyh0='}
self.in_mq(data)
def all(self):
data = {'push_method':'all',
'title':'Test中文',
'description':'Test'}
self.in_mq(data)
def end(self):
self.channel.close()
self.connection.close()
def in_mq(self, data):
self.channel.basic_publish(exchange='',
routing_key='xiaomi_c',
body=json.dumps(data))
if __name__ == "__main__":
xiaomi = Xiaomi()
xiaomi.notification_send()
#xiaomi.all()
xiaomi.end()
| bsd-3-clause | -3,308,385,428,919,708,000 | 23.4375 | 112 | 0.57971 | false |
iShoto/testpy | codes/20200104_metric_learning_mnist/src/train_mnist_original_center.py | 1 | 5545 | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
import torch.optim.lr_scheduler as lr_scheduler
from torch.autograd.function import Function
import torchvision
import os
import matplotlib.pyplot as plt
import argparse
from tqdm import trange
import numpy as np
from sklearn.metrics import classification_report
from losses import CenterLoss
from mnist_net import Net
import mnist_loader
# cf. https://cpp-learning.com/center-loss/
def main():
args = parse_args()
# Dataset
train_loader, test_loader, classes = mnist_loader.load_dataset(args.dataset_dir, img_show=True)
# Device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Model
model = Net().to(device)
print(model)
# Loss
nllloss = nn.NLLLoss().to(device) # CrossEntropyLoss = log_softmax + NLLLoss
loss_weight = 1
centerloss = CenterLoss(10, 2).to(device)
# Optimizer
dnn_optimizer = optim.SGD(model.parameters(), lr=0.001, momentum=0.9, weight_decay=0.0005)
sheduler = lr_scheduler.StepLR(dnn_optimizer, 20, gamma=0.8)
center_optimizer = optim.SGD(centerloss.parameters(), lr =0.5)
print('Start training...')
for epoch in range(100):
# Update parameters.
epoch += 1
sheduler.step()
# Train and test a model.
train_acc, train_loss, feat, labels = train(device, train_loader, model, nllloss, loss_weight, centerloss, dnn_optimizer, center_optimizer)
test_acc, test_loss = test(device, test_loader, model, nllloss, loss_weight, centerloss)
stdout_temp = 'Epoch: {:>3}, train acc: {:<8}, train loss: {:<8}, test acc: {:<8}, test loss: {:<8}'
print(stdout_temp.format(epoch, train_acc, train_loss, test_acc, test_loss))
# Visualize features of each class.
vis_img_path = args.vis_img_path_temp.format(str(epoch).zfill(3))
visualize(feat.data.cpu().numpy(), labels.data.cpu().numpy(), epoch, vis_img_path)
# Save a trained model.
model_path = args.model_path_temp.format(str(epoch).zfill(3))
torch.save(model.state_dict(), model_path)
def train(device, train_loader, model, nllloss, loss_weight, centerloss, dnn_optimizer, center_optimizer):
running_loss = 0.0
pred_list = []
label_list = []
ip1_loader = []
idx_loader = []
model.train()
for i,(imgs, labels) in enumerate(train_loader):
# Set batch data.
imgs, labels = imgs.to(device), labels.to(device)
# Predict labels.
ip1, pred = model(imgs)
# Calculate loss.
loss = nllloss(pred, labels) + loss_weight * centerloss(labels, ip1)
# Initilize gradient.
dnn_optimizer.zero_grad()
center_optimizer.zero_grad()
# Calculate gradient.
loss.backward()
# Update parameters.
dnn_optimizer.step()
center_optimizer.step()
# For calculation.
running_loss += loss.item()
pred_list += [int(p.argmax()) for p in pred]
label_list += [int(l) for l in labels]
# For visualization.
ip1_loader.append(ip1)
idx_loader.append((labels))
# Calculate training accurary and loss.
result = classification_report(pred_list, label_list, output_dict=True)
train_acc = round(result['weighted avg']['f1-score'], 6)
train_loss = round(running_loss / len(train_loader.dataset), 6)
# Concatinate features and labels.
feat = torch.cat(ip1_loader, 0)
labels = torch.cat(idx_loader, 0)
return train_acc, train_loss, feat, labels
def test(device, test_loader, model, nllloss, loss_weight, centerloss):
model = model.eval()
# Prediciton
running_loss = 0.0
pred_list = []
label_list = []
for i,(imgs, labels) in enumerate(test_loader):
with torch.no_grad():
# Set batch data.
imgs, labels = imgs.to(device), labels.to(device)
# Predict labels.
ip1, pred = model(imgs)
# Calculate loss.
loss = nllloss(pred, labels) + loss_weight * centerloss(labels, ip1)
# Append predictions and labels.
running_loss += loss.item()
pred_list += [int(p.argmax()) for p in pred]
label_list += [int(l) for l in labels]
# Calculate accuracy.
result = classification_report(pred_list, label_list, output_dict=True)
test_acc = round(result['weighted avg']['f1-score'], 6)
test_loss = round(running_loss / len(test_loader.dataset), 6)
return test_acc, test_loss
def visualize(feat, labels, epoch, vis_img_path):
colors = ['#ff0000', '#ffff00', '#00ff00', '#00ffff', '#0000ff',
'#ff00ff', '#990000', '#999900', '#009900', '#009999']
plt.figure()
for i in range(10):
plt.plot(feat[labels==i, 0], feat[labels==i, 1], '.', color=colors[i])
plt.legend(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], loc='best')
plt.xlim(left=-8, right=8)
plt.ylim(bottom=-8, top=8)
plt.text(-7.8, 7.3, "epoch=%d" % epoch)
plt.savefig(vis_img_path)
plt.clf()
def parse_args():
arg_parser = argparse.ArgumentParser(description="parser for focus one")
arg_parser.add_argument("--dataset_dir", type=str, default='../inputs/')
arg_parser.add_argument("--model_dir", type=str, default='../outputs/models/checkpoints/')
arg_parser.add_argument("--model_path_temp", type=str, default='../outputs/models/checkpoints/mnist_original_softmax_center_epoch_{}.pth')
arg_parser.add_argument("--vis_img_dir", type=str, default='../outputs/visual/')
arg_parser.add_argument("--vis_img_path_temp", type=str, default='../outputs/visual/epoch_{}.png')
args = arg_parser.parse_args()
os.makedirs(args.dataset_dir, exist_ok=True)
os.makedirs(args.model_dir, exist_ok=True)
os.makedirs(args.vis_img_dir, exist_ok=True)
return args
if __name__ == "__main__":
main()
| mit | -1,161,285,678,805,685,000 | 30.867816 | 141 | 0.690532 | false |
iw3hxn/LibrERP | account_payment_term_month/models/inherit_account_invoice.py | 1 | 3307 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Micronaet SRL (<http://www.micronaet.it>).
# Copyright (C) 2014 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import orm
from tools.translate import _
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def action_move_create(self, cr, uid, ids, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
ait_obj = self.pool['account.invoice.tax']
amount_tax = 0.0
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context=context):
amount_tax = context.get('amount_tax', 0.0)
if not amount_tax:
compute_taxes = ait_obj.compute(cr, uid, inv.id, context=context)
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
super(account_invoice, self).action_move_create(cr, uid, [inv.id], context=context)
return True
def onchange_payment_term_date_invoice(self, cr, uid, ids, payment_term_id, date_invoice):
res = {'value': {}}
if not ids:
return res
if not payment_term_id:
return res
context = self.pool['res.users'].context_get(cr, uid)
pt_obj = self.pool['account.payment.term']
ait_obj = self.pool['account.invoice.tax']
if not date_invoice:
date_invoice = time.strftime('%Y-%m-%d')
compute_taxes = ait_obj.compute(cr, uid, ids, context=context)
amount_tax = 0
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
pterm_list = pt_obj.compute(cr, uid, payment_term_id, value=1, date_ref=date_invoice, context=context)
if pterm_list:
pterm_list = [line[0] for line in pterm_list]
pterm_list.sort()
res = {'value': {'date_due': pterm_list[-1]}}
else:
payment = self.pool['account.payment.term'].browse(cr, uid, payment_term_id, context)
raise orm.except_orm(_('Data Insufficient "{0}" !'.format(payment.name)),
_('The payment term of supplier does not have a payment term line!'))
return res
| agpl-3.0 | 286,080,595,633,511,650 | 40.3375 | 110 | 0.586332 | false |
lablup/sorna-agent | src/ai/backend/kernel/vendor/aws_polly/__init__.py | 1 | 3171 | import asyncio
import ctypes
import logging
import os
import threading
import janus
from ... import BaseRunner
from .inproc import PollyInprocRunner
log = logging.getLogger()
class Runner(BaseRunner):
log_prefix = 'vendor.aws_polly-kernel'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.inproc_runner = None
self.sentinel = object()
self.input_queue = None
self.output_queue = None
# NOTE: If credentials are missing,
# boto3 will try to use the instance role.
self.access_key = \
self.child_env.get('AWS_ACCESS_KEY_ID', None)
self.secret_key = \
self.child_env.get('AWS_SECRET_ACCESS_KEY', None)
os.environ['AWS_DEFAULT_REGION'] = \
self.child_env.get('AWS_DEFAULT_REGION', 'ap-northeast-2')
async def init_with_loop(self):
self.input_queue = janus.Queue()
self.output_queue = janus.Queue()
async def build_heuristic(self) -> int:
raise NotImplementedError
async def execute_heuristic(self) -> int:
raise NotImplementedError
async def query(self, code_text) -> int:
self.ensure_inproc_runner()
await self.input_queue.async_q.put(code_text)
# Read the generated outputs until done
while True:
try:
msg = await self.output_queue.async_q.get()
except asyncio.CancelledError:
break
self.output_queue.async_q.task_done()
if msg is self.sentinel:
break
self.outsock.send_multipart(msg)
return 0
async def complete(self, data):
self.outsock.send_multipart([
b'completion',
[],
])
async def interrupt(self):
if self.inproc_runner is None:
log.error('No user code is running!')
return
# A dirty hack to raise an exception inside a running thread.
target_tid = self.inproc_runner.ident
if target_tid not in {t.ident for t in threading.enumerate()}:
log.error('Interrupt failed due to missing thread.')
return
affected_count = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.py_object(KeyboardInterrupt))
if affected_count == 0:
log.error('Interrupt failed due to invalid thread identity.')
elif affected_count > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.c_long(0))
log.error('Interrupt broke the interpreter state -- '
'recommended to reset the session.')
async def start_service(self, service_info):
return None, {}
def ensure_inproc_runner(self):
if self.inproc_runner is None:
self.inproc_runner = PollyInprocRunner(
self.input_queue.sync_q,
self.output_queue.sync_q,
self.sentinel,
self.access_key,
self.secret_key)
self.inproc_runner.start()
| lgpl-3.0 | -6,937,433,989,262,053,000 | 31.690722 | 73 | 0.584358 | false |
zetaops/ulakbus | ulakbus/views/reports/base.py | 1 | 6017 | # -*- coding: utf-8 -*-
"""
"""
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from io import BytesIO
from zengine.lib.translation import gettext as _, gettext_lazy
import six
from zengine.forms import JsonForm
from zengine.forms import fields
from zengine.views.base import BaseView
import re
import base64
from datetime import datetime
try:
from ulakbus.lib.pdfdocument.document import PDFDocument, register_fonts_from_paths
except:
print("Warning: Reportlab module not found")
from ulakbus.lib.s3_file_manager import S3FileManager
from ulakbus.lib.common import get_file_url
class ReporterRegistry(type):
registry = {}
_meta = None
def __new__(mcs, name, bases, attrs):
# for key, prop in attrs.items():
# if hasattr(prop, 'view_method'):
if name == 'Reporter':
ReporterRegistry._meta = attrs['Meta']
if 'Meta' not in attrs:
attrs['Meta'] = type('Meta', (object,), ReporterRegistry._meta.__dict__)
else:
for k, v in ReporterRegistry._meta.__dict__.items():
if k not in attrs['Meta'].__dict__:
setattr(attrs['Meta'], k, v)
new_class = super(ReporterRegistry, mcs).__new__(mcs, name, bases, attrs)
if name != 'Reporter':
ReporterRegistry.registry[name] = new_class
return new_class
@staticmethod
def get_reporters():
return [{"text": v.get_title(),
"wf": 'generic_reporter',
"model": k,
"kategori": 'Raporlar',
"param": 'id'} for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_permissions():
return [("report.%s" % k, v.get_title(), "") for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_reporter(name):
return ReporterRegistry.registry[name]
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
@six.add_metaclass(ReporterRegistry)
class Reporter(BaseView):
TITLE = ''
class Meta:
pass
def __init__(self, current):
super(Reporter, self).__init__(current)
self.cmd = current.input.get('cmd', 'show')
# print("CMD", self.cmd)
if self.cmd == 'show':
self.show()
elif self.cmd == 'printout':
self.printout()
class ReportForm(JsonForm):
printout = fields.Button(gettext_lazy(u"Yazdır"), cmd="printout")
def show(self):
objects = self.get_objects()
frm = self.ReportForm(current=self.current, title=self.get_title())
if objects:
frm.help_text = ''
if isinstance(objects[0], dict):
self.output['object'] = {'fields': objects, 'type': 'table-multiRow'}
else:
objects = dict((k, str(v)) for k, v in objects)
self.output['object'] = objects
else:
frm.help_text = _(u'Kayıt bulunamadı')
self.output['object'] = {}
self.set_client_cmd('form', 'show')
self.output['forms'] = frm.serialize()
self.output['forms']['constraints'] = {}
self.output['forms']['grouping'] = []
self.output['meta'] = {}
def printout(self):
register_fonts_from_paths('Vera.ttf',
'VeraIt.ttf',
'VeraBd.ttf',
'VeraBI.ttf',
'Vera')
objects = self.get_objects()
f = BytesIO()
pdf = PDFDocument(f, font_size=14)
pdf.init_report()
pdf.h1(self.tr2ascii(self.get_title()))
ascii_objects = []
if isinstance(objects[0], dict):
headers = objects[0].keys()
ascii_objects.append([self.tr2ascii(h) for h in headers])
for obj in objects:
ascii_objects.append([self.tr2ascii(k) for k in obj.values()])
else:
for o in objects:
ascii_objects.append((self.tr2ascii(o[0]), self.tr2ascii(o[1])))
pdf.table(ascii_objects)
pdf.generate()
download_url = self.generate_temp_file(
name=self.generate_file_name(),
content=base64.b64encode(f.getvalue()),
file_type='application/pdf',
ext='pdf'
)
self.set_client_cmd('download')
self.output['download_url'] = download_url
@staticmethod
def generate_temp_file(name, content, file_type, ext):
f = S3FileManager()
key = f.store_file(name=name, content=content, type=file_type, ext=ext)
return get_file_url(key)
def generate_file_name(self):
return "{0}-{1}".format(
FILENAME_RE.sub('-', self.tr2ascii(self.get_title()).lower()),
datetime.now().strftime("%d.%m.%Y-%H.%M.%S")
)
@staticmethod
def convert_choices(choices_dict_list):
result = []
for d in choices_dict_list:
try:
k = int(d[0])
except:
k = d[0]
result.append((k, d[1]))
return dict(result)
def get_headers(self):
return self.HEADERS
@classmethod
def get_title(cls):
return six.text_type(cls.TITLE)
def get_objects(self):
raise NotImplementedError
def tr2ascii(self, inp):
inp = six.text_type(inp)
shtlst = [
('ğ', 'g'),
('ı', 'i'),
('İ', 'I'),
('ç', 'c'),
('ö', 'o'),
('ü', 'u'),
('ş', 's'),
('Ğ', 'G'),
('Ş', 'S'),
('Ö', 'O'),
('Ü', 'U'),
('Ç', 'C'),
]
for t, a in shtlst:
inp = inp.replace(t, a)
return inp
def ReportDispatcher(current):
ReporterRegistry.get_reporter(current.input['model'])(current)
| gpl-3.0 | -235,994,614,447,476,320 | 29.467005 | 99 | 0.530157 | false |
pombredanne/nTLP | examples/gridworlds/gw_bm_analysis.py | 1 | 4888 | # Copyright (c) 2011, 2012 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $Id$
# Take averages of the output from the gridworld benchmark script.
import numpy as np
import sys
import os
import string
expform = (string.Template("exp(${SOLVER}_a*x + ${SOLVER}_b)"), "exp(%.3f*x + %.3f)")
linform = (string.Template("${SOLVER}_a*x + ${SOLVER}_b"), "%.3f*x + %.3f")
plotfit = string.Template("""${SOLVER}_a = ${SOLVER}_b = 0.5
${SOLVER}_f(x) = $FORMULA
fit ${SOLVER}_f(x) \"$FILENAME\" using $XCOL:((stringcolumn(1) eq "$SOLVER") ? $$$YCOL : 1/0) via ${SOLVER}_a, ${SOLVER}_b
""")
plottpl = string.Template("\"$FILENAME\" using $XCOL:((stringcolumn(1) eq \"$SOLVER\") ? $$$YCOL : 1/0):$ERRCOL with errorbars \
title \"$SOLVER\" lt $COLOR, ${SOLVER}_f(x) title sprintf(\"$SOLVER fit: $FORMULA\", ${SOLVER}_a, ${SOLVER}_b) lt $COLOR")
pf = string.Template("""
set xlabel "$XAXIS"
set ylabel "$YAXIS"
set terminal png font "" 10
set output "$FN_PNG"
""")
columns = ["", "Solver", "Cells", "Goals", "WDensity", "AvgTime", "StDevTime", "AvgStates", "StDevStates"]
colnames = ["", "Solver", "Grid cells", "Number of goals", "Wall Density", "CPU time (s)", "", "Number of states", ""]
err = { columns.index("AvgTime") : columns.index("StDevTime"),
columns.index("AvgStates") : columns.index("StDevStates") }
if len(sys.argv) < 4:
print "Usage: gw_bm_analysis.py [data file] [x-col] [y-col] <exp/lin>"
sys.exit(0)
d = np.genfromtxt(sys.argv[1], dtype="S16, i4, i4, i4, f8, f8, i4", names=True)
xcol = columns.index(sys.argv[2])
ycol = columns.index(sys.argv[3])
if len(sys.argv) >= 5:
EXP = (sys.argv[4] == "exp")
else:
# Default linear fit
EXP = False
if EXP: eqn = expform
else: eqn = linform
avgs = []
solvers = ["NuSMV", "jtlv", "gr1c", "SPIN"]
# List of columns specifying dimension of a grid
dimension = ["W", "H", "Goals", "WDensity"]
for solver in solvers:
s_data = d[d["Solver"] == solver]
for dim in np.unique(s_data[dimension]):
# Mean & error in the mean
times = s_data[s_data[dimension] == dim]["Time"]
time_mean = times.mean()
time_stdev = times.std()/np.sqrt(len(times))
states = s_data[s_data[dimension] == dim]["NStates"]
states_mean = states.mean()
states_stdev = states.std()/np.sqrt(len(states))
avgs.append((solver, dim[0] * dim[1], dim[2], dim[3], time_mean,
time_stdev, states_mean, states_stdev))
(prefix, ext) = os.path.splitext(sys.argv[1])
outfile = prefix + ".avg" + ext
pltfile = prefix + ".avg.plt"
pngfile = prefix + ".png"
with open(outfile, "w") as f:
f.write(" ".join(columns[1:]) + "\n")
for a in avgs:
f.write("%s %d %d %.4f %.4f %.4f %.4f %.4f\n" % a)
with open(pltfile, "w") as f:
pl = []
for (n, solver) in enumerate(solvers):
fx = eqn[0].substitute(SOLVER=solver)
s = plotfit.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, FORMULA=fx)
f.write(s)
s = plottpl.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, ERRCOL=err[ycol], COLOR=n, FORMULA=eqn[1])
pl.append(s)
s = pf.safe_substitute(FN_PNG=pngfile, XAXIS=colnames[xcol],
YAXIS=colnames[ycol])
f.write(s)
if EXP: f.write("set log y\n")
f.write("plot " + ", ".join(pl))
| bsd-3-clause | 3,587,470,938,751,121,400 | 40.07563 | 128 | 0.653642 | false |
mathiasertl/django-ca | ca/django_ca/deprecation.py | 1 | 1194 | # This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>.
"""Deprecation classes in django-ca."""
class RemovedInDjangoCA120Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.20."""
class RemovedInDjangoCA121Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.21."""
class RemovedInDjangoCA122Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.22."""
RemovedInNextVersionWarning = RemovedInDjangoCA120Warning
| gpl-3.0 | 4,251,496,230,839,164,400 | 40.172414 | 98 | 0.767169 | false |
DataDog/integrations-core | couchbase/tests/test_unit.py | 1 | 3050 | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy
import mock
import pytest
from datadog_checks.couchbase import Couchbase
def test_camel_case_to_joined_lower(instance):
couchbase = Couchbase('couchbase', {}, [instance])
CAMEL_CASE_TEST_PAIRS = {
'camelCase': 'camel_case',
'FirstCapital': 'first_capital',
'joined_lower': 'joined_lower',
'joined_Upper1': 'joined_upper1',
'Joined_upper2': 'joined_upper2',
'Joined_Upper3': 'joined_upper3',
'_leading_Underscore': 'leading_underscore',
'Trailing_Underscore_': 'trailing_underscore',
'DOubleCAps': 'd_ouble_c_aps',
'@@@super--$$-Funky__$__$$%': 'super_funky',
}
for test_input, expected_output in CAMEL_CASE_TEST_PAIRS.items():
test_output = couchbase.camel_case_to_joined_lower(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test_extract_seconds_value(instance):
couchbase = Couchbase('couchbase', {}, [instance])
EXTRACT_SECONDS_TEST_PAIRS = {
'3.45s': 3.45,
'12ms': 0.012,
'700.5us': 0.0007005,
u'733.364\u00c2s': 0.000733364,
'0': 0,
}
for test_input, expected_output in EXTRACT_SECONDS_TEST_PAIRS.items():
test_output = couchbase.extract_seconds_value(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test__get_query_monitoring_data(instance_query):
"""
`query_monitoring_url` can potentially fail, be sure we don't raise when the
endpoint is not reachable
"""
couchbase = Couchbase('couchbase', {}, [instance_query])
couchbase._get_query_monitoring_data()
@pytest.mark.parametrize(
'test_case, extra_config, expected_http_kwargs',
[
(
"new auth config",
{'username': 'new_foo', 'password': 'bar', 'tls_verify': False},
{'auth': ('new_foo', 'bar'), 'verify': False},
),
("legacy config", {'user': 'new_foo', 'ssl_verify': False}, {'auth': ('new_foo', 'password'), 'verify': False}),
],
)
def test_config(test_case, extra_config, expected_http_kwargs, instance):
instance = deepcopy(instance)
instance.update(extra_config)
check = Couchbase('couchbase', {}, [instance])
with mock.patch('datadog_checks.base.utils.http.requests') as r:
r.get.return_value = mock.MagicMock(status_code=200)
check.check(instance)
http_wargs = dict(
auth=mock.ANY, cert=mock.ANY, headers=mock.ANY, proxies=mock.ANY, timeout=mock.ANY, verify=mock.ANY
)
http_wargs.update(expected_http_kwargs)
r.get.assert_called_with('http://localhost:8091/pools/default/tasks', **http_wargs)
| bsd-3-clause | -6,643,786,405,936,923,000 | 34.057471 | 120 | 0.623279 | false |
roshchupkin/hase | tools/VCF2hdf5.py | 1 | 4024 |
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from config import PYTHON_PATH
if PYTHON_PATH is not None:
for i in PYTHON_PATH: sys.path.insert(0,i)
import argparse
import h5py
import pandas as pd
import numpy as np
from hdgwas.tools import Timer
import tables
import glob
def probes_VCF2hdf5(data_path, save_path,study_name, chunk_size=1000000):
if os.path.isfile(os.path.join(save_path,'probes',study_name+'.h5')):
os.remove(os.path.join(save_path,'probes',study_name+'.h5'))
hash_table={'keys':np.array([],dtype=np.int),'allele':np.array([])}
df=pd.read_csv(data_path,sep='\t',chunksize=chunk_size, header=None,index_col=None)
for i,chunk in enumerate(df):
print 'add chunk {}'.format(i)
print chunk.head()
chunk.columns=[ "CHR","bp" ,"ID",'allele1','allele2','QUAL','FILTER','INFO'] #TODO (high) parse INFO
hash_1=chunk.allele1.apply(hash)
hash_2=chunk.allele2.apply(hash)
k,indices=np.unique(np.append(hash_1,hash_2),return_index=True)
s=np.append(chunk.allele1,chunk.allele2)[indices]
ind=np.invert(np.in1d(k,hash_table['keys']))
hash_table['keys']=np.append(hash_table['keys'],k[ind])
hash_table['allele']=np.append(hash_table['allele'],s[ind])
chunk.allele1=hash_1
chunk.allele2=hash_2
chunk.to_hdf(os.path.join(save_path,'probes',study_name+'.h5'),data_columns=["CHR","bp" ,"ID",'allele1','allele2'], key='probes',format='table',append=True,
min_itemsize = 25, complib='zlib',complevel=9 )
pd.DataFrame.from_dict(hash_table).to_csv(os.path.join(save_path,'probes',study_name+'_hash_table.csv.gz'),index=False,compression='gzip', sep='\t')
def ind_VCF2hdf5(data_path, save_path,study_name):
if os.path.isfile(os.path.join(save_path,'individuals',study_name+'.h5')):
os.remove(os.path.join(save_path,'individuals',study_name+'.h5'))
n=[]
f=open(data_path,'r')
for i,j in enumerate(f):
n.append((j[:-1]))
f.close()
n=np.array(n)
chunk=pd.DataFrame.from_dict({"individual":n})
chunk.to_hdf(os.path.join(save_path,'individuals',study_name+'.h5'), key='individuals',format='table',
min_itemsize = 25, complib='zlib',complevel=9 )
def genotype_VCF2hdf5(data_path,id, save_path, study_name):
df=pd.read_csv(data_path, header=None, index_col=None,sep='\t', dtype=np.float16)
data=df.as_matrix()
print data.shape
print 'Saving chunk...{}'.format(os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'))
h5_gen_file = tables.open_file(
os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'), 'w', title=study_name)
atom = tables.Float16Atom()
genotype = h5_gen_file.create_carray(h5_gen_file.root, 'genotype', atom,
(data.shape),
title='Genotype',
filters=tables.Filters(complevel=9, complib='zlib'))
genotype[:] = data
h5_gen_file.close()
os.remove(data_path)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Script to convert VCF data')
parser.add_argument("-study_name", required=True, type=str, help="Study specific name")
parser.add_argument("-id", type=str, help="subject id")
parser.add_argument("-data",required=True, type=str, help="path to file")
parser.add_argument("-out",required=True, type=str, help="path to results save folder")
parser.add_argument("-flag",required=True,type=str,choices=['individuals','probes','chunk'], help="path to file with SNPs info")
args = parser.parse_args()
print args
try:
print ('Creating directories...')
os.mkdir(os.path.join(args.out,'genotype') )
os.mkdir(os.path.join(args.out,'individuals') )
os.mkdir(os.path.join(args.out,'probes') )
os.mkdir(os.path.join(args.out,'tmp_files'))
except:
print('Directories "genotype","probes","individuals" are already exist in {}...'.format(args.out))
if args.flag=='probes':
probes_VCF2hdf5(args.data, args.out, args.study_name)
elif args.flag=='individuals':
ind_VCF2hdf5(args.data, args.out,args.study_name)
elif args.flag=='chunk':
genotype_VCF2hdf5(args.data,args.id, args.out,args.study_name)
| gpl-3.0 | 3,613,012,578,314,009,000 | 36.962264 | 158 | 0.696571 | false |
joanayma/pyautorunalize | pyautorunanalize.py | 1 | 5119 | #! /bin/env python
"""
PyAutorunalizer 0.1
Python script for autorunalize: http://sysinternals.com/autoruns.com listing autoruns Windows
items. Version 11.6 or greater needed.
http://Virutotal.com externa database of viruses.
original idea: http://trustedsignal.blogspot.com.es/2012/02/finding-evil-automating-autoruns.html
original implementation uses cygwin32, bash and other blobs.
Virustotal API refer: https://github.com/botherder/virustotal/
Autoruns is part of Sysinternals' suit and owns the copyright. Windows are trademark of Microsoft.
Licence: GPLv2
#Use this script at your own.
This script is not inteded as a substitute for any antivirus. Is just a sanity check.
Individuals htat noncomplain the Virustotal or sysinternals terms or harms the antivirus
industry, are out of my resposability.
"""
import xml.etree.ElementTree as ElementTree
import json
import urllib,urllib.request
import sys,os,getopt,subprocess
fnull = open(os.devnull, "w")
def runanalizer(API_KEY):
#Check for autorunsc.exe
try:
with open('./autorunsc.exe'): pass
except IOError:
print('autorunsc.exe binary not found! Download from https://live.sysinternals.com/autorunsc.exe')
sys.exit(3)
try:
if os.environ['HTTP_PROXY'] != None:
proxies = {'https': 'http://{0}'.format(os.environ['HTTP_PROXY'])}
urllib.request.ProxyHandler(proxies)
print("[Info] Going through proxies: ",proxies)
except KeyError:
#not defined
pass
print('[Info] Getting list of files to analise from Autoruns ...')
autoruns_proc = subprocess.Popen(['autorunsc.exe', "/accepteula", '-xaf'], stdout=subprocess.PIPE, stderr = fnull)
autoruns_xml = (autoruns_proc.communicate()[0].decode("utf_16"))
autoruns_xml.replace('\r\n','\n')
#parse XML output
#items =[[]]
try:
autoruns_tree = ElementTree.fromstring(autoruns_xml)
except xml.etree.ElementTree.ParseError as e:
print('[Error] Error parsing xml autoruns\' output. \n Is Autoruns\' latest version?\n', e)
sys.exit(1002)
for item in autoruns_tree:
text = "[Object]"
if item is None:
text = text + " Invalid item (mostly not a binary image)\n"
break
imagepath = item.findtext('imagepath')
name = item.findtext('itemname')
if imagepath is not None:
sha256hash = item.findtext('sha256hash')
text = text + '' + name + '\n ' + imagepath + '\n ' + sha256hash + '\n scanning... '
print(text)
result = scan(sha256hash, API_KEY)
print(result)
def scan(sha256hash, API_KEY):
VIRUSTOTAL_REPORT_URL = 'https://www.virustotal.com/vtapi/v2/file/report'
VIRUSTOTAL_SCAN_URL = 'https://www.virustotal.com/vtapi/v2/file/scan'
if sha256hash == None:
response = "No valid hash for this file"
return response
data = urllib.parse.urlencode({
'resource' : sha256hash,
'apikey' : API_KEY
})
data = data.encode('utf-8')
try:
request = urllib.request.Request(VIRUSTOTAL_REPORT_URL, data)
reply = urllib.request.urlopen(request)
answer = 42
answer = reply.read().decode("utf-8")
report = json.loads(answer)
except Exception as e:
error = "\n[Error] Cannot obtain results from VirusTotal: {0}\n".format(e)
return error
sys.exit(4)
int(report['response_code']) == 0
if int(report['response_code']) == 0:
response = (report['verbose_msg'])
elif int(report['response_code']) < 0:
response = 'Not found on Virustotal database!'
#Shall send the file if is not on virustotal.
else:
response = 'FOUND'
if int(report['positives']) >= 0:
response = response + 'but not infected.'
else:
for av, scan in report['scans'].items():
if scan == 'detected':
response = response + ' INFECTED!\n engine:' + av + ',\n malware:' + scan['result'] + '\n'
return response
def help():
print(main.__doc__)
sys.exit(0)
def main(argv):
"""\n
Script for Windows basic security check using Sysinternal\'s Autoruns
and Virustotal.com\n
Thereforce, you need to get a public API Key from http://www.virustotal.com for your
scripting analysis\n
and autorunsc.exe binary.\n
Usage:\n
autorunalize.exe [--help] --API-KEY YOUR_API_KEY\n
-h, --help Shows this help.\n
-a, --API-KEY Your public API key from Virustotal.
This a 64 characters hexadecimal string.\n
Example:\n
./autorunalize.exe --API-KEY YOUR_API_KEY\n
"""
API_KEY = ''
try:
opts, args = getopt.getopt(argv,"ha:",["help","API-KEY="])
except getopt.GetoptError:
print('pyrunanalizer.py --API-KEY YOUR_API_KEY_HERE')
sys.exit(2)
for opt, arg in opts:
if opt in ('-h','--help'):
help()
sys.exit()
elif opt in ("-a", "--API-KEY"):
API_KEY = arg
runanalizer(API_KEY)
else:
help()
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-2.0 | -8,408,628,864,717,367,000 | 33.355705 | 116 | 0.62942 | false |
bilke/OpenSG-1.8 | SConsLocal/scons-local-0.96.1/SCons/Tool/__init__.py | 2 | 13279 | """SCons.Tool
SCons tool selection.
This looks for modules that define a callable object that can modify
a construction environment as appropriate for a given tool (or tool
chain).
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "tool specification" in an arbitrary callable function. No
one needs to use or tie in to this subsystem in order to roll their own
tool definition.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/baseline/src/engine/SCons/Tool/__init__.py 0.96.1.D001 2004/08/23 09:55:29 knight"
import imp
import sys
import SCons.Errors
import SCons.Defaults
class ToolSpec:
def __init__(self, name):
self.name = name
def __call__(self, env, *args, **kw):
env.Append(TOOLS = [ self.name ])
apply(self.generate, ( env, ) + args, kw)
def __str__(self):
return self.name
def Tool(name, toolpath=[]):
"Select a canned Tool specification, optionally searching in toolpath."
try:
file, path, desc = imp.find_module(name, toolpath)
try:
module = imp.load_module(name, file, path, desc)
spec = ToolSpec(name)
spec.generate = module.generate
spec.exists = module.exists
return spec
finally:
if file:
file.close()
except ImportError, e:
pass
full_name = 'SCons.Tool.' + name
if not sys.modules.has_key(full_name):
try:
file, path, desc = imp.find_module(name,
sys.modules['SCons.Tool'].__path__)
mod = imp.load_module(full_name, file, path, desc)
setattr(SCons.Tool, name, mod)
except ImportError, e:
raise SCons.Errors.UserError, "No tool named '%s': %s" % (name, e)
if file:
file.close()
spec = ToolSpec(name)
spec.generate = sys.modules[full_name].generate
spec.exists = sys.modules[full_name].exists
return spec
def createProgBuilder(env):
"""This is a utility function that creates the Program
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
program = env['BUILDERS']['Program']
except KeyError:
program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction,
emitter = '$PROGEMITTER',
prefix = '$PROGPREFIX',
suffix = '$PROGSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'Object',
target_scanner = SCons.Defaults.ProgScan)
env['BUILDERS']['Program'] = program
return program
def createStaticLibBuilder(env):
"""This is a utility function that creates the StaticLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
static_lib = env['BUILDERS']['StaticLibrary']
except KeyError:
static_lib = SCons.Builder.Builder(action = SCons.Defaults.ArAction,
emitter = '$LIBEMITTER',
prefix = '$LIBPREFIX',
suffix = '$LIBSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'StaticObject')
env['BUILDERS']['StaticLibrary'] = static_lib
env['BUILDERS']['Library'] = static_lib
return static_lib
def createSharedLibBuilder(env):
"""This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
shared_lib = env['BUILDERS']['SharedLibrary']
except KeyError:
action_list = [ SCons.Defaults.SharedCheck,
SCons.Defaults.ShLinkAction ]
shared_lib = SCons.Builder.Builder(action = action_list,
emitter = "$SHLIBEMITTER",
prefix = '$SHLIBPREFIX',
suffix = '$SHLIBSUFFIX',
target_scanner = SCons.Defaults.ProgScan,
src_suffix = '$SHOBJSUFFIX',
src_builder = 'SharedObject')
env['BUILDERS']['SharedLibrary'] = shared_lib
return shared_lib
def createObjBuilders(env):
"""This is a utility function that creates the StaticObject
and SharedObject Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (StaticObject, SharedObject)
"""
try:
static_obj = env['BUILDERS']['StaticObject']
except KeyError:
static_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$OBJPREFIX',
suffix = '$OBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['StaticObject'] = static_obj
env['BUILDERS']['Object'] = static_obj
try:
shared_obj = env['BUILDERS']['SharedObject']
except KeyError:
shared_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$SHOBJPREFIX',
suffix = '$SHOBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['SharedObject'] = shared_obj
return (static_obj, shared_obj)
def createCFileBuilders(env):
"""This is a utility function that creates the CFile/CXXFile
Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (CFile, CXXFile)
"""
try:
c_file = env['BUILDERS']['CFile']
except KeyError:
c_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CFILESUFFIX'})
env['BUILDERS']['CFile'] = c_file
env['CFILESUFFIX'] = '.c'
try:
cxx_file = env['BUILDERS']['CXXFile']
except KeyError:
cxx_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CXXFILESUFFIX'})
env['BUILDERS']['CXXFile'] = cxx_file
env['CXXFILESUFFIX'] = '.cc'
return (c_file, cxx_file)
def FindTool(tools, env):
for tool in tools:
t = Tool(tool)
if t.exists(env):
return tool
return None
def FindAllTools(tools, env):
def ToolExists(tool, env=env):
return Tool(tool).exists(env)
return filter (ToolExists, tools)
def tool_list(platform, env):
# XXX this logic about what tool to prefer on which platform
# should be moved into either the platform files or
# the tool files themselves.
# The search orders here are described in the man page. If you
# change these search orders, update the man page as well.
if str(platform) == 'win32':
"prefer Microsoft tools on Windows"
linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ]
c_compilers = ['msvc', 'mingw', 'gcc', 'icl', 'icc', 'cc', 'bcc32' ]
cxx_compilers = ['msvc', 'icc', 'g++', 'c++', 'bcc32' ]
assemblers = ['masm', 'nasm', 'gas', '386asm' ]
fortran_compilers = ['g77', 'ifl', 'cvf', 'fortran']
ars = ['mslib', 'ar', 'tlib']
elif str(platform) == 'os2':
"prefer IBM tools on OS/2"
linkers = ['ilink', 'gnulink', 'mslink']
c_compilers = ['icc', 'gcc', 'msvc', 'cc']
cxx_compilers = ['icc', 'g++', 'msvc', 'c++']
assemblers = ['nasm', 'masm', 'gas']
fortran_compilers = ['ifl', 'g77']
ars = ['ar', 'mslib']
elif str(platform) == 'irix':
"prefer MIPSPro on IRIX"
linkers = ['sgilink', 'gnulink']
c_compilers = ['sgicc', 'gcc', 'cc']
cxx_compilers = ['sgic++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sgiar']
elif str(platform) == 'sunos':
"prefer Forte tools on SunOS"
linkers = ['sunlink', 'gnulink']
c_compilers = ['suncc', 'gcc', 'cc']
cxx_compilers = ['sunc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sunar']
elif str(platform) == 'hpux':
"prefer aCC tools on HP-UX"
linkers = ['hplink', 'gnulink']
c_compilers = ['hpcc', 'gcc', 'cc']
cxx_compilers = ['hpc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['ar']
elif str(platform) == 'aix':
"prefer AIX Visual Age tools on AIX"
linkers = ['aixlink', 'gnulink']
c_compilers = ['aixcc', 'gcc', 'cc']
cxx_compilers = ['aixc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['aixf77', 'g77', 'fortran']
ars = ['ar']
else:
"prefer GNU tools on all other platforms"
linkers = ['gnulink', 'mslink', 'ilink']
c_compilers = ['gcc', 'msvc', 'icc', 'cc']
cxx_compilers = ['g++', 'msvc', 'icc', 'c++']
assemblers = ['gas', 'nasm', 'masm']
fortran_compilers = ['g77', 'ifort', 'ifl', 'fortran']
ars = ['ar', 'mslib']
c_compiler = FindTool(c_compilers, env) or c_compilers[0]
# XXX this logic about what tool provides what should somehow be
# moved into the tool files themselves.
if c_compiler and c_compiler == 'mingw':
# MinGW contains a linker, C compiler, C++ compiler,
# Fortran compiler, archiver and assembler:
cxx_compiler = None
linker = None
assembler = None
fortran_compiler = None
ar = None
else:
# Don't use g++ if the C compiler has built-in C++ support:
if c_compiler in ('msvc', 'icc'):
cxx_compiler = None
else:
cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0]
linker = FindTool(linkers, env) or linkers[0]
assembler = FindTool(assemblers, env) or assemblers[0]
fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0]
ar = FindTool(ars, env) or ars[0]
other_tools = FindAllTools(['BitKeeper', 'CVS',
'dmd',
'dvipdf', 'dvips', 'gs',
'jar', 'javac', 'javah',
'latex', 'lex', 'm4', 'midl', 'msvs',
'pdflatex', 'pdftex', 'Perforce',
'RCS', 'rmic', 'SCCS',
# 'Subversion',
'swig',
'tar', 'tex', 'yacc', 'zip'],
env)
tools = ([linker, c_compiler, cxx_compiler,
fortran_compiler, assembler, ar]
+ other_tools)
return filter(lambda x: x, tools)
| lgpl-2.1 | 5,232,730,326,358,949,000 | 37.827485 | 125 | 0.540252 | false |
brennie/reviewboard | reviewboard/oauth/forms.py | 1 | 11912 | """Forms for OAuth2 applications."""
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from django.forms import widgets
from django.utils.translation import ugettext, ugettext_lazy as _
from djblets.forms.widgets import CopyableTextInput, ListEditWidget
from oauth2_provider.generators import (generate_client_id,
generate_client_secret)
from oauth2_provider.validators import URIValidator
from reviewboard.admin.form_widgets import RelatedUserWidget
from reviewboard.oauth.models import Application
from reviewboard.oauth.widgets import OAuthSecretInputWidget
from reviewboard.site.urlresolvers import local_site_reverse
class ApplicationChangeForm(forms.ModelForm):
"""A form for updating an Application.
This form is intended to be used by the admin site.
"""
DISABLED_FOR_SECURITY_ERROR = _(
'This Application has been disabled to keep your server secure. '
'It cannot be re-enabled until its client secret changes.'
)
client_id = forms.CharField(
label=_('Client ID'),
help_text=_(
'The client ID. Your application will use this in OAuth2 '
'authentication to identify itself.',
),
widget=CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
required=False,
)
def __init__(self, data=None, initial=None, instance=None):
"""Initialize the form:
Args:
data (dict, optional):
The provided form data.
initial (dict, optional):
The initial form values.
instance (Application, optional):
The application to edit.
"""
super(ApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
if instance and instance.pk:
# If we are creating an application (as the
# ApplicationCreationForm is a subclass of this class), the
# client_secret wont be present so we don't have to initialize the
# widget.
client_secret = self.fields['client_secret']
client_secret.widget = OAuthSecretInputWidget(
attrs=client_secret.widget.attrs,
api_url=local_site_reverse('oauth-app-resource',
local_site=instance.local_site,
kwargs={'app_id': instance.pk}),
)
def clean_extra_data(self):
"""Prevent ``extra_data`` from being an empty string.
Returns:
unicode:
Either a non-zero length string of JSON-encoded data or ``None``.
"""
return self.cleaned_data['extra_data'] or None
def clean_redirect_uris(self):
"""Clean the ``redirect_uris`` field.
This method will ensure that all the URIs are valid by validating
each of them, as well as removing unnecessary whitespace.
Returns:
unicode:
A space-separated list of URIs.
Raises:
django.core.exceptions.ValidationError:
Raised when one or more URIs are invalid.
"""
validator = URIValidator()
redirect_uris = self.cleaned_data.get('redirect_uris', '').split()
errors = []
for uri in redirect_uris:
try:
validator(uri)
except ValidationError as e:
errors.append(e)
if errors:
raise ValidationError(errors)
# We join the list instead of returning the initial value because the
# the original value may have had multiple adjacent whitespace
# characters.
return ' '.join(redirect_uris)
def clean(self):
"""Validate the form.
This will validate the relationship between the
``authorization_grant_type`` and ``redirect_uris`` fields to ensure the
values are compatible.
This method is very similar to
:py:func:`Application.clean
<oauth2_provider.models.AbstractApplication.clean>`, but the data will
be verified by the form instead of the model to allow error messages to
be usable by consumers of the form.
This method does not raise an exception upon failing validation.
Instead, it sets errors internally so that they are related to the
pertinent field instead of the form as a whole.
Returns:
dict:
The cleaned form data.
"""
super(ApplicationChangeForm, self).clean()
grant_type = self.cleaned_data.get('authorization_grant_type')
# redirect_uris will not be present in cleaned_data if validation
# failed.
redirect_uris = self.cleaned_data.get('redirect_uris')
if (redirect_uris is not None and
len(redirect_uris) == 0 and
grant_type in (Application.GRANT_AUTHORIZATION_CODE,
Application.GRANT_IMPLICIT)):
# This is unfortunately not publicly exposed in Django 1.6, but it
# is exposed in later versions (as add_error).
self._errors['redirect_uris'] = self.error_class([
ugettext(
'The "redirect_uris" field may not be blank when '
'"authorization_grant_type" is "%s"'
)
% grant_type
])
self.cleaned_data.pop('redirect_uris')
if (self.instance and
self.instance.pk and
self.instance.is_disabled_for_security and
self.cleaned_data['enabled']):
raise ValidationError(self.DISABLED_FOR_SECURITY_ERROR)
if 'client_id' in self.cleaned_data:
del self.cleaned_data['client_id']
if 'client_secret' in self.cleaned_data:
del self.cleaned_data['client_secret']
return self.cleaned_data
class Meta:
model = Application
fields = '__all__'
help_texts = {
'authorization_grant_type': _(
'How the authorization is granted to the application.'
),
'client_secret': _(
'The client secret. This should only be known to Review Board '
'and your application.'
),
'client_type': _(
"The type of client. Confidential clients must be able to "
"keep users' passwords secure."
),
'name': _(
'The application name.'
),
'redirect_uris': _(
'A list of allowed URIs to redirect to.',
),
'skip_authorization': _(
'Whether or not users will be prompted for authentication. '
'This should most likely be unchecked.'
),
'user': _(
'The user who created the application. The selected user will '
'be able to change these settings from their account settings.'
),
}
widgets = {
'client_secret': CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
'name': widgets.TextInput(attrs={'size': 60}),
'redirect_uris': ListEditWidget(attrs={'size': 60}, sep=' '),
'user': RelatedUserWidget(multivalued=False),
'original_user': RelatedUserWidget(multivalued=False),
}
labels = {
'authorization_grant_type': _('Authorization Grant Type'),
'client_secret': _('Client Secret'),
'client_type': _('Client Type'),
'name': _('Name'),
'redirect_uris': _('Redirect URIs'),
'skip_authorization': _('Skip Authorization'),
'user': _('User'),
}
class ApplicationCreationForm(ApplicationChangeForm):
"""A form for creating an Application.
This is meant to be used by the admin site.
"""
def save(self, commit=True):
"""Save the form.
This method will generate the ``client_id`` and ``client_secret``
fields.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(ApplicationCreationForm, self).save(commit=False)
instance.client_id = generate_client_id()
instance.client_secret = generate_client_secret()
if commit:
instance.save()
return instance
class Meta(ApplicationChangeForm.Meta):
exclude = (
'client_id',
'client_secret',
)
class UserApplicationChangeForm(ApplicationChangeForm):
"""A form for an end user to change an Application."""
def __init__(self, user, data=None, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application):
The Application that is to be edited.
"""
super(UserApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
class Meta(ApplicationChangeForm.Meta):
exclude = (
'extra_data',
'local_site',
'original_user',
'skip_authorization',
'user',
)
class UserApplicationCreationForm(ApplicationCreationForm):
"""A form for an end user to update an Application."""
def __init__(self, user, data, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application, optional):
The Application that is to be edited.
This should always be ``None``.
"""
assert instance is None
super(UserApplicationCreationForm, self).__init__(data=data,
initial=initial,
instance=instance)
self.user = user
def save(self, commit=True):
"""Save the form.
This method will associate the user creating the application as its
owner.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(UserApplicationCreationForm, self).save(commit=False)
instance.user = self.user
if commit:
instance.save()
return instance
class Meta(ApplicationCreationForm.Meta):
exclude = (ApplicationCreationForm.Meta.exclude +
UserApplicationChangeForm.Meta.exclude)
| mit | 8,614,490,914,715,181,000 | 33.034286 | 79 | 0.561702 | false |
Aloomaio/googleads-python-lib | examples/ad_manager/v201808/creative_template_service/get_system_defined_creative_templates.py | 1 | 2005 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all system defined creative templates.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
creative_template_service = client.GetService(
'CreativeTemplateService', version='v201808')
# Create a statement to select creative templates.
statement = (ad_manager.StatementBuilder(version='v201808')
.Where('type = :type')
.WithBindVariable('type', 'SYSTEM_DEFINED'))
# Retrieve a small amount of creative templates at a time, paging
# through until all creative templates have been retrieved.
while True:
response = creative_template_service.getCreativeTemplatesByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for creative_template in response['results']:
# Print out some information for each creative template.
print('Creative template with ID "%d" and name "%s" was found.\n' %
(creative_template['id'], creative_template['name']))
statement.offset += statement.limit
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| apache-2.0 | -8,350,638,428,948,109,000 | 37.557692 | 75 | 0.715711 | false |
urthbound/google-python-exercises | basic/string1.py | 1 | 3701 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
output = 'Number of donuts: '
if count >= 10:
output += 'many'
else:
output += str(count)
return output
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
output = ''
if len(s) > 2:
output += s[0]
output += s[1]
output += s[-2]
output += s[-1]
else:
pass
return output
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
match = s[0]
output = s[1:-1].replace(match, '*')
output = (match + output + s[-1])
return output
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
output = '%s%s %s%s' %(b[:2], a[2:], a[:2], b[2:])
return output
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print 'donuts'
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print
print 'both_ends'
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print
print 'fix_start'
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print
print 'mix_up'
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| apache-2.0 | 3,103,572,609,937,760,000 | 27.689922 | 78 | 0.658741 | false |
praekelt/nurseconnect | nurseconnect/tests/test_utils.py | 1 | 4506 | from freezegun import freeze_time
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.models import SiteLanguageRelation, Languages, Main
from molo.surveys.models import MoloSurveyPage, MoloSurveySubmission
from molo.surveys.tests.test_models import create_survey
from nurseconnect.utils import (
get_period_date_format,
convert_string_to_boolean_list,
get_survey_results_for_user,
)
class UtilsTestCase(TestCase):
@freeze_time("2018-02-01")
def test_get_period_date_format_1(self):
self.assertEqual(
get_period_date_format(),
"201802"
)
@freeze_time("2012-12-01")
def test_get_period_date_format_2(self):
self.assertEqual(
get_period_date_format(),
"201212"
)
def test_convert_string_to_boolean_list_1(self):
self.assertEqual(
convert_string_to_boolean_list("true"),
[True]
)
def test_convert_string_to_boolean_list_2(self):
self.assertEqual(
convert_string_to_boolean_list("true,false"),
[True, False]
)
def test_convert_string_to_boolean_list_3(self):
self.assertEqual(
convert_string_to_boolean_list(" true, false"),
[True, False]
)
def test_convert_string_to_boolean_list_4(self):
self.assertEqual(
convert_string_to_boolean_list("TRUE,FalSE"),
[True, False]
)
def test_convert_string_to_boolean_list_5(self):
self.assertEqual(
convert_string_to_boolean_list("true,BANANA,false"),
[True, False]
)
def test_convert_string_to_boolean_list_6(self):
self.assertEqual(
convert_string_to_boolean_list("false , True"),
[False, True]
)
def test_convert_string_to_boolean_list_7(self):
self.assertEqual(
convert_string_to_boolean_list("false;true"),
[]
)
class SurveyUtilsTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
def test_get_survey_results_for_user_1(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data='{"the-sky-is-blue": "True"}')
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
}]
)
def test_get_survey_results_for_user_2(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
},
{
"question": "The grass is purple",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true,false"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data=('{"the-sky-is-blue": "True", '
'"the-grass-is-purple": "True"}'))
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[
{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
},
{
"question": "The grass is purple",
"user_answer": True,
"correct_answer": False,
},
]
)
| bsd-2-clause | -6,023,230,630,583,244,000 | 29.04 | 68 | 0.517976 | false |
Patreon/cartographer | cartographer/field_types/schema_relationship.py | 1 | 4047 | from cartographer.resources import get_resource_registry
from cartographer.resources.resource_registry import ResourceRegistryKeys
class SchemaRelationship(object):
"""
`SchemaRelationship` describes how to translate related resources to and from JSON API and our Python models.
`SchemaRelationship` is has one primary method,
`related_serializer`, for creating a `JSONAPISerializer` instance based on its input arguments.
Subclasses of `SchemaSerializer` can override this method
to customize serialization behavior.
Parsing of related resources is not currently handled by this class,
and instead is handled by the `PostedDocument` class (or, more typically, its subclass `SchemaParser`.
"""
def __init__(self, model_type, id_attribute=None, model_property=None,
model_method=None, serializer_method=None, includes=None):
"""
NOTE: only one of id_attribute, model_property, model_method, or serializer_method should be provided
:param model_type: the JSON API `type` string for the related model
:param id_attribute: the foreign key column on the parent serializer model which identifies the related serializer
:param model_property: the property on the parent serializer model which returns the related serializer
:param model_method: the property on the parent serializer model which returns the related serializer
:param serializer_method: the name of the method on the parent serializer object which uses this schema
which should be called to get the child serializer.
:return: an instance of SchemaRelationship,
which will later be used to serialize Python into JSON API.
"""
identifier_args = [id_attribute, model_property, model_method, serializer_method]
provided_identifiers = [identifier
for identifier in identifier_args
if identifier]
if len(provided_identifiers) > 1:
raise Exception("only one of [{}] should be provided".format(identifier_args.join(", ")))
self.model_type = model_type
self.id_attribute = id_attribute
self.model_property = model_property
self.model_method = model_method
self.serializer_method = serializer_method
self.includes = includes
def related_serializer(self, parent_serializer, relationship_key):
"""
:param parent_serializer: The serializer which has our return value as a related resource
:param relationship_key: The name by which the parent serializer knows this child
:return: The child serializer which will later be used to serialize a related resource
"""
if self.serializer_method is not None:
return getattr(parent_serializer, self.serializer_method)()
model = None
if self.id_attribute is not None:
related_model_getter = self.resource_registry_entry().get(ResourceRegistryKeys.MODEL_GET)
model_id = getattr(parent_serializer.model, self.id_attribute)
if model_id is not None and related_model_getter is not None:
model = related_model_getter(model_id)
elif self.model_property is not None:
model = getattr(parent_serializer.model, self.model_property)
elif self.model_method is not None:
model = getattr(parent_serializer.model, self.model_method)()
if model:
serializer_class = self.resource_registry_entry().get(ResourceRegistryKeys.SERIALIZER)
return serializer_class(
model,
parent_serializer=parent_serializer,
relationship_name=relationship_key,
includes=self.includes
)
else:
from cartographer.serializers import JSONAPINullSerializer
return JSONAPINullSerializer()
def resource_registry_entry(self):
return get_resource_registry().get(self.model_type, {})
| apache-2.0 | 8,312,230,826,271,928,000 | 50.227848 | 122 | 0.679763 | false |
SamuelYvon/radish | docs/conf.py | 1 | 9715 | # -*- coding: utf-8 -*-
#
# radish documentation build configuration file, created by
# sphinx-quickstart on Sat Jun 13 21:58:55 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# sphinx rtd theme
# import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'radish'
copyright = u'2015, Timo Furrer'
author = u'Timo Furrer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5.1'
# The full version, including alpha/beta/rc tags.
release = '0.5.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"logo": "radish-bdd-logo.png",
"logo_name": "radish",
"github_user": "radish-bdd",
"github_repo": "radish",
"github_button": True,
"github_banner": True
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "_static/radish-bdd-logo-trans-bg.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/radish-bdd-logo.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'radishdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'radish.tex', u'radish Documentation',
u'Timo Furrer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'radish', u'radish Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'radish', u'radish Documentation',
author, 'radish', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| mit | -103,530,752,461,175,070 | 31.062706 | 79 | 0.704683 | false |
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/lib/tool_shed/tools/tool_validator.py | 1 | 19484 | import filecmp
import logging
import os
import tempfile
from galaxy.tools import Tool
from galaxy.tools import parameters
from galaxy.tools.parameters import dynamic_options
from tool_shed.tools import data_table_manager
from tool_shed.util import basic_util
from tool_shed.util import hg_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_util
from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
class ToolValidator( object ):
def __init__( self, app ):
self.app = app
self.tdtm = data_table_manager.ToolDataTableManager( self.app )
def can_use_tool_config_disk_file( self, repository, repo, file_path, changeset_revision ):
"""
Determine if repository's tool config file on disk can be used. This method
is restricted to tool config files since, with the exception of tool config
files, multiple files with the same name will likely be in various directories
in the repository and we're comparing file names only (not relative paths).
"""
if not file_path or not os.path.exists( file_path ):
# The file no longer exists on disk, so it must have been deleted at some previous
# point in the change log.
return False
if changeset_revision == repository.tip( self.app ):
return True
file_name = basic_util.strip_path( file_path )
latest_version_of_file = \
self.get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision )
can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file )
try:
os.unlink( latest_version_of_file )
except:
pass
return can_use_disk_file
def check_tool_input_params( self, repo_dir, tool_config_name, tool, sample_files ):
"""
Check all of the tool's input parameters, looking for any that are dynamically
generated using external data files to make sure the files exist.
"""
invalid_files_and_errors_tups = []
correction_msg = ''
for input_param in tool.input_params:
if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
# If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml,
# make sure all requirements exist.
options = input_param.dynamic_options or input_param.options
if options and isinstance( options, dynamic_options.DynamicOptions ):
if options.tool_data_table or options.missing_tool_data_table_name:
# Make sure the repository contains a tool_data_table_conf.xml.sample file.
sample_tool_data_table_conf = hg_util.get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
if sample_tool_data_table_conf:
error, correction_msg = \
self.tdtm.handle_sample_tool_data_table_conf_file( sample_tool_data_table_conf,
persist=False )
if error:
invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
else:
options.missing_tool_data_table_name = None
else:
correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. "
correction_msg += "Upload a file named tool_data_table_conf.xml.sample to the repository "
correction_msg += "that includes the required entry to correct this error.<br/>"
invalid_tup = ( tool_config_name, correction_msg )
if invalid_tup not in invalid_files_and_errors_tups:
invalid_files_and_errors_tups.append( invalid_tup )
if options.index_file or options.missing_index_file:
# Make sure the repository contains the required xxx.loc.sample file.
index_file = options.index_file or options.missing_index_file
index_file_name = basic_util.strip_path( index_file )
sample_found = False
for sample_file in sample_files:
sample_file_name = basic_util.strip_path( sample_file )
if sample_file_name == '%s.sample' % index_file_name:
options.index_file = index_file_name
options.missing_index_file = None
if options.tool_data_table:
options.tool_data_table.missing_index_file = None
sample_found = True
break
if not sample_found:
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file_name )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % \
str( index_file_name )
invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
return invalid_files_and_errors_tups
def concat_messages( self, msg1, msg2 ):
if msg1:
if msg2:
message = '%s %s' % ( msg1, msg2 )
else:
message = msg1
elif msg2:
message = msg2
else:
message = ''
return message
def copy_disk_sample_files_to_dir( self, repo_files_dir, dest_path ):
"""
Copy all files currently on disk that end with the .sample extension to the
directory to which dest_path refers.
"""
sample_files = []
for root, dirs, files in os.walk( repo_files_dir ):
if root.find( '.hg' ) < 0:
for name in files:
if name.endswith( '.sample' ):
relative_path = os.path.join( root, name )
tool_util.copy_sample_file( self.app, relative_path, dest_path=dest_path )
sample_files.append( name )
return sample_files
def get_latest_tool_config_revision_from_repository_manifest( self, repo, filename, changeset_revision ):
"""
Get the latest revision of a tool config file named filename from the repository
manifest up to the value of changeset_revision. This method is restricted to tool_config
files rather than any file since it is likely that, with the exception of tool config
files, multiple files will have the same name in various directories within the repository.
"""
stripped_filename = basic_util.strip_path( filename )
for changeset in hg_util.reversed_upper_bounded_changelog( repo, changeset_revision ):
manifest_ctx = repo.changectx( changeset )
for ctx_file in manifest_ctx.files():
ctx_file_name = basic_util.strip_path( ctx_file )
if ctx_file_name == stripped_filename:
try:
fctx = manifest_ctx[ ctx_file ]
except LookupError:
# The ctx_file may have been moved in the change set. For example,
# 'ncbi_blastp_wrapper.xml' was moved to 'tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml',
# so keep looking for the file until we find the new location.
continue
fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-gltcrfrm" )
tmp_filename = fh.name
fh.close()
fh = open( tmp_filename, 'wb' )
fh.write( fctx.data() )
fh.close()
return tmp_filename
return None
def get_list_of_copied_sample_files( self, repo, ctx, dir ):
"""
Find all sample files (files in the repository with the special .sample extension)
in the reversed repository manifest up to ctx. Copy each discovered file to dir and
return the list of filenames. If a .sample file was added in a changeset and then
deleted in a later changeset, it will be returned in the deleted_sample_files list.
The caller will set the value of app.config.tool_data_path to dir in order to load
the tools and generate metadata for them.
"""
deleted_sample_files = []
sample_files = []
for changeset in hg_util.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
ctx_file_name = basic_util.strip_path( ctx_file )
# If we decide in the future that files deleted later in the changelog should
# not be used, we can use the following if statement. if ctx_file_name.endswith( '.sample' )
# and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files:
if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files:
fctx = hg_util.get_file_context_from_ctx( changeset_ctx, ctx_file )
if fctx in [ 'DELETED' ]:
# Since the possibly future used if statement above is commented out, the
# same file that was initially added will be discovered in an earlier changeset
# in the change log and fall through to the else block below. In other words,
# if a file named blast2go.loc.sample was added in change set 0 and then deleted
# in changeset 3, the deleted file in changeset 3 will be handled here, but the
# later discovered file in changeset 0 will be handled in the else block below.
# In this way, the file contents will always be found for future tools even though
# the file was deleted.
if ctx_file_name not in deleted_sample_files:
deleted_sample_files.append( ctx_file_name )
else:
sample_files.append( ctx_file_name )
tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) )
fh = open( tmp_ctx_file_name, 'wb' )
fh.write( fctx.data() )
fh.close()
return sample_files, deleted_sample_files
def handle_sample_files_and_load_tool_from_disk( self, repo_files_dir, repository_id, tool_config_filepath, work_dir ):
"""
Copy all sample files from disk to a temporary directory since the sample files may
be in multiple directories.
"""
message = ''
sample_files = self.copy_disk_sample_files_to_dir( repo_files_dir, work_dir )
if sample_files:
if 'tool_data_table_conf.xml.sample' in sample_files:
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
persist=False )
tool, valid, message2 = self.load_tool_from_config( repository_id, tool_config_filepath )
message = self.concat_messages( message, message2 )
return tool, valid, message, sample_files
def handle_sample_files_and_load_tool_from_tmp_config( self, repo, repository_id, changeset_revision,
tool_config_filename, work_dir ):
tool = None
message = ''
ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
# We're not currently doing anything with the returned list of deleted_sample_files here. It is
# intended to help handle sample files that are in the manifest, but have been deleted from disk.
sample_files, deleted_sample_files = self.get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
self.app.config.tool_data_path = work_dir
if 'tool_data_table_conf.xml.sample' in sample_files:
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
if tool_data_table_config:
error, message = self.tdtm.handle_sample_tool_data_table_conf_file( tool_data_table_config,
persist=False )
if error:
log.debug( message )
manifest_ctx, ctx_file = hg_util.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
if manifest_ctx and ctx_file:
tool, message2 = self.load_tool_from_tmp_config( repo, repository_id, manifest_ctx, ctx_file, work_dir )
message = self.concat_messages( message, message2 )
return tool, message, sample_files
def load_tool_from_changeset_revision( self, repository_id, changeset_revision, tool_config_filename ):
"""
Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value
of tool_config_filename. The value of changeset_revision is a valid (downloadable)
changeset revision. The tool config will be located in the repository manifest between
the received valid changeset revision and the first changeset revision in the repository,
searching backwards.
"""
original_tool_data_path = self.app.config.tool_data_path
repository = suc.get_repository_in_tool_shed( self.app, repository_id )
repo_files_dir = repository.repo_path( self.app )
repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_files_dir, create=False )
message = ''
tool = None
can_use_disk_file = False
tool_config_filepath = suc.get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename )
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ltfcr" )
can_use_disk_file = self.can_use_tool_config_disk_file( repository,
repo,
tool_config_filepath,
changeset_revision )
if can_use_disk_file:
self.app.config.tool_data_path = work_dir
tool, valid, message, sample_files = \
self.handle_sample_files_and_load_tool_from_disk( repo_files_dir,
repository_id,
tool_config_filepath,
work_dir )
if tool is not None:
invalid_files_and_errors_tups = \
self.check_tool_input_params( repo_files_dir,
tool_config_filename,
tool,
sample_files )
if invalid_files_and_errors_tups:
message2 = tool_util.generate_message_for_invalid_tools( self.app,
invalid_files_and_errors_tups,
repository,
metadata_dict=None,
as_html=True,
displaying_invalid_tool=True )
message = self.concat_messages( message, message2 )
else:
tool, message, sample_files = \
self.handle_sample_files_and_load_tool_from_tmp_config( repo,
repository_id,
changeset_revision,
tool_config_filename,
work_dir )
basic_util.remove_dir( work_dir )
self.app.config.tool_data_path = original_tool_data_path
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
self.tdtm.reset_tool_data_tables()
return repository, tool, message
def load_tool_from_config( self, repository_id, full_path ):
try:
tool = self.app.toolbox.load_tool( full_path, repository_id=repository_id )
valid = True
error_message = None
except KeyError, e:
tool = None
valid = False
error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
error_message += 'this error. '
except Exception, e:
tool = None
valid = False
error_message = str( e )
return tool, valid, error_message
def load_tool_from_tmp_config( self, repo, repository_id, ctx, ctx_file, work_dir ):
tool = None
message = ''
tmp_tool_config = hg_util.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
if tmp_tool_config:
tool_element, error_message = xml_util.parse_xml( tmp_tool_config )
if tool_element is None:
return tool, message
# Look for external files required by the tool config.
tmp_code_files = []
external_paths = Tool.get_externally_referenced_paths( tmp_tool_config )
for path in external_paths:
tmp_code_file_name = hg_util.copy_file_from_manifest( repo, ctx, path, work_dir )
if tmp_code_file_name:
tmp_code_files.append( tmp_code_file_name )
tool, valid, message = self.load_tool_from_config( repository_id, tmp_tool_config )
for tmp_code_file in tmp_code_files:
try:
os.unlink( tmp_code_file )
except:
pass
try:
os.unlink( tmp_tool_config )
except:
pass
return tool, message
| gpl-3.0 | -622,752,748,711,907,300 | 57.161194 | 131 | 0.543831 | false |
ArseniyK/Sunflower | application/operation.py | 1 | 48288 | import os
import gtk
import gobject
import fnmatch
from threading import Thread, Event
from gui.input_dialog import OverwriteFileDialog, OverwriteDirectoryDialog, OperationError, QuestionOperationError
from gui.operation_dialog import CopyDialog, MoveDialog, DeleteDialog, RenameDialog
from gui.error_list import ErrorList
from plugin_base.provider import Mode as FileMode, TrashError, Support as ProviderSupport
from plugin_base.monitor import MonitorSignals
from common import format_size
from queue import OperationQueue
# import constants
from gui.input_dialog import OverwriteOption
class BufferSize:
LOCAL = 4096 * 1024
REMOTE = 100 * 1024
class Option:
FILE_TYPE = 0
DESTINATION = 1
SET_OWNER = 2
SET_MODE = 3
SET_TIMESTAMP = 4
SILENT = 5
SILENT_MERGE = 6
SILENT_OVERWRITE = 7
class Skip:
TRASH = 0
REMOVE = 1
WRITE = 2
CREATE = 3
MODE_SET = 4
MOVE = 5
RENAME = 6
READ = 7
class OperationType:
COPY = 0
MOVE = 1
DELETE = 2
RENAME = 3
LINK = 4
class Operation(Thread):
"""Parent class for all operation threads"""
def __init__(self, application, source, destination=None, options=None, destination_path=None):
Thread.__init__(self, target=self)
self._can_continue = Event()
self._abort = Event()
self._application = application
self._source = source
self._destination = destination
self._options = options
self._source_queue = None
self._destination_queue = None
self._merge_all = None
self._overwrite_all = None
self._response_cache = {}
# operation queue
self._operation_queue = None
self._operation_queue_name = None
# daemonize
self.daemon = True
# create operation dialog
self._dialog = None
self._create_dialog()
self._dir_list = []
self._file_list = []
self._error_list = []
self._selection_list = []
# store initial paths
self._source_path = self._source.get_path()
if self._destination is not None:
self._destination_path = destination_path or self._destination.get_path()
self._can_continue.set()
def _create_dialog(self):
"""Create operation dialog"""
pass
def _destroy_ui(self):
"""Destroy user interface"""
if self._dialog is not None:
with gtk.gdk.lock:
self._dialog.destroy()
def _get_free_space_input(self, needed, available):
"""Get user input when there is not enough space"""
size_format = self._application.options.get('size_format')
space_needed = format_size(needed, size_format)
space_available = format_size(available, size_format)
if self._options is not None and self._options[Option.SILENT]:
# silent option is enabled, we skip operation by default
self._error_list.append(_(
'Aborted. Not enough free space on target file system.\n'
'Needed: {0}\n'
'Available: {1}'
).format(space_needed, space_available))
should_continue = False
else:
# ask user what to do
with gtk.gdk.lock:
dialog = gtk.MessageDialog(
self._dialog.get_window(),
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_YES_NO,
_(
'Target file system does not have enough '
'free space for this operation to continue.\n\n'
'Needed: {0}\n'
'Available: {1}\n\n'
'Do you wish to continue?'
).format(space_needed, space_available)
)
dialog.set_default_response(gtk.RESPONSE_YES)
result = dialog.run()
dialog.destroy()
should_continue = result == gtk.RESPONSE_YES
return should_continue
def _get_merge_input(self, path):
"""Get merge confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
merge = self._options[Option.SILENT_MERGE]
self._merge_all = merge
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OverwriteDirectoryDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
merge = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._merge_all = merge
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
return merge # return only response for current directory
def _get_overwrite_input(self, path):
"""Get overwrite confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
overwrite = self._options[Option.SILENT_OVERWRITE]
self._overwrite_all = overwrite
options = (False, '', True) # no rename, apply to all
else:
# we are not in silent mode, ask user what to do
with gtk.gdk.lock:
dialog = OverwriteFileDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
overwrite = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._overwrite_all = overwrite
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
# pass options from input dialog
options = result[1]
return overwrite, options
def _get_write_error_input(self, error):
"""Get user response for write error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There is a problem writing data to destination '
'file. What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.WRITE] = response
# abort operation if user requested
if response == OperationError.RESPONSE_CANCEL:
self.cancel()
return response
def _get_create_error_input(self, error, is_directory=False):
"""Get user response for create error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
if not is_directory:
# set message for file
dialog.set_message(_(
'An error occurred while trying to create specified '
'file. What would you like to do?'
))
else:
# set message for directory
dialog.set_message(_(
'An error occurred while trying to create specified '
'directory. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.CREATE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_mode_set_error_input(self, error):
"""Get user response for mode set error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'Problem with setting path parameter for '
'specified path. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MODE_SET] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_remove_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem removing specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.REMOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_trash_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = QuestionOperationError(self._application)
dialog.set_message(_(
'There was a problem trashing specified path. '
'Would you like to try removing it instead?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.TRASH] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_move_error_input(self, error):
"""Get user response for move error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem moving specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_rename_error_input(self, error):
"""Get user response for rename error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem renaming specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.RENAME] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_read_error_input(self, error):
"""Get user response for directory listing error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem with reading specified directory. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.READ] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def set_selection(self, item_list):
"""Set list of selected items"""
self._selection_list.extend(item_list)
def set_operation_queue(self, queue_name):
"""Set operation to wait for queue."""
if queue_name is None:
return
# create new queue
self._operation_queue = Event()
self._operation_queue_name = queue_name
# schedule operation
OperationQueue.add(queue_name, self._operation_queue)
def set_source_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._source_queue = queue
def set_destination_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._destination_queue = queue
def pause(self):
"""Pause current operation"""
self._can_continue.clear()
def resume(self):
"""Resume current operation"""
self._can_continue.set()
def cancel(self):
"""Set an abort switch"""
self._abort.set()
# release lock set by the pause
if not self._can_continue.is_set():
self.resume()
class CopyOperation(Operation):
"""Operation thread used for copying files"""
def __init__(self, application, source, destination, options, destination_path=None):
Operation.__init__(self, application, source, destination, options, destination_path)
self._merge_all = None
self._overwrite_all = None
self._dir_list_create = []
self._total_count = 0
self._total_size = 0
self._buffer_size = 0
# cache settings
should_reserve = self._application.options.section('operations').get('reserve_size')
supported_by_provider = ProviderSupport.RESERVE_SIZE in self._destination.get_support()
self._reserve_size = should_reserve and supported_by_provider
# detect buffer size
if self._source.is_local and self._destination.is_local:
system_stat = self._destination.get_system_size(self._destination_path)
if system_stat.block_size:
self._buffer_size = system_stat.block_size * 1024
else:
self._buffer_size = BufferSize.LOCAL
else:
self._buffer_size = BufferSize.REMOTE
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = CopyDialog(self._application, self)
def _update_status(self, status):
"""Set status and reset progress bars"""
self._dialog.set_status(status)
self._dialog.set_current_file("")
self._dialog.set_current_file_fraction(0)
def _get_lists(self):
"""Find all files for copying"""
gobject.idle_add(self._update_status, _('Searching for files...'))
# exclude files already selected with parent directory
for file_name in self._selection_list:
self._selection_list = filter(
lambda item: not item.startswith(file_name + os.path.sep),
self._selection_list
)
# traverse through the rest of the items
for item in self._selection_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# update current file label
gobject.idle_add(self._dialog.set_current_file, item)
gobject.idle_add(self._dialog.pulse)
if os.path.sep in item:
relative_path, item = os.path.split(item)
source_path = os.path.join(self._source_path, relative_path)
else:
relative_path = None
source_path = self._source_path
if self._source.is_dir(item, relative_to=source_path):
# item is directory
can_procede = True
can_create = True
# check if directory exists on destination
if self._destination.exists(item, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(item)
# if user didn't skip directory, scan and update lists
if can_procede:
self._dir_list.append((item, relative_path))
if can_create: self._dir_list_create.append((item, relative_path))
self._scan_directory(item, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, get stats and update lists
item_stat = self._source.get_stat(item, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((item, relative_path))
def _set_mode(self, path, mode):
"""Set mode for specified path"""
if not self._options[Option.SET_MODE]: return
try:
# try to set mode for specified path
self._destination.set_mode(
path,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem setting mode, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set mode again
if response == OperationError.RESPONSE_RETRY:
self._set_mode(path, mode)
return
def _set_owner(self, path, user_id, group_id):
"""Set owner and group for specified path"""
if not self._options[Option.SET_OWNER]: return
try:
# try set owner of specified path
self._destination.set_owner(
path,
user_id,
group_id,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set owner again
if response == OperationError.RESPONSE_RETRY:
self._set_owner(path, user_id, group_id)
return
def _set_timestamp(self, path, access_time, modify_time, change_time):
"""Set timestamps for specified path"""
if not self._options[Option.SET_TIMESTAMP]: return
try:
# try setting timestamp
self._destination.set_timestamp(
path,
access_time,
modify_time,
change_time,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set timestamp again
if response == OperationError.RESPONSE_RETRY:
self._set_timestamp(path, access_time, modify_time, change_time)
return
def _scan_directory(self, directory, relative_path=None):
"""Recursively scan directory and populate list"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try to get listing from directory
item_list = self._source.list_dir(directory, relative_to=source_path)
except StandardError as error:
# problem with reading specified directory, ask user
if Skip.READ in self._response_cache:
response = self._response_cache[Skip.READ]
else:
response = self._get_read_error_input(error)
# try to scan specified directory again
if response == OperationError.RESPONSE_RETRY:
self._scan_directory(directory, relative_path)
return
for item in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, os.path.join(directory, item))
gobject.idle_add(self._dialog.pulse)
full_name = os.path.join(directory, item)
# item is a directory, scan it
if self._source.is_dir(full_name, relative_to=source_path):
can_procede = True
can_create = True
if self._destination.exists(full_name, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(full_name)
if can_procede:
# allow processing specified directory
self._dir_list.append((full_name, source_path))
if can_create: self._dir_list_create.append((full_name, source_path))
self._scan_directory(full_name, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, update global statistics
item_stat = self._source.get_stat(full_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((full_name, relative_path))
def _create_directory(self, directory, relative_path=None):
"""Create specified directory"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
file_stat = self._source.get_stat(directory, relative_to=source_path)
mode = file_stat.mode if self._options[Option.SET_MODE] else 0755
try:
# try to create a directory
if self._destination.exists(directory, relative_to=self._destination_path):
if not self._destination.is_dir(directory, relative_to=self._destination_path):
raise StandardError(_(
'Unable to create directory because file with the same name '
'already exists in target directory.'
))
else:
# inode with specified name doesn't exist, create directory
self._destination.create_directory(
directory,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, directory, None)
self._destination_queue.put(event, False)
except StandardError as error:
# there was a problem creating directory
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error, True)
# try to create directory again
if response == OperationError.RESPONSE_RETRY:
self._create_directory(directory)
# exit method
return
# set owner
self._set_owner(directory, file_stat.user_id, file_stat.group_id)
def _copy_file(self, file_name, relative_path=None):
"""Copy file content"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
sh = None
dh = None
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
elif source_path == self._destination_path:
can_procede = False
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# update total size
file_stat = self._source.get_stat(file_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_current_size, file_stat.size)
return
try:
# get file stats
destination_size = 0L
file_stat = self._source.get_stat(file_name, relative_to=source_path, extended=True)
# get file handles
sh = self._source.get_file_handle(file_name, FileMode.READ, relative_to=source_path)
dh = self._destination.get_file_handle(dest_file, FileMode.WRITE, relative_to=self._destination_path)
# report error properly
if sh is None:
raise StandardError('Unable to open source file in read mode.')
if dh is None:
raise StandardError('Unable to open destination file in write mode.')
# reserve file size
if self._reserve_size:
# try to reserve file size in advance,
# can be slow on memory cards and network
try:
dh.truncate(file_stat.size)
except:
dh.truncate()
else:
# just truncate file to 0 size in case source file is smaller
dh.truncate()
dh.seek(0)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# close handles if they exist
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error)
# try to create file again and copy contents
if response == OperationError.RESPONSE_RETRY:
self._copy_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# remove amount of copied bytes from total size
gobject.idle_add(self._dialog.increment_current_size, -destination_size)
# exit method
return
while True:
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
data = sh.read(self._buffer_size)
if data:
try:
# try writing data to destination
dh.write(data)
except IOError as error:
# handle error
if Skip.WRITE in self._response_cache:
response = self._response_cache[Skip.WRITE]
else:
response = self._get_write_error_input(error)
# try to write data again
if response == OperationError.RESPONSE_RETRY:
gobject.idle_add(self._dialog.increment_current_size, -dh.tell())
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
self._copy_file(dest_file)
return
destination_size += len(data)
gobject.idle_add(self._dialog.increment_current_size, len(data))
if file_stat.size > 0: # ensure we don't end up with error on 0 size files
gobject.idle_add(
self._dialog.set_current_file_fraction,
destination_size / float(file_stat.size)
)
else:
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CHANGED, dest_file, None)
self._destination_queue.put(event, False)
else:
sh.close()
dh.close()
# set file parameters
self._set_mode(dest_file, file_stat.mode)
self._set_owner(dest_file, file_stat.user_id, file_stat.group_id)
self._set_timestamp(
dest_file,
file_stat.time_access,
file_stat.time_modify,
file_stat.time_change
)
break
def _create_directory_list(self):
"""Create all directories in list"""
gobject.idle_add(self._update_status, _('Creating directories...'))
for number, directory in enumerate(self._dir_list_create, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, directory[0])
self._create_directory(directory[0], directory[1]) # create directory
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(self._dir_list)
)
def _copy_file_list(self):
"""Copy list of files to destination path"""
# update status
gobject.idle_add(self._update_status, _('Copying files...'))
item_list = self._file_list[:]
# copy all the files in list
for file_name, source_path in item_list:
# abort operation if requested
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
# copy file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._copy_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items to copy
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if ProviderSupport.SYSTEM_SIZE in self._destination.get_support() \
and self._total_size > system_info.size_available:
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# perform operation
self._create_directory_list()
self._copy_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Copy Operation')
message = ngettext(
'Copying of {0} item from "{1}" to "{2}" is completed!',
'Copying of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# show error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Copy Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class MoveOperation(CopyOperation):
"""Operation thread used for moving files"""
def _remove_path(self, path, item_list, relative_path=None):
"""Remove path specified path."""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try removing specified path
self._source.remove_path(path, relative_to=source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path, item_list)
else:
# user didn't want to retry, remove path from item_list
item_list.pop(item_list.index(path))
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = MoveDialog(self._application, self)
def _move_file(self, file_name, relative_path=None):
"""Move specified file using provider rename method"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
return
# move file
try:
self._source.move_path(
file_name,
os.path.join(self._destination_path, dest_file),
relative_to=source_path
)
# push events to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, file_name, None)
self._source_queue.put(event, False)
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with moving file, ask user what to do
if Skip.MOVE in self._response_cache:
response = self._response_cache[Skip.MOVE]
else:
response = self._get_move_error_input(error)
# try moving file again
if response == OperationError.RESPONSE_RETRY:
self._move_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# exit method
return
def _move_file_list(self):
"""Move files from the list"""
gobject.idle_add(self._update_status, _('Moving files...'))
item_list = self._file_list[:]
for file_name, source_path in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# move file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._move_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def _delete_file_list(self):
"""Remove files from source list"""
gobject.idle_add(self._update_status, _('Deleting source files...'))
item_list = self._file_list[:]
for number, item in enumerate(item_list, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# remove path
gobject.idle_add(self._dialog.set_current_file, item[0])
self._remove_path(item[0], self._file_list, item[1])
# update current count
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(item_list)
)
self._delete_directories()
def _delete_directories(self):
"""Remove empty directories after moving files"""
gobject.idle_add(self._update_status, _('Deleting source directories...'))
dir_list = self._dir_list[:]
dir_list.reverse() # remove deepest directories first
for number, directory in enumerate(dir_list, 0):
source_path = self._source_path if directory[1] is None else os.path.join(self._source_path, directory[1])
directory = directory[0]
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
if self._source.exists(directory, relative_to=source_path):
gobject.idle_add(self._dialog.set_current_file, directory)
# try to get a list of items inside of directory
try:
item_list = self._source.list_dir(directory, relative_to=source_path)
except:
item_list = None
# remove directory if empty
if item_list is not None and len(item_list) == 0:
self._remove_path(directory, dir_list, relative_path=source_path)
# update current count
if len(dir_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(dir_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
def _check_devices(self):
"""Check if source and destination are on the same file system"""
dev_source = self._source.get_stat(self._source.get_path(), extended=True).device
dev_destination = self._destination.get_stat(self._destination.get_path(), extended=True).device
return dev_source == dev_destination
def run(self):
"""Main thread method
We override this method from CopyDialog in order to provide
a bit smarter move operation.
"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if self._total_size > system_info.size_available and not self._check_devices():
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# create directories
self._create_directory_list()
# copy/move files
if self._check_devices():
# both paths are on the same file system, move instead of copy
self._move_file_list()
self._delete_directories()
else:
# paths are located on different file systems, copy and remove
self._copy_file_list()
self._delete_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Move Operation')
message = ngettext(
'Moving of {0} item from "{1}" to "{2}" is completed!',
'Moving of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# shop error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Move Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class DeleteOperation(Operation):
"""Operation thread used for deleting files"""
def __init__(self, application, provider):
Operation.__init__(self, application, provider)
# allow users to force deleting items
self._force_delete = False
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = DeleteDialog(self._application, self)
def _remove_path(self, path):
"""Remove path"""
try:
# try removing specified path
self._source.remove_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def _trash_path(self, path):
"""Move path to the trash"""
try:
# try trashing specified path
self._source.trash_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except TrashError as error:
# problem removing path, ask user what to do
if Skip.TRASH in self._response_cache:
response = self._response_cache[Skip.TRASH]
else:
response = self._get_trash_error_input(error)
# try moving path to trash again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def set_force_delete(self, force):
"""Set forced deletion instead of trashing files"""
self._force_delete = force
def run(self):
"""Main thread method, this is where all the stuff is happening"""
self._file_list = self._selection_list[:] # use predefined selection list
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
with gtk.gdk.lock:
# clear selection on source directory
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# select removal method
trash_files = self._application.options.section('operations').get('trash_files')
trash_available = ProviderSupport.TRASH in self._source.get_support()
if self._force_delete:
remove_method = self._remove_path
else:
remove_method = (
self._remove_path,
self._trash_path
)[trash_files and trash_available]
# remove them
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item)
remove_method(item)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Delete Operation')
message = ngettext(
'Removal of {0} item from "{1}" is completed!',
'Removal of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class RenameOperation(Operation):
"""Thread used for rename of large number of files"""
def __init__(self, application, provider, path, file_list):
Operation.__init__(self, application, provider)
self._destination = provider
self._destination_path = path
self._source_path = path
self._file_list = file_list
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = RenameDialog(self._application, self)
def _rename_path(self, old_name, new_name, index):
"""Rename specified path"""
can_procede = True
try:
# check if specified path already exists
if self._destination.exists(new_name, relative_to=self._source_path):
can_procede, options = self._get_overwrite_input(new_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
new_name = os.path.join(
os.path.dirname(new_name),
options[OverwriteOption.NEW_NAME]
)
if not can_procede:
# user canceled overwrite, skip the file
self._file_list.pop(index)
return
else:
# rename path
self._source.rename_path(old_name, new_name, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
delete_event = (MonitorSignals.DELETE, old_name, None)
create_event = (MonitorSignals.CREATED, new_name, None)
self._source_queue.put(delete_event, False)
self._source_queue.put(create_event, False)
except StandardError as error:
# problem renaming path, ask user what to do
if Skip.RENAME in self._response_cache:
response = self._response_cache[Skip.RENAME]
else:
response = self._get_rename_error_input(error)
# try renaming path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(old_name, new_name, index)
else:
# user didn't want to retry, remove path from list
self._file_list.pop(index)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item[0])
self._rename_path(item[0], item[1], index-1)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Rename Operation')
message = ngettext(
'Rename of {0} item from "{1}" is completed!',
'Rename of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
| gpl-3.0 | -8,770,339,951,091,888,000 | 29.236694 | 114 | 0.676711 | false |
sujithshankar/anaconda | pyanaconda/constants.py | 1 | 6817 | #
# constants.py: anaconda constants
#
# Copyright (C) 2001 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Used for digits, ascii_letters, punctuation constants
import string # pylint: disable=deprecated-module
from pyanaconda.i18n import N_
# Use -1 to indicate that the selinux configuration is unset
SELINUX_DEFAULT = -1
# where to look for 3rd party addons
ADDON_PATHS = ["/usr/share/anaconda/addons"]
from pykickstart.constants import AUTOPART_TYPE_LVM
# common string needs to be easy to change
from pyanaconda import product
productName = product.productName
productVersion = product.productVersion
productArch = product.productArch
bugzillaUrl = product.bugUrl
isFinal = product.isFinal
# for use in device names, eg: "fedora", "rhel"
shortProductName = productName.lower() # pylint: disable=no-member
if productName.count(" "): # pylint: disable=no-member
shortProductName = ''.join(s[0] for s in shortProductName.split())
# DriverDisc Paths
DD_ALL = "/tmp/DD"
DD_FIRMWARE = "/tmp/DD/lib/firmware"
DD_RPMS = "/tmp/DD-*"
TRANSLATIONS_UPDATE_DIR = "/tmp/updates/po"
ANACONDA_CLEANUP = "anaconda-cleanup"
MOUNT_DIR = "/run/install"
DRACUT_REPODIR = "/run/install/repo"
DRACUT_ISODIR = "/run/install/source"
ISO_DIR = MOUNT_DIR + "/isodir"
IMAGE_DIR = MOUNT_DIR + "/image"
INSTALL_TREE = MOUNT_DIR + "/source"
BASE_REPO_NAME = "anaconda"
# NOTE: this should be LANG_TERRITORY.CODESET, e.g. en_US.UTF-8
DEFAULT_LANG = "en_US.UTF-8"
DEFAULT_VC_FONT = "eurlatgr"
DEFAULT_KEYBOARD = "us"
DRACUT_SHUTDOWN_EJECT = "/run/initramfs/usr/lib/dracut/hooks/shutdown/99anaconda-eject.sh"
# VNC questions
USEVNC = N_("Start VNC")
USETEXT = N_("Use text mode")
# Runlevel files
RUNLEVELS = {3: 'multi-user.target', 5: 'graphical.target'}
# Network
NETWORK_CONNECTION_TIMEOUT = 45 # in seconds
NETWORK_CONNECTED_CHECK_INTERVAL = 0.1 # in seconds
# DBus
DEFAULT_DBUS_TIMEOUT = -1 # use default
# Thread names
THREAD_EXECUTE_STORAGE = "AnaExecuteStorageThread"
THREAD_STORAGE = "AnaStorageThread"
THREAD_STORAGE_WATCHER = "AnaStorageWatcher"
THREAD_CHECK_STORAGE = "AnaCheckStorageThread"
THREAD_CUSTOM_STORAGE_INIT = "AnaCustomStorageInit"
THREAD_WAIT_FOR_CONNECTING_NM = "AnaWaitForConnectingNMThread"
THREAD_PAYLOAD = "AnaPayloadThread"
THREAD_PAYLOAD_RESTART = "AnaPayloadRestartThread"
THREAD_INPUT_BASENAME = "AnaInputThread"
THREAD_SYNC_TIME_BASENAME = "AnaSyncTime"
THREAD_EXCEPTION_HANDLING_TEST = "AnaExceptionHandlingTest"
THREAD_LIVE_PROGRESS = "AnaLiveProgressThread"
THREAD_SOFTWARE_WATCHER = "AnaSoftwareWatcher"
THREAD_CHECK_SOFTWARE = "AnaCheckSoftwareThread"
THREAD_SOURCE_WATCHER = "AnaSourceWatcher"
THREAD_INSTALL = "AnaInstallThread"
THREAD_CONFIGURATION = "AnaConfigurationThread"
THREAD_FCOE = "AnaFCOEThread"
THREAD_ISCSI_DISCOVER = "AnaIscsiDiscoverThread"
THREAD_ISCSI_LOGIN = "AnaIscsiLoginThread"
THREAD_GEOLOCATION_REFRESH = "AnaGeolocationRefreshThread"
THREAD_DATE_TIME = "AnaDateTimeThread"
THREAD_TIME_INIT = "AnaTimeInitThread"
THREAD_DASDFMT = "AnaDasdfmtThread"
THREAD_KEYBOARD_INIT = "AnaKeyboardThread"
THREAD_ADD_LAYOUTS_INIT = "AnaAddLayoutsInitThread"
# Geolocation constants
# geolocation providers
# - values are used by the geoloc CLI/boot option
GEOLOC_PROVIDER_FEDORA_GEOIP = "provider_fedora_geoip"
GEOLOC_PROVIDER_HOSTIP = "provider_hostip"
GEOLOC_PROVIDER_GOOGLE_WIFI = "provider_google_wifi"
# geocoding provider
GEOLOC_GEOCODER_NOMINATIM = "geocoder_nominatim"
# default providers
GEOLOC_DEFAULT_PROVIDER = GEOLOC_PROVIDER_FEDORA_GEOIP
GEOLOC_DEFAULT_GEOCODER = GEOLOC_GEOCODER_NOMINATIM
# timeout (in seconds)
GEOLOC_TIMEOUT = 3
ANACONDA_ENVIRON = "anaconda"
FIRSTBOOT_ENVIRON = "firstboot"
# Tainted hardware
UNSUPPORTED_HW = 1 << 28
# Password validation
PASSWORD_MIN_LEN = 8
PASSWORD_EMPTY_ERROR = N_("The password is empty.")
PASSWORD_CONFIRM_ERROR_GUI = N_("The passwords do not match.")
PASSWORD_CONFIRM_ERROR_TUI = N_("The passwords you entered were different. Please try again.")
PASSWORD_WEAK = N_("The password you have provided is weak. %s")
PASSWORD_WEAK_WITH_ERROR = N_("The password you have provided is weak: %s.")
PASSWORD_WEAK_CONFIRM = N_("You have provided a weak password. Press Done again to use anyway.")
PASSWORD_WEAK_CONFIRM_WITH_ERROR = N_("You have provided a weak password: %s. Press Done again to use anyway.")
PASSWORD_ASCII = N_("The password you have provided contains non-ASCII characters. You may not be able to switch between keyboard layouts to login. Press Done to continue.")
PASSWORD_DONE_TWICE = N_("You will have to press Done twice to confirm it.")
PASSWORD_STRENGTH_DESC = [N_("Empty"), N_("Weak"), N_("Fair"), N_("Good"), N_("Strong")]
# the number of seconds we consider a noticeable freeze of the UI
NOTICEABLE_FREEZE = 0.1
# all ASCII characters
PW_ASCII_CHARS = string.digits + string.ascii_letters + string.punctuation + " "
# Recognizing a tarfile
TAR_SUFFIX = (".tar", ".tbz", ".tgz", ".txz", ".tar.bz2", "tar.gz", "tar.xz")
# screenshots
SCREENSHOTS_DIRECTORY = "/tmp/anaconda-screenshots"
SCREENSHOTS_TARGET_DIRECTORY = "/root/anaconda-screenshots"
# cmdline arguments that append instead of overwrite
CMDLINE_APPEND = ["modprobe.blacklist", "ifname"]
DEFAULT_AUTOPART_TYPE = AUTOPART_TYPE_LVM
# Default to these units when reading user input when no units given
SIZE_UNITS_DEFAULT = "MiB"
# Constants for reporting status to IPMI. These are from the IPMI spec v2 rev1.1, page 512.
IPMI_STARTED = 0x7 # installation started
IPMI_FINISHED = 0x8 # installation finished successfully
IPMI_ABORTED = 0x9 # installation finished unsuccessfully, due to some non-exn error
IPMI_FAILED = 0xA # installation hit an exception
# for how long (in seconds) we try to wait for enough entropy for LUKS
# keep this a multiple of 60 (minutes)
MAX_ENTROPY_WAIT = 10 * 60
# X display number to use
X_DISPLAY_NUMBER = 1
# Payload status messages
PAYLOAD_STATUS_PROBING_STORAGE = N_("Probing storage...")
PAYLOAD_STATUS_PACKAGE_MD = N_("Downloading package metadata...")
PAYLOAD_STATUS_GROUP_MD = N_("Downloading group metadata...")
# Window title text
WINDOW_TITLE_TEXT = N_("Anaconda Installer")
| gpl-2.0 | 920,207,107,352,325,500 | 35.068783 | 173 | 0.74945 | false |
mwclient/mwclient | mwclient/page.py | 1 | 20723 | import six
from six import text_type
import time
from mwclient.util import parse_timestamp
import mwclient.listing
import mwclient.errors
class Page(object):
def __init__(self, site, name, info=None, extra_properties=None):
if type(name) is type(self):
self.__dict__.update(name.__dict__)
return
self.site = site
self.name = name
self._textcache = {}
if not info:
if extra_properties:
prop = 'info|' + '|'.join(six.iterkeys(extra_properties))
extra_props = []
for extra_prop in six.itervalues(extra_properties):
extra_props.extend(extra_prop)
else:
prop = 'info'
extra_props = ()
if type(name) is int:
info = self.site.get('query', prop=prop, pageids=name,
inprop='protection', *extra_props)
else:
info = self.site.get('query', prop=prop, titles=name,
inprop='protection', *extra_props)
info = six.next(six.itervalues(info['query']['pages']))
self._info = info
if 'invalid' in info:
raise mwclient.errors.InvalidPageTitle(info.get('invalidreason'))
self.namespace = info.get('ns', 0)
self.name = info.get('title', u'')
if self.namespace:
self.page_title = self.strip_namespace(self.name)
else:
self.page_title = self.name
self.base_title = self.page_title.split('/')[0]
self.base_name = self.name.split('/')[0]
self.touched = parse_timestamp(info.get('touched'))
self.revision = info.get('lastrevid', 0)
self.exists = 'missing' not in info
self.length = info.get('length')
self.protection = {
i['type']: (i['level'], i['expiry'])
for i in info.get('protection', ())
if i
}
self.redirect = 'redirect' in info
self.pageid = info.get('pageid', None)
self.contentmodel = info.get('contentmodel', None)
self.pagelanguage = info.get('pagelanguage', None)
self.restrictiontypes = info.get('restrictiontypes', None)
self.last_rev_time = None
self.edit_time = None
def redirects_to(self):
""" Get the redirect target page, or None if the page is not a redirect."""
info = self.site.get('query', prop='pageprops', titles=self.name, redirects='')
if 'redirects' in info['query']:
for page in info['query']['redirects']:
if page['from'] == self.name:
return Page(self.site, page['to'])
return None
else:
return None
def resolve_redirect(self):
""" Get the redirect target page, or the current page if its not a redirect."""
target_page = self.redirects_to()
if target_page is None:
return self
else:
return target_page
def __repr__(self):
return "<Page object '%s' for %s>" % (self.name.encode('utf-8'), self.site)
def __unicode__(self):
return self.name
@staticmethod
def strip_namespace(title):
if title[0] == ':':
title = title[1:]
return title[title.find(':') + 1:]
@staticmethod
def normalize_title(title):
# TODO: Make site dependent
title = title.strip()
if title[0] == ':':
title = title[1:]
title = title[0].upper() + title[1:]
title = title.replace(' ', '_')
return title
def can(self, action):
"""Check if the current user has the right to carry out some action
with the current page.
Example:
>>> page.can('edit')
True
"""
level = self.protection.get(action, (action,))[0]
if level == 'sysop':
level = 'editprotected'
return level in self.site.rights
def get_token(self, type, force=False):
return self.site.get_token(type, force, title=self.name)
def text(self, section=None, expandtemplates=False, cache=True, slot='main'):
"""Get the current wikitext of the page, or of a specific section.
If the page does not exist, an empty string is returned. By
default, results will be cached and if you call text() again
with the same section and expandtemplates the result will come
from the cache. The cache is stored on the instance, so it
lives as long as the instance does.
Args:
section (int): Section number, to only get text from a single section.
expandtemplates (bool): Expand templates (default: `False`)
cache (bool): Use in-memory caching (default: `True`)
"""
if not self.can('read'):
raise mwclient.errors.InsufficientPermission(self)
if not self.exists:
return u''
if section is not None:
section = text_type(section)
key = hash((section, expandtemplates))
if cache and key in self._textcache:
return self._textcache[key]
revs = self.revisions(prop='content|timestamp', limit=1, section=section,
slots=slot)
try:
rev = next(revs)
if 'slots' in rev:
text = rev['slots'][slot]['*']
else:
text = rev['*']
self.last_rev_time = rev['timestamp']
except StopIteration:
text = u''
self.last_rev_time = None
if not expandtemplates:
self.edit_time = time.gmtime()
else:
# The 'rvexpandtemplates' option was removed in MediaWiki 1.32, so we have to
# make an extra API call, see https://github.com/mwclient/mwclient/issues/214
text = self.site.expandtemplates(text)
if cache:
self._textcache[key] = text
return text
def save(self, *args, **kwargs):
"""Alias for edit, for maintaining backwards compatibility."""
return self.edit(*args, **kwargs)
def edit(self, text, summary=u'', minor=False, bot=True, section=None, **kwargs):
"""Update the text of a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, text=text, **kwargs)
def append(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Append text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, appendtext=text, **kwargs)
def prepend(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Prepend text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, prependtext=text, **kwargs)
def _edit(self, summary, minor, bot, section, **kwargs):
if not self.site.logged_in and self.site.force_login:
raise mwclient.errors.AssertUserFailedError()
if self.site.blocked:
raise mwclient.errors.UserBlocked(self.site.blocked)
if not self.can('edit'):
raise mwclient.errors.ProtectedPageError(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if minor:
data['minor'] = '1'
if not minor:
data['notminor'] = '1'
if self.last_rev_time:
data['basetimestamp'] = time.strftime('%Y%m%d%H%M%S', self.last_rev_time)
if self.edit_time:
data['starttimestamp'] = time.strftime('%Y%m%d%H%M%S', self.edit_time)
if bot:
data['bot'] = '1'
if section is not None:
data['section'] = section
data.update(kwargs)
if self.site.force_login:
data['assert'] = 'user'
def do_edit():
result = self.site.post('edit', title=self.name, summary=summary,
token=self.get_token('edit'),
**data)
if result['edit'].get('result').lower() == 'failure':
raise mwclient.errors.EditError(self, result['edit'])
return result
try:
result = do_edit()
except mwclient.errors.APIError as e:
if e.code == 'badtoken':
# Retry, but only once to avoid an infinite loop
self.get_token('edit', force=True)
try:
result = do_edit()
except mwclient.errors.APIError as e:
self.handle_edit_error(e, summary)
else:
self.handle_edit_error(e, summary)
# 'newtimestamp' is not included if no change was made
if 'newtimestamp' in result['edit'].keys():
self.last_rev_time = parse_timestamp(result['edit'].get('newtimestamp'))
# Workaround for https://phabricator.wikimedia.org/T211233
for cookie in self.site.connection.cookies:
if 'PostEditRevision' in cookie.name:
self.site.connection.cookies.clear(cookie.domain, cookie.path,
cookie.name)
# clear the page text cache
self._textcache = {}
return result['edit']
def handle_edit_error(self, e, summary):
if e.code == 'editconflict':
raise mwclient.errors.EditError(self, summary, e.info)
elif e.code in {'protectedtitle', 'cantcreate', 'cantcreate-anon',
'noimageredirect-anon', 'noimageredirect', 'noedit-anon',
'noedit', 'protectedpage', 'cascadeprotected',
'customcssjsprotected',
'protectednamespace-interface', 'protectednamespace'}:
raise mwclient.errors.ProtectedPageError(self, e.code, e.info)
elif e.code == 'assertuserfailed':
raise mwclient.errors.AssertUserFailedError()
else:
raise e
def touch(self):
"""Perform a "null edit" on the page to update the wiki's cached data of it.
This is useful in contrast to purge when needing to update stored data on a wiki,
for example Semantic MediaWiki properties or Cargo table values, since purge
only forces update of a page's displayed values and not its store.
"""
if not self.exists:
return
self.append('')
def move(self, new_title, reason='', move_talk=True, no_redirect=False):
"""Move (rename) page to new_title.
If user account is an administrator, specify no_redirect as True to not
leave a redirect.
If user does not have permission to move page, an InsufficientPermission
exception is raised.
"""
if not self.can('move'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if move_talk:
data['movetalk'] = '1'
if no_redirect:
data['noredirect'] = '1'
result = self.site.post('move', ('from', self.name), to=new_title,
token=self.get_token('move'), reason=reason, **data)
return result['move']
def delete(self, reason='', watch=False, unwatch=False, oldimage=False):
"""Delete page.
If user does not have permission to delete page, an InsufficientPermission
exception is raised.
"""
if not self.can('delete'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if watch:
data['watch'] = '1'
if unwatch:
data['unwatch'] = '1'
if oldimage:
data['oldimage'] = oldimage
result = self.site.post('delete', title=self.name,
token=self.get_token('delete'),
reason=reason, **data)
return result['delete']
def purge(self):
"""Purge server-side cache of page. This will re-render templates and other
dynamic content.
"""
self.site.post('purge', titles=self.name)
# def watch: requires 1.14
# Properties
def backlinks(self, namespace=None, filterredir='all', redirect=False,
limit=None, generator=True):
"""List pages that link to the current page, similar to Special:Whatlinkshere.
API doc: https://www.mediawiki.org/wiki/API:Backlinks
"""
prefix = mwclient.listing.List.get_prefix('bl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, namespace=namespace, filterredir=filterredir,
))
if redirect:
kwargs['%sredirect' % prefix] = '1'
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'backlinks', 'bl', limit=limit, return_values='title',
**kwargs
)
def categories(self, generator=True, show=None):
"""List categories used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Categories
Args:
generator (bool): Return generator (Default: True)
show (str): Set to 'hidden' to only return hidden categories
or '!hidden' to only return non-hidden ones.
Returns:
mwclient.listings.PagePropertyGenerator
"""
prefix = mwclient.listing.List.get_prefix('cl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, show=show
))
if generator:
return mwclient.listing.PagePropertyGenerator(
self, 'categories', 'cl', **kwargs
)
else:
# TODO: return sortkey if wanted
return mwclient.listing.PageProperty(
self, 'categories', 'cl', return_values='title', **kwargs
)
def embeddedin(self, namespace=None, filterredir='all', limit=None, generator=True):
"""List pages that transclude the current page.
API doc: https://www.mediawiki.org/wiki/API:Embeddedin
Args:
namespace (int): Restricts search to a given namespace (Default: None)
filterredir (str): How to filter redirects, either 'all' (default),
'redirects' or 'nonredirects'.
limit (int): Maximum amount of pages to return per request
generator (bool): Return generator (Default: True)
Returns:
mwclient.listings.List: Page iterator
"""
prefix = mwclient.listing.List.get_prefix('ei', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace,
filterredir=filterredir))
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'embeddedin', 'ei', limit=limit, return_values='title',
**kwargs
)
def extlinks(self):
"""List external links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Extlinks
"""
return mwclient.listing.PageProperty(self, 'extlinks', 'el', return_values='*')
def images(self, generator=True):
"""List files/images embedded in the current page.
API doc: https://www.mediawiki.org/wiki/API:Images
"""
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'images', '')
else:
return mwclient.listing.PageProperty(self, 'images', '',
return_values='title')
def iwlinks(self):
"""List interwiki links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Iwlinks
"""
return mwclient.listing.PageProperty(self, 'iwlinks', 'iw',
return_values=('prefix', '*'))
def langlinks(self, **kwargs):
"""List interlanguage links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Langlinks
"""
return mwclient.listing.PageProperty(self, 'langlinks', 'll',
return_values=('lang', '*'),
**kwargs)
def links(self, namespace=None, generator=True, redirects=False):
"""List links to other pages from the current page.
API doc: https://www.mediawiki.org/wiki/API:Links
"""
prefix = mwclient.listing.List.get_prefix('pl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if redirects:
kwargs['redirects'] = '1'
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'links', 'pl', **kwargs)
else:
return mwclient.listing.PageProperty(self, 'links', 'pl',
return_values='title', **kwargs)
def revisions(self, startid=None, endid=None, start=None, end=None,
dir='older', user=None, excludeuser=None, limit=50,
prop='ids|timestamp|flags|comment|user',
expandtemplates=False, section=None,
diffto=None, slots=None, uselang=None):
"""List revisions of the current page.
API doc: https://www.mediawiki.org/wiki/API:Revisions
Args:
startid (int): Revision ID to start listing from.
endid (int): Revision ID to stop listing at.
start (str): Timestamp to start listing from.
end (str): Timestamp to end listing at.
dir (str): Direction to list in: 'older' (default) or 'newer'.
user (str): Only list revisions made by this user.
excludeuser (str): Exclude revisions made by this user.
limit (int): The maximum number of revisions to return per request.
prop (str): Which properties to get for each revision,
default: 'ids|timestamp|flags|comment|user'
expandtemplates (bool): Expand templates in rvprop=content output
section (int): Section number. If rvprop=content is set, only the contents
of this section will be retrieved.
diffto (str): Revision ID to diff each revision to. Use "prev", "next" and
"cur" for the previous, next and current revision respectively.
slots (str): The content slot (Mediawiki >= 1.32) to retrieve content from.
uselang (str): Language to use for parsed edit comments and other localized
messages.
Returns:
mwclient.listings.List: Revision iterator
"""
kwargs = dict(mwclient.listing.List.generate_kwargs(
'rv', startid=startid, endid=endid, start=start, end=end, user=user,
excludeuser=excludeuser, diffto=diffto, slots=slots
))
if self.site.version[:2] < (1, 32) and 'rvslots' in kwargs:
# https://github.com/mwclient/mwclient/issues/199
del kwargs['rvslots']
kwargs['rvdir'] = dir
kwargs['rvprop'] = prop
kwargs['uselang'] = uselang
if expandtemplates:
kwargs['rvexpandtemplates'] = '1'
if section is not None:
kwargs['rvsection'] = section
return mwclient.listing.RevisionsIterator(self, 'revisions', 'rv', limit=limit,
**kwargs)
def templates(self, namespace=None, generator=True):
"""List templates used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Templates
"""
prefix = mwclient.listing.List.get_prefix('tl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'templates', prefix,
**kwargs)
else:
return mwclient.listing.PageProperty(self, 'templates', prefix,
return_values='title', **kwargs)
| mit | 138,916,023,745,932,510 | 37.304991 | 90 | 0.564059 | false |
dmittov/AlcoBot | bot.py | 1 | 2312 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import logging
import telegram
import cocktail
from time import sleep
from urllib2 import URLError
def main():
logging.basicConfig(
level=logging.DEBUG,
filename='debug.log',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Telegram Bot Authorization Token
TOKEN = None
with open('prod.token') as fh:
TOKEN = fh.readline()
logging.info(TOKEN)
bot = telegram.Bot(TOKEN)
try:
update_id = bot.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
update_id = response(bot, update_id)
except telegram.TelegramError as e:
# These are network problems with Telegram.
if e.message in ("Bad Gateway", "Timed out"):
sleep(1)
elif e.message == "Unauthorized":
# The user has removed or blocked the bot.
update_id += 1
else:
raise e
except URLError as e:
sleep(1)
def response(bot, update_id):
# Request updates after the last update_id
for update in bot.getUpdates(offset=update_id, timeout=10):
# chat_id is required to reply to any message
chat_id = update.message.chat_id
update_id = update.update_id + 1
try:
message = cocktail.coctail_msg(update.message.text)
except Exception as e:
message = e.message
if message:
bot.sendMessage(chat_id=chat_id,
text=message)
return update_id
if __name__ == '__main__':
main()
| gpl-3.0 | 4,792,354,141,523,781,000 | 30.243243 | 71 | 0.62154 | false |
nullzero/wprobot | wp/ltime.py | 1 | 1990 | # -*- coding: utf-8 -*-
"""
Library to manage everything about date and time.
"""
__version__ = "1.0.2"
__author__ = "Sorawee Porncharoenwase"
import datetime
import time
def wrapMonth(m):
"""Convert zero-based month number to zero-based month number."""
m -= 1
if m < 0:
m += 12
if m >= 12:
m -= 12
return m
def weekdayThai(d):
"""Return Thai name of days of the week."""
return map(lambda x: u"วัน" + x,
[u"จันทร์", u"อังคาร", u"พุธ", u"พฤหัสบดี", u"ศุกร์",
u"เสาร์", u"อาทิตย์"])[d]
def monthEng(m):
"""Return English name of month."""
return [u"January", u"February", u"March", u"April", u"May", u"June",
u"July", u"August", u"September", u"October", u"November",
u"December"][wrapMonth(m)]
def monthThai(m):
"""Return Thai name of month."""
return [u"มกราคม", u"กุมภาพันธ์", u"มีนาคม", u"เมษายน", u"พฤษภาคม",
u"มิถุนายน", u"กรกฎาคม", u"สิงหาคม", u"กันยายน", u"ตุลาคม",
u"พฤศจิกายน", u"ธันวาคม"][wrapMonth(m)]
def monthThaiAbbr(m):
"""Return Thai abbreviated name of month."""
return [u"ม.ค.", u"ก.พ.", u"มี.ค.", u"เม.ย.", u"พ.ค.", u"มิ.ย.",
u"ก.ค.", u"ส.ค.", u"ก.ย.", u"ต.ค.", u"พ.ย.", u"ธ.ค."][wrapMonth(m)]
def getNumDay(year, month):
"""Return length of day in given month"""
if month == 2:
if year % 400 == 0:
return 29
elif year % 100 == 0:
return 28
elif year % 4 == 0:
return 29
else:
return 28
return [0, 31, 0, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month]
td = datetime.timedelta
sleep = time.sleep
| mit | 5,983,318,943,164,025,000 | 27.440678 | 79 | 0.51907 | false |
Gr8z/Legend-Cogs | profanity/profanity.py | 1 | 2085 | import discord
from discord.ext import commands
from .utils.dataIO import dataIO, fileIO
import os
import asyncio
BOTCOMMANDER_ROLES = ["Family Representative", "Clan Manager", "Clan Deputy", "Co-Leader", "Hub Officer", "admin"]
class profanity:
"""profanity!"""
def __init__(self, bot):
self.bot = bot
self.bannedwords = dataIO.load_json('data/Profanity/banned_words.json')
async def banned_words(self, message):
word_set = set(self.bannedwords)
phrase_set = set(message.content.replace("*", "").replace("_", "").replace("#", "").split())
if word_set.intersection(phrase_set):
await self.bot.delete_message(message)
msg = await self.bot.send_message(
message.channel,
"{}, **We do not allow Hateful, obscene, offensive, racist, sexual, or violent words in any public channels.**".format(
message.author.mention
)
)
await asyncio.sleep(6)
await self.bot.delete_message(msg)
return
async def on_message_edit(self, before, after):
await self.banned_words(after)
async def on_message(self, message):
server = message.server
author = message.author
if message.author.id == self.bot.user.id:
return
botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES]
botcommander_roles = set(botcommander_roles)
author_roles = set(author.roles)
if len(author_roles.intersection(botcommander_roles)):
return
await self.banned_words(message)
def check_folders():
if not os.path.exists("data/Profanity"):
print("Creating data/Profanity folder...")
os.makedirs("data/Profanity")
def check_files():
f = "data/Profanity/banned_words.json"
if not fileIO(f, "check"):
print("Creating empty banned_words.json...")
fileIO(f, "save", [])
def setup(bot):
check_folders()
check_files()
bot.add_cog(profanity(bot)) | mit | 7,216,413,436,609,016,000 | 31.092308 | 135 | 0.61295 | false |
samuelfekete/Pythonometer | tests/test_questions.py | 1 | 1786 | """Test all questions."""
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pythonometer.quiz import all_questions
from pythonometer.questions.base import WrongAnswer
class TestQuestions(unittest.TestCase):
"""Test the questions.
All question tests are the same, so they are loaded dynamically.
"""
pass
# Add a test for every question.
for question in all_questions():
def question_test(self, question=question):
current_question = question()
# Assert that a question string is supplied.
question_string = current_question.get_question_text()
self.assertIsInstance(question_string, basestring)
# Assert that at least one correct answer is given.
self.assert_(current_question.get_correct_answers())
# Assert that checking with the correct answers returns True.
for correct_answer in current_question.get_correct_answers():
self.assert_(current_question.check_answer(correct_answer))
# Assert that checking with the wrong answers raises WrongAnswer.
for wrong_answer in current_question.get_wrong_answers():
with self.assertRaises(WrongAnswer):
current_question.check_answer(wrong_answer)
# Assert that checking a wrong answer raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('')
# Assert that checking the answer with bad code raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('raise Exception')
setattr(TestQuestions, 'test_{}'.format(question.__name__), question_test)
if __name__ == '__main__':
unittest.main()
| mit | 8,631,838,962,788,521,000 | 32.074074 | 82 | 0.68645 | false |
iocast/poiservice | lib/FilterEncodingWizard.py | 1 | 2742 | '''
Created on May 16, 2011
@author: michel
'''
import json
class FilterEncodingWizard(object):
comparision = [{
'value' : 'PropertyIsEqualTo',
'display' : '=',
'xml' : '<PropertyIsEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsEqualTo>'},
{'value' : 'PropertyIsNotEqualTo',
'display' : '!=',
'xml' : '<PropertyIsNotEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsNotEqualTo>'},
{'value' : 'PropertyIsLessThan',
'display' : '<',
'xml' : '<PropertyIsLessThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThan>'},
{'value' : 'PropertyIsGreaterThan',
'display' : '>',
'xml' : '<PropertyIsGreaterThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThan>'},
{'value' : 'PropertyIsLessThanOrEqualTo',
'display' : '<=',
'xml' : '<PropertyIsLessThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThanOrEqualTo>'},
{'value' : 'PropertyIsGreaterThanOrEqualTo',
'display' : '>=',
'xml' : '<PropertyIsGreaterThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThanOrEqualTo>'}
#{'value' : 'PropertyIsLike',
# 'display' : 'Like',
# 'xml' : ''},
#{'value' : 'PropertyIsBetween',
# 'display' : 'Between',
# 'xml' : ''},
#{'value' : 'PropertyIsNull',
# 'display' : 'Nul',
# 'xml' : ''}
]
logical = [
{'value' : 'Or',
'display' : 'or',
'xml' : '<Or>${statement}</Or>'},
{
'value' : 'And',
'display' : 'and',
'xml' : '<And>${statement}</And>'}
]
def comparisonToJson(self):
return json.dumps(self.comparision)
def comparisonToHTML(self):
html = '<select onChange="javascript:queryBuilder.operatorChanged(this);">'
for value in self.comparision:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html
def logicalToJson(self):
return json.dumps(self.logical)
def logicalToHTML(self):
html = '<select>'
for value in self.logical:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html;
| mit | -6,222,942,528,908,910,000 | 39.338235 | 155 | 0.522611 | false |
cardmaster/makeclub | controlers/activity.py | 1 | 10024 | '''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api.users import get_current_user, create_login_url, User
from google.appengine.ext import webapp
from google.appengine.ext import db
from errors import errorPage
from infopage import infoPage
from access import hasActPrivilige, hasClubPrivilige
from models import Activity, Membership, Club, ActivityParticipator, ActivityBill
from url import urldict
from template import render
class ActivityBase(webapp.RequestHandler):
def __init__(self, *args, **kw):
super(ActivityBase, self).__init__(*args, **kw)
self.actobj = None
def getActModel(self):
aid, = self.urlcfg.analyze(self.request.path)
if (aid):
id = int(aid)
return Activity.get_by_id(id)
else:
return None
def actionPath(self):
return self.request.path
def templateParams(self):
act = self.actobj
club = act.club
cluburl = urldict['ClubView'].path(club.slug)
templateVars = dict(club = club, cluburl = cluburl, act = act, action = self.actionPath() )
return templateVars
def makeResponseText(self, act):
templateVars = self.templateParams()
return render(self.template, templateVars, self.request.url)
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasActPrivilige(user, self.actobj, self.actOperation)):
errorPage ( self.response, "Not authorrized", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
def dbg(self, *args):
return #Clean up debug code
self.response.out.write (" ".join([str(arg) for arg in args]))
self.response.out.write ("<br />\n")
def get(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
self.response.out.write (self.makeResponseText(actobj))
else:
return
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class SpecialOp:
def __init__(self, oper = '', url = '', needPost = False, data = [], display = ''):
self.oper = oper
if (not display):
display = oper
self.display = display
self.url = url
self.needPost = needPost
self.data = data
class ActivityView(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityView, self).__init__(*args, **kw)
self.template = 'activity_view.html'
self.urlcfg = urldict['ActivityView']
self.actOperation = "view"
def templateParams(self):
defaults = super (ActivityView, self).templateParams()
user = get_current_user();
aid = self.actobj.key().id()
specialOps = []
if (hasActPrivilige(user, self.actobj, "edit" )):
sop = SpecialOp('edit', urldict['ActivityEdit'].path(aid), False)
specialOps.append(sop)
urlcfg = urldict['ActivityParticipate']
soplist = ['join', 'quit', 'confirm']
if (self.actobj.isBilled):
soplist.append("rebill")
else:
soplist.append("bill")
for oper in soplist:
if (hasActPrivilige(user, self.actobj, oper) ):
data = [('target', user.email()), ]
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, data)
specialOps.append(sop)
defaults['specialOps'] = specialOps
participatorOps = []
for oper in ('confirm', ):
if (hasActPrivilige(user, self.actobj, oper) ):
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, [])
participatorOps.append(sop)
defaults['participatorOps'] = participatorOps
apq = ActivityParticipator.all()
apq.filter ('activity = ', self.actobj)
defaults['participators'] = apq
return defaults
class ActivityParticipate(webapp.RequestHandler):
def getActModel(self, id):
try:
iid = int(id)
except:
return None
actobj = Activity.get_by_id(iid)
return actobj
def get(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
self.response.out.write (
'on id %s, operation %s' % (id, oper)
)
def post(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
id = int(id)
actobj = self.getActModel(id)
if (not actobj):
return errorPage (self.response, urldict['ClubList'].path(), "No such activity", 404 )
user = get_current_user();
if (not user):
return errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
target = self.request.get ('target')
cluburl = urldict['ClubView'].path(actobj.club.slug)
if (not hasActPrivilige(user, actobj, oper,target) ):
return errorPage ( self.response, "Can not access", cluburl, 403)
if (target):
targetUser = User(target)
if(not targetUser):
return errorPage ( self.response, "Illegal access", cluburl, 403)
else: #if target omitted, use current user as target
targetUser = user
mem = Membership.between (targetUser, actobj.club)
if (not mem):
return errorPage ( self.response, "Not a member", cluburl, 403)
acturl = urldict['ActivityView'].path(id)
if (oper == 'join'):
actp = ActivityParticipator.between (mem, actobj)
if (not actp):
actp = ActivityParticipator(member = mem, activity = actobj)
actp.put()
return infoPage (self.response, "Successfully Joined", "%s has join activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'quit'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
if (actp.confirmed):
return errorPage ( self.response, "Cannot delete confirmed participator", acturl, 403)
else:
actp.delete()
return infoPage (self.response, "Successfully Quited", "%s success quit activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'confirm'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
actp.confirmed = not actp.confirmed
actp.put()
return infoPage (self.response, "Successfully Confirmed", "success confirmed %s join activity %s" % (mem.name, actobj.name), acturl)
else:
return errorPage ( self.response, "No Such a Member", acturl, 404)
elif (oper == 'bill' or oper == "rebill"):
billobj = ActivityBill.generateBill(actobj, oper == "rebill")#If in rebill operation, we could enable rebill
if (billobj):
billobj.put()
billDict = dict(billobj = billobj)
return infoPage (self.response, "Successfully Billded", str(billobj.memberBill), acturl)
else:
return errorPage (self.response, "Error Will Generate Bill", acturl, 501)
def extractRequestData(request, interested, dbg=None):
retval = dict()
for (key, valid) in interested.iteritems() :
val = valid (request.get(key))
if (dbg):
dbg ( "Extract:", key, "=", val)
if (val):
retval [key] = val
return retval
import re
def parseDuration(times):
#support only h
tstr = times[:-1]
print "Times String: ", tstr
return float(tstr)
def parseBill (billstr, dbg = None):
entries = billstr.split (',')
ary = []
if (dbg):
dbg ("Bill String:", billstr)
dbg ("Splitted:", entries)
i = 1
for ent in entries:
ent = ent.strip()
if (i == 2):
val = ent
ary.append ( (key, val) )
i = 0
else :
key = ent
i += 1
return ary
class ActivityEdit(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityEdit, self).__init__(*args, **kw)
self.template = 'activity_edit.html'
self.urlcfg = urldict['ActivityEdit']
self.actobj = None
self.actOperation = "edit"
def parseBillDbg(self, billstr):
return parseBill(billstr, self.dbg)
def updateObject(self, actobj):
interested = dict (name = str, intro = str, duration = parseDuration, bill = self.parseBillDbg)
reqs = extractRequestData (self.request, interested, self.dbg)
for (key, val) in reqs.iteritems():
self.dbg (key, "=", val)
setattr (actobj, key, val)
#Will read data from postdata, and update the pass-in actobj.
pass
def post(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
if (self.request.get ('delete', False)):
actobj.delete()
return infoPage (self.response, "Successful deleted", "Deleted Activity %s" % actobj.name, "/")
self.updateObject(actobj)
key = actobj.put()
if (key):
return errorPage( self.response, "Successfully storing this Activity", urldict['ActivityView'].path(key.id()), 200)
else:
return errorPage( self.response, "Error while storing this Activity", urldict['ActivityEdit'].path(actobj.key().id()), 501)
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class ActivityNew(ActivityEdit):
def getActModel(self):
urlcfg = urldict['ActivityNew']
slug, = urlcfg.analyze(self.request.path)
user = get_current_user()
club = Club.getClubBySlug(slug)
if (user and club):
newact = Activity.createDefault(user, club)
if (newact): newact.bill = [('Filed Expense', 80), ('Balls Expense', 30)]
return newact
else:
return None
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasClubPrivilige(user, self.actobj.club, "newact")):
errorPage ( self.response, "Not Authorized to edit", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
| agpl-3.0 | -6,981,099,863,504,542,000 | 34.048951 | 142 | 0.680467 | false |
hotsyk/declaration-widgets | setup.py | 1 | 1627 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='declaration-widgets',
version='0.1.0',
description="Python Boilerplate contains all the boilerplate you need to create a Python package.",
long_description=readme + '\n\n' + history,
author="Volodymyr Hotsyk",
author_email='[email protected]',
url='https://github.com/hotsyk/declaration-widgets',
packages=[
'declaration-widgets',
],
package_dir={'declaration-widgets':
'declaration-widgets'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='declaration-widgets',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
)
| bsd-3-clause | 130,641,730,173,071,650 | 27.54386 | 103 | 0.631838 | false |
pedrogazquez/appBares | rango/forms.py | 1 | 3240 | from django import forms
from django.contrib.auth.models import User
from rango.models import Tapa, Bar, UserProfile
class BarForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre del bar")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# An inline class to provide additional information on the form.
class Meta:
# Provide an association between the ModelForm and a model
model = Bar
# class TapaForm(forms.ModelForm):
# nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
# url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
# views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# def clean(self):
# cleaned_data = self.cleaned_data
# url = cleaned_data.get('url')
# # If url is not empty and doesn't start with 'http://' add 'http://' to the beginning
# if url and not url.startswith('http://'):
# url = 'http://' + url
# cleaned_data['url'] = url
# return cleaned_data
# class Meta:
# # Provide an association between the ModelForm and a model
# model = Tapa
# # What fields do we want to include in our form?
# # This way we don't need every field in the model present.
# # Some fields may allow NULL values, so we may not want to include them...
# # Here, we are hiding the foreign keys
# fields = ('nombre', 'url','views')
class TapaForm(forms.ModelForm):
nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
# Provide an association between the ModelForm and a model
model = Tapa
# What fields do we want to include in our form?
# This way we don't need every field in the model present.
# Some fields may allow NULL values, so we may not want to include them...
# Here, we are hiding the foreign key.
# we can either exclude the category field from the form,
exclude = ('bar',)
#or specify the fields to include (i.e. not include the category field)
fields = ('nombre', 'url','views')
class UserForm(forms.ModelForm):
username = forms.CharField(help_text="Please enter a username.")
email = forms.CharField(help_text="Please enter your email.")
password = forms.CharField(widget=forms.PasswordInput(), help_text="Please enter a password.")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
website = forms.URLField(help_text="Please enter your website.", required=False)
picture = forms.ImageField(help_text="Select a profile image to upload.", required=False)
class Meta:
model = UserProfile
fields = ('website', 'picture') | gpl-3.0 | 233,954,924,696,194,240 | 41.810811 | 113 | 0.659877 | false |
EntPack/SilentDune-Client | silentdune_client/modules/automation/auto_discovery/updates.py | 1 | 25425 | #
# Authors: Ma He <[email protected]>
# Robert Abram <[email protected]>
# Copyright (C) 2015-2017 EntPack
# see file 'LICENSE' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from datetime import datetime
import fnmatch
import glob
import logging
import os
import platform
import shlex
import socket
import subprocess
import time
try:
from urllib import parse
except ImportError:
# < 3.0
import urlparse as parse
import ConfigParser
from silentdune_client.modules import QueueTask
from silentdune_client.modules.firewall.manager import SilentDuneClientFirewallModule, \
TASK_FIREWALL_INSERT_RULES
from silentdune_client.builders import iptables as ipt
from silentdune_client.modules.automation.auto_discovery.base_discovery_service import BaseDiscoveryService
from silentdune_client.modules.firewall.manager.iptables_utils import create_iptables_egress_ingress_rule, \
create_iptables_egress_rule_dest, create_iptables_ingress_rule_source
from silentdune_client.modules.firewall.manager.slots import Slots
from silentdune_client.utils.misc import which
_logger = logging.getLogger('sd-client')
class SystemUpdatesDiscovery(BaseDiscoveryService):
"""
Auto discover SystemUpdates like apt-get and yum.
"""
_rebuild_rules_interval = 5 # Rebuild the updates cache rules every 5 days.
_slot = Slots.updates
_config_section_name = 'auto_discovery'
_config_property_name = '_disable_auto_updates'
_dist = ''
_dist_version = ''
_machine = ''
_disable_auto_updates_ftp = None
_disable_auto_updates_rsync = None
_rebuild_rules = True # Rebuild the rules
_rebuild_cache = False # Should we rebuild the package manager cache?
_cache_last_rebuilt = None # Time stamp last time cache was rebuilt.
_hostnames = list() # Previously added hostnames
_repo_manager = None
_repo_cache_base = ''
_repo_config_base = ''
_repo_service_base = '' # openSUSE/SLES systems
_saved_rules = None
def __init__(self, config):
super(SystemUpdatesDiscovery, self).__init__(config)
self._disable_auto_updates_ftp = True if self.config.get(
self._config_section_name, 'disable_auto_updates_ftp').lower() == 'yes' else False
def _discover_iptables(self):
"""
Virtual Override
:return: Firewall Rules
"""
rules = list()
# Find the system package manager executable and setup our environment info
if not self._repo_manager and not self.discover_pkg_manager():
_logger.error('{0}: unable to continue setting up rules for updates.'.format(self.get_name()))
return None
# Test to see if DNS lookup is available right now. If not, just return and wait until DNS is working.
if not self.resolve_hostname('example.org', 80):
return None
# Every 5 days rebuild the rules.
if self._cache_last_rebuilt:
days = int((datetime.now() - self._cache_last_rebuilt).days)
if days >= self._rebuild_rules_interval:
self._rebuild_rules = True
rules.append(self.iptables_updates())
return rules
def discover_pkg_manager(self):
"""
Find the system package manager executable
:return: True if found, otherwise False
"""
self._dist = platform.dist()[0].lower()
self._dist_version = platform.dist()[1]
self._dist_version = self._dist_version.split('.')[0]
self._machine = platform.machine()
if self._dist in 'ubuntu debian':
self._repo_manager = which('apt-get')
# Nothing else to do.
elif self._dist in 'centos redhat fedora':
self._repo_config_base = '/etc/yum.repos.d/*.repo'
self._repo_manager = which('dnf')
self._repo_cache_base = '/var/cache/dnf'
if not self._repo_manager:
self._repo_manager = which('yum')
self._repo_cache_base = '/var/cache/yum/{0}/{1}'.format(self._machine, self._dist_version)
elif self._dist in 'suse':
self._repo_manager = which('zypper')
self._repo_config_base = '/etc/zypp/repos.d/*.repo'
self._repo_service_base = '/etc/zypp/services.d/*.service'
# No metalink cache until suse implements metalinks in zypper
else:
_logger.error('{0}: unsupported distribution ({1})'.format(self.get_name(), self._dist))
return False
if not self._repo_manager:
_logger.error('{0}: unable to find package manager executable for {1}'.format(self.get_name(), self._dist))
return False
return True
def iptables_updates(self):
rules = list()
if self._dist in 'ubuntu debian':
rules.append(self.iptables_updates_apt())
elif self._dist in 'centos redhat fedora':
rules.append(self.add_repository_rules())
elif self._dist in 'suse':
rules.append(self.add_repository_rules())
return rules
def iptables_updates_apt(self):
rules = list()
if not os.path.exists('/etc/apt/sources.list'):
_logger.error('{0}: /etc/apt/sources.list not found.'.format(self.get_name()))
return None
# Get all nameserver ip address values
with open('/etc/apt/sources.list') as handle:
for line in handle:
if line.startswith('deb '):
url = line.split()[1]
if url:
rules.append(self.add_rule_by_url(url))
return rules
def add_repository_rules(self):
"""
Add repository rules for rpm based systems.
"""
# If it is not time to rebuild the package manager cache, just return the rules we have.
if not self._rebuild_rules:
return self._saved_rules
self._rebuild_cache = False
# reset hostnames list
self._hostnames = list()
rules = list()
base_urls = list()
mirror_urls = list()
_logger.debug('{0}: adding rules for {1} repositories'.format(self.get_name(), self._dist))
# Loop through all the repo files and gather url information
repofiles = glob.glob(self._repo_config_base)
# Add in any zypper service files.
if self._dist in 'suse':
repofiles += glob.glob(self._repo_service_base)
for repofile in repofiles:
config = ConfigParser.ConfigParser()
if config.read(repofile):
sections = config.sections()
# Loop through sections looking for enabled repositories.
for section in sections:
if config.has_option(section, 'enabled'):
enabled = config.getint(section, 'enabled')
else:
enabled = 1
if not enabled:
continue
_logger.debug('{0}: adding urls for section: {1}'.format(self.get_name(), section))
url = None
if config.has_option(section, 'metalink'):
url = config.get(section, 'metalink')
self._rebuild_cache = True
if url:
mirror_urls.append([section, url])
elif config.has_option(section, 'mirrorlist'):
url = config.get(section, 'mirrorlist')
self._rebuild_cache = True
if url:
mirror_urls.append([section, url])
elif config.has_option(section, 'baseurl'):
url = config.get(section, 'baseurl')
if url:
base_urls.append([section, url])
# Handle zypper service files.
elif config.has_option(section, 'url'):
url = config.get(section, 'url')
if url:
base_urls.append([section, url])
if not url:
_logger.debug('{0}: could not find repo section ({1}) url?'.format(self.get_name(), section))
# Loop through all the urls and add rules for them.
for section, url in base_urls:
# TODO: Add support for mirrorbrain style mirrorlists.
rules.append(self.add_rule_by_url(url))
# If we don't need to rebuild the package manager cache, just return the rules we have.
if not self._rebuild_cache:
return rules
for section, url in mirror_urls:
rules.append(self.add_rule_by_url(url))
# Rebuild the package manager cache
# Open up all port 80 and port 443 connections so cache rebuild succeeds.
all_access = list()
all_access.append(create_iptables_egress_ingress_rule(
'', 80, 'tcp', self._slot, transport=ipt.TRANSPORT_IPV4))
all_access.append(create_iptables_egress_ingress_rule(
'', 80, 'tcp', self._slot, transport=ipt.TRANSPORT_IPV6))
all_access.append(create_iptables_egress_ingress_rule(
'', 443, 'tcp', self._slot, transport=ipt.TRANSPORT_IPV4))
all_access.append(create_iptables_egress_ingress_rule(
'', 443, 'tcp', self._slot, transport=ipt.TRANSPORT_IPV6))
# our parent will take care of clearing these rules once we return the real rules.
self.add_url_rule_to_firewall(all_access)
time.sleep(2) # Give the firewall manager time to add the rules.
if not self.rebuild_package_manager_cache():
return rules
# Check to see if we know where the package manage cache data is.
if not self._repo_cache_base:
return rules
# loop through the mirror list and parse the mirrorlist or metalink file.
for section, url in mirror_urls:
file, file_type = self.get_cache_file(section)
if file:
if file_type == 'mirrorlist':
urls = self.get_mirrorlist_urls_from_file(file, section)
else:
urls = self.get_metalink_urls_from_file(file, section)
if urls:
for url in urls:
if url:
rules.append(self.add_rule_by_url(url))
self._cache_last_rebuilt = datetime.now()
self._saved_rules = rules
return rules
def get_cache_file(self, section):
"""
Auto detect the mirror list path, file and type.
:param section: config section name
:return: file and file type
"""
if 'yum' in self._repo_manager:
cachepath = '{0}/{1}'.format(self._repo_cache_base, section)
elif 'dnf' in self._repo_manager:
# Get all the files and directories in the cache base dir
files = os.listdir(self._repo_cache_base)
# Filter out only the directories we are looking for
files = fnmatch.filter(files, '{0}-????????????????'.format(section))
# Now figure out which path is the newest one.
cachepath = '{0}/{1}/'.format(self._repo_cache_base, max(files, key=os.path.getmtime))
else:
return None, ''
if not os.path.isdir(cachepath):
_logger.error('{0}: calculated cache path is invalid ({1})'.format(self.get_name(), cachepath))
return None, ''
# detect url cache file and type
if os.path.isfile('{0}/mirrorlist.txt'.format(cachepath)):
return '{0}/mirrorlist.txt'.format(cachepath), 'mirrorlist'
if os.path.isfile('{0}/mirrorlist'.format(cachepath)):
return '{0}/mirrorlist'.format(cachepath), 'mirrorlist'
if os.path.isfile('{0}/metalink.xml'.format(cachepath)):
return '{0}/metalink.xml'.format(cachepath), 'metalink'
_logger.error('{0}: cache file is not found ({1})'.format(self.get_name(), cachepath))
return None, ''
def rebuild_package_manager_cache(self):
"""
Have the package manager clean and rebuild it's cache information
:return: True if successful, otherwise False
"""
if self._dist in 'suse':
cmd_clean = '{0} clean'.format(self._repo_manager)
cmd_make = '{0} refresh'.format(self._repo_manager)
elif self._dist in 'centos redhat fedora':
cmd_clean = '{0} clean metadata'.format(self._repo_manager)
cmd_make = '{0} makecache fast'.format(self._repo_manager)
else:
_logger.error('{0}: unsupported package manager, unable to rebuild cache.')
return False
_logger.debug('{0}: rebuilding {1} package manager cache data'.format(self.get_name(), self._dist))
# Clean package manager cache
p = subprocess.Popen(shlex.split(cmd_clean), stdout=subprocess.PIPE)
stdoutdata, stderrdata = p.communicate()
p.wait()
if stderrdata:
_logger.error('{0}: cleaning package manager cache failed.'.format(self.get_name()))
return False
# Rebuild the package manager cache
p = subprocess.Popen(shlex.split(cmd_make), stdout=subprocess.PIPE)
stdoutdata, stderrdata = p.communicate()
p.wait()
if stderrdata:
_logger.error('{0}: rebuilding package manager cache failed.'.format(self.get_name()))
return False
return True
def add_url_rule_to_firewall(self, rules):
"""
Add rules allowing access immediately to the firewall.
:param rules: rules list
"""
if not rules:
return
# Notify the firewall module to reload the rules.
task = QueueTask(TASK_FIREWALL_INSERT_RULES,
src_module=self._parent.get_name(),
dest_module=SilentDuneClientFirewallModule().get_name(),
data=rules)
self._parent.send_parent_task(task)
time.sleep(2) # Give the firewall manager time to add the rule to the kernel
def resolve_hostname(self, hostname, port):
"""
Return a single or multiple IP addresses from the hostname parameter
:param hostname: hostname string, IE: www.example.org
:param port:
:return: list of IP addresses
"""
ipaddrs = list()
if hostname:
if port:
if 1 < port <= 65536:
try:
ais = socket.getaddrinfo(hostname, port, 0, 0, socket.IPPROTO_TCP)
except:
_logger.debug('{0}: error resolving host: {1}:{2}'.format(self.get_name(), hostname, port))
return None
for result in ais:
ipaddr = result[-1][0]
ipaddrs.append(ipaddr)
return ipaddrs
def add_rule_by_url(self, url):
"""
:param url: Complete url string
:return: urlparse URI
"""
rules = list()
if not url:
_logger.debug('{0}: empty url given, unable to add rule'.format(self.get_name()))
return None
try:
uri = parse.urlparse(url)
except:
_logger.error('{0}: error parsing url ({1})'.format(self.get_name(), url))
return None
if not uri.scheme or not uri.hostname or uri.scheme not in ['http', 'https', 'ftp', 'rsync']:
return None
# If this is an FTP url...
if uri.scheme == 'ftp':
return self.add_ftp_rule_by_url(uri)
port = uri.port
if not port:
if uri.scheme == 'http':
port = 80
elif uri.scheme == 'https':
port = 443
elif uri.scheme == 'rsync':
port = 873
key = uri.hostname + ':' + str(port)
# Avoid duplicate urls.
if key not in self._hostnames:
self._hostnames.append(key)
ipaddrs = self.resolve_hostname(uri.hostname, port)
if ipaddrs:
for ipaddr in ipaddrs:
# _logger.debug('{0}: adding ip: {1} from hostname: {2}'.format(
# self.get_name(), uri.scheme + '://' + ipaddr, uri.hostname))
rules.append(create_iptables_egress_ingress_rule(
ipaddr, port, 'tcp', self._slot, transport=ipt.TRANSPORT_AUTO))
_logger.debug('{0}: host: {1} ip: {2}:{3}'.format(self.get_name(), uri.hostname, ipaddr, port))
return rules
return None
def add_ftp_rule_by_url(self, uri):
"""
Add rules to allow FTP access based on uri value
:param uri: urlparse uri value
:return: rules
"""
# Check to make sure we can add ftp rules.
if self._disable_auto_updates_ftp:
return None
rules = list()
ipaddrs = self.resolve_hostname(uri.hostname, 21)
if ipaddrs:
for ipaddr in ipaddrs:
_logger.debug('{0}: adding ip: {1} from hostname: {2}'.format(
self.get_name(), uri.scheme + '://' + ipaddr, uri.hostname))
# FTP control
rules.append(create_iptables_egress_ingress_rule(ipaddr, 21, 'tcp', self._slot,
transport=ipt.TRANSPORT_AUTO))
# FTP data transfer
rules.append(create_iptables_egress_rule_dest(ipaddr, 20, 'tcp', self._slot, 'ESTABLISHED',
transport=ipt.TRANSPORT_AUTO))
rules.append(
create_iptables_ingress_rule_source(ipaddr, 20, 'tcp', self._slot, 'ESTABLISHED,RELATED',
transport=ipt.TRANSPORT_AUTO))
rules.append(
create_iptables_egress_rule_dest(ipaddr, None, 'tcp', self._slot, 'ESTABLISHED,RELATED',
transport=ipt.TRANSPORT_AUTO))
rules.append(
create_iptables_ingress_rule_source(ipaddr, None, 'tcp', self._slot, 'ESTABLISHED',
transport=ipt.TRANSPORT_AUTO))
return rules
def get_mirrorlist_urls_from_file(self, file, section):
urls = list()
if not file or not os.path.isfile(file):
_logger.debug('{0}: unable to locate mirrorlist file ({1})'.format(self.get_name(), file))
return urls
with open(file) as handle:
lines = handle.read().split('\n')
if lines:
for line in lines:
if line.strip():
urls.append(line.strip())
_logger.debug('{0}: retrieving mirrorlist urls for "{1}" succeeded'.format(self.get_name(), section))
return urls
def get_metalink_urls_from_file(self, file, section):
urls = list()
if not file or not os.path.isfile(file):
_logger.debug('{0}: unable to locate metalink file ({1})'.format(self.get_name(), file))
return urls
with open(file) as handle:
lines = handle.read().split('\n')
if lines:
for line in lines:
if line.find('<url protocol=') != -1:
url = line.split(' >')[1].split('</url>')[0].strip()
if url:
urls.append(url)
if line.find('xmlns="') != -1:
url = line.split('xmlns="')[1].split('"')[0].strip()
if url:
urls.append(url)
if line.find('xmlns:mm0="') != -1:
url = line.split('xmlns:mm0="')[1].split('"')[0].strip()
if url:
urls.append(url)
else:
_logger.debug('{0}: no data read from metalink file ({1})'.format(self.get_name(), file))
return None
_logger.debug('{0}: retrieving metalink urls for "{1}" succeeded'.format(self.get_name(), section))
return urls
# def grab_mirror_list_urls(self, mirrorlist, section):
# """
# Attempt to download repository mirrorlist data using curl and return the urls.
# :param mirrorlist:
# :return: url list
# """
# urls = list()
#
# if mirrorlist:
#
# mirrorlist = mirrorlist.replace('$releasever', self._dist_version)
# mirrorlist = mirrorlist.replace('$basearch', self._machine)
# mirrorlist = mirrorlist.replace('$infra', 'stock')
#
# c = pycurl.Curl()
# transport = c.IPRESOLVE_V4
#
# # Force curl IPv4 DNS lookup, if that fails try IPv6 DNS lookup.
# while True:
# try:
# storage = StringIO()
# c = pycurl.Curl()
# c.setopt(c.URL, mirrorlist.encode('utf-8'))
# c.setopt(c.IPRESOLVE, transport)
# c.setopt(c.WRITEFUNCTION, storage.write)
# c.perform()
# c.close()
# content = storage.getvalue()
# urllist = content.split('\n')
# for url in urllist:
# if url.strip():
# urls.append(url.strip())
# break
# except pycurl.error as e:
#
# if transport == c.IPRESOLVE_V4:
# transport = c.IPRESOLVE_V6
# continue
#
# _logger.debug('{0}: {1}: retrieving mirror list failed ({2})'.format(self.get_name(), section, e))
# return None
#
# _logger.debug('{0}: retrieving mirror list for section "{1}" succeeded'.format(self.get_name(), section))
# return urls
# def grab_metalink_urls(self, metalink, section):
# """
# Attempt to download repository metalink data using curl and return the urls.
# :param metalink: metalink url
# :return: url list
# """
#
# urls = list()
# content = None
#
# if metalink:
# metalink = metalink.replace('$releasever', self._dist_version)
# metalink = metalink.replace('$basearch', self._machine)
#
# c = pycurl.Curl()
# transport = c.IPRESOLVE_V4
#
# # Force curl IPv4 DNS lookup, if that fails try IPv6 DNS lookup.
# while True:
# try:
# storage = StringIO()
# c = pycurl.Curl()
# c.setopt(c.URL, metalink.encode('utf-8'))
# c.setopt(c.IPRESOLVE, transport)
# c.setopt(c.WRITEFUNCTION, storage.write)
# c.perform()
# c.close()
# content = storage.getvalue()
# break
# except pycurl.error as e:
#
# if transport == c.IPRESOLVE_V4:
# transport = c.IPRESOLVE_V6
# continue
#
# _logger.debug('{0}: retrieving metalink list failed ({1})'.format(self.get_name(), e))
# return None
#
# if content:
# lines = content.split('\n')
# for line in lines:
# if line.find('<url protocol=') != -1:
# url = line.split(' >')[1].split('</url>')[0]
# if url:
# urls.append(url.strip())
# if line.find('xmlns="') != -1:
# url = line.split('xmlns="')[1].split('"')[0]
# if url:
# urls.append(url.strip())
# if line.find('xmlns:mm0="') != -1:
# url = line.split('xmlns:mm0="')[1].split('"')[0]
# if url:
# urls.append(url.strip())
#
# _logger.debug('{0}: retrieving metalink list for section "{1}" succeeded'.format(self.get_name(), section))
# return urls
| lgpl-3.0 | 8,512,857,766,906,700,000 | 35.321429 | 120 | 0.536716 | false |
google-research/disentanglement_lib | disentanglement_lib/data/ground_truth/cars3d.py | 1 | 4067 | # coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cars3D data set."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.data.ground_truth import ground_truth_data
from disentanglement_lib.data.ground_truth import util
import numpy as np
import PIL
import scipy.io as sio
from six.moves import range
from sklearn.utils import extmath
from tensorflow.compat.v1 import gfile
CARS3D_PATH = os.path.join(
os.environ.get("DISENTANGLEMENT_LIB_DATA", "."), "cars")
class Cars3D(ground_truth_data.GroundTruthData):
"""Cars3D data set.
The data set was first used in the paper "Deep Visual Analogy-Making"
(https://papers.nips.cc/paper/5845-deep-visual-analogy-making) and can be
downloaded from http://www.scottreed.info/. The images are rescaled to 64x64.
The ground-truth factors of variation are:
0 - elevation (4 different values)
1 - azimuth (24 different values)
2 - object type (183 different values)
"""
def __init__(self):
self.factor_sizes = [4, 24, 183]
features = extmath.cartesian(
[np.array(list(range(i))) for i in self.factor_sizes])
self.latent_factor_indices = [0, 1, 2]
self.num_total_factors = features.shape[1]
self.index = util.StateSpaceAtomIndex(self.factor_sizes, features)
self.state_space = util.SplitDiscreteStateSpace(self.factor_sizes,
self.latent_factor_indices)
self.data_shape = [64, 64, 3]
self.images = self._load_data()
@property
def num_factors(self):
return self.state_space.num_latent_factors
@property
def factors_num_values(self):
return self.factor_sizes
@property
def observation_shape(self):
return self.data_shape
def sample_factors(self, num, random_state):
"""Sample a batch of factors Y."""
return self.state_space.sample_latent_factors(num, random_state)
def sample_observations_from_factors(self, factors, random_state):
"""Sample a batch of observations X given a batch of factors Y."""
all_factors = self.state_space.sample_all_factors(factors, random_state)
indices = self.index.features_to_index(all_factors)
return self.images[indices].astype(np.float32)
def _load_data(self):
dataset = np.zeros((24 * 4 * 183, 64, 64, 3))
all_files = [x for x in gfile.ListDirectory(CARS3D_PATH) if ".mat" in x]
for i, filename in enumerate(all_files):
data_mesh = _load_mesh(filename)
factor1 = np.array(list(range(4)))
factor2 = np.array(list(range(24)))
all_factors = np.transpose([
np.tile(factor1, len(factor2)),
np.repeat(factor2, len(factor1)),
np.tile(i,
len(factor1) * len(factor2))
])
indexes = self.index.features_to_index(all_factors)
dataset[indexes] = data_mesh
return dataset
def _load_mesh(filename):
"""Parses a single source file and rescales contained images."""
with gfile.Open(os.path.join(CARS3D_PATH, filename), "rb") as f:
mesh = np.einsum("abcde->deabc", sio.loadmat(f)["im"])
flattened_mesh = mesh.reshape((-1,) + mesh.shape[2:])
rescaled_mesh = np.zeros((flattened_mesh.shape[0], 64, 64, 3))
for i in range(flattened_mesh.shape[0]):
pic = PIL.Image.fromarray(flattened_mesh[i, :, :, :])
pic.thumbnail((64, 64, 3), PIL.Image.ANTIALIAS)
rescaled_mesh[i, :, :, :] = np.array(pic)
return rescaled_mesh * 1. / 255
| apache-2.0 | 5,423,659,911,183,028,000 | 35.3125 | 79 | 0.687239 | false |
google/latexify_py | tests/node_visitor_base_test.py | 1 | 2191 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for node_visitor_base."""
import pytest
from latexify import node_visitor_base
class MockVisitor(node_visitor_base.NodeVisitorBase):
"""Mock visitor class."""
def __init__(self):
# Dummy member to fail visitor invocation.
self.visit_Baz = None # pylint: disable=invalid-name
def generic_visit(self, node, action):
return 'generic_visit: {}, {}'.format(node.__class__.__name__, action)
def visit_Foo(self, node, action): # pylint: disable=invalid-name
del node
return 'visit_Foo: {}'.format(action)
def visit_Foo_abc(self, node): # pylint: disable=invalid-name
del node
return 'visit_Foo_abc'
def visit_Foo_xyz(self, node): # pylint: disable=invalid-name
del node
return 'visit_Foo_xyz'
class Foo:
pass
class Bar:
pass
class Baz:
pass
def test_generic_visit():
visitor = MockVisitor()
assert visitor.visit(Bar()) == 'generic_visit: Bar, None'
assert visitor.visit(Bar(), 'unknown') == 'generic_visit: Bar, unknown'
assert visitor.visit(Bar(), '123') == 'generic_visit: Bar, 123'
def test_visit_node():
visitor = MockVisitor()
assert visitor.visit(Foo()) == 'visit_Foo: None'
assert visitor.visit(Foo(), 'unknown') == 'visit_Foo: unknown'
assert visitor.visit(Foo(), '123') == 'visit_Foo: 123'
def test_visit_node_action():
visitor = MockVisitor()
assert visitor.visit(Foo(), 'abc') == 'visit_Foo_abc'
assert visitor.visit(Foo(), 'xyz') == 'visit_Foo_xyz'
def test_invalid_visit():
visitor = MockVisitor()
with pytest.raises(AttributeError, match='visit_Baz is not callable'):
visitor.visit(Baz())
| apache-2.0 | -3,868,527,890,570,488,300 | 26.734177 | 74 | 0.691465 | false |
JudoWill/ResearchNotebooks | GA-PhredProcessing.py | 1 | 1153 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import os, os.path
import shutil
import glob
import sys
from subprocess import check_call, check_output
os.chdir('/home/will/Dropbox/PhredDirectory/')
staden_path = '/home/will/staden-2.0.0b9.x86_64/bin/'
sys.path.append('/home/will/PySeqUtils/')
# <codecell>
from GeneralSeqTools import call_muscle, fasta_reader, fasta_writer
# <codecell>
#from Bio import SeqIO
from Bio.SeqIO.AbiIO import AbiIterator
files = glob.glob('../Wigdahl Trace files/2:11:11/*.ab1')
seqs = []
for f in files:
rec = AbiIterator(open(f, mode = 'rb'), trim = True).next()
seqs.append( (rec.id, rec.seq.tostring()) )
# <codecell>
!/home/will/staden-2.0.0b9.x86_64/bin/convert_trace --help
# <codecell>
res = call_muscle(seqs)
with open('align_data.fasta', 'w') as handle:
fasta_writer(handle, res)
# <codecell>
from HIVTransTool import process_seqs
results = list(process_seqs(seqs[:50], extract_regions = True, known_names = 50))
# <codecell>
for row in results:
if row['RegionName'] == 'LTR5':
print row['Name'], row['QueryNuc']
# <codecell>
results[:5]
# <codecell>
| mit | 8,730,449,133,413,499,000 | 18.87931 | 81 | 0.679965 | false |
intel-hpdd/intel-manager-for-lustre | chroma_core/services/job_scheduler/job_scheduler_client.py | 1 | 10557 | # Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""
The service `job_scheduler` handles both RPCs (JobSchedulerRpc) and a queue (NotificationQueue).
The RPCs are used for explicit requests to modify the system or run a particular task, while the queue
is used for updates received from agent reports. Access to both of these, along with some additional
non-remote functionality is wrapped in JobSchedulerClient.
"""
from django import db
from chroma_core.services import log_register
from chroma_core.services.rpc import ServiceRpcInterface
from chroma_core.models import ManagedHost, Command
log = log_register(__name__)
class JobSchedulerRpc(ServiceRpcInterface):
methods = [
"set_state",
"run_jobs",
"cancel_job",
"create_host_ssh",
"test_host_contact",
"create_filesystem",
"create_ostpool",
"create_task",
"remove_task",
"update_ostpool",
"delete_ostpool",
"create_client_mount",
"create_copytool",
"register_copytool",
"unregister_copytool",
"update_nids",
"trigger_plugin_update",
"update_lnet_configuration",
"create_host",
"create_targets",
"available_transitions",
"available_jobs",
"get_locks",
"update_corosync_configuration",
"get_transition_consequences",
"configure_stratagem",
"update_stratagem",
"run_stratagem",
]
class JobSchedulerClient(object):
"""Because there are some tasks which are the domain of the job scheduler but do not need to
be run in the context of the service, the RPCs and queue operations are accompanied in this
class by some operations that run locally. The local operations are
read-only operations such as querying what operations are possible for a particular object.
"""
@classmethod
def command_run_jobs(cls, job_dicts, message):
"""Create and run some Jobs, within a single Command.
:param job_dicts: List of 1 or more dicts like {'class_name': 'MyJobClass', 'args': {<dict of arguments to Job constructor>}}
:param message: User-visible string describing the operation, e.g. "Detecting filesystems"
:return: The ID of a new Command
"""
return JobSchedulerRpc().run_jobs(job_dicts, message)
@classmethod
def command_set_state(cls, object_ids, message, run=True):
"""Modify the system in whatever way is necessary to reach the state
specified in `object_ids`. Creates Jobs under a single Command. May create
no Jobs if the system is already in the state, or already scheduled to be
in that state. If the system is already scheduled to be in that state, then
the returned Command will be connected to the existing Jobs which take the system to
the desired state.
:param cls:
:param object_ids: List of three-tuples (natural_key, object_id, new_state)
:param message: User-visible string describing the operation, e.g. "Starting filesystem X"
:param run: Test only. Schedule jobs without starting them.
:return: The ID of a new Command
"""
return JobSchedulerRpc().set_state(object_ids, message, run)
@classmethod
def available_transitions(cls, object_list):
"""Return the transitions available for each object in list
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_transitions(object_list)
@classmethod
def available_jobs(cls, object_list):
"""Query which jobs (other than changes to state) can be run on this object.
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_jobs(object_list)
@classmethod
def get_transition_consequences(cls, stateful_object, new_state):
"""Query what the side effects of a state transition are. Effectively does
a dry run of scheduling jobs for the transition.
The return format is like this:
::
{
'transition_job': <job dict>,
'dependency_jobs': [<list of job dicts>]
}
# where each job dict is like
{
'class': '<job class name>',
'requires_confirmation': <boolean, whether to prompt for confirmation>,
'confirmation_prompt': <string, confirmation prompt>,
'description': <string, description of the job>,
'stateful_object_id': <ID of the object modified by this job>,
'stateful_object_content_type_id': <Content type ID of the object modified by this job>
}
:param stateful_object: A StatefulObject instance
:param new_state: Hypothetical new value of the 'state' attribute
"""
return JobSchedulerRpc().get_transition_consequences(
stateful_object.__class__.__name__, stateful_object.id, new_state
)
@classmethod
def cancel_job(cls, job_id):
"""Attempt to cancel a job which is already scheduled (and possibly running)
:param job_id: ID of a Job object
"""
JobSchedulerRpc().cancel_job(job_id)
@classmethod
def create_host_ssh(cls, address, server_profile, root_pw, pkey, pkey_pw):
"""
Create a host which will be set up using SSH
:param address: SSH address
:return: (<ManagedHost instance>, <Command instance>)
"""
host_id, command_id = JobSchedulerRpc().create_host_ssh(address, server_profile, root_pw, pkey, pkey_pw)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def test_host_contact(cls, address, root_pw=None, pkey=None, pkey_pw=None):
command_id = JobSchedulerRpc().test_host_contact(address, root_pw, pkey, pkey_pw)
return Command.objects.get(pk=command_id)
@classmethod
def update_corosync_configuration(cls, corosync_configuration_id, mcast_port, network_interface_ids):
command_id = JobSchedulerRpc().update_corosync_configuration(
corosync_configuration_id, mcast_port, network_interface_ids
)
return Command.objects.get(pk=command_id)
@classmethod
def create_filesystem(cls, fs_data):
return JobSchedulerRpc().create_filesystem(fs_data)
@classmethod
def create_ostpool(cls, pool_data):
return JobSchedulerRpc().create_ostpool(pool_data)
@classmethod
def update_ostpool(cls, pool_data):
return JobSchedulerRpc().update_ostpool(pool_data)
@classmethod
def delete_ostpool(cls, pool):
return JobSchedulerRpc().delete_ostpool(pool)
@classmethod
def create_task(cls, task_data):
return JobSchedulerRpc().create_task(task_data)
@classmethod
def remove_task(cls, task_id):
return JobSchedulerRpc().create_task(task_id)
@classmethod
def update_nids(cls, nid_data):
return JobSchedulerRpc().update_nids(nid_data)
@classmethod
def trigger_plugin_update(cls, include_host_ids, exclude_host_ids, plugin_names):
"""
Cause the plugins on the hosts passed to send an update irrespective of whether any
changes have occurred.
:param include_host_ids: List of host ids to include in the trigger update.
:param exclude_host_ids: List of host ids to exclude from the include list (makes for usage easy)
:param plugin_names: list of plugins to trigger update on - empty list means all.
:return: command id that caused updates to be sent.
"""
assert isinstance(include_host_ids, list)
assert isinstance(exclude_host_ids, list)
assert isinstance(plugin_names, list)
return JobSchedulerRpc().trigger_plugin_update(include_host_ids, exclude_host_ids, plugin_names)
@classmethod
def update_lnet_configuration(cls, lnet_configuration_list):
return JobSchedulerRpc().update_lnet_configuration(lnet_configuration_list)
@classmethod
def create_host(cls, fqdn, nodename, address, server_profile_id):
# The address of a host isn't something we can learn from it (the
# address is specifically how the host is to be reached from the manager
# for outbound connections, not just its FQDN). If during creation we know
# the address, then great, accept it. Else default to FQDN, it's a reasonable guess.
if address is None:
address = fqdn
host_id, command_id = JobSchedulerRpc().create_host(fqdn, nodename, address, server_profile_id)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def create_targets(cls, targets_data):
from chroma_core.models import ManagedTarget, Command
target_ids, command_id = JobSchedulerRpc().create_targets(targets_data)
return (list(ManagedTarget.objects.filter(id__in=target_ids)), Command.objects.get(pk=command_id))
@classmethod
def create_client_mount(cls, host, filesystem_name, mountpoint):
from chroma_core.models import LustreClientMount
client_mount_id = JobSchedulerRpc().create_client_mount(host.id, filesystem_name, mountpoint)
return LustreClientMount.objects.get(id=client_mount_id)
@classmethod
def create_copytool(cls, copytool_data):
from chroma_core.models import Copytool
copytool_id = JobSchedulerRpc().create_copytool(copytool_data)
return Copytool.objects.get(id=copytool_id)
@classmethod
def register_copytool(cls, copytool_id, uuid):
JobSchedulerRpc().register_copytool(copytool_id, uuid)
@classmethod
def unregister_copytool(cls, copytool_id):
JobSchedulerRpc().unregister_copytool(copytool_id)
@classmethod
def get_locks(cls):
return JobSchedulerRpc().get_locks()
@classmethod
def configure_stratagem(cls, stratagem_data):
return JobSchedulerRpc().configure_stratagem(stratagem_data)
@classmethod
def update_stratagem(cls, stratagem_data):
return JobSchedulerRpc().update_stratagem(stratagem_data)
@classmethod
def run_stratagem(cls, mdts, fs_id, stratagem_data):
return JobSchedulerRpc().run_stratagem(mdts, fs_id, stratagem_data)
| mit | -5,882,066,424,023,178,000 | 36.703571 | 133 | 0.666856 | false |
fsmMLK/inkscapeMadeEasy | examples/iME_Draw_lineStyle_and_markers.py | 1 | 4006 | #!/usr/bin/python
import inkex
import inkscapeMadeEasy_Base as inkBase
import inkscapeMadeEasy_Draw as inkDraw
import math
class myExtension(inkBase.inkscapeMadeEasy):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--myColorPicker", action="store", type="string", dest="lineColorPickerVar", default='0')
self.OptionParser.add_option("--myColorOption", action="store", type="string", dest="lineColorOptionVar", default='0')
def effect(self):
# sets the position to the viewport center, round to next 10.
position=[self.view_center[0],self.view_center[1]]
position[0]=int(math.ceil(position[0] / 10.0)) * 10
position[1]=int(math.ceil(position[1] / 10.0)) * 10
# creates a dot marker, with red stroke color and gray (40%) filling color
myDotMarker = inkDraw.marker.createDotMarker(self,
nameID='myDot' ,
RenameMode=1, # overwrite an eventual markers with the same name
scale=0.2,
strokeColor=inkDraw.color.defined('red'),
fillColor=inkDraw.color.gray(0.4))
# parses the input options to get the color of the line
lineColor = inkDraw.color.parseColorPicker(self.options.lineColorOptionVar, self.options.lineColorPickerVar)
# create a new line style with a 2.0 pt line and the marker just defined at both ends
myLineStyleDot = inkDraw.lineStyle.set(lineWidth=2.0,
lineColor=lineColor,
fillColor=inkDraw.color.defined('blue'),
lineJoin='round',
lineCap='round',
markerStart=myDotMarker,
markerMid=myDotMarker,
markerEnd=myDotMarker,
strokeDashArray=None)
#root_layer = self.current_layer
root_layer = self.document.getroot()
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=position,lineStyle=myLineStyleDot)
# -- Creates a second line style with ellipsis and
# creates a ellipsis marker with default values
infMarkerStart,infMarkerEnd = inkDraw.marker.createElipsisMarker(self,
nameID='myEllipsis' ,
RenameMode=1) # overwrite an eventual markers with the same name
# create a new line style
myStyleInf = inkDraw.lineStyle.set(lineWidth=1.0,
lineColor=lineColor,
fillColor=None,
lineJoin='round',
lineCap='round',
markerStart=infMarkerStart,
markerMid=None,
markerEnd=infMarkerEnd,
strokeDashArray=None)
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=[position[0]+300,position[1]],lineStyle=myStyleInf)
if __name__ == '__main__':
x = myExtension()
x.affect()
| gpl-3.0 | -872,509,245,772,700,800 | 53.876712 | 148 | 0.496006 | false |
wetek-enigma/enigma2 | lib/python/Screens/ButtonSetup.py | 1 | 29746 | from GlobalActions import globalActionMap
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.Button import Button
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.SystemInfo import SystemInfo
from Components.config import config, ConfigSubsection, ConfigText, ConfigYesNo
from Components.PluginComponent import plugins
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Tools.BoundFunction import boundFunction
from ServiceReference import ServiceReference
from enigma import eServiceReference, eActionMap
from Components.Label import Label
import os
def getButtonSetupKeys():
return [(_("Red"), "red", ""),
(_("Red long"), "red_long", ""),
(_("Green"), "green", ""),
(_("Green long"), "green_long", ""),
(_("Yellow"), "yellow", ""),
(_("Yellow long"), "yellow_long", ""),
(_("Blue"), "blue", ""),
(_("Blue long"), "blue_long", ""),
(_("Info (EPG)"), "info", "Infobar/InfoPressed/1"),
(_("Info (EPG) Long"), "info_long", "Infobar/showEventInfoPlugins/1"),
(_("Epg/Guide"), "epg", "Infobar/EPGPressed/1"),
(_("Epg/Guide long"), "epg_long", "Infobar/showEventGuidePlugins/1"),
(_("Left"), "cross_left", ""),
(_("Right"), "cross_right", ""),
(_("Up"), "cross_up", ""),
(_("Down"), "cross_down", ""),
(_("PageUp"), "pageup", ""),
(_("PageUp long"), "pageup_long", ""),
(_("PageDown"), "pagedown", ""),
(_("PageDown long"), "pagedown_long", ""),
(_("Channel up"), "channelup", ""),
(_("Channel down"), "channeldown", ""),
(_("TV"), "showTv", ""),
(_("Radio"), "radio", ""),
(_("Radio long"), "radio_long", ""),
(_("Rec"), "rec", ""),
(_("Rec long"), "rec_long", ""),
(_("Teletext"), "text", ""),
(_("Help"), "displayHelp", ""),
(_("Help long"), "displayHelp_long", ""),
(_("Subtitle"), "subtitle", ""),
(_("Subtitle long"), "subtitle_long", ""),
(_("Menu"), "mainMenu", ""),
(_("List/Fav"), "list", ""),
(_("List/Fav long"), "list_long", ""),
(_("PVR"), "pvr", ""),
(_("PVR long"), "pvr_long", ""),
(_("Favorites"), "favorites", ""),
(_("Favorites long"), "favorites_long", ""),
(_("File"), "file", ""),
(_("File long"), "file_long", ""),
(_("OK long"), "ok_long", ""),
(_("Media"), "media", ""),
(_("Media long"), "media_long", ""),
(_("Open"), "open", ""),
(_("Open long"), "open_long", ""),
(_("Www"), "www", ""),
(_("Www long"), "www_long", ""),
(_("Directory"), "directory", ""),
(_("Directory long"), "directory_long", ""),
(_("Back/Recall"), "back", ""),
(_("Back/Recall") + " " + _("long"), "back_long", ""),
(_("Home"), "home", ""),
(_("End"), "end", ""),
(_("Next"), "next", ""),
(_("Previous"), "previous", ""),
(_("Audio"), "audio", ""),
(_("Play"), "play", ""),
(_("Playpause"), "playpause", ""),
(_("Stop"), "stop", ""),
(_("Pause"), "pause", ""),
(_("Rewind"), "rewind", ""),
(_("Fastforward"), "fastforward", ""),
(_("Skip back"), "skip_back", ""),
(_("Skip forward"), "skip_forward", ""),
(_("activatePiP"), "activatePiP", ""),
(_("Timer"), "timer", ""),
(_("Playlist"), "playlist", ""),
(_("Playlist long"), "playlist_long", ""),
(_("Timeshift"), "timeshift", ""),
(_("Homepage"), "homep", ""),
(_("Homepage long"), "homep_long", ""),
(_("Search/WEB"), "search", ""),
(_("Search/WEB long"), "search_long", ""),
(_("Slow"), "slow", ""),
(_("Mark/Portal/Playlist"), "mark", ""),
(_("Sleep"), "sleep", ""),
(_("Sleep long"), "sleep_long", ""),
(_("Power"), "power", ""),
(_("Power long"), "power_long", ""),
(_("HDMIin"), "HDMIin", "Infobar/HDMIIn"),
(_("HDMIin") + " " + _("long"), "HDMIin_long", (SystemInfo["LcdLiveTV"] and "Infobar/ToggleLCDLiveTV") or ""),
(_("Context"), "contextMenu", "Infobar/showExtensionSelection"),
(_("Context long"), "context_long", ""),
(_("SAT"), "sat", "Infobar/openSatellites"),
(_("SAT long"), "sat_long", ""),
(_("Prov"), "prov", ""),
(_("Prov long"), "prov_long", ""),
(_("F1/LAN"), "f1", ""),
(_("F1/LAN long"), "f1_long", ""),
(_("F2"), "f2", ""),
(_("F2 long"), "f2_long", ""),
(_("F3"), "f3", ""),
(_("F3 long"), "f3_long", ""),
(_("F4"), "f4", ""),
(_("F4 long"), "f4_long", ""),]
config.misc.ButtonSetup = ConfigSubsection()
config.misc.ButtonSetup.additional_keys = ConfigYesNo(default=True)
for x in getButtonSetupKeys():
exec "config.misc.ButtonSetup." + x[1] + " = ConfigText(default='" + x[2] + "')"
def getButtonSetupFunctions():
ButtonSetupFunctions = []
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "EPG"))
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "Plugins"))
twinPlugins.append(plugin.name)
ButtonSetupFunctions.append((_("Show graphical multi EPG"), "Infobar/openGraphEPG", "EPG"))
ButtonSetupFunctions.append((_("Main menu"), "Infobar/mainMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Show help"), "Infobar/showHelp", "InfoBar"))
ButtonSetupFunctions.append((_("Show extension selection"), "Infobar/showExtensionSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Zap down"), "Infobar/zapDown", "InfoBar"))
ButtonSetupFunctions.append((_("Zap up"), "Infobar/zapUp", "InfoBar"))
ButtonSetupFunctions.append((_("Volume down"), "Infobar/volumeDown", "InfoBar"))
ButtonSetupFunctions.append((_("Volume up"), "Infobar/volumeUp", "InfoBar"))
ButtonSetupFunctions.append((_("Show Infobar"), "Infobar/toggleShow", "InfoBar"))
ButtonSetupFunctions.append((_("Show service list"), "Infobar/openServiceList", "InfoBar"))
ButtonSetupFunctions.append((_("Show favourites list"), "Infobar/openBouquets", "InfoBar"))
ButtonSetupFunctions.append((_("Show satellites list"), "Infobar/openSatellites", "InfoBar"))
ButtonSetupFunctions.append((_("History back"), "Infobar/historyBack", "InfoBar"))
ButtonSetupFunctions.append((_("History next"), "Infobar/historyNext", "InfoBar"))
ButtonSetupFunctions.append((_("Show eventinfo plugins"), "Infobar/showEventInfoPlugins", "EPG"))
ButtonSetupFunctions.append((_("Show event details"), "Infobar/openEventView", "EPG"))
ButtonSetupFunctions.append((_("Show single service EPG"), "Infobar/openSingleServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show multi channel EPG"), "Infobar/openMultiServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show Audioselection"), "Infobar/audioSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Enable digital downmix"), "Infobar/audioDownmixOn", "InfoBar"))
ButtonSetupFunctions.append((_("Disable digital downmix"), "Infobar/audioDownmixOff", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to radio mode"), "Infobar/showRadio", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to TV mode"), "Infobar/showTv", "InfoBar"))
ButtonSetupFunctions.append((_("Show servicelist or movies"), "Infobar/showServiceListOrMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Show movies"), "Infobar/showMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Instant record"), "Infobar/instantRecord", "InfoBar"))
ButtonSetupFunctions.append((_("Start instant recording"), "Infobar/startInstantRecording", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift End"), "Infobar/activateTimeshiftEnd", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift end and pause"), "Infobar/activateTimeshiftEndAndPause", "InfoBar"))
ButtonSetupFunctions.append((_("Start timeshift"), "Infobar/startTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Stop timeshift"), "Infobar/stopTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Start teletext"), "Infobar/startTeletext", "InfoBar"))
ButtonSetupFunctions.append((_("Show subservice selection"), "Infobar/subserviceSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle selection"), "Infobar/subtitleSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle quick menu"), "Infobar/subtitleQuickMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Letterbox zoom"), "Infobar/vmodeSelection", "InfoBar"))
if SystemInfo["PIPAvailable"]:
ButtonSetupFunctions.append((_("Show PIP"), "Infobar/showPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Swap PIP"), "Infobar/swapPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Move PIP"), "Infobar/movePiP", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle PIPzap"), "Infobar/togglePipzap", "InfoBar"))
ButtonSetupFunctions.append((_("Activate HbbTV (Redbutton)"), "Infobar/activateRedButton", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In full screen"), "Infobar/HDMIInFull", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In PiP"), "Infobar/HDMIInPiP", "InfoBar"))
if SystemInfo["LcdLiveTV"]:
ButtonSetupFunctions.append((_("Toggle LCD LiveTV"), "Infobar/ToggleLCDLiveTV", "InfoBar"))
ButtonSetupFunctions.append((_("Hotkey Setup"), "Module/Screens.ButtonSetup/ButtonSetup", "Setup"))
ButtonSetupFunctions.append((_("Software update"), "Module/Screens.SoftwareUpdate/UpdatePlugin", "Setup"))
ButtonSetupFunctions.append((_("CI (Common Interface) Setup"), "Module/Screens.Ci/CiSelection", "Setup"))
ButtonSetupFunctions.append((_("Tuner Configuration"), "Module/Screens.Satconfig/NimSelection", "Scanning"))
ButtonSetupFunctions.append((_("Manual Scan"), "Module/Screens.ScanSetup/ScanSetup", "Scanning"))
ButtonSetupFunctions.append((_("Automatic Scan"), "Module/Screens.ScanSetup/ScanSimple", "Scanning"))
for plugin in plugins.getPluginsForMenu("scan"):
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/scan/" + plugin[2], "Scanning"))
ButtonSetupFunctions.append((_("Network setup"), "Module/Screens.NetworkSetup/NetworkAdapterSelection", "Setup"))
ButtonSetupFunctions.append((_("Network menu"), "Infobar/showNetworkMounts", "Setup"))
ButtonSetupFunctions.append((_("Plugin Browser"), "Module/Screens.PluginBrowser/PluginBrowser", "Setup"))
ButtonSetupFunctions.append((_("Channel Info"), "Module/Screens.ServiceInfo/ServiceInfo", "Setup"))
ButtonSetupFunctions.append((_("SkinSelector"), "Module/Screens.SkinSelector/SkinSelector", "Setup"))
ButtonSetupFunctions.append((_("LCD SkinSelector"), "Module/Screens.SkinSelector/LcdSkinSelector", "Setup"))
ButtonSetupFunctions.append((_("Timer"), "Module/Screens.TimerEdit/TimerEditList", "Setup"))
ButtonSetupFunctions.append((_("Open AutoTimer"), "Infobar/showAutoTimerList", "Setup"))
for plugin in plugins.getPluginsForMenu("system"):
if plugin[2]:
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/system/" + plugin[2], "Setup"))
ButtonSetupFunctions.append((_("Standby"), "Module/Screens.Standby/Standby", "Power"))
ButtonSetupFunctions.append((_("Restart"), "Module/Screens.Standby/TryQuitMainloop/2", "Power"))
ButtonSetupFunctions.append((_("Restart enigma"), "Module/Screens.Standby/TryQuitMainloop/3", "Power"))
ButtonSetupFunctions.append((_("Deep standby"), "Module/Screens.Standby/TryQuitMainloop/1", "Power"))
ButtonSetupFunctions.append((_("SleepTimer"), "Module/Screens.SleepTimerEdit/SleepTimerEdit", "Power"))
ButtonSetupFunctions.append((_("PowerTimer"), "Module/Screens.PowerTimerEdit/PowerTimerEditList", "Power"))
ButtonSetupFunctions.append((_("Usage Setup"), "Setup/usage", "Setup"))
ButtonSetupFunctions.append((_("User interface settings"), "Setup/userinterface", "Setup"))
ButtonSetupFunctions.append((_("Recording Setup"), "Setup/recording", "Setup"))
ButtonSetupFunctions.append((_("Harddisk Setup"), "Setup/harddisk", "Setup"))
ButtonSetupFunctions.append((_("Subtitles Settings"), "Setup/subtitlesetup", "Setup"))
ButtonSetupFunctions.append((_("Language"), "Module/Screens.LanguageSelection/LanguageSelection", "Setup"))
ButtonSetupFunctions.append((_("OscamInfo Mainmenu"), "Module/Screens.OScamInfo/OscamInfoMenu", "Plugins"))
ButtonSetupFunctions.append((_("CCcamInfo Mainmenu"), "Module/Screens.CCcamInfo/CCcamInfoMain", "Plugins"))
ButtonSetupFunctions.append((_("Movieplayer"), "Module/Screens.MovieSelection/MovieSelection", "Plugins"))
if os.path.isdir("/etc/ppanels"):
for x in [x for x in os.listdir("/etc/ppanels") if x.endswith(".xml")]:
x = x[:-4]
ButtonSetupFunctions.append((_("PPanel") + " " + x, "PPanel/" + x, "PPanels"))
if os.path.isdir("/usr/script"):
for x in [x for x in os.listdir("/usr/script") if x.endswith(".sh")]:
x = x[:-3]
ButtonSetupFunctions.append((_("Shellscript") + " " + x, "Shellscript/" + x, "Shellscripts"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
ButtonSetupFunctions.append((_("ScriptRunner"), "ScriptRunner/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
ButtonSetupFunctions.append((_("QuickMenu"), "QuickMenu/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
ButtonSetupFunctions.append((_("Kodi MediaCenter"), "Kodi/", "Plugins"))
return ButtonSetupFunctions
class ButtonSetup(Screen):
def __init__(self, session, args=None):
Screen.__init__(self, session)
self['description'] = Label(_('Click on your remote on the button you want to change'))
self.session = session
self.setTitle(_("Hotkey Setup"))
self["key_red"] = Button(_("Exit"))
self.list = []
self.ButtonSetupKeys = getButtonSetupKeys()
self.ButtonSetupFunctions = getButtonSetupFunctions()
for x in self.ButtonSetupKeys:
self.list.append(ChoiceEntryComponent('',(_(x[0]), x[1])))
self["list"] = ChoiceList(list=self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10], selection = 0)
self["choosen"] = ChoiceList(list=[])
self.getFunctions()
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.close,
}, -1)
self["ButtonSetupButtonActions"] = ButtonSetupActionMap(["ButtonSetupActions"], dict((x[1], self.ButtonSetupGlobal) for x in self.ButtonSetupKeys))
self.longkeyPressed = False
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.getFunctions)
self.onShown.append(self.disableKeyMap)
self.onClose.append(self.enableKeyMap)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
index = 0
for x in self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10]:
if key == x[0][1]:
self["list"].moveToIndex(index)
if key.endswith("_long"):
self.longkeyPressed = True
break
index += 1
self.getFunctions()
self.session.open(ButtonSetupSelect, self["list"].l.getCurrentSelection())
def getFunctions(self):
key = self["list"].l.getCurrentSelection()[0][1]
if key:
selected = []
for x in eval("config.misc.ButtonSetup." + key + ".value.split(',')"):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self["choosen"].setList(selected)
class ButtonSetupSelect(Screen):
def __init__(self, session, key, args=None):
Screen.__init__(self, session)
self.skinName="ButtonSetupSelect"
self['description'] = Label(_('Select the desired function and click on "OK" to assign it. Use "CH+/-" to toggle between the lists. Select an assigned function and click on "OK" to de-assign it. Use "Next/Previous" to change the order of the assigned functions.'))
self.session = session
self.key = key
self.setTitle(_("Hotkey Setup for") + ": " + key[0][0])
self["key_red"] = Button(_("Cancel"))
self["key_green"] = Button(_("Save"))
self.mode = "list"
self.ButtonSetupFunctions = getButtonSetupFunctions()
self.config = eval("config.misc.ButtonSetup." + key[0][1])
self.expanded = []
self.selected = []
for x in self.config.value.split(','):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
self.selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self.prevselected = self.selected[:]
self["choosen"] = ChoiceList(list=self.selected, selection=0)
self["list"] = ChoiceList(list=self.getFunctionList(), selection=0)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "KeyboardInputActions"],
{
"ok": self.keyOk,
"cancel": self.cancel,
"red": self.cancel,
"green": self.save,
"up": self.keyUp,
"down": self.keyDown,
"left": self.keyLeft,
"right": self.keyRight,
"pageUp": self.toggleMode,
"pageDown": self.toggleMode,
"shiftUp": self.moveUp,
"shiftDown": self.moveDown,
}, -1)
self.onShown.append(self.enableKeyMap)
self.onClose.append(self.disableKeyMap)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def getFunctionList(self):
functionslist = []
catagories = {}
for function in self.ButtonSetupFunctions:
if not catagories.has_key(function[2]):
catagories[function[2]] = []
catagories[function[2]].append(function)
for catagorie in sorted(list(catagories)):
if catagorie in self.expanded:
functionslist.append(ChoiceEntryComponent('expanded',((catagorie), "Expander")))
for function in catagories[catagorie]:
functionslist.append(ChoiceEntryComponent('verticalline',((function[0]), function[1])))
else:
functionslist.append(ChoiceEntryComponent('expandable',((catagorie), "Expander")))
return functionslist
def toggleMode(self):
if self.mode == "list" and self.selected:
self.mode = "choosen"
self["choosen"].selectionEnabled(1)
self["list"].selectionEnabled(0)
elif self.mode == "choosen":
self.mode = "list"
self["choosen"].selectionEnabled(0)
self["list"].selectionEnabled(1)
def keyOk(self):
if self.mode == "list":
currentSelected = self["list"].l.getCurrentSelection()
if currentSelected[0][1] == "Expander":
if currentSelected[0][0] in self.expanded:
self.expanded.remove(currentSelected[0][0])
else:
self.expanded.append(currentSelected[0][0])
self["list"].setList(self.getFunctionList())
else:
if currentSelected[:2] in self.selected:
self.selected.remove(currentSelected[:2])
else:
self.selected.append(currentSelected[:2])
elif self.selected:
self.selected.remove(self["choosen"].l.getCurrentSelection())
if not self.selected:
self.toggleMode()
self["choosen"].setList(self.selected)
def keyLeft(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageUp)
def keyRight(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageDown)
def keyUp(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveUp)
def keyDown(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveDown)
def moveUp(self):
self.moveChoosen(self.keyUp)
def moveDown(self):
self.moveChoosen(self.keyDown)
def moveChoosen(self, direction):
if self.mode == "choosen":
currentIndex = self["choosen"].getSelectionIndex()
swapIndex = (currentIndex + (direction == self.keyDown and 1 or -1)) % len(self["choosen"].list)
self["choosen"].list[currentIndex], self["choosen"].list[swapIndex] = self["choosen"].list[swapIndex], self["choosen"].list[currentIndex]
self["choosen"].setList(self["choosen"].list)
direction()
else:
return 0
def save(self):
configValue = []
for x in self.selected:
configValue.append(x[0][1])
self.config.value = ",".join(configValue)
self.config.save()
self.close()
def cancel(self):
if self.selected != self.prevselected:
self.session.openWithCallback(self.cancelCallback, MessageBox, _("Are you sure to cancel all changes"), default=False)
else:
self.close()
def cancelCallback(self, answer):
answer and self.close()
class ButtonSetupActionMap(ActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class helpableButtonSetupActionMap(HelpableActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class InfoBarButtonSetup():
def __init__(self):
self.ButtonSetupKeys = getButtonSetupKeys()
self["ButtonSetupButtonActions"] = helpableButtonSetupActionMap(self, "ButtonSetupActions",
dict((x[1],(self.ButtonSetupGlobal, boundFunction(self.getHelpText, x[1]))) for x in self.ButtonSetupKeys), -10)
self.longkeyPressed = False
self.onExecEnd.append(self.clearLongkeyPressed)
def clearLongkeyPressed(self):
self.longkeyPressed = False
def getKeyFunctions(self, key):
if key in ("play", "playpause", "Stop", "stop", "pause", "rewind", "next", "previous", "fastforward", "skip_back", "skip_forward") and (self.__class__.__name__ == "MoviePlayer" or hasattr(self, "timeshiftActivated") and self.timeshiftActivated()):
return False
selection = eval("config.misc.ButtonSetup." + key + ".value.split(',')")
selected = []
for x in selection:
if x.startswith("ZapPanic"):
selected.append(((_("Panic to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
elif x.startswith("Zap"):
selected.append(((_("Zap to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
else:
function = list(function for function in getButtonSetupFunctions() if function[1] == x )
if function:
selected.append(function[0])
return selected
def getHelpText(self, key):
selected = self.getKeyFunctions(key)
if not selected:
return
if len(selected) == 1:
return selected[0][0]
else:
return _("ButtonSetup") + " " + tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
selected = self.getKeyFunctions(key)
if not selected:
return 0
elif len(selected) == 1:
if key.endswith("_long"):
self.longkeyPressed = True
return self.execButtonSetup(selected[0])
else:
key = tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
self.session.openWithCallback(self.execButtonSetup, ChoiceBox, (_("Hotkey")) + " " + key, selected)
def execButtonSetup(self, selected):
if selected:
selected = selected[1].split("/")
if selected[0] == "Plugins":
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
elif selected[0] == "MenuPlugin":
for plugin in plugins.getPluginsForMenu(selected[1]):
if plugin[2] == selected[2]:
self.runPlugin(plugin[1])
return
elif selected[0] == "Infobar":
if hasattr(self, selected[1]):
exec "self." + ".".join(selected[1:]) + "()"
else:
return 0
elif selected[0] == "Module":
try:
exec "from " + selected[1] + " import *"
exec "self.session.open(" + ",".join(selected[2:]) + ")"
except:
print "[ButtonSetup] error during executing module %s, screen %s" % (selected[1], selected[2])
elif selected[0] == "Setup":
exec "from Screens.Setup import *"
exec "self.session.open(Setup, \"" + selected[1] + "\")"
elif selected[0].startswith("Zap"):
if selected[0] == "ZapPanic":
self.servicelist.history = []
self.pipShown() and self.showPiP()
self.servicelist.servicelist.setCurrent(eServiceReference("/".join(selected[1:])))
self.servicelist.zap(enable_pipzap = True)
if hasattr(self, "lastservice"):
self.lastservice = eServiceReference("/".join(selected[1:]))
self.close()
else:
self.show()
from Screens.MovieSelection import defaultMoviePath
moviepath = defaultMoviePath()
if moviepath:
config.movielist.last_videodir.value = moviepath
elif selected[0] == "PPanel":
ppanelFileName = '/etc/ppanels/' + selected[1] + ".xml"
if os.path.isfile(ppanelFileName) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import PPanel
self.session.open(PPanel, name=selected[1] + ' PPanel', node=None, filename=ppanelFileName, deletenode=None)
elif selected[0] == "Shellscript":
command = '/usr/script/' + selected[1] + ".sh"
if os.path.isfile(command) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import Execute
self.session.open(Execute, selected[1] + " shellscript", None, command)
else:
from Screens.Console import Console
exec "self.session.open(Console,_(selected[1]),[command])"
elif selected[0] == "EMC":
try:
from Plugins.Extensions.EnhancedMovieCenter.plugin import showMoviesNew
from Screens.InfoBar import InfoBar
open(showMoviesNew(InfoBar.instance))
except Exception as e:
print('[EMCPlayer] showMovies exception:\n' + str(e))
elif selected[0] == "ScriptRunner":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
from Plugins.Extensions.Infopanel.ScriptRunner import ScriptRunner
self.session.open (ScriptRunner)
elif selected[0] == "QuickMenu":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open (QuickMenu)
elif selected[0] == "Kodi":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
from Plugins.Extensions.Kodi.plugin import KodiMainScreen
self.session.open(KodiMainScreen)
def showServiceListOrMovies(self):
if hasattr(self, "openServiceList"):
self.openServiceList()
elif hasattr(self, "showMovies"):
self.showMovies()
def ToggleLCDLiveTV(self):
config.lcd.showTv.value = not config.lcd.showTv.value
| gpl-2.0 | 6,284,683,870,160,974,000 | 45.917981 | 266 | 0.686042 | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/errors/types/feed_attribute_reference_error.py | 1 | 1293 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.errors',
marshal='google.ads.googleads.v7',
manifest={
'FeedAttributeReferenceErrorEnum',
},
)
class FeedAttributeReferenceErrorEnum(proto.Message):
r"""Container for enum describing possible feed attribute
reference errors.
"""
class FeedAttributeReferenceError(proto.Enum):
r"""Enum describing possible feed attribute reference errors."""
UNSPECIFIED = 0
UNKNOWN = 1
CANNOT_REFERENCE_REMOVED_FEED = 2
INVALID_FEED_NAME = 3
INVALID_FEED_ATTRIBUTE_NAME = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -425,834,407,816,632,600 | 30.536585 | 74 | 0.702243 | false |
examachine/pisi | pisi/exml/xmlfilepiks.py | 1 | 2519 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# Author: Eray Ozkural <[email protected]>
"""
XmlFile class further abstracts a dom object using the
high-level dom functions provided in xmlext module (and sorely lacking
in xml.dom :( )
function names are mixedCase for compatibility with minidom,
an 'old library'
this implementation uses piksemel
"""
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
import codecs
import exceptions
import piksemel as iks
import pisi
from pisi.file import File
from pisi.util import join_path as join
class Error(pisi.Error):
pass
class XmlFile(object):
"""A class to help reading and writing an XML file"""
def __init__(self, tag):
self.rootTag = tag
def newDocument(self):
"""clear DOM"""
self.doc = iks.newDocument(self.rootTag)
def unlink(self):
"""deallocate DOM structure"""
del self.doc
def rootNode(self):
"""returns root document element"""
return self.doc
def readxmlfile(self, file):
raise Exception("not implemented")
try:
self.doc = iks.parse(file)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def readxml(self, uri, tmpDir='/tmp', sha1sum=False,
compress=None, sign=None, copylocal = False):
uri = File.make_uri(uri)
#try:
localpath = File.download(uri, tmpDir, sha1sum=sha1sum,
compress=compress,sign=sign, copylocal=copylocal)
#except IOError, e:
# raise Error(_("Cannot read URI %s: %s") % (uri, unicode(e)) )
try:
self.doc = iks.parse(localpath)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def writexml(self, uri, tmpDir = '/tmp', sha1sum=False, compress=None, sign=None):
f = File(uri, File.write, sha1sum=sha1sum, compress=compress, sign=sign)
f.write(self.doc.toPrettyString())
f.close()
def writexmlfile(self, f):
f.write(self.doc.toPrettyString())
| gpl-3.0 | -4,301,337,258,087,709,000 | 27.303371 | 86 | 0.628821 | false |
csm0042/rpihome_v3 | rpihome_v3/schedule_service/service_main.py | 1 | 6835 | #!/usr/bin/python3
""" service_main.py:
"""
# Import Required Libraries (Standard, Third Party, Local) ********************
import asyncio
import datetime
import logging
if __name__ == "__main__":
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from rpihome_v3.occupancy_service.msg_processing import create_heartbeat_msg
from rpihome_v3.occupancy_service.msg_processing import process_heartbeat_msg
from rpihome_v3.schedule_service.msg_processing import process_get_device_scheduled_state_msg
# Authorship Info *************************************************************
__author__ = "Christopher Maue"
__copyright__ = "Copyright 2017, The RPi-Home Project"
__credits__ = ["Christopher Maue"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Christopher Maue"
__email__ = "[email protected]"
__status__ = "Development"
# Internal Service Work Task **************************************************
class MainTask(object):
def __init__(self, log, **kwargs):
# Configure logger
self.log = log or logging.getLogger(__name__)
# Define instance variables
self.ref_num = None
self.msg_in_queue = None
self.msg_out_queue = None
self.schedule = []
self.service_addresses = []
self.message_types = []
self.last_check_hb = datetime.datetime.now()
self.out_msg = str()
self.out_msg_list = []
self.next_msg = str()
self.next_msg_split = []
self.msg_source_addr = str()
self.msg_type = str()
self.destinations = []
# Map input variables
if kwargs is not None:
for key, value in kwargs.items():
if key == "ref":
self.ref_num = value
self.log.debug('Ref number generator set during __init__ '
'to: %s', self.ref_num)
if key == "schedule":
self.schedule = value
self.log.debug('Schedule set during __init__ '
'to: %s', self.schedule)
if key == "msg_in_queue":
self.msg_in_queue = value
self.log.debug('Message in queue set during __init__ '
'to: %s', self.msg_in_queue)
if key == "msg_out_queue":
self.msg_out_queue = value
self.log.debug('Message out queue set during __init__ '
'to: %s', self.msg_out_queue)
if key == "service_addresses":
self.service_addresses = value
self.log.debug('Service address list set during __init__ '
'to: %s', self.service_addresses)
if key == "message_types":
self.message_types = value
self.log.debug('Message type list set during __init__ '
'to: %s', self.message_types)
@asyncio.coroutine
def run(self):
""" task to handle the work the service is intended to do """
self.log.info('Starting schedule service main task')
while True:
# Initialize result list
self.out_msg_list = []
# INCOMING MESSAGE HANDLING
if self.msg_in_queue.qsize() > 0:
self.log.debug('Getting Incoming message from queue')
self.next_msg = self.msg_in_queue.get_nowait()
self.log.debug('Message pulled from queue: [%s]', self.next_msg)
# Determine message type
self.next_msg_split = self.next_msg.split(',')
if len(self.next_msg_split) >= 6:
self.log.debug('Extracting source address and message type')
self.msg_source_addr = self.next_msg_split[1]
self.msg_type = self.next_msg_split[5]
self.log.debug('Source Address: %s', self.msg_source_addr)
self.log.debug('Message Type: %s', self.msg_type)
# Service Check (heartbeat)
if self.msg_type == self.message_types['heartbeat']:
self.log.debug('Message is a heartbeat')
self.out_msg_list = process_heartbeat_msg(
self.log,
self.ref_num,
self.next_msg,
self.message_types)
# Device scheduled command checks
if self.msg_type == self.message_types['get_device_scheduled_state']:
self.log.debug('Message is a get device scheduled state message')
self.out_msg_list = process_get_device_scheduled_state_msg(
self.log,
self.ref_num,
self.schedule,
self.next_msg,
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Message [%s] successfully queued', self.out_msg)
# PERIODIC TASKS
# Periodically send heartbeats to other services
if datetime.datetime.now() >= (self.last_check_hb + datetime.timedelta(seconds=120)):
self.destinations = [
(self.service_addresses['automation_addr'],
self.service_addresses['automation_port'])
]
self.out_msg_list = create_heartbeat_msg(
self.log,
self.ref_num,
self.destinations,
self.service_addresses['schedule_addr'],
self.service_addresses['schedule_port'],
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Response message [%s] successfully queued',
self.out_msg)
# Update last-check
self.last_check_hb = datetime.datetime.now()
# Yield to other tasks for a while
yield from asyncio.sleep(0.25)
| gpl-3.0 | -8,828,960,547,473,434,000 | 42.814103 | 97 | 0.501244 | false |
Ledoux/ShareYourSystem | Pythonlogy/ShareYourSystem/Standards/Classors/Switcher/Drafts/__init__ copy.py | 1 | 8024 | #<ImportSpecificModules>
import operator
,Doer,Representer
from ShareYourSystem.Functers import Functer,Triggerer,Hooker
BaseModuleStr="ShareYourSystem.Functers.Functer"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer")
#</ImportSpecificModules>
#<DefineLocals>
SYS.setSubModule(globals())
SwitchingBeforeStr='Before'
SwitchingAfterStr='After'
SwitchingBindStr='bind'
#</DefineLocals>
#<DefineClass>
@DecorationClass()
class SwitcherClass(BaseClass):
def default_init(self,**_KwargVariablesDict):
#<DefineSpecificDo>
self.SwitchingFunction=None #<NotRepresented>
self.SwitchedFunction=None #<NotRepresented>
self.SwitchedFunctionStr="" #<NotRepresented>
self.SwitchedBoolSuffixStr="" #<NotRepresented>
self.SwitchedClassBoolKeyStr="" #<NotRepresented>
self.SwitchedInstanceBoolKeyStr="" #<NotRepresented>
#</DefineSpecificDo>
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
def __call__(self,_Variable):
#Switch
self.switch(_Variable)
#Link
self.FunctedFunction=self.SwitchedFunction
#Call the call of the parent class
return BaseClass.__call__(self,self.SwitchingFunction)
def switch(self,_Variable=None):
#set the switching Function
if self.SwitchingFunction==None:
self.SwitchingFunction=_Variable
#set the SwitchedFunctionStr this is the functing function..and we remove all the tagged Functer@
self.SwitchedFunctionStr=self.SwitchingFunction.__name__.split(Functer.FunctingDecorationStr)[-1]
#debug
self.debug(('self.',self,['SwitchedFunctionStr']))
#Cut the pre attributing part if there is one
if Functer.FunctingAttributeStr in self.SwitchedFunctionStr:
self.SwitchedFunctionStr=self.SwitchedFunctionStr.split(Functer.FunctingAttributeStr)[-1]
#self.SwitchedDoneFunctionStr=Doer.getDoneStrWithDoStr(self.SwitchedFunctionStr)
#SwitchedBoolSuffixStr=self.SwitchedDoneFunctionStr[0].upper()+self.SwitchedDoneFunctionStr[1:]
self.SwitchedBoolSuffixStr=self.SwitchedFunctionStr[0].upper()+self.SwitchedFunctionStr[1:]+'Bool'
self.SwitchedInstanceBoolKeyStr='Switching'+self.SwitchedBoolSuffixStr
#self.SwitchedInstanceBoolKeyStr='SwitchedInstance'+self.SwitchedBoolSuffixStr
self.SwitchedClassBoolKeyStr='SwitchedClass'+self.SwitchedBoolSuffixStr
#debug
self.debug(('self.',self,['SwitchedInstanceBoolKeyStr','SwitchedClassBoolKeyStr']))
#Definition the SwitchedFunction
def SwitchedFunction(*_LiargVariablesList,**_KwargVariablesDict):
#Alias
InstanceVariable=_LiargVariablesList[0]
#Append for debbuging
#if hasattr(InstanceVariable,'DebuggingNotFrameFunctionStrsList'):
# if 'SwitchedFunction' not in InstanceVariable.DebuggingNotFrameFunctionStrsList:
# InstanceVariable.DebuggingNotFrameFunctionStrsList.append('SwitchedFunction')
#debug
'''
self.debug(
[
('self.',self,['SwitchedClassBoolKeyStr','SwitchedInstanceBoolKeyStr']),
Representer.represent(InstanceVariable,**{'RepresentingAlineaIsBool':False})
]
)
'''
#set the SwitchedBool if it was not already
if hasattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr)==False:
#debug
'''
self.debug('The InstanceVariable has not the SwitchedBoolSuffixStr..so set it to False')
'''
#set
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,False)
elif getattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr):
#debug
'''
self.debug('The Instance has already done this method')
'''
#Return
return InstanceVariable
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#At the level of the class set the new binding set function
if hasattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr)==False:
#Definition the binding function that will call the init one
def bindBefore(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#debug
'''
self.debug('Reinit with '+Representer.represent(
TriggeredInstanceVariable.SettingKeyVariable,**{'RepresentingAlineaIsBool':False}
)
)
'''
#Definition the init method to trigger
SwitchedInitMethod=Functer.getFunctingFunctionWithFuncFunction(
TriggeredInstanceVariable.__class__.init
)
#debug
'''
self.debug(
[
'SwitchedInitMethod is '+str(SwitchedInitMethod),
"SwitchedInitMethod.func_globals['__file__'] is "+SwitchedInitMethod.func_globals['__file__']
]
)
'''
#Call the init method (just at the level of this class definition) (so IMPORTANT this is init not __init__)
SwitchedInitMethod(TriggeredInstanceVariable)
#set the name
TriggeredBeforeMethodStr='bindBeforeWith'+self.SwitchedBoolSuffixStr
bindBefore.__name__=TriggeredBeforeMethodStr
#debug
'''
self.debug(
[
("self.",self,['SwitchedDoneFunctionStr','SwitchedBoolSuffixStr']),
("TriggeredMethodStr is "+TriggeredMethodStr)
]
)
'''
#Link the bindBefore function
setattr(
InstanceVariable.__class__,
TriggeredBeforeMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"Before"
}
)(bindBefore)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredBeforeMethodStr)()
'''
#Definition the binding function that will set the switched bool to True
def bindAfter(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#Say that it is ok
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,False)
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,True)
#set the name
TriggeredAfterMethodStr='bindAfterWith'+self.SwitchedBoolSuffixStr
bindAfter.__name__=TriggeredAfterMethodStr
#Link the bindAfter function
setattr(
InstanceVariable.__class__,
TriggeredAfterMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"After"
}
)(bindAfter)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredAfterMethodStr)()
'''
#Say that it is ok
setattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr,True)
#debug
'''
self.debug(
[
#('InstanceVariable is '+SYS._str(InstanceVariable)),
('_LiargVariablesList is '+str(_LiargVariablesList))
]
)
'''
#Call the SwitchingFunction
self.SwitchingFunction(*_LiargVariablesList,**_KwargVariablesDict)
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#set True for the Bool after the call
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,True)
#debug
'''
self.debug(('InstanceVariable.',InstanceVariable,[self.SwitchedBoolSuffixStr]))
'''
#Return self for the wrapped method call
return InstanceVariable
#set
self.SwitchedFunction=SwitchedFunction
#Return self
return self
#</DefineClass>
| mit | 331,411,898,130,539,900 | 28.284672 | 112 | 0.692672 | false |
bokeh-cookbook/bokeh-cookbook | plugins/ipynb/markup.py | 1 | 5935 | from __future__ import absolute_import, print_function, division
import os
import json
try:
# Py3k
from html.parser import HTMLParser
except ImportError:
# Py2.7
from HTMLParser import HTMLParser
from pelican import signals
from pelican.readers import MarkdownReader, HTMLReader, BaseReader
from .ipynb import get_html_from_filepath, fix_css
def register():
"""
Register the new "ipynb" reader
"""
def add_reader(arg):
arg.settings["READERS"]["ipynb"] = IPythonNB
signals.initialized.connect(add_reader)
class IPythonNB(BaseReader):
"""
Extend the Pelican.BaseReader to `.ipynb` files can be recognized
as a markup language:
Setup:
`pelicanconf.py`:
```
MARKUP = ('md', 'ipynb')
```
"""
enabled = True
file_extensions = ['ipynb']
def read(self, filepath):
metadata = {}
metadata['ipython'] = True
# Files
filedir = os.path.dirname(filepath)
filename = os.path.basename(filepath)
metadata_filename = filename.split('.')[0] + '.ipynb-meta'
metadata_filepath = os.path.join(filedir, metadata_filename)
if os.path.exists(metadata_filepath):
# Metadata is on a external file, process using Pelican MD Reader
md_reader = MarkdownReader(self.settings)
_content, metadata = md_reader.read(metadata_filepath)
else:
# Load metadata from ipython notebook file
ipynb_file = open(filepath)
notebook_metadata = json.load(ipynb_file)['metadata']
# Change to standard pelican metadata
for key, value in notebook_metadata.items():
key = key.lower()
if key in ("title", "date", "category", "tags", "slug", "author"):
metadata[key] = self.process_metadata(key, value)
keys = [k.lower() for k in metadata.keys()]
if not set(['title', 'date']).issubset(set(keys)):
# Probably using ipynb.liquid mode
md_filename = filename.split('.')[0] + '.md'
md_filepath = os.path.join(filedir, md_filename)
if not os.path.exists(md_filepath):
raise Exception("Could not find metadata in `.ipynb-meta`, inside `.ipynb` or external `.md` file.")
else:
raise Exception("""Could not find metadata in `.ipynb-meta` or inside `.ipynb` but found `.md` file,
assuming that this notebook is for liquid tag usage if true ignore this error""")
content, info = get_html_from_filepath(filepath)
# Generate Summary: Do it before cleaning CSS
if 'summary' not in [key.lower() for key in self.settings.keys()]:
parser = MyHTMLParser(self.settings, filename)
if hasattr(content, 'decode'): # PY2
content = '<body>%s</body>' % content.encode('utf-8')
content = content.decode("utf-8")
else:
content = '<body>%s</body>' % content
parser.feed(content)
parser.close()
content = parser.body
if ('IPYNB_USE_META_SUMMARY' in self.settings.keys() and self.settings['IPYNB_USE_META_SUMMARY'] is False) or 'IPYNB_USE_META_SUMMARY' not in self.settings.keys():
metadata['summary'] = parser.summary
content = fix_css(content, info)
return content, metadata
class MyHTMLParser(HTMLReader._HTMLParser):
"""
Custom Pelican `HTMLReader._HTMLParser` to create the summary of the content
based on settings['SUMMARY_MAX_LENGTH'].
Summary is stoped if founds any div containing ipython notebook code cells.
This is needed in order to generate valid HTML for the summary,
a simple string split will break the html generating errors on the theme.
The downside is that the summary length is not exactly the specified, it stops at
completed div/p/li/etc tags.
"""
def __init__(self, settings, filename):
HTMLReader._HTMLParser.__init__(self, settings, filename)
self.settings = settings
self.filename = filename
self.wordcount = 0
self.summary = None
self.stop_tags = [('div', ('class', 'input')), ('div', ('class', 'output')), ('h2', ('id', 'Header-2'))]
if 'IPYNB_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags = self.settings['IPYNB_STOP_SUMMARY_TAGS']
if 'IPYNB_EXTEND_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags.extend(self.settings['IPYNB_EXTEND_STOP_SUMMARY_TAGS'])
def handle_starttag(self, tag, attrs):
HTMLReader._HTMLParser.handle_starttag(self, tag, attrs)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
mask = [stoptag[0] == tag and (stoptag[1] is None or stoptag[1] in attrs) for stoptag in self.stop_tags]
if any(mask):
self.summary = self._data_buffer
self.wordcount = self.settings['SUMMARY_MAX_LENGTH']
def handle_endtag(self, tag):
HTMLReader._HTMLParser.handle_endtag(self, tag)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
self.wordcount = len(strip_tags(self._data_buffer).split(' '))
if self.wordcount >= self.settings['SUMMARY_MAX_LENGTH']:
self.summary = self._data_buffer
def strip_tags(html):
"""
Strip html tags from html content (str)
Useful for summary creation
"""
s = HTMLTagStripper()
s.feed(html)
return s.get_data()
class HTMLTagStripper(HTMLParser):
"""
Custom HTML Parser to strip HTML tags
Useful for summary creation
"""
def __init__(self):
HTMLParser.__init__(self)
self.reset()
self.fed = []
def handle_data(self, html):
self.fed.append(html)
def get_data(self):
return ''.join(self.fed)
| agpl-3.0 | -8,813,531,780,843,853,000 | 34.969697 | 175 | 0.609436 | false |
mtlynch/ndt-e2e-clientworker | client_wrapper/install_selenium_extensions.py | 1 | 3193 | import argparse
import os
import platform
import urllib
import tempfile
import names
driver_urls = {
'chrome_os_x': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_mac32.zip',
'file_name': 'chromedriver_mac32.zip'
},
'chrome_ubuntu': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_linux64.zip',
'file_name': 'chromedriver_linux64.zip'
},
'chrome_windows_10': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_win32.zip',
'file_name': 'chromedriver_win32.zip'
},
'edge_windows_10': {
'url':
'https://download.microsoft.com/download/8/D/0/8D0D08CF-790D-4586-B726-C6469A9ED49C/MicrosoftWebDriver.msi',
'file_name': 'MicrosoftWebDriver.msi'
},
'safari_os_x': {
'url':
'http://selenium-release.storage.googleapis.com/2.48/SafariDriver.safariextz',
'file_name': 'SafariDriver.safariextz',
}
}
def _download_chrome_drivers():
"""Downloads Chrome drivers for Selenium."""
# Mac OS X
if platform.system() == 'Darwin':
remote_file = driver_urls['chrome_os_x']
elif platform.system() == 'Linux':
remote_file = driver_urls['chrome_ubuntu']
elif platform.system() == 'Windows':
remote_file = driver_urls['chrome_windows_10']
else:
raise ValueError('Unsupported OS specified: %s' % (platform.system()))
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_temp_file(url, file_name):
"""Downloads file into temp directory.
Args:
url: A string representing the URL the file is to be downloaded from.
file_name: A string representing the name of the file to be downloaded.
"""
temp_dir = tempfile.mkdtemp()
download_path = os.path.join(temp_dir, file_name)
print('File downloading to %s' % download_path)
urllib.URLopener().retrieve(url, download_path)
def _download_edge_drivers():
"""Downloads Edge drivers for Selenium."""
remote_file = driver_urls['edge_windows_10']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_safari_drivers():
"""Downloads Safari drivers for Selenium."""
remote_file = driver_urls['safari_os_x']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def main(args):
if args.browser == names.CHROME:
_download_chrome_drivers()
elif args.browser == names.EDGE:
_download_edge_drivers()
elif args.browser == names.SAFARI:
_download_safari_drivers()
elif args.browser == names.FIREFOX:
pass
else:
raise ValueError('Unsupported browser specified: %s' % (args.browser))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='NDT E2E Testing Client Selenium Extension Installer',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--browser',
help='Browser to run under (for browser-based client)',
choices=('chrome', 'firefox', 'safari', 'edge'))
main(parser.parse_args())
| apache-2.0 | 7,890,214,524,247,196,000 | 31.917526 | 116 | 0.634826 | false |
cwacek/python-jsonschema-objects | test/test_regression_126.py | 1 | 1829 | import pytest
import python_jsonschema_objects as pjs
import collections
@pytest.fixture
def schema():
return {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Test",
"definitions": {
"MyEnum1": {"type": "string", "enum": ["E_A", "E_B"]},
"MyEnum2": {"type": "string", "enum": ["F_A", "F_B", "F_C", "F_D"]},
"MyInt": {
"default": "0",
"type": "integer",
"minimum": 0,
"maximum": 4294967295,
},
"MyObj1": {
"type": "object",
"properties": {
"e1": {"$ref": "#/definitions/MyEnum1"},
"e2": {"$ref": "#/definitions/MyEnum2"},
"i1": {"$ref": "#/definitions/MyInt"},
},
"required": ["e1", "e2", "i1"],
},
"MyArray": {
"type": "array",
"items": {"$ref": "#/definitions/MyObj1"},
"minItems": 0,
"uniqueItems": True,
},
"MyMsg1": {
"type": "object",
"properties": {"a1": {"$ref": "#/definitions/MyArray"}},
},
"MyMsg2": {"type": "object", "properties": {"s1": {"type": "string"}}},
},
"type": "object",
"oneOf": [{"$ref": "#/definitions/MyMsg1"}, {"$ref": "#/definitions/MyMsg2"}],
}
def test_regression_126(schema):
builder = pjs.ObjectBuilder(schema)
ns = builder.build_classes(standardize_names=False)
Obj1 = ns.MyObj1
Array1 = ns.MyArray
Msg1 = ns.MyMsg1
o1 = Obj1(e1="E_A", e2="F_C", i1=2600)
o2 = Obj1(e1="E_B", e2="F_D", i1=2500)
objs = Array1([o1, o2])
msg = Msg1(a1=objs)
print(msg.serialize())
| mit | -1,888,342,341,075,785,700 | 30.534483 | 86 | 0.42865 | false |
Krakn/learning | src/python/advent_of_code/2017/05/a_maze_of_twisty_trampolines_all_alike.py | 1 | 3322 | #!/usr/bin/env python3
'''
--- Day 5: A Maze of Twisty Trampolines, All Alike ---
'''
def load_input(filename):
'''
Parse input file, returning an array of maze offsets.
'''
maze = list()
with open(filename, 'r') as file_input:
for line in file_input.readlines():
maze.append(int(line.strip()))
return maze
def part1(maze):
'''
--- Part 1 ---
An urgent interrupt arrives from the CPU: it's trapped in a maze of jump
instructions, and it would like assistance from any programs with spare
cycles to help find the exit.
The message includes a list of the offsets for each jump. Jumps are
relative: -1 moves to the previous instruction, and 2 skips the next one.
Start at the first instruction in the list. The goal is to follow the jumps
until one leads outside the list.
In addition, these instructions are a little strange; after each jump, the
offset of that instruction increases by 1. So, if you come across an offset
of 3, you would move three instructions forward, but change it to a 4 for
the next time it is encountered.
For example, consider the following list of jump offsets:
0 3 0 1 -3 Positive jumps ("forward") move downward; negative jumps move
upward. For legibility in this example, these offset values
will be written all on one line, with the current instruction
marked in parentheses. The following steps would be taken
before an exit is found:
(0) 3 0 1 -3 - Before we have taken any steps.
(1) 3 0 1 -3 - Jump with offset 0 (that is, don't jump at all).
Fortunately, the instruction is then incremented
to 1.
2 (3) 0 1 -3 - Step forward because of the instruction we just modified.
The first instruction is incremented again, now to 2.
2 4 0 1 (-3) - Jump all the way to the end; leave a 4 behind.
2 (4) 0 1 -2 - Go back to where we just were; increment -3 to -2.
2 5 0 1 -2 - Jump 4 steps forward, escaping the maze. In this
example, the exit is reached in 5 steps.
How many steps does it take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
def part2(maze):
'''
--- Part Two ---
Now, the jumps are even stranger: after each jump, if the offset was three
or more, instead decrease it by 1. Otherwise, increase it by 1 as before.
Using this rule with the above example, the process now takes 10 steps,
and the offset values after finding the exit are left as 2 3 2 3 -1.
How many steps does it now take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
if maze[index] >= 3:
maze[index] -= 1
index = index + maze[index] + 1
else:
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
if __name__ == "__main__":
MAZE1 = load_input('input.txt')
MAZE2 = load_input('input.txt')
print("Part 1:", part1(MAZE1))
print("Part 2:", part2(MAZE2))
| isc | -1,629,784,482,675,308,500 | 33.604167 | 79 | 0.609573 | false |
winterbird-code/adbb | adbb/__init__.py | 1 | 2124 | #!/usr/bin/env python
#
# This file is part of adbb.
#
# adbb is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# adbb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with adbb. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import logging
import logging.handlers
import sys
import adbb.db
from adbb.link import AniDBLink
from adbb.animeobjs import Anime, AnimeTitle, Episode, File
from adbb.anames import get_titles
anidb_client_name = "adbb"
anidb_client_version = 2
anidb_api_version = 3
log = None
_anidb = None
_sessionmaker = None
def init(
anidb_user,
anidb_pwd,
sql_db_url,
debug=False,
loglevel='info',
logger=None,
outgoing_udp_port=9876):
if logger is None:
logger = logging.getLogger(__name__)
logger.setLevel(loglevel.upper())
if debug:
logger.setLevel(logging.DEBUG)
lh = logging.StreamHandler()
lh.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
lh = logging.handlers.SysLogHandler(address='/dev/log')
lh.setFormatter(logging.Formatter(
'adbb %(filename)s/%(funcName)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
global log, _anidb, _sessionmaker
log = logger
_sessionmaker = adbb.db.init_db(sql_db_url)
_anidb = adbb.link.AniDBLink(
anidb_user,
anidb_pwd,
myport=outgoing_udp_port)
def get_session():
return _sessionmaker()
def close_session(session):
session.close()
def close():
global _anidb
_anidb.stop()
| gpl-3.0 | -7,247,581,232,844,061,000 | 25.222222 | 83 | 0.663842 | false |
ifxit/nidhogg | tests/test_get_best_volume.py | 1 | 9629 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from nidhogg import get_best_volume_by_quota, get_best_volume_by_size
from nidhogg.core import NidhoggException
from nidhogg.compatible import Volume, VolumeWithQuotaRatio
def check_volume(volume, size):
"""Helper function that is applied to check if the volume is suitable."""
size *= 1048576 # convert to byte
size *= 1.2 # add buffer to the given size
max_file_count = 32000000
quota_ratio_threshold = 1.2
# checks
check_1 = bool(volume["size_available"] >= size)
check_2 = bool(volume["files_used"] < max_file_count)
check_3 = bool(volume["quota_ratio"] < quota_ratio_threshold)
return check_1 and check_2 and check_3
def test_best_project_home_1():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 116086018048.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 193273528320.0, 'size_available': 77187489792.0, 'quota_size': 216895848448.0, 'state': u'online', 'quota_ratio': 1.1222222222222222, 'snapable': True, 'files_used': 1049599.0, 'name': u'proj000'}),
VolumeWithQuotaRatio(**{'size_used': 768038326272.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 1428076625920.0, 'size_available': 660038287360.0, 'quota_size': 1526860873728.0, 'state': u'online', 'quota_ratio': 1.069172932330827, 'snapable': True, 'files_used': 6377127.0, 'name': u'proj109'}),
VolumeWithQuotaRatio(**{'size_used': 168616095744.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 483183820800.0, 'size_available': 314567712768.0, 'quota_size': 558345748480.0, 'state': u'online', 'quota_ratio': 1.1555555555555554, 'snapable': True, 'files_used': 882234.0, 'name': u'proj013'}),
VolumeWithQuotaRatio(**{'size_used': 755761999872.0, 'filer': u'filer07.example.com', 'files_total': 44876648.0, 'size_total': 1122060206080.0, 'size_available': 366298185728.0, 'quota_size': 918049259518.0, 'state': u'online', 'quota_ratio': 0.8181818181800358, 'snapable': True, 'files_used': 35818461.0, 'name': u'proj090'}),
VolumeWithQuotaRatio(**{'size_used': 1775658102784.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2415919104000.0, 'size_available': 640259833856.0, 'quota_size': 2655363530744.0, 'state': u'online', 'quota_ratio': 1.0991111111077998, 'snapable': True, 'files_used': 19140696.0, 'name': u'proj320'}),
VolumeWithQuotaRatio(**{'size_used': 1592106135552.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2126008811520.0, 'size_available': 533902389248.0, 'quota_size': 2759516487680.0, 'state': u'online', 'quota_ratio': 1.297979797979798, 'snapable': True, 'files_used': 11719412.0, 'name': u'proj108'}), # quota over 1.2
]
# 50 GB, smallest quota ratio, because proj090 has too much files > 32 mio
assert 'proj109' == get_best_volume_by_quota(volumes, check_volume, size=50 * 1024)['name']
def test_best_project_home_2():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 755761999872.0, 'filer': u'filer07.example.com', 'files_total': 44876648.0, 'size_total': 1122060206080.0, 'size_available': 366298185728.0, 'quota_size': 918049259518.0, 'state': u'online', 'quota_ratio': 0.8181818181800358, 'snapable': True, 'files_used': 31999999.0, 'name': u'proj090'}),
VolumeWithQuotaRatio(**{'size_used': 1775658102784.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2415919104000.0, 'size_available': 640259833856.0, 'quota_size': 2655363530744.0, 'state': u'online', 'quota_ratio': 1.0991111111077998, 'snapable': True, 'files_used': 19140696.0, 'name': u'proj320'}),
VolumeWithQuotaRatio(**{'size_used': 1592106135552.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2126008811520.0, 'size_available': 533902389248.0, 'quota_size': 2759516487680.0, 'state': u'online', 'quota_ratio': 1.297979797979798, 'snapable': True, 'files_used': 11719412.0, 'name': u'proj108'}), # quota over 1.2
]
# 100 GB, netapp with sufficient space
assert 'proj090' == get_best_volume_by_quota(volumes, check_volume, size=100 * 1024)['name']
def test_best_project_home_big():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 755761999872.0, 'filer': u'filer07.example.com', 'files_total': 44876648.0, 'size_total': 1122060206080.0, 'size_available': 366298185728.0, 'quota_size': 918049259518.0, 'state': u'online', 'quota_ratio': 0.8181818181800358, 'snapable': True, 'files_used': 31999999.0, 'name': u'proj090'}),
VolumeWithQuotaRatio(**{'size_used': 1775658102784.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2415919104000.0, 'size_available': 640259833856.0, 'quota_size': 2655363530744.0, 'state': u'online', 'quota_ratio': 1.0991111111077998, 'snapable': True, 'files_used': 19140696.0, 'name': u'proj320'}),
VolumeWithQuotaRatio(**{'size_used': 1592106135552.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2126008811520.0, 'size_available': 533902389248.0, 'quota_size': 2759516487680.0, 'state': u'online', 'quota_ratio': 1.297979797979798, 'snapable': True, 'files_used': 11719412.0, 'name': u'proj108'}), # quota over 1.2
]
# 350 GB, netapp with sufficient space
assert 'proj320' == get_best_volume_by_quota(volumes, check_volume, size=350 * 1024)['name']
def test_best_project_home_too_big():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 755761999872.0, 'filer': u'filer07.example.com', 'files_total': 44876648.0, 'size_total': 1122060206080.0, 'size_available': 366298185728.0, 'quota_size': 918049259518.0, 'state': u'online', 'quota_ratio': 0.8181818181800358, 'snapable': True, 'files_used': 31999999.0, 'name': u'proj090'}),
]
with pytest.raises(NidhoggException):
# 350 GB, netapp with sufficient space
get_best_volume_by_quota(volumes, check_volume, size=350 * 1024)
def test_best_project_home_too_much_files():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 755761999872.0, 'filer': u'filer07.example.com', 'files_total': 44876648.0, 'size_total': 1122060206080.0, 'size_available': 366298185728.0, 'quota_size': 918049259518.0, 'state': u'online', 'quota_ratio': 0.8181818181800358, 'snapable': True, 'files_used': 35818461.0, 'name': u'proj090'}),
]
with pytest.raises(NidhoggException):
get_best_volume_by_quota(volumes, check_volume, size=1234)
def test_best_project_home_too_big_ratio_quota():
volumes = [
VolumeWithQuotaRatio(**{'size_used': 1592106135552.0, 'filer': u'filer07.example.com', 'files_total': 31876689.0, 'size_total': 2126008811520.0, 'size_available': 533902389248.0, 'quota_size': 2759516487680.0, 'state': u'online', 'quota_ratio': 1.297979797979798, 'snapable': True, 'files_used': 11719412.0, 'name': u'proj108'}), # quota over 1.2
]
with pytest.raises(NidhoggException):
get_best_volume_by_quota(volumes, check_volume, size=1234)
def test_best_user_home_1():
volumes = [
Volume(**{'size_used': 432169402368.0, 'filer': u'filer21.example.com', 'files_total': 21790707.0, 'size_total': 676457349120.0, 'size_available': 244287254528.0, 'state': u'online', 'snapable': True, 'files_used': 8648992.0, 'name': u'home000'}),
Volume(**{'size_used': 81415127040.0, 'filer': u'filer21.example.com', 'files_total': 3112959.0, 'size_total': 96636764160.0, 'size_available': 15221399552.0, 'state': u'online', 'snapable': True, 'files_used': 1413035.0, 'name': u'home002'}),
Volume(**{'size_used': 349094301696.0, 'filer': u'filer21.example.com', 'files_total': 15564791.0, 'size_total': 429496729600.0, 'size_available': 80396869632.0, 'state': u'online', 'snapable': True, 'files_used': 7136798.0, 'name': u'home050'}),
Volume(**{'size_used': 133556998144.0, 'filer': u'filer21.example.com', 'files_total': 26460144.0, 'size_total': 429496729600.0, 'size_available': 295939719168.0, 'state': u'online', 'snapable': True, 'files_used': 862642.0, 'name': u'home110'}),
]
assert 'home110' == get_best_volume_by_size(volumes)['name']
def test_best_user_home_2():
def check(volume):
if volume['name'] == 'home110':
return False
return True
volumes = [
Volume(**{'size_used': 432169402368.0, 'filer': u'filer21.example.com', 'files_total': 21790707.0, 'size_total': 676457349120.0, 'size_available': 244287254528.0, 'state': u'online', 'snapable': True, 'files_used': 8648992.0, 'name': u'home000'}),
Volume(**{'size_used': 81415127040.0, 'filer': u'filer21.example.com', 'files_total': 3112959.0, 'size_total': 96636764160.0, 'size_available': 15221399552.0, 'state': u'online', 'snapable': True, 'files_used': 1413035.0, 'name': u'home002'}),
Volume(**{'size_used': 349094301696.0, 'filer': u'filer21.example.com', 'files_total': 15564791.0, 'size_total': 429496729600.0, 'size_available': 80396869632.0, 'state': u'online', 'snapable': True, 'files_used': 7136798.0, 'name': u'home050'}),
Volume(**{'size_used': 133556998144.0, 'filer': u'filer21.example.com', 'files_total': 26460144.0, 'size_total': 429496729600.0, 'size_available': 295939719168.0, 'state': u'online', 'snapable': True, 'files_used': 862642.0, 'name': u'home110'}),
]
assert 'home000' == get_best_volume_by_size(volumes, check)['name']
def test_best_user_home_no_volumes():
volumes = []
with pytest.raises(NidhoggException):
get_best_volume_by_size(volumes)['name']
| mit | 9,026,829,610,576,834,000 | 85.747748 | 355 | 0.678367 | false |
jcfr/mystic | examples/TEST_ffitPP2_b.py | 1 | 1429 | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Testing the polynomial fitting problem of [1] using scipy's Nelder-Mead algorithm.
Reference:
[1] Storn, R. and Price, K. Differential Evolution - A Simple and Efficient
Heuristic for Global Optimization over Continuous Spaces. Journal of Global
Optimization 11: 341-359, 1997.
"""
from test_ffit import Chebyshev8, plot_solution, print_solution
from TEST_ffitPP_b import ChebyshevCost
if __name__ == '__main__':
import random
from mystic.solvers import fmin
#from mystic._scipyoptimize import fmin
from mystic.tools import random_seed
random_seed(123)
import pp
import sys
if len(sys.argv) > 1:
tunnelport = sys.argv[1]
ppservers = ("localhost:%s" % tunnelport,)
else:
ppservers = ()
myserver = pp.Server(ppservers=ppservers)
trials = []
for trial in range(8):
x = tuple([random.uniform(-100,100) + Chebyshev8[i] for i in range(9)])
trials.append(x)
results = [myserver.submit(fmin,(ChebyshevCost,x),(),()) for x in trials]
for solution in results:
print_solution(solution())
#plot_solution(solution)
# end of file
| bsd-3-clause | 4,351,184,427,107,404,300 | 27.58 | 82 | 0.687194 | false |
Aloomaio/googleads-python-lib | examples/ad_manager/v201805/creative_service/create_creative_from_template.py | 1 | 3666 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a new template creative for a given advertiser.
To determine which companies are advertisers, run get_advertisers.py.
To determine which creative templates exist, run
get_all_creative_templates.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import os
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set id of the advertiser (company) that the creative will be assigned to.
ADVERTISER_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_id):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201805')
# Use the image banner with optional third party tracking template.
creative_template_id = '10000680'
# Create image asset.
file_name = 'image%s.jpg' % uuid.uuid4()
image_data = open(os.path.join(os.path.split(__file__)[0], '..', '..', 'data',
'medium_rectangle.jpg'), 'r').read()
size = {
'width': '300',
'height': '250'
}
asset = {
'xsi_type': 'CreativeAsset',
'fileName': file_name,
'assetByteArray': image_data,
'size': size
}
# Create creative from templates.
creative = {
'xsi_type': 'TemplateCreative',
'name': 'Template Creative #%s' % uuid.uuid4(),
'advertiserId': advertiser_id,
'size': size,
'creativeTemplateId': creative_template_id,
'creativeTemplateVariableValues': [
{
'xsi_type': 'AssetCreativeTemplateVariableValue',
'uniqueName': 'Imagefile',
'asset': asset
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imagewidth',
'value': '300'
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imageheight',
'value': '250'
},
{
'xsi_type': 'UrlCreativeTemplateVariableValue',
'uniqueName': 'ClickthroughURL',
'value': 'www.google.com'
},
{
'xsi_type': 'StringCreativeTemplateVariableValue',
'uniqueName': 'Targetwindow',
'value': '_blank'
}
]
}
# Call service to create the creative.
creative = creative_service.createCreatives([creative])[0]
# Display results.
print ('Template creative with id "%s", name "%s", and type "%s" was '
'created and can be previewed at %s.'
% (creative['id'], creative['name'],
ad_manager.AdManagerClassType(creative), creative['previewUrl']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID)
| apache-2.0 | 1,060,630,061,611,670,300 | 32.027027 | 80 | 0.638298 | false |
open-mmlab/mmdetection | mmdet/models/detectors/base.py | 1 | 14139 | from abc import ABCMeta, abstractmethod
from collections import OrderedDict
import mmcv
import numpy as np
import torch
import torch.distributed as dist
from mmcv.runner import BaseModule, auto_fp16
from mmdet.core.visualization import imshow_det_bboxes
class BaseDetector(BaseModule, metaclass=ABCMeta):
"""Base class for detectors."""
def __init__(self, init_cfg=None):
super(BaseDetector, self).__init__(init_cfg)
self.fp16_enabled = False
@property
def with_neck(self):
"""bool: whether the detector has a neck"""
return hasattr(self, 'neck') and self.neck is not None
# TODO: these properties need to be carefully handled
# for both single stage & two stage detectors
@property
def with_shared_head(self):
"""bool: whether the detector has a shared head in the RoI Head"""
return hasattr(self, 'roi_head') and self.roi_head.with_shared_head
@property
def with_bbox(self):
"""bool: whether the detector has a bbox head"""
return ((hasattr(self, 'roi_head') and self.roi_head.with_bbox)
or (hasattr(self, 'bbox_head') and self.bbox_head is not None))
@property
def with_mask(self):
"""bool: whether the detector has a mask head"""
return ((hasattr(self, 'roi_head') and self.roi_head.with_mask)
or (hasattr(self, 'mask_head') and self.mask_head is not None))
@abstractmethod
def extract_feat(self, imgs):
"""Extract features from images."""
pass
def extract_feats(self, imgs):
"""Extract features from multiple images.
Args:
imgs (list[torch.Tensor]): A list of images. The images are
augmented from the same image but in different ways.
Returns:
list[torch.Tensor]: Features of different images
"""
assert isinstance(imgs, list)
return [self.extract_feat(img) for img in imgs]
def forward_train(self, imgs, img_metas, **kwargs):
"""
Args:
img (list[Tensor]): List of tensors of shape (1, C, H, W).
Typically these should be mean centered and std scaled.
img_metas (list[dict]): List of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys, see
:class:`mmdet.datasets.pipelines.Collect`.
kwargs (keyword arguments): Specific to concrete implementation.
"""
# NOTE the batched image size information may be useful, e.g.
# in DETR, this is needed for the construction of masks, which is
# then used for the transformer_head.
batch_input_shape = tuple(imgs[0].size()[-2:])
for img_meta in img_metas:
img_meta['batch_input_shape'] = batch_input_shape
async def async_simple_test(self, img, img_metas, **kwargs):
raise NotImplementedError
@abstractmethod
def simple_test(self, img, img_metas, **kwargs):
pass
@abstractmethod
def aug_test(self, imgs, img_metas, **kwargs):
"""Test function with test time augmentation."""
pass
async def aforward_test(self, *, img, img_metas, **kwargs):
for var, name in [(img, 'img'), (img_metas, 'img_metas')]:
if not isinstance(var, list):
raise TypeError(f'{name} must be a list, but got {type(var)}')
num_augs = len(img)
if num_augs != len(img_metas):
raise ValueError(f'num of augmentations ({len(img)}) '
f'!= num of image metas ({len(img_metas)})')
# TODO: remove the restriction of samples_per_gpu == 1 when prepared
samples_per_gpu = img[0].size(0)
assert samples_per_gpu == 1
if num_augs == 1:
return await self.async_simple_test(img[0], img_metas[0], **kwargs)
else:
raise NotImplementedError
def forward_test(self, imgs, img_metas, **kwargs):
"""
Args:
imgs (List[Tensor]): the outer list indicates test-time
augmentations and inner Tensor should have a shape NxCxHxW,
which contains all images in the batch.
img_metas (List[List[dict]]): the outer list indicates test-time
augs (multiscale, flip, etc.) and the inner list indicates
images in a batch.
"""
for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]:
if not isinstance(var, list):
raise TypeError(f'{name} must be a list, but got {type(var)}')
num_augs = len(imgs)
if num_augs != len(img_metas):
raise ValueError(f'num of augmentations ({len(imgs)}) '
f'!= num of image meta ({len(img_metas)})')
# NOTE the batched image size information may be useful, e.g.
# in DETR, this is needed for the construction of masks, which is
# then used for the transformer_head.
for img, img_meta in zip(imgs, img_metas):
batch_size = len(img_meta)
for img_id in range(batch_size):
img_meta[img_id]['batch_input_shape'] = tuple(img.size()[-2:])
if num_augs == 1:
# proposals (List[List[Tensor]]): the outer list indicates
# test-time augs (multiscale, flip, etc.) and the inner list
# indicates images in a batch.
# The Tensor should have a shape Px4, where P is the number of
# proposals.
if 'proposals' in kwargs:
kwargs['proposals'] = kwargs['proposals'][0]
return self.simple_test(imgs[0], img_metas[0], **kwargs)
else:
assert imgs[0].size(0) == 1, 'aug test does not support ' \
'inference with batch size ' \
f'{imgs[0].size(0)}'
# TODO: support test augmentation for predefined proposals
assert 'proposals' not in kwargs
return self.aug_test(imgs, img_metas, **kwargs)
@auto_fp16(apply_to=('img', ))
def forward(self, img, img_metas, return_loss=True, **kwargs):
"""Calls either :func:`forward_train` or :func:`forward_test` depending
on whether ``return_loss`` is ``True``.
Note this setting will change the expected inputs. When
``return_loss=True``, img and img_meta are single-nested (i.e. Tensor
and List[dict]), and when ``resturn_loss=False``, img and img_meta
should be double nested (i.e. List[Tensor], List[List[dict]]), with
the outer list indicating test time augmentations.
"""
if torch.onnx.is_in_onnx_export():
assert len(img_metas) == 1
return self.onnx_export(img[0], img_metas[0])
if return_loss:
return self.forward_train(img, img_metas, **kwargs)
else:
return self.forward_test(img, img_metas, **kwargs)
def _parse_losses(self, losses):
"""Parse the raw outputs (losses) of the network.
Args:
losses (dict): Raw output of the network, which usually contain
losses and other necessary infomation.
Returns:
tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor \
which may be a weighted sum of all losses, log_vars contains \
all the variables to be sent to the logger.
"""
log_vars = OrderedDict()
for loss_name, loss_value in losses.items():
if isinstance(loss_value, torch.Tensor):
log_vars[loss_name] = loss_value.mean()
elif isinstance(loss_value, list):
log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value)
else:
raise TypeError(
f'{loss_name} is not a tensor or list of tensors')
loss = sum(_value for _key, _value in log_vars.items()
if 'loss' in _key)
log_vars['loss'] = loss
for loss_name, loss_value in log_vars.items():
# reduce loss when distributed training
if dist.is_available() and dist.is_initialized():
loss_value = loss_value.data.clone()
dist.all_reduce(loss_value.div_(dist.get_world_size()))
log_vars[loss_name] = loss_value.item()
return loss, log_vars
def train_step(self, data, optimizer):
"""The iteration step during training.
This method defines an iteration step during training, except for the
back propagation and optimizer updating, which are done in an optimizer
hook. Note that in some complicated cases or models, the whole process
including back propagation and optimizer updating is also defined in
this method, such as GAN.
Args:
data (dict): The output of dataloader.
optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of
runner is passed to ``train_step()``. This argument is unused
and reserved.
Returns:
dict: It should contain at least 3 keys: ``loss``, ``log_vars``, \
``num_samples``.
- ``loss`` is a tensor for back propagation, which can be a \
weighted sum of multiple losses.
- ``log_vars`` contains all the variables to be sent to the
logger.
- ``num_samples`` indicates the batch size (when the model is \
DDP, it means the batch size on each GPU), which is used for \
averaging the logs.
"""
losses = self(**data)
loss, log_vars = self._parse_losses(losses)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img_metas']))
return outputs
def val_step(self, data, optimizer=None):
"""The iteration step during validation.
This method shares the same signature as :func:`train_step`, but used
during val epochs. Note that the evaluation after training epochs is
not implemented with this method, but an evaluation hook.
"""
losses = self(**data)
loss, log_vars = self._parse_losses(losses)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img_metas']))
return outputs
def show_result(self,
img,
result,
score_thr=0.3,
bbox_color=(72, 101, 241),
text_color=(72, 101, 241),
mask_color=None,
thickness=2,
font_size=13,
win_name='',
show=False,
wait_time=0,
out_file=None):
"""Draw `result` over `img`.
Args:
img (str or Tensor): The image to be displayed.
result (Tensor or tuple): The results to draw over `img`
bbox_result or (bbox_result, segm_result).
score_thr (float, optional): Minimum score of bboxes to be shown.
Default: 0.3.
bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines.
The tuple of color should be in BGR order. Default: 'green'
text_color (str or tuple(int) or :obj:`Color`):Color of texts.
The tuple of color should be in BGR order. Default: 'green'
mask_color (None or str or tuple(int) or :obj:`Color`):
Color of masks. The tuple of color should be in BGR order.
Default: None
thickness (int): Thickness of lines. Default: 2
font_size (int): Font size of texts. Default: 13
win_name (str): The window name. Default: ''
wait_time (float): Value of waitKey param.
Default: 0.
show (bool): Whether to show the image.
Default: False.
out_file (str or None): The filename to write the image.
Default: None.
Returns:
img (Tensor): Only if not `show` or `out_file`
"""
img = mmcv.imread(img)
img = img.copy()
if isinstance(result, tuple):
bbox_result, segm_result = result
if isinstance(segm_result, tuple):
segm_result = segm_result[0] # ms rcnn
else:
bbox_result, segm_result = result, None
bboxes = np.vstack(bbox_result)
labels = [
np.full(bbox.shape[0], i, dtype=np.int32)
for i, bbox in enumerate(bbox_result)
]
labels = np.concatenate(labels)
# draw segmentation masks
segms = None
if segm_result is not None and len(labels) > 0: # non empty
segms = mmcv.concat_list(segm_result)
if isinstance(segms[0], torch.Tensor):
segms = torch.stack(segms, dim=0).detach().cpu().numpy()
else:
segms = np.stack(segms, axis=0)
# if out_file specified, do not show image in window
if out_file is not None:
show = False
# draw bounding boxes
img = imshow_det_bboxes(
img,
bboxes,
labels,
segms,
class_names=self.CLASSES,
score_thr=score_thr,
bbox_color=bbox_color,
text_color=text_color,
mask_color=mask_color,
thickness=thickness,
font_size=font_size,
win_name=win_name,
show=show,
wait_time=wait_time,
out_file=out_file)
if not (show or out_file):
return img
def onnx_export(self, img, img_metas):
raise NotImplementedError(f'{self.__class__.__name__} does '
f'not support ONNX EXPORT')
| apache-2.0 | 7,170,421,546,663,079,000 | 39.512894 | 79 | 0.561779 | false |
eduble/panteda | operators/map/heatmap.py | 1 | 4193 | #!/usr/bin/env python3
import numpy as np
import numpy.random
from time import time
# web mercator projection functions
# ---------------------------------
def linear_lat(lat, atanh = np.arctanh, sin = np.sin, radians = np.radians):
return atanh(sin(radians(lat)))
def inv_linear_lat(ll, asin = np.arcsin, tanh = np.tanh, degrees = np.degrees):
return degrees(asin(tanh(ll)))
def lng_to_x(w, lng_min, lng_max, lng):
return (lng - lng_min) * (w / (lng_max - lng_min))
def lat_to_y(h, lat_min, lat_max, lat):
return (linear_lat(lat) - linear_lat(lat_min)) * (h / (linear_lat(lat_max) - linear_lat(lat_min)))
def x_to_lng(w, lng_min, lng_max, x):
return x * ((lng_max - lng_min)/w) + lng_min
def y_to_lat(h, lat_min, lat_max, y):
return inv_linear_lat(y * ((linear_lat(lat_max) - linear_lat(lat_min))/h) + linear_lat(lat_min))
# heatmap data generation
# -----------------------
class HeatMap:
def __init__(self, lnglat, width, height, westlng, eastlng, southlat, northlat):
# compute pixel bounds of the map
x = np.append(np.arange(0, width, 5), width)
y = np.append(np.arange(0, height, 5), height)
# project pixel bounds coordinates (x, y -> lng, lat)
edgelng = x_to_lng(width, westlng, eastlng, x)
centerlng = x_to_lng(width, westlng, eastlng, (x[1:] + x[:-1])/2)
edgelat = y_to_lat(height, southlat, northlat, y)
centerlat = y_to_lat(height, southlat, northlat, (y[1:] + y[:-1])/2)
# prepare computation parameters
self.bins = edgelng, edgelat
self.range = (westlng, eastlng), (southlat, northlat)
self.iterator = lnglat.chunks()
self.heatmap = None
# prepare compression parameters
scalelat = (edgelat[1:] - edgelat[:-1]).min() / 2
self.approx_centerlat = numpy.rint((centerlat - centerlat[0]) / scalelat)
scalelng = edgelng[1] - edgelng[0] # longitude is linear
self.approx_centerlng = numpy.rint((centerlng - centerlng[0]) / scalelng)
self.scales = dict(lat=scalelat, lng=scalelng)
self.offsets = dict(lat=centerlat[0], lng=centerlng[0])
# stream status parameters
self.done = False
def compute(self, time_credit):
# make histogram:
# - create a pixel grid
# - given a tuple (lng, lat) increment the corresponding pixel
deadline = time() + time_credit
deadline_reached = False
for chunk in self.iterator:
lng, lat = chunk.columns
chunk_heatmap = np.histogram2d(lng, lat, bins=self.bins, range=self.range)[0]
if self.heatmap is None:
self.heatmap = chunk_heatmap.T
else:
self.heatmap += chunk_heatmap.T
if time() > deadline:
deadline_reached = True
break
if not deadline_reached:
# we left the loop because of the end of iteration
self.done = True
# get sparse matrix representation: (lat, lng, intensity) tuples.
# in order to lower network usage, we will transfer this data in a
# compressed form: lng & lat values will be transfered as integers
# together with a scaling factor and an offset to be applied.
def compressed_form(self):
# count number of points
count = int(self.heatmap.sum())
if count == 0:
# if no points, return empty data
data = dict(lat = [], lng = [], val = [])
else:
# apply threshold and
# compute approximated sparse matrix data
nonzero_xy = ((self.heatmap / self.heatmap.max()) > 0.05).nonzero()
nonzero_x = nonzero_xy[1]
nonzero_y = nonzero_xy[0]
data = dict(
lat = self.approx_centerlat[nonzero_y].astype(int).tolist(),
lng = self.approx_centerlng[nonzero_x].astype(int).tolist(),
val = self.heatmap[nonzero_xy].astype(int).tolist()
)
return dict(
data = data,
scales = self.scales,
offsets = self.offsets,
count = count,
done = self.done
)
| gpl-3.0 | -2,101,006,383,756,166,100 | 41.785714 | 102 | 0.577152 | false |
mohitreddy1996/Gender-Detection-from-Signature | src/train_test/random_forests.py | 1 | 1140 | from sklearn.metrics import precision_recall_fscore_support
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import MinMaxScaler, normalize
df = pd.read_csv('../../Dataset/dataset.csv', delimiter='\t')
dataset = df.values
mask = np.random.rand(len(df)) < .80
train = df[mask]
test = df[~mask]
X = pd.DataFrame()
Y = pd.DataFrame()
X = train.ix[:, 2:len(train.columns) - 1]
Y = train.ix[:, len(train.columns) - 1: len(train.columns)]
X_Test = pd.DataFrame()
Y_Test = pd.DataFrame()
# After Normalising
X_standard = normalize(X)
print X_standard.shape
X_Test = test.ix[:, 2:len(test.columns) - 1]
Y_Test = test.ix[:, len(test.columns) - 1: len(test.columns)]
X_Test_standard = normalize(X_Test)
print X_Test_standard.shape
print "Training Data Set Size : ", str(len(X))
print "Testing Data Set Size : ", str(len(X_Test))
# tune parameters here.
rf = RandomForestClassifier(n_estimators=150, max_features=20)
rf.fit(X_standard, Y)
# predict
Y_Result = rf.predict(X_Test_standard)
print precision_recall_fscore_support(Y_Test, Y_Result, average='micro')
| mit | 5,244,072,690,348,756,000 | 20.923077 | 72 | 0.711404 | false |
aquaya/ivrhub | ivrhub/models.py | 1 | 3129 | ''' mongoengine models
'''
from mongoengine import *
class User(Document):
''' some are admins some are not
'''
admin_rights = BooleanField(required=True)
api_id = StringField()
api_key = StringField()
email = EmailField(required=True, unique=True, max_length=254)
email_confirmation_code = StringField(required=True)
email_confirmed = BooleanField(required=True)
forgot_password_code = StringField()
last_login_time = DateTimeField(required=True)
name = StringField()
organizations = ListField(ReferenceField('Organization'))
password_hash = StringField(required=True)
registration_time = DateTimeField(required=True)
verified = BooleanField(required=True)
class Organization(Document):
''' people join orgs
'''
description = StringField(default='')
# url-safe version of the name
label = StringField(unique=True, required=True)
location = StringField(default='')
name = StringField(unique=True, required=True)
class Form(Document):
''' the heart of the system
'''
# unique code for requesting this form via sms or a call
calling_code = StringField()
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default = '')
# url-safe version of the name
label = StringField(unique_with='organization')
language = StringField(default = '')
name = StringField(unique_with='organization')
organization = ReferenceField(Organization)
# have to store questions here as well so we know the order
questions = ListField(ReferenceField('Question'))
class Question(Document):
''' connected to forms
'''
audio_filename = StringField()
audio_url = StringField()
creation_time = DateTimeField()
description = StringField()
form = ReferenceField(Form)
# url-safe version of the name
label = StringField(unique_with='form')
name = StringField(unique_with='form')
# 'text_prompt', 'audio_file' or 'audio_url'
prompt_type = StringField(default='text_prompt')
# 'keypad' or 'voice' or 'no response'
response_type = StringField(default='keypad')
s3_key = StringField()
s3_url = StringField()
text_prompt = StringField()
text_prompt_language = StringField(default='en')
class Response(Document):
''' individual response to a form
'''
call_sid = StringField()
completion_time = DateTimeField()
form = ReferenceField(Form)
# whether this was a 'call' or 'ringback' or 'scheduled call'
initiated_using = StringField()
initiation_time = DateTimeField()
# track the progress of the response
last_question_asked = ReferenceField(Question)
# any notes about the response as a whole
notes = StringField()
respondent_phone_number = StringField()
class Answer(Document):
''' connected to questions and responses
'''
audio_url = StringField()
keypad_input = StringField()
# any notes on this answer (like a transcription)
notes = StringField()
question = ReferenceField(Question)
response = ReferenceField(Response)
| mit | 7,868,206,963,524,489,000 | 31.59375 | 66 | 0.686801 | false |
tzangms/PyConTW | pycon_project/biblion/views.py | 1 | 3501 | from datetime import datetime
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import simplejson as json
from django.contrib.sites.models import Site
from biblion.exceptions import InvalidSection
from biblion.models import Post, FeedHit
from biblion.settings import ALL_SECTION_NAME
def blog_index(request):
posts = Post.objects.current()
posts = posts.filter(language=request.LANGUAGE_CODE)
return render_to_response("biblion/blog_list.html", {
"posts": posts,
}, context_instance=RequestContext(request))
def blog_section_list(request, section):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
return render_to_response("biblion/blog_section_list.html", {
"section_slug": section,
"section_name": dict(Post.SECTION_CHOICES)[Post.section_idx(section)],
"posts": posts,
}, context_instance=RequestContext(request))
def blog_post_detail(request, **kwargs):
if "post_pk" in kwargs:
if request.user.is_authenticated() and request.user.is_staff:
queryset = Post.objects.all()
post = get_object_or_404(queryset, pk=kwargs["post_pk"])
else:
raise Http404()
else:
queryset = Post.objects.current()
queryset = queryset.filter(
published__year = int(kwargs["year"]),
published__month = int(kwargs["month"]),
published__day = int(kwargs["day"]),
)
post = get_object_or_404(queryset, slug=kwargs["slug"])
post.inc_views()
return render_to_response("biblion/blog_post.html", {
"post": post,
}, context_instance=RequestContext(request))
def serialize_request(request):
data = {
"path": request.path,
"META": {
"QUERY_STRING": request.META.get("QUERY_STRING"),
"REMOTE_ADDR": request.META.get("REMOTE_ADDR"),
}
}
for key in request.META:
if key.startswith("HTTP"):
data["META"][key] = request.META[key]
return json.dumps(data)
def blog_feed(request, section=None):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
if section is None:
section = ALL_SECTION_NAME
current_site = Site.objects.get_current()
feed_title = "%s Blog: %s" % (current_site.name, section[0].upper() + section[1:])
blog_url = "http://%s%s" % (current_site.domain, reverse("blog"))
url_name, kwargs = "blog_feed", {"section": section}
feed_url = "http://%s%s" % (current_site.domain, reverse(url_name, kwargs=kwargs))
if posts:
feed_updated = posts[0].published
else:
feed_updated = datetime(2009, 8, 1, 0, 0, 0)
# create a feed hit
hit = FeedHit()
hit.request_data = serialize_request(request)
hit.save()
atom = render_to_string("biblion/atom_feed.xml", {
"feed_id": feed_url,
"feed_title": feed_title,
"blog_url": blog_url,
"feed_url": feed_url,
"feed_updated": feed_updated,
"entries": posts,
"current_site": current_site,
})
return HttpResponse(atom, mimetype="application/atom+xml")
| bsd-3-clause | -8,178,591,767,137,141,000 | 29.181034 | 86 | 0.623536 | false |
jseabold/statsmodels | statsmodels/sandbox/distributions/sppatch.py | 5 | 24020 | '''patching scipy to fit distributions and expect method
This adds new methods to estimate continuous distribution parameters with some
fixed/frozen parameters. It also contains functions that calculate the expected
value of a function for any continuous or discrete distribution
It temporarily also contains Bootstrap and Monte Carlo function for testing the
distribution fit, but these are neither general nor verified.
Author: josef-pktd
License: Simplified BSD
'''
from statsmodels.compat.python import lmap
import numpy as np
from scipy import stats, optimize, integrate
########## patching scipy
#vonmises does not define finite bounds, because it is intended for circular
#support which does not define a proper pdf on the real line
stats.distributions.vonmises.a = -np.pi
stats.distributions.vonmises.b = np.pi
#the next 3 functions are for fit with some fixed parameters
#As they are written, they do not work as functions, only as methods
def _fitstart(self, x):
'''example method, method of moment estimator as starting values
Parameters
----------
x : ndarray
data for which the parameters are estimated
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
This example was written for the gamma distribution, but not verified
with literature
'''
loc = np.min([x.min(),0])
a = 4/stats.skew(x)**2
scale = np.std(x) / np.sqrt(a)
return (a, loc, scale)
def _fitstart_beta(self, x, fixed=None):
'''method of moment estimator as starting values for beta distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
for method of moment estimator for known loc and scale
https://en.wikipedia.org/wiki/Beta_distribution#Parameter_estimation
http://www.itl.nist.gov/div898/handbook/eda/section3/eda366h.htm
NIST reference also includes reference to MLE in
Johnson, Kotz, and Balakrishan, Volume II, pages 221-235
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a, b = x.min(), x.max()
eps = (a-b)*0.01
if fixed is None:
#this part not checked with books
loc = a - eps
scale = (a - b) * (1 + 2*eps)
else:
if np.isnan(fixed[-2]):
#estimate loc
loc = a - eps
else:
loc = fixed[-2]
if np.isnan(fixed[-1]):
#estimate scale
scale = (b + eps) - loc
else:
scale = fixed[-1]
#method of moment for known loc scale:
scale = float(scale)
xtrans = (x - loc)/scale
xm = xtrans.mean()
xv = xtrans.var()
tmp = (xm*(1-xm)/xv - 1)
p = xm * tmp
q = (1 - xm) * tmp
return (p, q, loc, scale) #check return type and should fixed be returned ?
def _fitstart_poisson(self, x, fixed=None):
'''maximum likelihood estimator as starting values for Poisson distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
MLE :
https://en.wikipedia.org/wiki/Poisson_distribution#Maximum_likelihood
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a = x.min()
eps = 0 # is this robust ?
if fixed is None:
#this part not checked with books
loc = a - eps
else:
if np.isnan(fixed[-1]):
#estimate loc
loc = a - eps
else:
loc = fixed[-1]
#MLE for standard (unshifted, if loc=0) Poisson distribution
xtrans = (x - loc)
lambd = xtrans.mean()
#second derivative d loglike/ dlambd Not used
#dlldlambd = 1/lambd # check
return (lambd, loc) #check return type and should fixed be returned ?
def nnlf_fr(self, thetash, x, frmask):
# new frozen version
# - sum (log pdf(x, theta),axis=0)
# where theta are the parameters (including loc and scale)
#
try:
if frmask is not None:
theta = frmask.copy()
theta[np.isnan(frmask)] = thetash
else:
theta = thetash
loc = theta[-2]
scale = theta[-1]
args = tuple(theta[:-2])
except IndexError:
raise ValueError("Not enough input arguments.")
if not self._argcheck(*args) or scale <= 0:
return np.inf
x = np.array((x-loc) / scale)
cond0 = (x <= self.a) | (x >= self.b)
if (np.any(cond0)):
return np.inf
else:
N = len(x)
#raise ValueError
return self._nnlf(x, *args) + N*np.log(scale)
def fit_fr(self, data, *args, **kwds):
'''estimate distribution parameters by MLE taking some parameters as fixed
Parameters
----------
data : ndarray, 1d
data for which the distribution parameters are estimated,
args : list ? check
starting values for optimization
kwds :
- 'frozen' : array_like
values for frozen distribution parameters and, for elements with
np.nan, the corresponding parameter will be estimated
Returns
-------
argest : ndarray
estimated parameters
Examples
--------
generate random sample
>>> np.random.seed(12345)
>>> x = stats.gamma.rvs(2.5, loc=0, scale=1.2, size=200)
estimate all parameters
>>> stats.gamma.fit(x)
array([ 2.0243194 , 0.20395655, 1.44411371])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, np.nan, np.nan])
array([ 2.0243194 , 0.20395655, 1.44411371])
keep loc fixed, estimate shape and scale parameters
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, np.nan])
array([ 2.45603985, 1.27333105])
keep loc and scale fixed, estimate shape parameter
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
array([ 3.00048828])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.2])
array([ 2.57792969])
estimate only scale parameter for fixed shape and loc
>>> stats.gamma.fit_fr(x, frozen=[2.5, 0.0, np.nan])
array([ 1.25087891])
Notes
-----
self is an instance of a distribution class. This can be attached to
scipy.stats.distributions.rv_continuous
*Todo*
* check if docstring is correct
* more input checking, args is list ? might also apply to current fit method
'''
loc0, scale0 = lmap(kwds.get, ['loc', 'scale'],[0.0, 1.0])
Narg = len(args)
if Narg == 0 and hasattr(self, '_fitstart'):
x0 = self._fitstart(data)
elif Narg > self.numargs:
raise ValueError("Too many input arguments.")
else:
args += (1.0,)*(self.numargs-Narg)
# location and scale are at the end
x0 = args + (loc0, scale0)
if 'frozen' in kwds:
frmask = np.array(kwds['frozen'])
if len(frmask) != self.numargs+2:
raise ValueError("Incorrect number of frozen arguments.")
else:
# keep starting values for not frozen parameters
for n in range(len(frmask)):
# Troubleshooting ex_generic_mle_tdist
if isinstance(frmask[n], np.ndarray) and frmask[n].size == 1:
frmask[n] = frmask[n].item()
# If there were array elements, then frmask will be object-dtype,
# in which case np.isnan will raise TypeError
frmask = frmask.astype(np.float64)
x0 = np.array(x0)[np.isnan(frmask)]
else:
frmask = None
#print(x0
#print(frmask
return optimize.fmin(self.nnlf_fr, x0,
args=(np.ravel(data), frmask), disp=0)
#The next two functions/methods calculate expected value of an arbitrary
#function, however for the continuous functions intquad is use, which might
#require continuouity or smoothness in the function.
#TODO: add option for Monte Carlo integration
def expect(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
'''
if fn is None:
def fun(x, *args):
return x*self.pdf(x, loc=loc, scale=scale, *args)
else:
def fun(x, *args):
return fn(x)*self.pdf(x, loc=loc, scale=scale, *args)
if lb is None:
lb = loc + self.a * scale #(self.a - loc)/(1.0*scale)
if ub is None:
ub = loc + self.b * scale #(self.b - loc)/(1.0*scale)
if conditional:
invfac = (self.sf(lb, loc=loc, scale=scale, *args)
- self.sf(ub, loc=loc, scale=scale, *args))
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args)[0]/invfac
def expect_v2(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set using
quantiles of the distribution, see Notes
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
The default limits are lb = self.ppf(1e-9, *args), ub = self.ppf(1-1e-9, *args)
For some heavy tailed distributions, 'alpha', 'cauchy', 'halfcauchy',
'levy', 'levy_l', and for 'ncf', the default limits are not set correctly
even when the expectation of the function is finite. In this case, the
integration limits, lb and ub, should be chosen by the user. For example,
for the ncf distribution, ub=1000 works in the examples.
There are also problems with numerical integration in some other cases,
for example if the distribution is very concentrated and the default limits
are too large.
'''
#changes: 20100809
#correction and refactoring how loc and scale are handled
#uses now _pdf
#needs more testing for distribution with bound support, e.g. genpareto
if fn is None:
def fun(x, *args):
return (loc + x*scale)*self._pdf(x, *args)
else:
def fun(x, *args):
return fn(loc + x*scale)*self._pdf(x, *args)
if lb is None:
#lb = self.a
try:
lb = self.ppf(1e-9, *args) #1e-14 quad fails for pareto
except ValueError:
lb = self.a
else:
lb = max(self.a, (lb - loc)/(1.0*scale)) #transform to standardized
if ub is None:
#ub = self.b
try:
ub = self.ppf(1-1e-9, *args)
except ValueError:
ub = self.b
else:
ub = min(self.b, (ub - loc)/(1.0*scale))
if conditional:
invfac = self._sf(lb,*args) - self._sf(ub,*args)
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args, limit=500)[0]/invfac
### for discrete distributions
#TODO: check that for a distribution with finite support the calculations are
# done with one array summation (np.dot)
#based on _drv2_moment(self, n, *args), but streamlined
def expect_discrete(self, fn=None, args=(), loc=0, lb=None, ub=None,
conditional=False):
'''calculate expected value of a function with respect to the distribution
for discrete distribution
Parameters
----------
(self : distribution instance as defined in scipy stats)
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
optional keyword parameters
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution, lb and ub are inclusive (ul<=k<=ub)
conditional : bool (False)
If true then the expectation is corrected by the conditional
probability of the integration interval. The return value is the
expectation of the function, conditional on being in the given
interval (k such that ul<=k<=ub).
Returns
-------
expected value : float
Notes
-----
* function is not vectorized
* accuracy: uses self.moment_tol as stopping criterium
for heavy tailed distribution e.g. zipf(4), accuracy for
mean, variance in example is only 1e-5,
increasing precision (moment_tol) makes zipf very slow
* suppnmin=100 internal parameter for minimum number of points to evaluate
could be added as keyword parameter, to evaluate functions with
non-monotonic shapes, points include integers in (-suppnmin, suppnmin)
* uses maxcount=1000 limits the number of points that are evaluated
to break loop for infinite sums
(a maximum of suppnmin+1000 positive plus suppnmin+1000 negative integers
are evaluated)
'''
#moment_tol = 1e-12 # increase compared to self.moment_tol,
# too slow for only small gain in precision for zipf
#avoid endless loop with unbound integral, eg. var of zipf(2)
maxcount = 1000
suppnmin = 100 #minimum number of points to evaluate (+ and -)
if fn is None:
def fun(x):
#loc and args from outer scope
return (x+loc)*self._pmf(x, *args)
else:
def fun(x):
#loc and args from outer scope
return fn(x+loc)*self._pmf(x, *args)
# used pmf because _pmf does not check support in randint
# and there might be problems(?) with correct self.a, self.b at this stage
# maybe not anymore, seems to work now with _pmf
self._argcheck(*args) # (re)generate scalar self.a and self.b
if lb is None:
lb = (self.a)
else:
lb = lb - loc
if ub is None:
ub = (self.b)
else:
ub = ub - loc
if conditional:
invfac = self.sf(lb,*args) - self.sf(ub+1,*args)
else:
invfac = 1.0
tot = 0.0
low, upp = self._ppf(0.001, *args), self._ppf(0.999, *args)
low = max(min(-suppnmin, low), lb)
upp = min(max(suppnmin, upp), ub)
supp = np.arange(low, upp+1, self.inc) #check limits
#print('low, upp', low, upp
tot = np.sum(fun(supp))
diff = 1e100
pos = upp + self.inc
count = 0
#handle cases with infinite support
while (pos <= ub) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos += self.inc
count += 1
if self.a < 0: #handle case when self.a = -inf
diff = 1e100
pos = low - self.inc
while (pos >= lb) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos -= self.inc
count += 1
if count > maxcount:
# replace with proper warning
print('sum did not converge')
return tot/invfac
stats.distributions.rv_continuous.fit_fr = fit_fr
stats.distributions.rv_continuous.nnlf_fr = nnlf_fr
stats.distributions.rv_continuous.expect = expect
stats.distributions.rv_discrete.expect = expect_discrete
stats.distributions.beta_gen._fitstart = _fitstart_beta #not tried out yet
stats.distributions.poisson_gen._fitstart = _fitstart_poisson #not tried out yet
########## end patching scipy
def distfitbootstrap(sample, distr, nrepl=100):
'''run bootstrap for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data for bootstrap
distr : distribution instance with fit_fr method
nrepl : int
number of bootstrap replications
Returns
-------
res : array (nrepl,)
parameter estimates for all bootstrap replications
'''
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
rvsind = np.random.randint(nobs, size=nobs)
x = sample[rvsind]
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def distfitmc(sample, distr, nrepl=100, distkwds={}):
'''run Monte Carlo for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data, in Monte Carlo only used to get nobs,
distr : distribution instance with fit_fr method
nrepl : int
number of Monte Carlo replications
Returns
-------
res : array (nrepl,)
parameter estimates for all Monte Carlo replications
'''
arg = distkwds.pop('arg')
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
x = distr.rvs(arg, size=nobs, **distkwds)
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def printresults(sample, arg, bres, kind='bootstrap'):
'''calculate and print(Bootstrap or Monte Carlo result
Parameters
----------
sample : ndarray
original sample data
arg : float (for general case will be array)
bres : ndarray
parameter estimates from Bootstrap or Monte Carlo run
kind : {'bootstrap', 'montecarlo'}
output is printed for Mootstrap (default) or Monte Carlo
Returns
-------
None, currently only printing
Notes
-----
still a bit a mess because it is used for both Bootstrap and Monte Carlo
made correction:
reference point for bootstrap is estimated parameter
not clear:
I'm not doing any ddof adjustment in estimation of variance, do we
need ddof>0 ?
todo: return results and string instead of printing
'''
print('true parameter value')
print(arg)
print('MLE estimate of parameters using sample (nobs=%d)'% (nobs))
argest = distr.fit_fr(sample, frozen=[np.nan, 0.0, 1.0])
print(argest)
if kind == 'bootstrap':
#bootstrap compares to estimate from sample
argorig = arg
arg = argest
print('%s distribution of parameter estimate (nrepl=%d)'% (kind, nrepl))
print('mean = %f, bias=%f' % (bres.mean(0), bres.mean(0)-arg))
print('median', np.median(bres, axis=0))
print('var and std', bres.var(0), np.sqrt(bres.var(0)))
bmse = ((bres - arg)**2).mean(0)
print('mse, rmse', bmse, np.sqrt(bmse))
bressorted = np.sort(bres)
print('%s confidence interval (90%% coverage)' % kind)
print(bressorted[np.floor(nrepl*0.05)], bressorted[np.floor(nrepl*0.95)])
print('%s confidence interval (90%% coverage) normal approximation' % kind)
print(stats.norm.ppf(0.05, loc=bres.mean(), scale=bres.std()),)
print(stats.norm.isf(0.05, loc=bres.mean(), scale=bres.std()))
print('Kolmogorov-Smirnov test for normality of %s distribution' % kind)
print(' - estimated parameters, p-values not really correct')
print(stats.kstest(bres, 'norm', (bres.mean(), bres.std())))
if __name__ == '__main__':
examplecases = ['largenumber', 'bootstrap', 'montecarlo'][:]
if 'largenumber' in examplecases:
print('\nDistribution: vonmises')
for nobs in [200]:#[20000, 1000, 100]:
x = stats.vonmises.rvs(1.23, loc=0, scale=1, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('1.23, loc=0, scale=1')
print('unconstrained')
print(stats.vonmises.fit(x))
print(stats.vonmises.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(stats.vonmises.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('\nDistribution: gamma')
distr = stats.gamma
arg, loc, scale = 2.5, 0., 20.
for nobs in [200]:#[20000, 1000, 100]:
x = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('%f, loc=%f, scale=%f' % (arg, loc, scale))
print('unconstrained')
print(distr.fit(x))
print(distr.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('with fixed loc')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, np.nan]))
ex = ['gamma', 'vonmises'][0]
if ex == 'gamma':
distr = stats.gamma
arg, loc, scale = 2.5, 0., 1
elif ex == 'vonmises':
distr = stats.vonmises
arg, loc, scale = 1.5, 0., 1
else:
raise ValueError('wrong example')
nobs = 100
nrepl = 1000
sample = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nDistribution:', distr)
if 'bootstrap' in examplecases:
print('\nBootstrap')
bres = distfitbootstrap(sample, distr, nrepl=nrepl )
printresults(sample, arg, bres)
if 'montecarlo' in examplecases:
print('\nMonteCarlo')
mcres = distfitmc(sample, distr, nrepl=nrepl,
distkwds=dict(arg=arg, loc=loc, scale=scale))
printresults(sample, arg, mcres, kind='montecarlo')
| bsd-3-clause | -3,253,983,564,312,369,000 | 32.03989 | 91 | 0.617527 | false |
codefisher/mozbutton_sdk | builder/restartless_button.py | 1 | 28578 | import os
import re
import json
import codecs
import lxml.etree as ET
from copy import deepcopy
from collections import namedtuple, defaultdict
try:
from PIL import Image
except ImportError:
pass
from builder.ext_button import Button, Option, ChromeString, ChromeFile
try:
basestring
except NameError:
basestring = str # py3
Keys = namedtuple("Keys", ['command', 'button'])
ExtraUI = namedtuple("ExtraUI", ["parent", "parent_id", "index", "code", "after"])
class RestartlessButton(Button):
def __init__(self, *args, **kwargs):
super(RestartlessButton, self).__init__(*args, **kwargs)
self._ui_ids = set()
self._included_js_files = []
self._bootstrap_globals = []
self._bootstrap_startup = []
self._bootstrap_shutdown = []
for folder, button, files in self._info:
if "bootstrap" in files:
for file_name in os.listdir(os.path.join(folder, "bootstrap")):
if file_name[0] != ".":
with open(os.path.join(folder, "bootstrap", file_name), "r") as js_fp:
data = js_fp.read()
if file_name == "global.js":
self._bootstrap_globals.append(data)
elif file_name == "startup.js":
self._bootstrap_startup.append(data)
elif file_name == "shutdown.js":
self._bootstrap_shutdown.append(data)
def get_files(self):
for file_name, data in self.get_jsm_files().items():
yield (file_name + ".jsm", data)
def locale_files(self, button_locales, *args, **kwargs):
dtd_data = button_locales.get_dtd_data(self.get_locale_strings(),
self, untranslated=False, format_type="properties")
for locale, data in dtd_data.items():
yield locale, "button_labels.properties", data
locales_inuse = dtd_data.keys()
key_strings = button_locales.get_string_data(self.get_key_strings(),
self, format_type="properties")
for locale, data in self.locale_file_filter(key_strings, locales_inuse):
yield locale, "keys.properties", data
for locale, file_name, data in super(RestartlessButton, self).locale_files(button_locales, locales_inuse):
yield locale, file_name, data
def jsm_keyboard_shortcuts(self, file_name):
if not self._settings.get("use_keyboard_shortcuts"):
return
for button in self._button_keys.keys():
func = self._button_commands.get(file_name, {}).get(button)
if func is not None:
yield Keys(self._patch_call(func), button)
def option_data(self):
scripts = []
if self._settings.get("use_keyboard_shortcuts"):
scripts.append("key-option.js")
with open(self.find_file("key-option.xul"), "r") as key_option_file:
key_option_template = key_option_file.read()
for button in self._button_keys.keys():
xul = self.format_string(key_option_template,
button=button,
menu_label=button + ".label")
applications = " ".join(self._button_applications[button])
self._button_options[button + "-key-item"].append(
Option("tb-key-shortcut.option.title:lightning.png:" + applications, xul))
self._button_applications[
button + "-key-item"] = self._button_applications[button]
files, javascript = super(RestartlessButton, self).option_data()
return files, javascript + scripts
def get_pref_list(self):
settings = super(RestartlessButton, self).get_pref_list()
pref_root = self._settings.get("pref_root")
if self._settings.get('use_keyboard_shortcuts'):
for button in self._button_keys.keys():
settings.append(("{}key-disabled.{}".format(pref_root, button), 'false'))
properties = self.pref_locale_file("'chrome://{chrome_name}/locale/{prefix}keys.properties'")
settings.append(("{}key.{}".format(pref_root, button), properties))
settings.append(("{}modifier.{}".format(pref_root, button), properties))
return settings
def get_js_files(self):
js_files = super(RestartlessButton, self).get_js_files()
if self._settings.get("use_keyboard_shortcuts"):
with open(self.find_file("key-option.js")) as key_option_fp:
js_files["key-option"] = self.string_subs(key_option_fp.read())
self._included_js_files = js_files.keys()
return js_files
def get_chrome_strings(self):
for chrome_string in super(RestartlessButton, self).get_chrome_strings():
yield chrome_string
yield ChromeString(file_name='bootstrap.js', data=self.create_bootstrap())
defaults = self.get_defaults()
if defaults:
yield ChromeString(file_name=os.path.join("chrome", "content", "defaultprefs.js"), data=defaults)
def get_chrome_files(self):
for chrome_file in super(RestartlessButton, self).get_chrome_files():
yield chrome_file
yield ChromeFile(file_name=os.path.join("chrome", "content", "customizable.jsm"), path=self.find_file('customizable.jsm'))
def create_bootstrap(self):
chrome_name = self._settings.get("chrome_name")
loaders = []
resource = ""
if self.resource_files:
resource = "createResource('{0}', 'chrome://{0}/content/resources/');".format(chrome_name)
window_modules = defaultdict(list)
for file_name in self._button_files:
for overlay in self._settings.get("files_to_window").get(file_name, ()):
window_modules[overlay].append(file_name)
for overlay, modules in window_modules.items():
mods = "\n\t\t".join(["modules.push('chrome://{0}/content/{1}.jsm');".format(chrome_name, file_name) for file_name in modules])
loaders.append("(uri == '{0}') {{\n\t\t{1}\n\t}}".format(overlay, mods))
if self._settings.get("show_updated_prompt"):
install_template = self.env.get_template('bootstrap.js')
install = install_template.render(**self._settings)
else:
install = ""
template = self.env.get_template('bootstrap.js')
return template.render(
resource=resource, install=install,
globals=self.string_subs("\n".join(self._bootstrap_globals)),
startup=self.string_subs("\n".join(self._bootstrap_startup)),
shutdown=self.string_subs("\n".join(self._bootstrap_shutdown)),
loaders = "if" + " else if".join(loaders),
**self._settings)
def _jsm_create_menu(self, file_name, buttons):
if not self._settings.get('menuitems'):
return ''
statements = []
data = self.create_menu_dom(file_name, buttons)
in_submenu = [menuitem for menuitem in data if menuitem.parent_id is None]
in_menu = [menuitem for menuitem in data if menuitem.parent_id is not None]
num = 0
template = self.env.get_template('menu.js')
if in_submenu:
menu_id, menu_label, locations = self._settings.get("menu_meta")
if isinstance(locations, basestring):
locations = [locations]
for i, location in enumerate(locations):
menu_id_num = "{0}_{1}".format(menu_id, i) if i else menu_id
meta = self._settings.get("file_to_menu").get(location, {}).get(file_name)
if meta:
menu_name, insert_after = meta
statements.append(template.render(**{
"menu_name": menu_name,
"menu_id": menu_id_num,
"label": menu_label,
"class": "menu-iconic",
"menu_label": menu_label,
"insert_after": insert_after,
"menuitems_sorted": self._settings.get("menuitems_sorted")
}))
num += 3
for item, _, _ in in_submenu:
item_statements, count, _ = self._create_dom(
item, top="menupopup_2", count=num, doc="document")
num = count + 1
statements.extend(item_statements)
for item, menu_name, insert_after in in_menu:
statements.append("var menupopup_{0} = document.getElementById('{1}');".format(num, menu_name))
var_name = "menupopup_%s" % num
num += 1
item.attrib["insertafter"] = insert_after
item_statements, count, _ = self._create_dom(item, top=var_name, count=num)
num = count + 1
statements.extend(item_statements)
return "\n\t".join(statements)
def _dom_string_lookup(self, value):
result = []
items = re.findall(r'&.+?;|[^&;]+', value)
for item in items:
if item == "&brandShortName;":
result.append("Cc['@mozilla.org/xre/app-info;1'].createInstance(Ci.nsIXULAppInfo).name")
elif item[0] == '&' and item[-1] == ';':
result.append("buttonStrings.get('%s')" % item[1:-1])
else:
result.append("'%s'" % item)
return ' + '.join(result)
def _create_dom(self, root, top=None, count=0, doc='document', child_parent=None, rename=None, append_children=True):
num = count
if rename == None:
rename = {}
children = []
statements = [
"var %s_%s = %s.createElement('%s');" % (root.tag, num, doc, rename.get(root.tag, root.tag)),
]
javascript_object = self._settings.get("javascript_object")
for key, value in sorted(root.attrib.items(), key=self._attr_key):
if key == 'id':
statements.append("%s_%s.id = '%s';" % (root.tag, num, value))
elif key in ('label', 'tooltiptext') or (root.tag == 'key' and key in ('key', 'keycode', 'modifiers')):
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, self._dom_string_lookup(value))))
elif key == "class":
for val in value.split():
statements.append('%s_%s.classList.add("%s");' % (root.tag, num, val))
elif key[0:2] == 'on':
if key == 'oncommand' and root.tag == 'key':
# we do this because key elements without a oncommand are optimized away
# but we can't call our function, because that might not exist
# in the window scope, so the event listener has to be used
statements.append("%s_%s.setAttribute('oncommand', 'void(0);');" % (root.tag, num))
statements.append("%s_%s.addEventListener('%s', function(event) {\n\t\t\t\t%s\n\t\t\t}, false);" % (root.tag, num, key[2:], self._patch_call(value)))
elif key == "insertafter":
pass
elif key == "showamenu":
statements.append("{}_{}.addEventListener('DOMMenuItemActive', {}.menuLoaderEvent, false);".format(root.tag, num, javascript_object))
statements.append("%s_%s._handelMenuLoaders = true;" % (root.tag, num))
statements.append("%s_%s.setAttribute('%s', '%s');" % ((root.tag, num, key, value)))
elif key == "toolbarname":
# this is just for our custom toolbars which are named "Toolbar Buttons 1" and the like
name, sep, other = value.partition(' ')
other = " + '%s%s'" % (sep, other) if sep else ""
value = "buttonStrings.get('%s')%s" % (name, other)
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, value)))
elif key == "type" and value == "menu-button" and 'id' in root.attrib:
statements.append('''if(extensionPrefs.getPrefType('menupopup.hide.{0}') == extensionPrefs.PREF_INVALID || !extensionPrefs.getBoolPref('menupopup.hide.{0}')) {{\n\t\t\t\t{1}_{2}.setAttribute("{3}", "{4}");\n\t\t\t}}'''.format(root.attrib['id'], root.tag, num, key, value))
else:
statements.append('%s_%s.setAttribute("%s", "%s");' % ((root.tag, num, key, value)))
for node in root:
sub_nodes, count, _ = self._create_dom(node, '%s_%s' % (root.tag, num), count+1, doc=doc, rename=rename, child_parent=(child_parent if top == None else None))
if append_children:
statements.extend(sub_nodes)
else:
children = sub_nodes
if not top:
statements.append('return %s_%s;' % (root.tag, num))
else:
if "insertafter" in root.attrib:
statements.append("%s.insertBefore(%s_%s, %s.getElementById('%s').nextSibling);" % (top, root.tag, num, doc, root.attrib.get("insertafter")))
else:
statements.append('%s.appendChild(%s_%s);' % (top if not child_parent else child_parent, root.tag, num))
return statements, count, children
def _attr_key(self, attr):
order = ('id', 'defaultarea', 'type', 'label', 'tooltiptext', 'command', 'onclick', 'oncommand')
if attr[0].lower() in order:
return order.index(attr[0].lower())
return 100
def _create_dom_button(self, button_id, root, file_name, count, toolbar_ids):
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
if 'viewid' in root.attrib:
self._ui_ids.add(root.attrib["viewid"])
statements, _, children = self._create_dom(root, child_parent="popupset", append_children=False)
children[0] = """var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
} else {
var menupopup_1 = document.createElement('menupopup');
popupset = document.documentElement;
}"""
data = {
"type": "'view'",
"onBeforeCreated": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(children),
}
elif 'usepanelview' in root.attrib:
self._ui_ids.add("{0}-panel-view".format(root.attrib["id"]))
root.attrib["onclick"] = """if(event.target != event.currentTarget || ('button' in event && event.button != 0)) {{
return;
}}
var item = event.target;
if(item.nodeName == 'key') {{
item = document.getElementById('{0}');
}}
if(item.getAttribute('cui-areatype') == 'menu-panel') {{
var win = item.ownerDocument.defaultView;
event.preventDefault();
event.stopPropagation();
item.ownerDocument.getElementById('{0}-panel-view').ownerButton = item;
win.PanelUI.showSubView('{0}-panel-view', item, CustomizableUI.AREA_PANEL);
}}""".format(root.attrib["id"])
if 'type' not in root.attrib:
popup_opener = """ else {
item.firstChild.openPopup(item, "after_start");
}"""
if 'oncommand' not in root.attrib:
root.attrib["oncommand"] = root.attrib["onclick"] + popup_opener
else:
root.attrib["onclick"] += popup_opener
statements, _, _ = self._create_dom(root)
root_clone = deepcopy(root)
popup = root_clone[0]
if root.attrib['usepanelview'] == 'button-menu':
del root_clone.attrib["type"]
popup.insert(0, ET.Element("menuseparator"))
popup.insert(0, ET.Element("menuitem", root_clone.attrib))
for node in popup:
node.attrib['class'] = 'subviewbutton'
if 'onpopupshowing' in popup.attrib:
popup.attrib['onViewShowing'] = popup.attrib['onpopupshowing']
del popup.attrib['onpopupshowing']
if 'onpopuphiding' in popup.attrib:
popup.attrib['onViewHiding'] = popup.attrib['onpopuphiding']
del popup.attrib['onpopuphiding']
_, _, children = self._create_dom(root_clone, child_parent="popupset", rename={'menuitem': 'toolbarbutton'}, append_children=False)
children.pop(0)
data = {
"type": "'custom'",
"onBuild": '''function (document) {
var window = document.defaultView;
var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
%s
menupopup_1.id = "%s-panel-view";
}
%s
}''' % ("\n\t\t\t\t\t".join(children), root.attrib['id'], "\n\t\t\t\t".join(statements))
}
else:
statements, _, _ = self._create_dom(root)
data = {
"type": "'custom'",
"onBuild": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(statements)
}
self._apply_toolbox(file_name, data)
toolbar_max_count = self._settings.get("buttons_per_toolbar")
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in root.attrib.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key == 'oncommand':
self._button_commands[file_name][button_id] = value
elif key == 'viewid':
data["viewId"] = "'%s'" % value
elif key == 'onviewshowing':
data["onViewShowing"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
elif key == 'onviewhideing':
data["onViewHiding"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode){\n\t\t\tvar document = aNode.ownerDocument;\n\t\t\t%s\n\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
return "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for key, value in items)
def _apply_toolbox(self, file_name, data):
toolbox_info = self._settings.get("file_to_toolbar_box2").get(file_name)
if toolbox_info:
window_file, toolbox_id = toolbox_info
data["toolbox"] = "'%s'" % toolbox_id
if window_file:
data["window"] = "'%s'" % window_file
def _patch_call(self, value):
data = []
if re.search(r'\bthis\b', value):
value = re.sub(r'\bthis\b', 'aThis', value)
data.append("var aThis = event.currentTarget;")
if re.search(r'\bdocument\b', value):
data.append("var document = event.target.ownerDocument;")
if re.search(r'\bwindow\b', value):
data.append("var window = event.target.ownerDocument.defaultView;")
data.append(value)
return "\n\t\t\t\t".join(data)
def _create_jsm_button(self, button_id, root, file_name, count, toolbar_ids):
toolbar_max_count = self._settings.get("buttons_per_toolbar")
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
data = {}
attr = root.attrib
self._apply_toolbox(file_name, data)
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in attr.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key in ('onclick', 'oncommand'):
if key == 'oncommand':
self._button_commands[file_name][button_id] = value
key = 'onCommand' if key == 'oncommand' else 'onClick'
data[key] = "function(event) {\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode) {\n\t\t\t\tvar document = aNode.ownerDocument;\n\t\t\t\t%s\n\t\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
result = "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for (key, value) in items)
return result
def get_jsm_files(self):
result = {}
simple_attrs = {'label', 'tooltiptext', 'id', 'oncommand', 'onclick', 'key', 'class'}
button_hash, toolbar_template = self._get_toolbar_info()
template = self.env.get_template('button.jsm')
javascript_object = self._settings.get("javascript_object")
for file_name, values in self._button_xul.items():
jsm_buttons = []
js_includes = [js_file for js_file in self._get_js_file_list(file_name)
if js_file != "loader" and js_file in self._included_js_files]
toolbars, toolbar_ids = self._create_jsm_toolbar(button_hash, toolbar_template, file_name, values)
count = 0
modules = set()
for button_id, xul in values.items():
root = ET.fromstring(xul.replace('&', '&'))
modules.update(self._modules[button_id])
attr = root.attrib
if not len(root) and not set(attr.keys()).difference(simple_attrs) and (not "class" in attr or attr["class"] == "toolbarbutton-1 chromeclass-toolbar-additional"):
jsm_buttons.append(self._create_jsm_button(button_id, root, file_name, count, toolbar_ids))
else:
jsm_buttons.append(self._create_dom_button(button_id, root, file_name, count, toolbar_ids))
count += 1
default_mods = {
"resource://gre/modules/Services.jsm",
"resource:///modules/CustomizableUI.jsm",
"resource://services-common/stringbundle.js"
}
modules_import = "\n".join("try { Cu.import('%s'); } catch(e) {}" % mod for mod in modules if mod and mod not in default_mods)
if self._settings.get("menu_meta"):
menu_id, menu_label, _ = self._settings.get("menu_meta")
else:
menu_id, menu_label = "", ""
end = set()
menu = self._jsm_create_menu(file_name, values)
for js_file in set(self._get_js_file_list(file_name) + [file_name]):
if self._button_js_setup.get(js_file, {}):
end.update(self._button_js_setup[js_file].values())
if (self._settings.get("menuitems") and menu) or self._settings.get('location_placement'):
end.add(javascript_object + ".setUpMenuShower(document);")
extra_ui = self.create_extra_ui(file_name, values)
result[file_name] = template.render(
modules=modules_import,
locale_file_prefix=self._settings.get("locale_file_prefix"),
scripts=js_includes,
button_ids=json.dumps(list(values.keys())),
toolbar_ids=json.dumps(toolbar_ids),
toolbars=toolbars,
menu_id=menu_id,
ui_ids=json.dumps(list(self._ui_ids)),
toolbox=self._settings.get("file_to_toolbar_box").get(file_name, ('', ''))[1],
menu=menu,
keys=list(self.jsm_keyboard_shortcuts(file_name)),
end="\n\t".join(end),
buttons=jsm_buttons,
extra_ui=extra_ui,
javascript_object=self._settings.get("javascript_object"),
pref_root=self._settings.get("pref_root"),
chrome_name=self._settings.get("chrome_name")
)
return result
def create_extra_ui(self, file_name, values):
location = self._settings.get("location_placement")
result = []
if location and file_name in self._settings.get("file_to_location", {}).get(location):
for index, (button_id, xul) in enumerate(values.items()):
parent, parent_id, after, attrib = self._settings.get("file_to_location").get(location).get(file_name)
root = ET.fromstring(xul.replace('&', '&'))
root.attrib["insertafter"] = after
root.attrib["id"] += "-extra-ui"
self._ui_ids.add(root.attrib["id"])
if attrib:
for name, value in attrib.items():
if value is None:
del root.attrib[name]
else:
root.attrib[name] = value
parent_var = "{}_{}".format(parent, index)
statements, _, _ = self._create_dom(root, top=parent_var)
result.append(ExtraUI(parent, parent_id, index, "\n\t\t".join(statements), after))
return result
def _create_jsm_toolbar(self, button_hash, toolbar_template, file_name, values):
toolbar_ids = []
toolbars = []
if file_name in self._settings.get("extra_toolbars_disabled"):
return '', []
count = 0
max_count = self._settings.get("buttons_per_toolbar")
buttons = list(values.keys())
for box_setting, include_setting in [("file_to_toolbar_box", "include_toolbars"),
("file_to_bottom_box", "include_satusbars")]:
toolbar_node, toolbar_box = self._settings.get(box_setting).get(file_name, ('', ''))
data = {
"defaultset": "",
"persist": "collapsed,hidden",
"context": "toolbar-context-menu",
"class": "toolbar-buttons-toolbar chromeclass-toolbar",
"mode": "icons",
"iconsize": "small",
"customizable": "true",
}
if self._settings.get(include_setting) and toolbar_box:
number = self.toolbar_count(include_setting, values, max_count)
for i in range(number):
if self._settings.get("put_button_on_toolbar"):
data["defaultset"] = ",".join(buttons[i * max_count:(i + 1) * max_count])
button_hash.update(bytes(i))
hash = button_hash.hexdigest()[:6]
label_number = "" if (number + count) == 1 else " %s" % (i + count + 1)
toolbar_ids.append("tb-toolbar-%s" % hash)
if include_setting != "include_toolbars":
data["toolboxid"] = toolbar_box
data["id"] = "tb-toolbar-%s" % hash
toolbarname = self._dom_string_lookup("&tb-toolbar-buttons-toggle-toolbar.name;%s" % label_number)
values["tb-toolbar-buttons-toggle-toolbar-%s" % hash] = toolbar_template.replace("{{hash}}", hash).replace("{{ number }}", label_number)
toolbars.append("""createToolbar(document, '%s', %s, %s)""" % (toolbar_box, json.dumps(data), toolbarname))
count += number
return "\n\t\t".join(toolbars), toolbar_ids
| mit | 4,315,887,822,240,294,400 | 52.217877 | 288 | 0.549689 | false |
soybean217/lora-python | UServer/admin_server/admin_http_api/api/api_group.py | 1 | 3730 | import json
from wtforms import ValidationError
from userver.object.application import Application
from . import api, root
from flask import request, Response
from userver.object.group import Group
from binascii import hexlify
from utils.errors import KeyDuplicateError, PatchError
from .decorators import group_filter_valid, group_exists
from .forms import get_formdata_from_json_or_form
from .forms.form_group import AddGroupForm, PatchGroup, device_operate
from ..http_auth import auth
@api.route(root + 'groups', methods=['GET'])
@auth.auth_required
@group_filter_valid
def group_list(user=None, app=None):
if request.method == 'GET':
if app is not None:
groups = Group.objects.all(app_eui=app.app_eui)
elif user is not None:
groups = []
apps = Application.query.filter_by(user_id=user.id)
for app in apps:
groups += Group.objects.all(app.app_eui)
else:
groups = Group.objects.all()
groups = [group.obj_to_dict() for group in groups]
groups_json = json.dumps(groups)
return Response(status=200, response=groups_json)
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# add_group = AddGroupForm(formdata)
# try:
# if add_group.validate():
# if len(add_group['appskey'].data) != 0:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data, appskey=add_group['appskey'].data)
# else:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data)
# group.save()
# return Response(status=201, response=json.dumps(group.obj_to_dict()))
# else:
# return Response(status=406, response=json.dumps({'errors': add_group.errors,
# 'succeed': False}))
# except KeyDuplicateError as error:
# return Response(status=403, response=json.dumps({"error": str(error),
# "succeed": False}))
@api.route(root + 'groups/<group_id>', methods=['GET'])
@auth.auth_required
@group_exists
def group_index(group):
if request.method == 'GET':
group_json = json.dumps(group.obj_to_dict())
return group_json, 200
# elif request.method == 'PATCH':
# try:
# formdata = get_formdata_from_json_or_form(request)
# PatchGroup.patch(group, formdata)
# return Response(status=200, response=json.dumps(group.obj_to_dict()))
# except (AssertionError, ValidationError, PatchError) as e:
# return json.dumps({"error": str(e)}), 406
# elif request.method == 'POST':
# POST Down Msg
# pass
# elif request.method == 'DELETE':
# try:
# group.delete()
# return json.dumps({'errors': "Group: %s deleted." % hexlify(group.id).decode(),
# 'succeed': False}), 200
# except Exception as e:
# return json.dumps({'errors': "Fail to delete group: %s.\n%s" % (hexlify(group.id).decode(), str(e)),
# 'succeed': False}), 400
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# error = device_operate(group, formdata)
# if error is None or len(error) == 0:
# return json.dumps({'success': True}), 200
# else:
# return json.dumps({'error': str(error)}), 406
#
| mit | -6,227,232,879,890,345,000 | 43.404762 | 172 | 0.574263 | false |
spino327/sdr_testbed | DistributedTestbed/SlaveRX.py | 1 | 6293 | '''
Copyright (c) 2011, Universidad Industrial de Santander, Colombia
University of Delaware
All rights reserved.
@author: Sergio Pino
@author: Henry Arguello
Website: http://www.eecis.udel.edu/
emails : [email protected] - [email protected]
Date : Feb, 2011
'''
import socket
import time
import sys
from receiver.RXApp import RXApp
from util.PropertyReader import readProperties
from util import Utils
class SlaveRX(object):
'''
SlaveRX is responsible of control the RX USRP node.
'''
def __init__(self, host, port, path):
'''
Constructor
@param host: refers to the local host address
@param port: port for the server to listen
@param path: File system path where the data will be stored
'''
# server
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((host, port))
self.server.listen(1)
self.path = path
self.app = None
def setRXProperties(self, lo_off, fc, dec, gain, sync):
'''
Set the USRP RX properties
@param lo_off: local oscillator offset (int)
@param fc: Center frequency (float)
@param dec: Decimation factor (int)
@param gain: Gain of the receiver in dB (int)
@param sync: True if the Hardware will use the GPSDO (boolean)
'''
self.lo_off = lo_off
self.fc = fc
self.dec = dec
self.gain = gain
self.sync = sync
def launch(self):
'''
calls startup
'''
print("i: launch SlaveRX")
while True:
sc, addr = self.server.accept()
sc.settimeout(10*60)
print("\n\ni: SlaveRX Connection from " + str(addr) + ", time " + time.strftime("%d-%m-%y/%H:%M:%S"))
tic = time.time()
try:
self.__startup__(sc, addr)
except Exception, e:
print("e: " + str(e))
sc.close()
print("i: SlaveRX Connection closed, duration: " + str(time.time() - tic) + " [seg]\n\n")
print("i: SlaveRX end launch")
def record(self, prefix, at, signame):
"""
@param prefix: prefix path folder where the signals are stored, e.g. /home/folder/
@param at: attenuation factor
@param signame: filename of the signal
Start recording
"""
# creating the folder
folder = self.path + prefix
folder = folder if (folder.endswith("/")) else folder + "/"
Utils.ensure_dir(folder)
# signal file
filename = folder + signame + "_at" + str(at) +"_G" + str(self.gain) + ".dat"
print("i: record filename = " + filename)
self.app = RXApp(self.fc, self.dec, self.gain, "addr=192.168.10.2", self.sync, filename, self.lo_off)
self.app.launch()
def __startup__(self, sc, addr):
'''
Responsible for starting the application; for creating and showing
the initial GUI.
'''
print("i: startup")
msg = sc.recv(1024)
if msg == "start":
sc.send("ok")
print("i: start ok")
msg = sc.recv(1024)
print("i: msg = " + msg)
while msg != "finish":
tic = time.time()
if msg.find("startRec") >= 0:
# message "startRec:/prefix_path/:at:signame:"
print("i: startRec received")
values = msg.split(":")
prefix = values[1]
at = float(values[2])
signame = values[3]
self.record(prefix, at, signame)
sc.send("ok")
elif msg.find("stopRec") >= 0:
print("i: stopRec received")
if self.app.stopApp():
print("i: stopRec successful")
sc.send("ok")
else:
print("i: stopRec failed")
sc.send("error")
else:
print("i: ending")
break
print("i: cmd duration: " + str(time.time() - tic) + " [seg]\n")
msg = sc.recv(1024)
else:
print("e: not start")
sc.send("error")
if msg == "finish":
print("i: finish cmd received")
sc.close()
print("i: end startup")
def __exit__(self):
'''
This method runs on the event dispatching thread.
'''
print "somebody call me!"
self.__exit__()
if __name__ == '__main__':
'''
Creates an instance of the specified {@code Application}
subclass, sets the {@code ApplicationContext} {@code
application} property, and then calls the new {@code
Application's} {@code startup} method. The {@code launch} method is
typically called from the Application's {@code main}:
'''
# Reading the properties
confFile = "confRX.txt"
if(len(sys.argv) > 1):
arg = sys.argv[1]
confFile = arg if len(arg) > 0 else confFile
else:
print("working with default config file path")
properties = readProperties(confFile)
print("Properties:")
for p in properties:
print("\t" + p + " : " + properties[p])
path = properties["rxpath"]
path = path if (path.endswith("/")) else path+"/"
sync = True if properties["sync"] == "True" else False
app = SlaveRX(properties["rxip"],
int(properties["rxport"]),
path)
app.setRXProperties(int(properties["lo_off"]),
float(properties["fc"]),
int(properties["dec"]),
int(properties["gain"]),
sync)
app.launch()
exit() | apache-2.0 | -2,031,581,930,941,409,300 | 29.259615 | 113 | 0.482918 | false |
alanc10n/py-rau | pyrau/rau.py | 1 | 1747 | import argparse
from redis import StrictRedis
from pyrau.commands import Command
def delete(args, command):
""" Execute the delete command """
command.delete(args.pattern)
def keys(args, command):
""" Execute the keys command """
details = args.details | args.sorted
command.keys(args.pattern, details, args.sorted)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-H', '--host', default='localhost', help='Host')
parser.add_argument('-p', '--port', default=6379, type=int, help='Port')
parser.add_argument('-b', '--batch_size', default=20,
type=int, help='Batch size for pipeline operations')
subparsers = parser.add_subparsers(help='Commands')
del_parser = subparsers.add_parser('delete', help='Delete key(s)')
del_parser.add_argument('pattern', type=str, help='Key pattern')
del_parser.set_defaults(func=delete)
key_parser = subparsers.add_parser('keys', help="List keys")
key_parser.add_argument('-p', '--pattern', help='Key pattern',
default=None)
key_parser.add_argument('-d', '--details',
help='Include details for key(s)',
action='store_true')
key_parser.add_argument('-s', '--sorted',
help='Sort result by size, implies --details',
action='store_true')
key_parser.set_defaults(func=keys)
args = parser.parse_args()
return args
def main():
args = parse_args()
redis = StrictRedis(host=args.host, port=args.port)
command = Command(redis)
command.batch_size = args.batch_size
args.func(args, command)
if __name__ == '__main__':
main()
| mit | -40,468,994,944,548,810 | 31.962264 | 76 | 0.606754 | false |
akx/shoop | shoop_tests/admin/test_views.py | 1 | 2079 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import json
import pytest
from shoop.testing.factories import (
create_random_order, create_random_person, get_default_category,
get_default_product, get_default_shop
)
from shoop.testing.utils import apply_request_middleware
from shoop.utils.importing import load
@pytest.mark.parametrize("class_spec", [
"shoop.admin.modules.categories.views.list:CategoryListView",
"shoop.admin.modules.contacts.views:ContactListView",
"shoop.admin.modules.orders.views:OrderListView",
"shoop.admin.modules.products.views:ProductListView",
])
@pytest.mark.django_db
def test_list_view(rf, class_spec):
view = load(class_spec).as_view()
request = rf.get("/", {
"jq": json.dumps({"perPage": 100, "page": 1})
})
response = view(request)
assert 200 <= response.status_code < 300
def random_order():
# These are prerequisites for random orders
contact = create_random_person()
product = get_default_product()
return create_random_order(contact, [product])
@pytest.mark.parametrize("model_and_class", [
(get_default_category, "shoop.admin.modules.categories.views:CategoryEditView"),
(create_random_person, "shoop.admin.modules.contacts.views:ContactDetailView"),
(random_order, "shoop.admin.modules.orders.views:OrderDetailView"),
(get_default_product, "shoop.admin.modules.products.views:ProductEditView"),
])
@pytest.mark.django_db
def test_detail_view(rf, admin_user, model_and_class):
get_default_shop() # obvious prerequisite
model_func, class_spec = model_and_class
model = model_func()
view = load(class_spec).as_view()
request = apply_request_middleware(rf.get("/"), user=admin_user)
response = view(request, pk=model.pk)
if hasattr(response, "render"):
response.render()
assert 200 <= response.status_code < 300
| agpl-3.0 | -3,598,878,904,634,792,000 | 34.237288 | 84 | 0.7114 | false |
TerryRen/TrPython | NetLib/SuperCaptcha.py | 1 | 9743 | #python 2.7
#coding=utf-8
__author__ = "Terry.Ren"
#try:
# import Image
#except ImportError:
# from PIL import Image
from PIL import Image
from PIL import ImageDraw
import ImageEnhance
import os
import urllib
import StringIO
import uuid
import pytesseract #open source
class Captcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 95 or pix[x, y][1] < 95 or pix[x, y][2] < 95:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
'''
Get Captcha Code from on-line web site
'''
def GetOnlineCaptchaCode(self, imageUrl, isStoreOriginalImage = False, localDir = '', extName = 'jpg'):
if isStoreOriginalImage == True:
if not os.path.isdir(localDir):
raise ValueError("please validate the argument GetOnlineCaptchaCode.localDir...")
localFileName = self.__StoreImage2LocalDir(imageUrl , localDir , extName)
img = Image.open(localFileName)
else:
imgBuf = StringIO.StringIO(urllib.urlopen(imageUrl).read())
img = Image.open(imgBuf)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = self.__twoValue(img)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = img.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 90 or pix[x, y][1] < 90 or pix[x, y][2] < 90:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
def TestImage(self):
data = [(1,0),(0,1)]
size = (2,2)
image = Image.new("1",size)
draw = ImageDraw.Draw(image)
for x in xrange(0,size[0]):
for y in xrange(0,size[1]):
draw.point((x,y),data[x][y])
image.save("D:\\GitHub\\TrPython\\NetLib\\Test\\1.gif")
class SmartCaptcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=100,G=100,B=120
for x in range(img.size[0]):
if pix[x, y][0] < 100 and pix[x, y][1] < 100:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
def __getEffectivePoint(self, pix, x , y):
point, sx , sy = 0, x-1, y-1
#print sx+3 , sy +3 ,x , y
for i in xrange(3):
for j in xrange(3):
if sx+i == x and sy+j == y:
continue
if pix[sx+i,sy+j] == pix[x,y]:
point += 1
return point;
'''
1111111
1011101
1011101
1111111
'''
def __clearNoise(self, img, effectivePoint ,processCount):
for ct in xrange(0, processCount):
pix = img.load() #read pix
for x in xrange(1,img.size[0] - 1):
for y in xrange(1, img.size[1] - 1):
point = self.__getEffectivePoint(pix , x , y)
if point < effectivePoint:
pix[x, y] = (255, 255, 255, 255) # set to Noise
return img
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# two value
img = self.__twoValue(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# clear Noise
img = self.__clearNoise(img, 3 , 1)
# orc
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
if __name__ == "__main__":
print '[unit test]'
#validate1 = Captcha()
#print validate1.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet")
#validate2 = Captcha(True)
#print validate2.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet",True,"D:\\GitHub\\TrPython\\NetLib\\Test")
#validate2.Clearup()
#validate3 = Captcha(True)
#print validate3.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\1400.gif","gif")
#validate3.TestImage()
validate4 = SmartCaptcha(True)
print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg","jpg")
#print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\queaa.jpg","jpg")
print pytesseract.image_to_string(Image.open("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg"))
| apache-2.0 | 4,605,390,416,347,193,000 | 29.600629 | 132 | 0.550098 | false |
UIA-CAIR/DeepRTS | coding/test.py | 1 | 3205 | import Scenarios
import Agents
import torch
import imageio
import pygame
import os
from datetime import datetime
import numpy
action_names = {
1: "Previous Unit",
2: "Next Unit",
3: "Move Left",
4: "Move Right",
5: "Move Up",
6: "Move Down",
7: "Move Up Left",
8: "Move Up Right",
9: "Move Down Left",
10: "Move Down Right",
11: "Attack",
12: "Harvest",
13: "Build 0",
14: "Build 1",
15: "Build 2",
16: "No Action"
}
if __name__ == "__main__":
now = datetime.now()
now_string = now.strftime("%d-%m-%Y %H-%M-%S")
directory = "Videos " + now_string
test_directory = "Tests"
test_path = os.path.join(os.getcwd(), test_directory)
files = ["NN_700"]
results_path = os.path.join(os.getcwd(), "Results")
recording_path = os.path.join(results_path, directory)
log_path = os.path.join(recording_path, "log.txt")
os.mkdir(recording_path)
log = open(log_path, "w+")
# environment
env = Scenarios.ImageToPyTorch(Scenarios.Scenario182({}))
env.game.set_max_fps(99999999)
env.game.set_max_ups(99999999)
TRIALS = 100
for file in files:
file_path = os.path.join(test_path, file + ".pt")
results_directory = file + "-Random"
results_path = os.path.join(test_path, results_directory)
os.mkdir(results_path)
outcomes_path = os.path.join(results_path, "outcomes.txt")
durations_path = os.path.join(results_path, "durations.txt")
outcomes_file = open(outcomes_path, "w+")
durations_file = open(durations_path, "w+")
# agents
state_size = env.observation_space.shape
action_size = env.action_space.n
agent_a = Agents.SmallAgent(4410, action_size)
agent_a.load(file_path)
agent_b = Agents.RandomAgent()
for trial in range(TRIALS):
state = env.reset()
flat_state = state.flatten()
# video stuff
filenames = []
terminal = False
changed = False
count = 0
# play game
while not terminal:
if trial == 0:
if changed:
# save the current window
window = pygame.display.get_surface()
image_name = "image_" + str(count) + ".jpeg"
image_path = os.path.join(results_path, image_name)
pygame.image.save(window, image_path)
filenames.append(image_path)
count += 1
# AI for player 1
env.game.set_player(env.game.players[0])
action = agent_a.get_action(flat_state, 0)
next_state, _, terminal, _ = env.step(action)
flat_next_state = next_state.flatten()
# AI for player 1
env.game.set_player(env.game.players[1])
action = agent_b.get_action(state, 0)
next_state, _, terminal, _ = env.step(action)
changed = not numpy.array_equal(state, next_state)
state = next_state
flat_state = flat_next_state
if (env.game.players[0].is_defeated()):
outcomes_file.write("0,")
outcomes_file.flush()
else:
outcomes_file.write("1,")
outcomes_file.flush()
durations_file.write(str(env.game.get_episode_duration()) + ",")
durations_file.flush()
if trial == 0:
images = []
for filename in filenames:
images.append(imageio.imread(filename))
video_path = os.path.join(results_path, "video.gif")
imageio.mimsave(video_path, images) | mit | -957,784,358,521,181,200 | 20.231788 | 67 | 0.635881 | false |
tfiedor/perun | perun/fuzz/randomizer.py | 1 | 1994 | """Module that simply encapsulate all the random functions that are used in fuzzing,
with only one call of function from random package."""
__author__ = 'Matus Liscinsky'
import random
def random_repeats(repeats):
"""Decorator for random number of repeats of inner function
Note that the return value of the wrapped function is NOT checked or passed anywhere
:param int repeats: the upper bound of number of repeats
:return: decorator that takes function and repeats its call up to @p repeats times
"""
def inner_wrapper(func):
"""Inner wrapper
:param function func: wrapped function
:return: innermost wrapper
"""
def innermost_wrapper(*args, **kwargs):
"""Innermost wrapper
:param list args: list of arguments
:param dict kwargs: list of keyword arguments
"""
for _ in range(rand_from_range(1, repeats)):
func(*args, **kwargs)
innermost_wrapper.__doc__ = func.__doc__
return innermost_wrapper
return inner_wrapper
def rand_from_range(start, stop):
"""Basic function that randomly choose an integer from range bounded by `start` and `stop`
parameters. Matematically expressed as `start` <= random_number <= `stop`.
:param int start: lower bound of the interval
:param int stop: upper limit of the interval
:return int: random integer from given range
"""
return random.randint(start, stop)
def rand_index(lst_len):
"""Function that randomly choose an index from list.
:param int lst_len: length of the list
:return int: random integer that represents valid index of element in list
"""
return rand_from_range(0, lst_len-1)
def rand_choice(lst):
"""Return a randomly selected element of a list.
:param list lst: the list from which the element will be selected
:return: element of list on random index
"""
return lst[rand_from_range(0, len(lst)-1)]
| gpl-3.0 | 1,950,198,294,091,948,500 | 31.16129 | 94 | 0.665998 | false |
weggert/calendar_sync | calendar_sync.py | 1 | 6753 | #!/usr/bin/python
import fileinput
import os
class CalendarManager:
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_name = calendar_name
self.dry_run = dry_run
self.include_descriptions = include_descriptions
def clear_calendar(self):
command = """
osascript -e 'tell application "Calendar" to tell calendar "%s"
set eventList to every event
repeat with e in eventList
delete e
end repeat
end tell'
"""
command = command % self.calendar_name
if not self.dry_run:
os.system(command)
print 'Calendar cleared'
def create_calendar_event(self, summary, start_date, end_date, all_day, location, description):
if not self.include_descriptions:
description = ''
properties = 'start date:theStartDate, end date:theEndDate, summary:"%s", description:"%s", location:"%s"'\
% (summary, description, location)
if all_day is True:
properties += ', allday event:true'
command = """
osascript -e 'set theStartDate to date "%s"
set theEndDate to date "%s"
tell application "Calendar" to tell calendar "%s"
set theEvent to make new event with properties {%s}
end tell'
"""
command = command % (start_date, end_date, self.calendar_name, properties)
if not self.dry_run:
os.system(command)
self.print_summary(summary, start_date, end_date, all_day, location, description)
@staticmethod
def print_summary(summary, start_date, end_date, all_day, location, description):
print 'Summary: ' + summary
print ' Start: ' + start_date
print ' End: ' + end_date
print ' All Day: ' + str(all_day)
print ' Location: ' + location
print ' Description: ' + description
print ''
class CalendarSummaryProcessor:
class LineType:
EventStart, Summary, Location, Date, Time, Where, Notes, Status, Other = range(9)
def __init__(self):
pass
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_manager = CalendarManager(
calendar_name=calendar_name,
dry_run=dry_run,
include_descriptions=include_descriptions)
self.reset()
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def reset(self):
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def process_summary(self):
self.calendar_manager.clear_calendar()
for input_line in fileinput.input():
line_type = self.get_line_type(input_line)
if line_type is self.LineType.EventStart:
if self.processing_event:
if self.summary != 'Remote'\
and self.summary != 'IP Video - Daily Scrum'\
and self.summary != 'Cloud Team Scrum':
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
self.reset()
if line_type is self.LineType.Summary:
self.summary = self.sanitize_line(input_line.strip()[9:])
self.processing_event = True
if line_type is self.LineType.Date:
self.date = input_line.strip()[6:]
if line_type is self.LineType.Time:
self.time = input_line.strip()[6:]
if line_type is self.LineType.Location:
self.location = self.sanitize_line(input_line.strip()[10:])
self.processing_event = True
if line_type is self.LineType.Other:
description_line = self.sanitize_line(input_line.strip())
if len(description_line) > 0:
self.description = self.description + description_line + '\n'
self.last_description_line_was_blank = False
else:
if not self.first_description_line and not self.last_description_line_was_blank:
self.description += '\n'
self.last_description_line_was_blank = True
self.first_description_line = False
if self.processing_event:
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
@staticmethod
def get_start_end_dates(date, time):
dates = date.split(" to ")
times = time.split(" to ")
start_date = dates[0] + ' ' + times[0]
end_date = dates[1] + ' ' + times[1]
all_day = False
if times[0] == '12:00:00 AM' and times[1] == "12:00:00 AM" and dates[0] != dates[1]:
all_day = True
return start_date, end_date, all_day
def get_line_type(self, input_line):
if input_line.startswith('EVENT'):
return self.LineType.EventStart
if input_line.startswith('Summary:'):
return self.LineType.Summary
if input_line.startswith('Date:'):
return self.LineType.Date
if input_line.startswith('Time:'):
return self.LineType.Time
if input_line.startswith('Location:'):
return self.LineType.Location
if input_line.startswith('Where'):
return self.LineType.Where
if input_line.startswith('Notes'):
return self.LineType.Notes
if input_line.startswith('Status'):
return self.LineType.Status
return self.LineType.Other
def process_named_line(self, input_line):
colon_position = input_line.find(':')
return self.sanitize_line(input_line[colon_position+1:].strip())
@staticmethod
def sanitize_line(input_line):
return input_line.replace("'", "").replace('"', '').replace('*~*~*~*~*~*~*~*~*~*', '').strip()
CalendarSummaryProcessor(calendar_name='Work Calendar',
dry_run=False,
include_descriptions=True).process_summary() | apache-2.0 | -4,109,685,570,745,103,000 | 34.925532 | 115 | 0.577077 | false |
allenai/allennlp | allennlp/modules/text_field_embedders/basic_text_field_embedder.py | 1 | 5232 | from typing import Dict
import inspect
import torch
from overrides import overrides
from allennlp.common.checks import ConfigurationError
from allennlp.data import TextFieldTensors
from allennlp.modules.text_field_embedders.text_field_embedder import TextFieldEmbedder
from allennlp.modules.time_distributed import TimeDistributed
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.modules.token_embedders import EmptyEmbedder
@TextFieldEmbedder.register("basic")
class BasicTextFieldEmbedder(TextFieldEmbedder):
"""
This is a `TextFieldEmbedder` that wraps a collection of
[`TokenEmbedder`](../token_embedders/token_embedder.md) objects. Each
`TokenEmbedder` embeds or encodes the representation output from one
[`allennlp.data.TokenIndexer`](../../data/token_indexers/token_indexer.md). As the data produced by a
[`allennlp.data.fields.TextField`](../../data/fields/text_field.md) is a dictionary mapping names to these
representations, we take `TokenEmbedders` with corresponding names. Each `TokenEmbedders`
embeds its input, and the result is concatenated in an arbitrary (but consistent) order.
Registered as a `TextFieldEmbedder` with name "basic", which is also the default.
# Parameters
token_embedders : `Dict[str, TokenEmbedder]`, required.
A dictionary mapping token embedder names to implementations.
These names should match the corresponding indexer used to generate
the tensor passed to the TokenEmbedder.
"""
def __init__(self, token_embedders: Dict[str, TokenEmbedder]) -> None:
super().__init__()
# NOTE(mattg): I'd prefer to just use ModuleDict(token_embedders) here, but that changes
# weight locations in torch state dictionaries and invalidates all prior models, just for a
# cosmetic change in the code.
self._token_embedders = token_embedders
for key, embedder in token_embedders.items():
name = "token_embedder_%s" % key
self.add_module(name, embedder)
self._ordered_embedder_keys = sorted(self._token_embedders.keys())
@overrides
def get_output_dim(self) -> int:
output_dim = 0
for embedder in self._token_embedders.values():
output_dim += embedder.get_output_dim()
return output_dim
def forward(
self, text_field_input: TextFieldTensors, num_wrapping_dims: int = 0, **kwargs
) -> torch.Tensor:
if sorted(self._token_embedders.keys()) != sorted(text_field_input.keys()):
message = "Mismatched token keys: %s and %s" % (
str(self._token_embedders.keys()),
str(text_field_input.keys()),
)
embedder_keys = set(self._token_embedders.keys())
input_keys = set(text_field_input.keys())
if embedder_keys > input_keys and all(
isinstance(embedder, EmptyEmbedder)
for name, embedder in self._token_embedders.items()
if name in embedder_keys - input_keys
):
# Allow extra embedders that are only in the token embedders (but not input) and are empty to pass
# config check
pass
else:
raise ConfigurationError(message)
embedded_representations = []
for key in self._ordered_embedder_keys:
# Note: need to use getattr here so that the pytorch voodoo
# with submodules works with multiple GPUs.
embedder = getattr(self, "token_embedder_{}".format(key))
if isinstance(embedder, EmptyEmbedder):
# Skip empty embedders
continue
forward_params = inspect.signature(embedder.forward).parameters
forward_params_values = {}
missing_tensor_args = set()
for param in forward_params.keys():
if param in kwargs:
forward_params_values[param] = kwargs[param]
else:
missing_tensor_args.add(param)
for _ in range(num_wrapping_dims):
embedder = TimeDistributed(embedder)
tensors: Dict[str, torch.Tensor] = text_field_input[key]
if len(tensors) == 1 and len(missing_tensor_args) == 1:
# If there's only one tensor argument to the embedder, and we just have one tensor to
# embed, we can just pass in that tensor, without requiring a name match.
token_vectors = embedder(list(tensors.values())[0], **forward_params_values)
else:
# If there are multiple tensor arguments, we have to require matching names from the
# TokenIndexer. I don't think there's an easy way around that.
token_vectors = embedder(**tensors, **forward_params_values)
if token_vectors is not None:
# To handle some very rare use cases, we allow the return value of the embedder to
# be None; we just skip it in that case.
embedded_representations.append(token_vectors)
return torch.cat(embedded_representations, dim=-1)
| apache-2.0 | 8,035,300,947,094,428,000 | 47.444444 | 114 | 0.640673 | false |
perlygatekeeper/glowing-robot | google_test/free_the_bunny_prisoners/solution_5_fails.py | 1 | 1090 | import itertools
def solution(bunnies,keys_required):
answer = []
for i in range(bunnies):
answer.append([])
# if keys_required > bunnies:
# return None
if keys_required == 0:
return [[0]]
elif keys_required == 1:
key = 0
for group in range(bunnies):
answer[group].append(key)
elif bunnies == keys_required:
key = 0
for group in range(bunnies):
answer[group].append(key)
key += 1
else:
key = 0
for item in itertools.combinations(range(bunnies), keys_required):
for group in item:
answer[group].append(key)
key += 1
return answer
for num_buns in range(1,10):
for num_required in range(10):
key_dist = solution(num_buns,num_required)
print("-" * 60)
print("Answer for {0:d} bunnies, requiring {1:d}".format(num_buns,num_required))
if ( len(key_dist[0]) * len(key_dist) ) < 25:
print(key_dist)
else:
for bun in key_dist:
print(bun)
| artistic-2.0 | -8,434,445,743,401,300,000 | 28.459459 | 88 | 0.538532 | false |
mrjmad/nagademon_2014 | nagademon2014/maingame/models/history_elements.py | 1 | 6460 | # -*- coding: utf-8 -*-
from __future__ import (print_function, division, absolute_import, unicode_literals)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.db import models
USER_MODEL = settings.AUTH_USER_MODEL
@python_2_unicode_compatible
class Character(models.Model):
short_name = models.CharField(_("NPC's short Name"), max_length=20, unique=True)
first_name = models.CharField("Firstname of Character", max_length=50)
last_name = models.CharField("Lastname of Character", max_length=50)
gender = models.PositiveSmallIntegerField(u"Gender of Character")
description = models.TextField("Description")
def __str__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Meta:
abstract = True
@python_2_unicode_compatible
class PlayerCharacter(Character):
def __str__(self):
return u"PC : %s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class NPCharacter(Character):
def __str__(self):
return u"NPC : %s %s" % (self.first_name, self.last_name)
class PlaceManager(models.Manager):
def get_by_natural_key(self, short_name):
return self.get(short_name=short_name)
@python_2_unicode_compatible
class Place(models.Model):
objects = PlaceManager()
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
short_name = models.CharField(_("Place's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
text = models.TextField("Scene's Text")
def __str__(self):
return self.name
def natural_key(self):
return self.short_name,
@python_2_unicode_compatible
class Scene(models.Model):
short_name = models.CharField(_("Scene's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
synopsis = models.TextField("Scene's synopsis, only for authors")
final = models.BooleanField("Final Round ?", default=False)
place = models.ForeignKey(Place, verbose_name="Scene's Place",
blank=True, null=True)
is_active = models.BooleanField(_("Is active ?"), default=True)
order = models.PositiveIntegerField(_("Scene's Order"), default=0)
need_a_trigger = models.BooleanField(_("Activable only by a trigger"), default=False)
def __str__(self):
return self.name
@python_2_unicode_compatible
class PartScene(models.Model):
text = models.CharField("Scene's Text", max_length=400)
for_scene = models.ForeignKey(Scene, verbose_name="Scene")
limited_to_player = models.ForeignKey(PlayerCharacter, blank=True, null=True)
parent = models.ForeignKey('self', blank=True, null=True)
active = models.BooleanField(default=True)
def __str__(self):
return "Text %s |for scene :%s" % (self.text, self.for_scene)
@python_2_unicode_compatible
class Choice1PartSceneto1Scene(models.Model):
text = models.CharField("Choice's Text", max_length=400)
for_part_scene = models.ForeignKey(PartScene, verbose_name="Current Part Scene",
related_name="current_choices_set")
next_scene = models.ForeignKey(Scene, verbose_name="Next Scene",
related_name="leading_choices_set",
null=True, blank=True)
next_part_scene = models.ForeignKey(PartScene, verbose_name="Next Part Scene",
related_name="leading_choices_set",
null=True, blank=True)
def __str__(self):
return "%s |for scene %s , part scene id :%s" % (self.text,
self.for_part_scene.for_scene,
self.for_part_scene.id)
@python_2_unicode_compatible
class Quest(models.Model):
short_name = models.CharField(_("Quest's short Name"), max_length=20, unique=True)
title = models.CharField("Quest's Title", max_length=140)
text = models.TextField("Quest's Text")
time_frame = models.PositiveIntegerField(_("Maximum Time (in minutes) for validate the Quest"), default=0)
given_by = models.ForeignKey(NPCharacter, verbose_name=_('Given by'))
scene = models.ForeignKey(Scene, verbose_name=_("Scene who Quest is activable"),
related_name=_("quests_for_scene"))
scene_after = models.ForeignKey(Scene, verbose_name=_("Scene after the End's Quest"),
related_name=_("finished_quests_for_scene"))
apparition_function = models.CharField(_("Name of Apparition's Function"), max_length=120, blank=True, null=True)
validation_function = models.CharField(_("Name of Validation's Function"), max_length=120)
def __str__(self):
return "%s | for scene :%s, by NPC %s in time %s" % (self.title, self.scene, self.given_by,
self.timedelta)
class ObjectType(models.Model):
name = models.CharField(u"Type Object Name", max_length=200)
description = models.TextField("Type's Description", blank=True, null=True)
short_name = models.CharField(_("Type Object's short Name"), max_length=20, unique=True)
class OneObject(models.Model):
name = models.CharField(_("Type Object Name"), max_length=200)
type = models.ForeignKey(ObjectType, verbose_name=_("Object's Type"))
description = models.TextField("Object's Description", blank=True, null=True)
initial_place = models.ForeignKey(Place, verbose_name=_("Object's Initial place"),
related_name=_("initial_objects_set"), blank=True, null=True)
stored_in = models.ForeignKey(Place, related_name=_("objects_stored_set"),
verbose_name=_("Where the object is stored"), blank=True, null=True)
| mit | 5,791,698,670,006,022,000 | 43.551724 | 117 | 0.645201 | false |
vitale232/ves | ves/VESinverse_vectorized.py | 1 | 12839 | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 28 16:32:48 2016
@author: jclark
this code uses the Ghosh method to determine the apparent resistivities
for a layered earth model. Either schlumberger or Wenner configurations
can be used
"""
import numpy as np
import random
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
plt.style.use('bmh')
import sys
# Schlumberger filter
fltr1 = [0., .00046256, -.0010907, .0017122, -.0020687,
.0043048, -.0021236, .015995, .017065, .098105, .21918, .64722,
1.1415, .47819, -3.515, 2.7743, -1.201, .4544, -.19427, .097364,
-.054099, .031729, -.019109, .011656, -.0071544, .0044042,
-.002715, .0016749, -.0010335, .00040124]
#Wenner Filter
fltr2 = [0., .000238935, .00011557, .00017034, .00024935,
.00036665, .00053753, .0007896, .0011584, .0017008, .0024959,
.003664, .0053773, .007893, .011583, .016998, .024934, .036558,
.053507, .078121, .11319, .16192, .22363, .28821, .30276, .15523,
-.32026, -.53557, .51787, -.196, .054394, -.015747, .0053941,
-.0021446, .000665125]
print(len(fltr1))
print(len(fltr2))
#I know there must be a better method to assign lists. And probably numpy
#arrays would be best. But my Python wasn't up to it. If the last letter
#is an 'l' that means it is a log10 of the value
# 65 is completely arbitrary
p = [0] * 20 # earth layer parameters?
r = [0] * 65 # apparent resistivty?
rl = [0] * 65 # np.log(r) ?
t = [0] * 50 #
b = [0] * 65 #
asav = [0] * 65 # voltage spacing in meters?
asavl = [0] * 65 # np.log(asav)
adatl = [0] * 65 # interpolated voltage spacing ( np.log(10) / 6 )?
rdatl = [0] * 65 # np.log()
# adat = [0] * 65 # voltage spacing input
# rdat = [0] * 65 # apparent res input
pkeep = [0] * 65 # earth parameters after applying equations?
rkeep = [0] * 65 # r after applying equations?
rkeepl = [0] * 65 # np.log()!
pltanswer = [0] * 65
pltanswerl = [0] * 65
pltanswerkeep = [0] * 65
pltanswerkeepl = [0] * 65
rl = [0] * 65
small = [0] * 65
xlarge = [0] * 65
x=[0] * 100
y = [0] * 100
y2 = [0] * 100
u = [0] * 5000
new_x = [0] * 1000
new_y = [0] * 1000
ndat = 13
#hard coded data input - spacing and apparent resistivities measured
#in teh field
adat = [0., 0.55, 0.95, 1.5, 2.5, 3., 4.5, 5.5, 9., 12., 20., 30., 70.]
rdat = [0., 125., 110., 95., 40., 24., 15., 10.5, 8., 6., 6.5, 11., 25.]
one30 = 1.e30 # What's the purpose of this and should it be user input?
rms = one30 # Just a starting value for rmserror?
errmin = 1.e10 # Should this be user input?
# INPUT
array_spacing = 'wenner' # 1 is for shchlumberger and 2 is for Wenner
nLayers = 3 #number of layers
n = 2 * nLayers - 1 # What does n represent? number of parameters
spac = 0.2 # smallest electrode spacing - should this come from the input file?
m = 20 # number of points where resistivity is calculated
spac = np.log(spac)
delx = np.log(10.0) / 6. # I take it this is the sample interval on the log scale?
# this is where the range in parameters should be input from a GUI
# I'm hard coding this in for now
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
# I think I have it coded up that these are getting grabbed from the rectangles currently.
# Is that the best way to go?
small[1] = 1.
small[2] = 10.
small[3] = 20.
small[4] = 2.
small[5] = 500.
xlarge[1] = 5
xlarge[2] = 75.
xlarge[3] = 200.
xlarge[4] = 100
xlarge[5] = 3000.
iter_ = 10000 #number of iterations for the Monte Carlo guesses. to be input on GUI
# Is 10000 the most reasonable default, or should I play with it?
def readData(adat, rdat, ndat, return_indexed=False):
#normally this is where the data would be read from the csv file
# but now I'm just hard coding it in as global lists
for i in range(1, ndat):
adatl[i] = np.log10(adat[i])
rdatl[i] = np.log10(rdat[i])
if return_indexed:
return adatl[:ndat], rdatl[:ndat]
else:
return adatl, rdatl
<<<<<<< HEAD
=======
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
>>>>>>> 60497dd... ?s
def transf(y, i):
# these lines apparently find the computer precision ep
ep = 1.0
ep = ep / 2.0
fctr = ep + 1.
while fctr > 1.:
ep = ep / 2.0
fctr = ep + 1.
u = 1. / np.exp(y) # y = spac - 19. * delx - 0.13069
t[1] = p[n]
for j in range(2, nLayers + 1, 1):
pwr = -2. * u * p[nLayers + 1 - j]
if pwr < np.log(2. * ep):
pwr = np.log(2. * ep)
a = np.exp(pwr)
b = (1. - a) / (1. + a)
rs = p[n + 1 - j]
tpr = b * rs
t[j] = (tpr + t[j - 1]) / (1. + tpr * t[j - 1] / (rs * rs))
r[i] = t[nLayers]
return
def filters(b, k):
for i in range(1, m + 1):
re = 0.
for j in range(1, k + 1):
re = re + b[j] * r[i + k - j] # include ranges of thickness, res . push button for rmse error, observed data
# surf thicknes .2 - 100
# res 2-3000 # could use huge ranges at cost of time
r[i] = re
return
def rmsfit():
if array_spacing.lower() == 'wenner':
y = spac - 19. * delx - 0.13069
mum1 = m + 28
for i in range(1, mum1 + 1):
transf(y, i)
y = y + delx
filters(fltr1, 29)
elif array_spacing.lower() == 'schlumberger':
s = np.log(2.)
y = spac - 10.8792495 * delx
mum2 = m + 33
for i in range(1, mum2 + 1):
transf(y, i)
a = r[i]
y1 = y + s
transf(y1, i)
r[i] = 2. * a - r[i]
y = y + delx
filters(fltr2, 34)
else:
print("\nType of survey not indicated.")
raise SystemExit('Exiting.\n\n Take better care next time.')
x = spac
#print("A-Spacing App. Resistivity")
for i in range(1, m + 1):
a = np.exp(x)
asav[i] = a
asavl[i] = np.log10(a)
rl[i] = np.log10(r[i])
x = x + delx
#print("%7.2f %9.3f " % ( asav[i], r[i]))
rms = error()
return rms
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
# my code to do a spline fit to predicted data at the nice spacing of Ghosh
# use splint to determine the spline interpolated prediction at the
# spacing where the measured resistivity was taken - to compare observation
# to prediction
def spline(n, yp1, ypn, x=[] ,y=[] ,y2=[]):
"""Still struggling to understand the general operation of this function."""
u = [0] * 1000
one29 = 0.99e30
#print(x,y)
if yp1 > one29:
y2[0] = 0.
u[0] = 0.
else:
y2[0] = -0.5
u[0] = (3. / (x[1] - x[0])) * ((y[1] - y[0]) / (x[1] - x[0]) - yp1)
for i in range(1, n):
#print(i,x[i])
sig = (x[i] - x[i-1]) / (x[i+1] - x[i-1])
p=sig * y2[i - 1] + 2.
y2[i] = (sig-1.) / p
u[i] = (((6. * ((y[i+1] - y[i]) / (x[i+1] - x[i]) - (y[i] - y[i-1]) /
x[i] - x[i-1])) / (x[i + 1] - x[i - 1]) - sig * u[i - 1]) / p)
if ypn > one29:
qn = 0.
un = 0.
else:
qn = 0.5
un = (3. / (x[n] - x[n - 1])) * (ypn - (y[n] - y[n - 1]) / (x[n] - x[n - 1]))
y2[n] = (un - qn * u[n - 1]) / (qn * y2[n - 1] + 1.)
for k in range(n-1, -1, -1):
y2[k] = y2[k] * y2[k + 1] + u[k]
return
def splint(n, x ,xa=[], ya=[], y2a=[]): # Is this function the T function?
"""Still struggling to understand the general operation of this function."""
klo = 0
khi = n
while khi - klo > 1:
k = int((khi + klo) // 2)
if xa[k] > x:
khi = k
else:
klo = k
h = xa[khi] - xa[klo]
if abs(h) < 1e-20:
print(" bad xa input")
#print(x,xa[khi],xa[klo])
a = (xa[khi] - x) / h
b = (x - xa[klo]) / h
y = (a * ya[klo] + b * ya[khi] + ((a * a * a - a) * y2a[klo] +
(b * b * b - b) * y2a[khi]) * (h * h) /6.)
#print("x= ", x,"y= ", y, " ya= ", ya[khi]," y2a= ", y2a[khi], " h= ",h)
return y
#main here
if __name__ == '__main__':
adatl, rdatl = readData(adat, rdat, ndat, return_indexed=False)
print(adat[1:ndat],rdat[1:ndat])
print('log stufffff')
print(adatl[1:ndat], rdatl[1:ndat]) # is this to skip 0?
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
for iloop in range(1, int(iter_/2) + 1):
#print( ' iloop is ', iloop)
for i in range(1, n + 1): # number of parameters + 1
randNumber = random.random() # IS this just to add noise to the model?
# #print(randNumber, ' random')
# print(xlarge)
# print(small)
# s = input('')
# print('xlarge[i]: {}, small[i]: {}'.format(xlarge[i], small[i]))
p[i] = (xlarge[i] - small[i]) * randNumber + small[i]
# print(p)
print('\n')
print(p)
# s = input('')
rms = rmsfit()
if rms < errmin:
print('rms ', rms, ' errmin ', errmin)
for i in range(1, n + 1):
pkeep[i] = p[i]
for i in range(1, m + 1):
rkeep[i] = r[i]
rkeepl[i] = rl[i]
for i in range(1, ndat + 1):
pltanswerkeepl[i] = pltanswerl[i]
pltanswerkeep[i] = pltanswer[i]
errmin = rms
#output the best fitting earth model
print(' Layer ', ' Thickness ', ' Res_ohm-m ')
for i in range(1,nLayers,1):
print(i, pkeep[i], pkeep[nLayers+i-1])
print( nLayers, ' Infinite ', pkeep[n])
for i in range(1,m+1, 1):
asavl[i] = np.log10(asav[i])
#output the error of fit
print( ' RMS error ', errmin)
print( ' Spacing', ' Res_pred ', ' Log10_spacing ', ' Log10_Res_pred ')
for i in range(1,m+1,1):
#print(asav[i], rkeep[i], asavl[i], rkeepl[i])
print("%7.2f %9.3f %9.3f %9.3f" % ( asav[i], rkeep[i],
asavl[i], rkeepl[i]))
print('plot a lot')
plt.loglog(asav[1:m],rkeep[1:m],'-') # resistivity prediction curve
plt.loglog(adat[1:ndat],pltanswerkeep[1:ndat], 'ro') # predicted data red dots
s=7
plt.loglog(adat[1:ndat],rdat[1:ndat],'bo',markersize=s) #original data blue dots
plt.show()
plt.grid(True)
sys.exit(0)
| lgpl-3.0 | -1,006,991,285,408,766,500 | 31.442708 | 120 | 0.53283 | false |
mikoim/funstuff | null/crawler/tt2db.py | 1 | 1709 | # -*- coding: utf-8 -*-
import urllib.request
import time
import pymongo
import http.client
import re
def httpWrapper(url):
try:
data_raw = urllib.request.urlopen(url).read().decode('utf-8')
except:
return "NULL"
return data_raw
def getGirlName(data_raw):
matches = re.findall('名前[ ]+?/[ ]+?(.+?)(|\n)*( |)*(|\n)*( |)*(\(|<br />)', data_raw)
for match in matches[0]:
return match.replace(' ', '')
return
def getGrilPhotos(data_raw):
matches = re.findall('<span>(photos/.+?.jpg)</span>', data_raw)
if len(matches) == 0:
matches = re.findall('<a href="(photos/.+?.jpg)">', data_raw)
return matches
def getLastModTime(path):
conn = http.client.HTTPConnection("twintail-japan.com")
conn.request("HEAD", path)
res = conn.getresponse()
return int(time.mktime(time.strptime(res.getheaders()[2][1], '%a, %d %b %Y %H:%M:%S %Z')) * 1000)
conn = pymongo.Connection()
db = conn.tw2db
col = db.tm
for x in range(1, 3):
baseUrl = "http://twintail-japan.com/sailor/contents/%d.html" % x
data_raw = httpWrapper(baseUrl)
if data_raw != "NULL":
name = getGirlName(data_raw)
for photo in getGrilPhotos(data_raw):
dbtml = {'author' : '', 'time' : '', 'title' : '', 'via' : '', 'src' : '', 'message' : ''}
dbtml['author'] = name
dbtml['title'] = name + " @ セーラ服とツインテール"
dbtml['via'] = baseUrl
dbtml['message'] = ""
dbtml['time'] = getLastModTime("/sailor/contents/%d.html" % x)
dbtml['src'] = 'http://twintail-japan.com/sailor/contents/%s' % (photo)
col.insert(dbtml)
print(x) | mit | -7,780,927,202,663,628,000 | 26.606557 | 102 | 0.562686 | false |
jdobes/cobbler | cobbler/item.py | 1 | 13896 | """
An Item is a serializable thing that can appear in a Collection
Copyright 2006-2009, Red Hat, Inc
Michael DeHaan <[email protected]>
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
"""
import exceptions
import utils
from cexceptions import *
from utils import _
import pprint
import fnmatch
class Item:
TYPE_NAME = "generic"
def __init__(self,config,is_subobject=False):
"""
Constructor. Requires a back reference to the Config management object.
NOTE: is_subobject is used for objects that allow inheritance in their trees. This
inheritance refers to conceptual inheritance, not Python inheritance. Objects created
with is_subobject need to call their set_parent() method immediately after creation
and pass in a value of an object of the same type. Currently this is only supported
for profiles. Subobjects blend their data with their parent objects and only require
a valid parent name and a name for themselves, so other required options can be
gathered from items further up the cobbler tree.
Old cobbler: New cobbler:
distro distro
profile profile
system profile <-- created with is_subobject=True
system <-- created as normal
For consistancy, there is some code supporting this in all object types, though it is only usable
(and only should be used) for profiles at this time. Objects that are children of
objects of the same type (i.e. subprofiles) need to pass this in as True. Otherwise, just
use False for is_subobject and the parent object will (therefore) have a different type.
"""
self.config = config
self.settings = self.config._settings
self.clear(is_subobject) # reset behavior differs for inheritance cases
self.parent = '' # all objects by default are not subobjects
self.children = {} # caching for performance reasons, not serialized
self.log_func = self.config.api.log
self.ctime = 0 # to be filled in by collection class
self.mtime = 0 # to be filled in by collection class
self.uid = "" # to be filled in by collection class
self.last_cached_mtime = 0
self.cached_datastruct = ""
def clear(self,is_subobject=False):
"""
Reset this object.
"""
utils.clear_from_fields(self,self.get_fields(),is_subobject=is_subobject)
def make_clone(self):
raise exceptions.NotImplementedError
def from_datastruct(self,seed_data):
"""
Modify this object to take on values in seed_data
"""
return utils.from_datastruct_from_fields(self,seed_data,self.get_fields())
def to_datastruct(self):
return utils.to_datastruct_from_fields(self, self.get_fields())
def printable(self):
return utils.printable_from_fields(self,self.get_fields())
def remote_methods(self):
return utils.get_remote_methods_from_fields(self,self.get_fields())
def set_uid(self,uid):
self.uid = uid
def get_children(self,sorted=True):
"""
Get direct children of this object.
"""
keys = self.children.keys()
if sorted:
keys.sort()
results = []
for k in keys:
results.append(self.children[k])
return results
def get_descendants(self):
"""
Get objects that depend on this object, i.e. those that
would be affected by a cascading delete, etc.
"""
results = []
kids = self.get_children(sorted=False)
results.extend(kids)
for kid in kids:
grandkids = kid.get_descendants()
results.extend(grandkids)
return results
def get_parent(self):
"""
For objects with a tree relationship, what's the parent object?
"""
return None
def get_conceptual_parent(self):
"""
The parent may just be a superclass for something like a
subprofile. Get the first parent of a different type.
"""
# FIXME: this is a workaround to get the type of an instance var
# what's a more clean way to do this that's python 2.3 friendly?
# this returns something like: cobbler.item_system.System
mtype = str(self).split(" ")[0][1:]
parent = self.get_parent()
while parent is not None:
ptype = str(parent).split(" ")[0][1:]
if mtype != ptype:
self.conceptual_parent = parent
return parent
parent = parent.get_parent()
return None
def set_name(self,name):
"""
All objects have names, and with the exception of System
they aren't picky about it.
"""
if self.name not in ["",None] and self.parent not in ["",None] and self.name == self.parent:
raise CX(_("self parentage is weird"))
if not isinstance(name, basestring):
raise CX(_("name must be a string"))
for x in name:
if not x.isalnum() and not x in [ "_", "-", ".", ":", "+" ] :
raise CX(_("invalid characters in name: '%s'" % name))
self.name = name
return True
def set_comment(self, comment):
if comment is None:
comment = ""
self.comment = comment
return True
def set_owners(self,data):
"""
The owners field is a comment unless using an authz module that pays attention to it,
like authz_ownership, which ships with Cobbler but is off by default. Consult the Wiki
docs for more info on CustomizableAuthorization.
"""
owners = utils.input_string_or_list(data)
self.owners = owners
return True
def set_kernel_options(self,options,inplace=False):
"""
Kernel options are a space delimited list,
like 'a=b c=d e=f g h i=j' or a hash.
"""
(success, value) = utils.input_string_or_hash(options)
if not success:
raise CX(_("invalid kernel options"))
else:
if inplace:
for key in value.keys():
if key.startswith("~"):
del self.kernel_options[key[1:]]
else:
self.kernel_options[key] = value[key]
else:
self.kernel_options = value
return True
def set_kernel_options_post(self,options,inplace=False):
"""
Post kernel options are a space delimited list,
like 'a=b c=d e=f g h i=j' or a hash.
"""
(success, value) = utils.input_string_or_hash(options)
if not success:
raise CX(_("invalid post kernel options"))
else:
if inplace:
for key in value.keys():
if key.startswith("~"):
del self.self.kernel_options_post[key[1:]]
else:
self.kernel_options_post[key] = value[key]
else:
self.kernel_options_post = value
return True
def set_ks_meta(self,options,inplace=False):
"""
A comma delimited list of key value pairs, like 'a=b,c=d,e=f' or a hash.
The meta tags are used as input to the templating system
to preprocess kickstart files
"""
(success, value) = utils.input_string_or_hash(options,allow_multiples=False)
if not success:
return False
else:
if inplace:
for key in value.keys():
if key.startswith("~"):
del self.ks_meta[key[1:]]
else:
self.ks_meta[key] = value[key]
else:
self.ks_meta = value
return True
def set_mgmt_classes(self,mgmt_classes):
"""
Assigns a list of configuration management classes that can be assigned
to any object, such as those used by Puppet's external_nodes feature.
"""
mgmt_classes_split = utils.input_string_or_list(mgmt_classes)
self.mgmt_classes = utils.input_string_or_list(mgmt_classes_split)
return True
def set_template_files(self,template_files,inplace=False):
"""
A comma seperated list of source=destination templates
that should be generated during a sync.
"""
(success, value) = utils.input_string_or_hash(template_files,allow_multiples=False)
if not success:
return False
else:
if inplace:
for key in value.keys():
if key.startswith("~"):
del self.template_files[key[1:]]
else:
self.template_files[key] = value[key]
else:
self.template_files = value
return True
def sort_key(self,sort_fields=[]):
data = self.to_datastruct()
return [data.get(x,"") for x in sort_fields]
def find_match(self,kwargs,no_errors=False):
# used by find() method in collection.py
data = self.to_datastruct()
for (key, value) in kwargs.iteritems():
# Allow ~ to negate the compare
if value is not None and value.startswith("~"):
res=not self.find_match_single_key(data,key,value[1:],no_errors)
else:
res=self.find_match_single_key(data,key,value,no_errors)
if not res:
return False
return True
def find_match_single_key(self,data,key,value,no_errors=False):
# special case for systems
key_found_already = False
if data.has_key("interfaces"):
if key in [ "mac_address", "ip_address", "subnet", "netmask", "virt_bridge", \
"dhcp_tag", "dns_name", "static_routes", "interface_type", \
"interface_master", "bonding_opts", "bridge_opts", "bonding", "bonding_master" ]:
if key == "bonding":
key = "interface_type" # bonding is deprecated
elif key == "bonding_master":
key = "interface_master" # bonding_master is deprecated
key_found_already = True
for (name, interface) in data["interfaces"].iteritems():
if value is not None and interface.has_key(key):
if self.__find_compare(interface[key], value):
return True
if not data.has_key(key):
if not key_found_already:
if not no_errors:
# FIXME: removed for 2.0 code, shouldn't cause any problems to not have an exception here?
# raise CX(_("searching for field that does not exist: %s" % key))
return False
else:
if value is not None: # FIXME: new?
return False
if value is None:
return True
else:
return self.__find_compare(value, data[key])
def __find_compare(self, from_search, from_obj):
if isinstance(from_obj, basestring):
# FIXME: fnmatch is only used for string to string comparisions
# which should cover most major usage, if not, this deserves fixing
if fnmatch.fnmatch(from_obj.lower(), from_search.lower()):
return True
else:
return False
else:
if isinstance(from_search, basestring):
if type(from_obj) == type([]):
from_search = utils.input_string_or_list(from_search)
for x in from_search:
if x not in from_obj:
return False
return True
if type(from_obj) == type({}):
(junk, from_search) = utils.input_string_or_hash(from_search,allow_multiples=True)
for x in from_search.keys():
y = from_search[x]
if not from_obj.has_key(x):
return False
if not (y == from_obj[x]):
return False
return True
if type(from_obj) == type(True):
if from_search.lower() in [ "true", "1", "y", "yes" ]:
inp = True
else:
inp = False
if inp == from_obj:
return True
return False
raise CX(_("find cannot compare type: %s") % type(from_obj))
def dump_vars(self,data,format=True):
raw = utils.blender(self.config.api, False, self)
if format:
return pprint.pformat(raw)
else:
return raw
def set_depth(self,depth):
self.depth = depth
def set_ctime(self,ctime):
self.ctime = ctime
def set_mtime(self,mtime):
self.mtime = mtime
def set_parent(self,parent):
self.parent = parent
def check_if_valid(self):
"""
Raise exceptions if the object state is inconsistent
"""
if self.name is None or self.name == "":
raise CX("Name is required")
| gpl-2.0 | -3,816,668,014,623,622,000 | 35.859416 | 109 | 0.549151 | false |
digifant/eMonitor | tools/update-osm-data.py | 1 | 10402 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import logging.handlers
import traceback
import os
import time
from optparse import OptionParser
import MySQLdb
import codecs
import requests
import sys
import pdb
import argparse
from pprint import pprint
def osmWebUrl (lat,lng):
return "http://www.openstreetmap.org/?&mlat=%s&mlon=%s&zoom=17" % (lat,lng)
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1', 'j', 'ja'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0', 'nein'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def prompt(query):
sys.stdout.write('%s [y/n]: ' % query)
val = raw_input()
try:
ret = str2bool(val)
except ValueError:
sys.stdout.write('Please answer with a y/n\n')
return prompt(query)
return ret
# returns None if not found!
def queryOsmNominatin(street, streetno, city ):
url = 'http://nominatim.openstreetmap.org/search'
params = 'format=json&city={}&street={}'.format(city, street)
#params = 'format=json&city=%s&street=%s' % (city, address)
if streetno != '':
params += ' {}'.format(streetno)
params = params.replace (' ', '+')
params = params.replace ('<', '<')
params = params.replace ('>', '>')
logging.debug ("OSM nominatim query: %s?%s" % (url,params))
headers = {
'User-Agent': 'OSMSyncForFireFighterStreetDbOfOurTown',
'From': '[email protected]'
}
r = requests.get('{}?{}'.format(url, params), timeout=3, headers=headers)
#logging.debug("osm nomination result: %s" % pprint(r.json()))
#import pdb; pdb.set_trace()
_position = None
try:
_position = {'lat':r.json()[0]['lat'], 'lng':r.json()[0]['lon'], 'osm_id':r.json()[0]['osm_id'].decode('iso-8859-1').encode('utf8') }
except IndexError:
logging.error ("street %s not found! (housenumber=%s)" % (street, streetno))
#logging.debug (_position)
return _position
def updateMysqlStreets (db, user, passwd, command):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "update_position":
sql = "SELECT * FROM streets"
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street DB %s lat=%s lng=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6]) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
if _position != None:
if row[9] == int(_position['osm_id']):
sql = 'update streets set lat=%s, lng=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(row[0]))
logging.debug ("sql query %s" % sql)
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat and lng to (%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s" % (row[9], _position['osm_id'] ))
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("Sync finished")
if len(not_found) > 0:
logging.error ("didnt found %s streets:" % len(not_found))
for k in not_found.keys():
logging.error ("not found: id=%s streetname=%s" % (k, not_found[k]))
def verifyMysqlStreets (db, user, passwd, command, street=-1):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "verify_streets":
sql = "SELECT * FROM streets"
if street > 0:
sql = sql + " where id=%i" % street
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street %s lat=%s lng=%s url=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6], osmWebUrl(row[5],row[6]) ) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
if _position != None:
sql = 'update streets set lat=%s, lng=%s, osmid=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(_position['osm_id']), int(row[0]))
logging.debug ("sql query %s" % sql)
if row[9] == int(_position['osm_id']):
logging.info ("osmid=%s db lat=%s db lng=%s OsmNominatim lat=%s lng=%s new url=%s" % (row[9], row[5], row[6], float(_position['lat']), float(_position['lng']), osmWebUrl(float(_position['lat']),float(_position['lng'])) ) )
if round(float(row[5]),4) != round(float(_position['lat']),4) or round(float(row[6]),4) != round(float(_position['lng']),4):
logging.info ("%i NO MATCH" % row[9])
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.info ("%i MATCH" % row[9])
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s url=%s" % (row[9], _position['osm_id'], osmWebUrl(float(_position['lat']),float(_position['lng']))))
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("verify finished")
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-d", "--database", dest="database", help="mysql database name", default="emonitor")
parser.add_option("-u", "--user", dest="user", help="mysql user", default='emonitor')
parser.add_option("-p", "--passwd", dest="passwd", help="mysql password", default='emonitor')
parser.add_option("--update-streets-position", dest="update_streets_position", help="update positions for all streets", action="store_true", default=False)
parser.add_option("--verify-street-position", dest="verify_street_position", help="verify positions for given street", type=int, default=-1)
parser.add_option("-v", "--verify-all-streets-position", dest="verify_all_streets_position", help="verify positions for given street", action="store_true", default=False)
parser.add_option("-a", "--ask-fix", dest="ask_fix", help="ask for fixing", action="store_true", default=False)
(options, args) = parser.parse_args()
#logging.basicConfig(filename='screenshot-and-telegram.log', level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
if options.update_streets_position:
updateMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="update_position")
if options.verify_street_position > 0:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets", street=int(options.verify_street_position))
if options.verify_all_streets_position:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets")
#queryOsmNominatin(street="Rexrothstraße", streetno='', city='Kleinblittersdorf')
| bsd-3-clause | -7,600,303,221,806,536,000 | 45.424107 | 250 | 0.544379 | false |
suma12/asterix | asterix/APDU.py | 1 | 31348 | """ asterix/APDU.py
__author__ = "Petr Tobiska"
Author: Petr Tobiska, mailto:[email protected]
This file is part of asterix, a framework for communication with smartcards
based on pyscard. This file implements handfull APDU commands.
asterix is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
asterix is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
References:
[GP CS] GlobalPlatform Card Specification, Version 2.2.1, Jan 2011
[GP AmD] Secure Channel Protocol 03, Card Specification v 2.2 - Amendment D,
Version 1.1, Sep 2009
"""
import re
import hashlib
import random
from struct import pack, unpack
from binascii import hexlify, unhexlify
# PyCrypto
from Crypto.Cipher import DES, DES3, AES
# ECSDA
from ecdsa import ecdsa, ellipticcurve
# pyscard
from smartcard.ATR import ATR
# asterix
from formutil import s2l, l2s, derLen, derLV, s2int, int2s, s2ECP, chunks,\
split2TLV, findTLValue, swapNibbles
from GAF import GAF
from applet import DESsign
from SCP03 import CMAC
from mycard import ISOException, resetCard
__all__ = ('calcKCV', 'putKey', 'storeDataPutKey',
'push2B_DGI', 'X963keyDerivation', 'Push3scenario',
'selectApplet', 'openLogCh', 'closeLogCh',
'getStatus', 'getExtCardRes', 'getData',
'selectFile', 'readBinary', 'readRecord',
'updateBinary', 'updateRecord',
'verifyPin', 'changePin', 'disablePin', 'enablePin', 'unblockPin',
'selectUSIM', 'cardInfo', 'KeyType')
INS_VERIFY_PIN = 0x20
INS_CHANGE_PIN = 0x24
INS_DISABLE_PIN = 0x26
INS_ENABLE_PIN = 0x28
INS_UNBLOCK_PIN = 0x2C
INS_MANAGE_LOGCH = 0x70
INS_SELECT = 0xA4
INS_READBIN = 0xB0
INS_READREC = 0xB2
INS_GETDATA = 0xCA
INS_UPDBIN = 0xD6
INS_UPDREC = 0xDC
INS_PUTKEY = 0xD8
INS_STOREDATA = 0xE2
INS_GETSTATUS = 0xF2
class KeyType:
"""Key types as defined in [GP CS] Tab 11.16"""
# subset of currently supported keys
DES_IMPLICIT = 0x80
TDES_CBC = 0x82
DES_ECB = 0x83
DES_CBC = 0x84
AES = 0x88
def calcKCV(keyValue, zAES=False):
"""Calculate KCV for symmetric key.
keyValue - key values as string (DES, 3DES2k, 3DES3k, AES)
zAES - True if key is AES (i.e. encrypt block of '01' instead of '00')
Return 3B-long string."""
if zAES:
assert len(keyValue) in (16, 24, 32), "Wrong length of AES key"
block = '\x01'*16
tkey = AES.new(keyValue, AES.MODE_ECB)
else:
assert len(keyValue) in (8, 16, 24), "Wrong length of (3)DES key"
block = '\x00'*8
if len(keyValue) == 8:
tkey = DES.new(keyValue, DES.MODE_ECB)
else:
tkey = DES3.new(keyValue, DES.MODE_ECB)
return tkey.encrypt(block)[:3]
def putKey(oldKeyVersion, newKeyVersion, keyId, keyComponents,
zMoreCmd=False, zMultiKey=False, keyDEK=None,
lenMAC=8):
"""Build APDU for PUT KEY command.
oldKeyVersion - key version to be replaced. If zero, new key is created.
newKeyVersion - key version of key being put
keyId - id of the 1st key being put
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8) and value (string).
zMoreCmd - P1.b8, signals if there is more commands
zMultiKey - P2.b8, signals if more than one component being put
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
lenMAC - length of CMAC for AES.
Applicable if AES key with key id=0x02 (KID) and
key version 0x01-0x0F or 0x11 is being put with AES keyDEK
(see ETSI 102.226 rel 9+, 8.2.1.5)
Returns APDU built (as list of u8).
See [GP CS] 11.8 and [GP AmD] 7.2 for reference.
See [GP CS] Tab 11.16 for coding of key type.
Currently only Format1 supported.
"""
# sanity check
assert 0 <= oldKeyVersion < 0x80
assert 0 < newKeyVersion < 0x80
assert 0 < keyId < 0x80
assert len(keyComponents) > 0
assert lenMAC in (4, 8)
P1 = (zMoreCmd and 0x80 or 0) | oldKeyVersion
P2 = (zMultiKey and 0x80 or 0) | keyId
data = chr(newKeyVersion)
for kc in keyComponents:
keyType, keyVal = kc[:2] # ignore eventual keyUsage and keyAccess
assert 0 <= keyType < 0xFF
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
# for AES as keyDEK, prepend length of component
if 'zAES' in dir(keyDEK) and keyDEK.zAES:
encValue = derLen(keyVal) + encValue
# see ETSI 102.226 rel 9+, 8.2.1.5
if keyType == KeyType.AES and keyId == 2 and \
newKeyVersion in range(0x01, 0x10) + [0x11]:
encValue += chr(lenMAC)
else:
encValue = keyVal
# calculate KCV
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
kcv = calcKCV(keyVal, keyType == KeyType.AES)
else:
kcv = ''
data += chr(keyType) + derLen(encValue) + encValue + derLen(kcv) + kcv
keyId += 1
apdu = [0x80, INS_PUTKEY, P1, P2, len(data)] + s2l(data)
return apdu
def push2B_DGI(keyVer, keys, keyCASDenc):
""" Create DGI 00A6 and 8010 for Push2B scenario
keyVer - key verions (u8)
keys - ((keytype, keyvalue)); 1 or 3 sym. keys
keyCASDenc - a method to call for encryption 8010 content
Return DGIs built (as list of strings)."""
# DGI tag on 2B (GP Card Spec 2.2.1, 11.1.12)
# DGI length coding as in GP Systems Scripting Language Spec. v1.1.0, an. B
# i.e. on 1B for x < 255, FF<yyyy> for x >=255
KAT = GAF(""" -- Control Reference Template (KAT)
-- see GP 2.2.1 AmA 4.4
00A6 #[
A6 #(
90 #(04) -- scenario identifier: Push#2B
95 #($keyUsage)
80 #($keyType)
81 #($keyLen)
83 #($keyVer)
-- 45 #($SDIN) -- optional Security Domain Image Number
)] """)
assert len(keys) in (1, 3), "One or three sym. keys expected"
keyUsage = len(keys) == 1 and '\x5C' or '\x10' # Tab. 13
keyType = keys[0][0]
assert all([k[0] == keyType for k in keys]), "Key types differ"
# remap keyType to '80' as required by GP UICC config 10.3.1
if keyType in (KeyType.TDES_CBC, KeyType.DES_ECB, KeyType.DES_CBC):
keyType = KeyType.DES_IMPLICIT
lens = [len(k[1]) for k in keys]
l = max(lens)
assert l == min(lens), "Key lengths differ"
dgi00A6 = KAT.eval(keyUsage=keyUsage, keyType=chr(keyType),
keyLen=chr(l), keyVer=chr(keyVer))
data = keyCASDenc(''.join([k[1] for k in keys]))
dgi8010 = pack(">H", 0x8010) + chr(len(data)) + data
return (dgi00A6, dgi8010)
def storeDataPutKeyDGI(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build DGI for Store Data for Put Key.
keyVer - key version of key being created
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8), value (string)
and optionally Key Usage Qualifier and Key Access
(u8, defaults 0x18, 0x14 or 0x48 for key UQ, 0x00 for key ac.)
keyId - id of the 1st key being created (optional, u8, default 1)
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
Returns DGIs built (as list of string).
See GP 2.2.1 AmA 4.10.2 for reference.
"""
# sanity check
assert 0 < keyVer and keyVer < 0x80
assert 0 < keyId and keyId < 0x80
assert len(keyComponents) > 0
KeyUQ = (None, 0x38, 0x34, 0xC8) # see GP 2.2.1, 11.1.9
templ = """ B9 #(95#($keyUQ) 96#($keyAc) 80#($keyType) 81#($keyLen)
82#($keyId) 83#($keyVer) 84#($KCV))"""
d = {'keyVer': chr(keyVer)}
B9 = ''
dgi8113 = []
for kc in keyComponents:
assert len(kc) in (2, 4), "wrong keyComponent" + kc.__str__()
if len(kc) == 2:
keyType, keyVal = kc
keyUQ = 1 <= keyId <= 3 and KeyUQ[keyId] or 0xFF
keyAc = 0x00
else:
keyType, keyVal, keyUQ, keyAc = kc
d['keyLen'] = chr(len(keyVal))
assert 0 <= keyType < 0xFF
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
d['KCV'] = calcKCV(keyVal, keyType == KeyType.AES)
else:
d['KCV'] = ''
d['keyId'] = chr(keyId)
for k in ('keyType', 'keyUQ', 'keyAc', 'keyId'):
d[k] = chr(locals()[k])
tlv = GAF(templ).eval(**d)
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
else:
encValue = keyVal
B9 += tlv
dgi8113.append(pack(">HB", 0x8113, len(encValue)) + encValue)
keyId += 1
return(pack(">HB", 0x00B9, len(B9)) + B9, dgi8113)
def storeDataPutKey(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build APDU for Store Data for Put Key.
keyVer, keyComponents, keyId and keyDEK as in storeDataPutKeyDGI.
Return APDU a u8 list."""
dgi00B9, dgi8113 = storeDataPutKeyDGI(keyVer, keyComponents,
keyId, keyDEK)
data = dgi00B9 + ''.join(dgi8113)
assert len(data) < 256, "Longer Put Key not implemented"
P1 = 0x88
P2 = 0
apdu = [0x80, INS_STOREDATA, P1, P2, len(data)] + s2l(data)
return apdu
# ###### Scenario 3 stuff
# Preloaded ECC Curve Parameters, GP 2.2.1 AmE 4.5
# N.B., all have cofactor = 1
ECC_Curves = {
0x00: ecdsa.generator_256, # NIST P-256
0x01: ecdsa.generator_384, # NIST P-384
0x02: ecdsa.generator_521, # NIST P-521
# 0x03: brainpoolP256r1,
# 0x04: brainpoolP256t1,
# 0x05: brainpoolP384r1,
# 0x06: brainpoolP384t1,
# 0x07: brainpoolP512r1,
# 0x08: brainpoolP512t1,
}
# tag definition
T_IIN = 0x42
T_SDIN = T_CIN = 0x45
T_keyType = 0x80
T_keyLen = 0x81
T_keyID = 0x82
T_keyVer = 0x83
T_DR = 0x85
T_HostID = 0x84
T_receipt = 0x86
T_scenarioID = 0x90
T_seqCounter = 0x91
T_keyUsage = 0x95
T_keyAcc = 0x96
T_CRT = 0xA6
def X963keyDerivation(sharedSecret, bytelen, sharedInfo='',
h = hashlib.sha256):
""" X9.63 Key Derivation Function as deifned in TR-03111 4.3.3
bytelen - expected length of Key Data
sharedSecret, sharedInfo - strings
h - function to create HASH object (default hashlib.sha256)
Return Key Data (string)
Reference: TR-03111: BSI TR-03111 Elliptic Curve Cryptography, Version 2.0
https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TR03111/BSI-TR-03111_pdf.html"""
keyData = ''
l = h().digest_size
j = (bytelen - 1)/l + 1
for i in xrange(1, 1+j):
keyData += h(sharedSecret + pack(">L", i) + sharedInfo).digest()
return keyData[:bytelen]
def DESMAC(key, data):
""" Calculate MAC single DES with final 3DES"""
return DESsign(key).calc(data)
ktDES = KeyType.DES_IMPLICIT
ktAES = KeyType.AES
class Push3scenario:
""" Implementation of Global Platform Push #3 scenario (ECKA)"""
def __init__(self, keyParRef, pkCASD, **kw):
""" Constructor
keyParRef - Key Parameter Reference
pkCASD - PK.CASD.ECKA (tuple long x, long y)
optional **kw: IIN, CIN (as strings)"""
assert keyParRef in ECC_Curves, \
"Unknown Key param reference 0x%02X" % keyParRef
self.keyParRef = keyParRef
self.generator = ECC_Curves[keyParRef]
self.curve = self.generator.curve()
self.bytelen = len(int2s(self.curve.p()))
assert self.bytelen in (32, 48, 64, 66) # currently allowed keys
pkCASDxy = s2ECP(pkCASD)
assert self.curve.contains_point(*pkCASDxy),\
"PK.CASD.ECKA not on the curve"
self.pkCASD = ellipticcurve.Point(self.curve, *pkCASDxy)
for k in ('IIN', 'CIN'):
if k in kw:
assert isinstance(kw[k], str)
self.__dict__[k] = kw[k]
def makeDGI(self, keyVer, privkey=None,
keys=([(KeyType.AES, 16)]*3),
zDelete=False, zDR=False, zID=False, **kw):
""" Prepare data for Push #3 scenario and generate keys.
keyVer - key version to create
privkey - eSK.AP.ECKA (secret multiplier as string)
randomly generated if None
keys - [(keyType, keyLen)] to generate
zDelete, zDR, zID - bits 1-3 of Parameters of scenario, (GP AmE, Tab. 4-17)
optional **kw: keyId, seqCounter, SDIN, HostID
Return <data for StoreData>"""
if privkey is None:
secexp = random.randrange(2, self.generator.order())
else:
secexp = s2int(privkey)
assert 1 < secexp < self.generator.order(), "Wrong eSK.AP.ECKA"
print "eSK.AP.ECKA = %X" % secexp
pubkey = self.generator * secexp
dgi7F49 = pack(">HBB", 0x7F49, 2*self.bytelen+1, 4) + \
int2s(pubkey.x(), self.bytelen * 8) + \
int2s(pubkey.y(), self.bytelen * 8)
# calculate Shared Secret, suppose that cofactor is 1
S_AB = secexp * self.pkCASD
self.sharedSecret = int2s(S_AB.x(), self.bytelen * 8)
print "Shared Secret =", hexlify(self.sharedSecret).upper()
# build DGI 00A6
if zID:
assert hasattr(self, 'IIN'), "Missing IIN while CardId requested"
assert hasattr(self, 'CIN'), "Missing cIN while CardId requested"
assert 'HostID' in kw and isinstance(kw['HostID'], str)
self.HostCardID = ''.join([derLV(v) for v in
(kw['HostID'], self.IIN, self.CIN)])
else:
self.HostCardID = ''
self.zDR = zDR
scenarioPar = (zDelete and 1 or 0) +\
(zDR and 2 or 0) +\
(zID and 4 or 0)
assert all([k[0] in (KeyType.DES_IMPLICIT, KeyType.AES) for k in keys])
ktl1 = keys[0]
zDifKey = any([keys[i] != ktl1 for i in xrange(1, len(keys))])
tA6value = pack("BBBB", T_scenarioID, 2, 3, scenarioPar)
if zDifKey:
self.receiptAlgo = CMAC
self.keyLens = [16] + [k[1] for k in keys]
self.keyDesc = ''
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
# default keyUsage from GP 2.2.1 AmE tab. 4-16 for ENC, MAC, DEK
for k, keyUsage in zip(keys, (0x38, 0x34, 0xC8)):
if len(k) > 2:
keyUsage = k[2]
tB9value = pack("BBB", T_keyUsage, 1, keyUsage)
if len(k) >= 4: # optional key Access as fourth elem. of key
tB9value += pack("BBB", T_keyAcc, 1, k[3])
tB9value += pack("BBB", T_keyType, 1, k[0])
tB9value += pack("BBB", T_keyLen, 1, k[1])
self.keyDesc += pack("BBB", keyUsage, *k[:2])
tA6value += '\xB9' + derLV(tB9value)
else:
assert len(keys) in (1, 3), \
"One or three secure ch. keys expected."
self.keyLens = [ktl1[1]] * (1 + len(keys))
self.receiptAlgo = ktl1[0] == KeyType.AES and CMAC or DESMAC
keyUsage = len(keys) == 1 and 0x5C or 0x10
self.keyDesc = pack("BBB", keyUsage, *ktl1[:2])
tA6value += pack("BBB", T_keyUsage, 1, keyUsage)
if len(ktl1) == 4:
tA6value += pack("BBB", T_keyAcc, 1, ktl1[3])
tA6value += pack("BBB", T_keyType, 1, ktl1[0])
tA6value += pack("BBB", T_keyLen, 1, ktl1[1])
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
if 'seqCounter' in kw:
tA6value += chr(T_seqCounter) + derLV(kw['seqCounter'])
if 'SDIN' in kw:
tA6value += chr(T_SDIN) + derLV(kw['SDIN'])
if zID:
tA6value += chr(T_HostID) + derLV(kw['HostID'])
self.tA6 = chr(T_CRT) + derLV(tA6value)
dgi00A6 = pack(">HB", 0x00A6, len(self.tA6)) + self.tA6
return (dgi00A6, dgi7F49)
def generKeys(self, respData):
""" Verify receipt and generate symmetric keys.
respData - response to Store Data (string)
Return generated keys (tuple of strings)"""
try:
data2rec = self.tA6
except KeyError:
print "Run makeDGI first"
return
respTLV = split2TLV(respData)
if self.zDR:
lenDR = (self.bytelen // 32) * 16 # map to 16, 24 or 32
DR = respTLV[0][1]
assert len(respTLV) == 2 and \
respTLV[0][0] == T_DR and len(DR) == lenDR
data2rec += pack("BB", T_DR, lenDR) + DR
else:
assert len(respTLV) == 1
assert respTLV[-1][0] == T_receipt
receipt = respTLV[-1][1]
sharedInfo = self.keyDesc
if self.zDR:
sharedInfo += DR
if hasattr(self, 'HostCardID'):
sharedInfo += self.HostCardID
print "Shared Info =", hexlify(sharedInfo).upper()
keyData = X963keyDerivation(self.sharedSecret, sum(self.keyLens),
sharedInfo)
keyDataIt = chunks(keyData, self.keyLens)
receiptKey = keyDataIt.next()
print "Receipt Key =", hexlify(receiptKey).upper()
expReceipt = self.receiptAlgo(receiptKey, data2rec)
assert receipt == expReceipt, "Receipt verification failed"
return [k for k in keyDataIt if k] # skip empty rest
def selectApplet(c, AID, logCh=0):
""" Select applet on a given logical channel or
open new log. channel if logCh is None. """
if logCh is None:
logCh = openLogCh(c)
# select the Applet on the given logical channel
apdu = [logCh, INS_SELECT, 4, 0, len(AID)] + s2l(AID)
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C and len(AID) == 0:
apdu = [logCh, INS_SELECT, 4, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
if(sw1 == 0x61):
apdu = [logCh, 0xC0, 0, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
respdata = l2s(resp)
# close channel
return (respdata, logCh)
def openLogCh(c):
""" Manage channel to open logical channel. """
apdu = [0, INS_MANAGE_LOGCH, 0, 0, 1]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return resp[0]
def closeLogCh(c, logCh):
apdu = [0, INS_MANAGE_LOGCH, 0x80, logCh, 0]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
class GetStatusData:
"""Represent and interpret data from Get status for Packages and Modules"""
def __init__(self, respdataPM, respdataApp):
ind = 0
self.packages = []
while len(respdataPM) > ind:
length = respdataPM[ind]
pack_aid = l2s(respdataPM[ind+1: ind+1+length])
ind += length + 1
lcs = respdataPM[ind]
priv = respdataPM[ind+1]
nmod = respdataPM[ind+2]
ind += 3
mods = []
for i in xrange(nmod):
length = respdataPM[ind]
mods.append(l2s(respdataPM[ind+1: ind+1+length]))
ind += length + 1
self.packages.append({'pack_aid': pack_aid,
'lcs': lcs,
'priv': priv,
'modules': mods})
ind = 0
self.insts = []
while len(respdataApp) > ind:
length = respdataApp[ind]
app_aid = l2s(respdataApp[ind+1: ind+1+length])
ind += length + 1
lcs = respdataApp[ind]
priv = respdataApp[ind+1]
ind += 2
self.insts.append({'app_aid': app_aid,
'lcs': lcs,
'priv': priv})
def __str__(self):
res = ''
for p in self.packages:
res += "Package AID: %s %02X %02X\n" % \
(hexlify(p['pack_aid']).upper().ljust(32),
p['lcs'], p['priv'])
for m in p['modules']:
res += " module %s\n" % hexlify(m).upper().ljust(32)
for p in self.insts:
res += "Insts AID : %s %02X %02X\n" % \
(hexlify(p['app_aid']).upper().ljust(32),
p['lcs'], p['priv'])
return res
def getStatus(sc, AID_pref=''):
""" Issue GET STATUS apdu for packages and modules, and instances. """
res = {}
for P1 in (0x10, 0x40):
apdu = [0x80, INS_GETSTATUS, P1, 0, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
respdata, sw1, sw2 = sc.transmit(apdu)
sw = (sw1 << 8) + sw2
while sw == 0x6310:
apdu = [0x80, INS_GETSTATUS, P1, 1, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
resp, sw1, sw2 = sc.transmit(apdu)
respdata += resp
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
res[P1] = respdata
return GetStatusData(res[0x10], res[0x40])
def getData(c, tag):
P1 = tag >> 8
P2 = tag & 0xFF
apdu = [0x80, INS_GETDATA, P1, P2, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def getExtCardRes(c):
""" Issue GET DATA with tag FF21 in order to receive Extended
Card Resources (GP 2.2.1, 11.3 & ETSI TS 102.226, 8.2.1.7).
Returns [num. of install applets, free NVM, free RAM]"""
# CLA = 0x00: return only value
# CLA = 0x80: return TLV, i.e. 0xFF21 #(value)
apdu = [0x80, INS_GETDATA, 0xFF, 0x21, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
payload = l2s(resp)
result = [s2int(findTLValue(payload, (0xFF21, tag))) for
tag in (0x81, 0x82, 0x83)]
return result
def selectFile(c, path, logCh=0):
""" Select file by path from MF or MF for empty path """
if len(path) > 0:
apdu = [logCh, INS_SELECT, 8, 4, len(path)] + s2l(path)
else:
apdu = [logCh, INS_SELECT, 0, 4, 2, 0x3F, 0x00]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x61:
resp, sw1, sw2 = c.transmit([0, 0xC0, 0, 0, sw2])
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readBinary(c, le, logCh=0, offset=0):
"""Read Binary on currently selected EF"""
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_READBIN, P1, P2, le]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readRecord(c, recNum, logCh=0):
""" Read record from currently selected EF"""
apdu = [logCh, INS_READREC, recNum, 4, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def updateBinary(c, data, logCh=0, offset=0):
"""Update binary on currently selected EF"""
assert len(data) < 0x100
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_UPDBIN, P1, P2, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def updateRecord(c, recNum, data, logCh=0):
""" Update record from currently selected EF"""
assert len(data) < 0x100
apdu = [logCh, INS_UPDREC, recNum, 4, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def verifyPin(c, pin=None, P2=0x01, logCh=0):
"""Verify PIN
pin - value (str, 4-8bytes). If None, just get number of tries.
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
Return number of remaing tries or True if verification succesfull.
"""
lc = 0 if pin is None else 8
apdu = [logCh, INS_VERIFY_PIN, 0, P2, lc]
if pin is not None:
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu += s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw == 0x6983: # PIN blocked
return 0
if 0x63C0 <= sw <= 0x63CA: # remaining tries
return sw - 0x63C0
if sw != 0x9000:
raise ISOException(sw)
return True # pin verified
def changePin(c, oldPin, newPin, P2=0x01, logCh=0):
"""Change PIN
oldPin - old PIN value (str, 4-8bytes)
newPin - new PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(oldPin) <= 8
oldPin += '\xFF' * (8 - len(oldPin))
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_CHANGE_PIN, 0, P2, 0x10] + s2l(oldPin) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def disablePin(c, pin, P2=0x01, logCh=0):
"""Disable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_DISABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def enablePin(c, pin, P2=0x01, logCh=0):
"""Enable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_ENABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def unblockPin(c, puk, newPin, P2=0x01, logCh=0):
"""unblock PIN
puk - new PIN value (str, 4-8bytes)
newPin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert len(puk) == 8
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_UNBLOCK_PIN, 0, P2, 0x10] + s2l(puk) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def selectUSIM(c, logCh=0):
"""Select USIM, return AID
Read EF_DIR, USIM = first application with AID of USIM (3GPP TS 31.110)"""
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
aids = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
aids.append(aid)
except ISOException:
break
# search for USIM
for aid in aids:
if aid[:7] == unhexlify('A0000000871002'):
infoUSIM = selectApplet(c, aid, logCh)
return aid
return None
def cardInfo(c, USIMpin=None, logCh=0):
"""Deselect, read EF_DIR, EF_ICCID"""
resetCard(c)
histBytes = l2s(ATR(c.getATR()).getHistoricalBytes())
infoMF = selectFile(c, '', logCh)
# read EF_ICCID
infoICCID = selectFile(c, unhexlify('2FE2'), logCh)
fileSize = s2int(findTLValue(infoICCID, (0x62, 0x80)))
assert fileSize == 10, "Wrong size of EF_ICCID"
iccid = swapNibbles(readBinary(c, fileSize))
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
dirDO = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
label = findTLValue(r, (0x61, 0x50))
dirDO.append({'AID': aid, 'label': label})
except ISOException:
break
# select USIM and try to read IMSI
aids = [DO['AID'] for DO in dirDO
if DO['AID'][:7] == unhexlify('A0000000871002')]
if len(aids) >= 1:
aid_usim = aids[0] # choose the first AID found
else:
aid_usim = None
if aid_usim:
infoUSIM = selectApplet(c, aid_usim, logCh)
if USIMpin is not None:
verifyPin(c, USIMpin, logCh=logCh)
infoIMSI = selectFile(c, unhexlify('7FFF6F07'), logCh)
try:
bimsi = readBinary(c, 9, logCh)
digits = reduce(lambda d, n: d + [ord(n) & 0x0F, ord(n) >> 4],
bimsi[1:1+ord(bimsi[0])], [])
digits.pop(0) # remove first nibble 8 or 9
while digits[-1] == 0x0F:
digits.pop() # remove trailing F
imsi = ''.join([chr(ord('0')+i) for i in digits])
except ISOException:
imsi = None
else:
imsi = None
# select default applet and get tags 45 and 42
selectApplet(c, '', logCh)
try:
iin = findTLValue(getData(c, T_IIN), (T_IIN,))
except ISOException:
iin = None
try:
cin = findTLValue(getData(c, T_CIN), (T_CIN,))
except ISOException:
cin = None
return histBytes, iccid, dirDO, imsi, iin, cin
| lgpl-2.1 | -262,022,843,162,776,060 | 35.621495 | 115 | 0.571711 | false |
xerond/lucia | ledEditor/cfilegen.py | 1 | 2560 | from effectgroup import EffectGroup
from effectdescriptions import EffectDescriptions
from myutils import Utils
def generateFile(fileName,ledCount,effectGroups):
f = open(fileName,'w')
f.write("#ifndef H_SONG_INSTRUCTIONS\n#define H_SONG_INSTRUCTIONS\n#include \"avr/pgmspace.h\"\n#include \"song_instructions.h\"\nconst char song_instructions[] PROGMEM = {")
lastTime = 0
for curEffectGroup in effectGroups:
writeBuffer = ""
newTime = curEffectGroup.getTimeAs10msCount()
tD = newTime - lastTime
lastTime = newTime
writeBuffer += "0xff,\n"
writeBuffer += Utils.short_to_hex(tD) + "\n"
for ledIndex in range (0,ledCount):
ledEffect = curEffectGroup.getLedEffect(ledIndex)
tempLedBytes = generateLedEffectBytes(ledIndex,ledEffect)
if tempLedBytes <> "":
writeBuffer += "\t" + generateLedEffectBytes(ledIndex,ledEffect) + "\n"
writeBuffer += "0xff,\n"
f.write(writeBuffer)
f.write("0x00,};\n#endif")
#generates a string for led effect
def generateLedEffectBytes(ledNumber,ledEffect):
effectNumber = ledEffect[EffectGroup.INDEX_EFFECT_NUMBER]
#get the real effect number
#TODO we are accessing a global here, eek!
print "Effect num is: " + str(effectNumber)
realEffectNumber = EffectDescriptions.quickEffectLookup[effectNumber]['realId']
effectData = ledEffect[EffectGroup.INDEX_EFFECT_DATA]
#if effect number is < 0, ignore it
if effectNumber < 0:
return ""
returnStr = Utils.byte_to_hex(ledNumber) + Utils.byte_to_hex(realEffectNumber)
#get the effect description
effectDescr = EffectDescriptions.quickEffectLookup[effectNumber]
#Depending on the data, time to output the values accordingly
reqAttributes = effectDescr['reqAttributes']
attribCount = len(reqAttributes)
for i in range (0,attribCount):
curAttrib = reqAttributes[i]
attribType = curAttrib[EffectDescriptions.INDEX_TYPE]
curData = effectData[i]
if(attribType == EffectDescriptions.VAR_COLOR):
returnStr += Utils.short_to_hex(curData[0])
returnStr += Utils.short_to_hex(curData[1])
returnStr += Utils.short_to_hex(curData[2])
elif(attribType == EffectDescriptions.VAR_BYTE):
returnStr += Utils.byte_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_WORD):
returnStr += Utils.short_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_DWORD):
returnStr += Utils.dword_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_HIDDEN_BYTE):
returnStr += Utils.short_to_hex(int(curData))
else:
print "ERROR! COULD NOT DECODE EFFECT!"
return returnStr
| mit | 6,627,170,382,357,033,000 | 34.068493 | 175 | 0.74375 | false |
notepadqq/NotepadqqApi_Python | notepadqq_api/notepadqq_api.py | 1 | 3531 | import asyncio
import sys
from notepadqq_api.message_channel import MessageChannel
from notepadqq_api.message_interpreter import MessageInterpreter
from notepadqq_api.stubs import Stubs
class NotepadqqApi():
"""Provides access to the Notepadqq Api."""
_NQQ_STUB_ID = 1
def __init__(self, socket_path=None, extension_id=None):
"""Construct a new Api object that can be used to invoke Notepadqq
methods and to receive its events.
If not provided, socket_path and extension_id are respectively
sys.argv[1] and sys.argv[2]
"""
if socket_path is None:
try:
socket_path = sys.argv[1]
except IndexError:
raise ValueError("Socket path not provided")
if extension_id is None:
try:
extension_id = sys.argv[2]
except IndexError:
raise ValueError("Extension id not provided")
self._socket_path = socket_path
self._extension_id = extension_id
self._message_channel = MessageChannel(self._socket_path)
self._message_interpreter = MessageInterpreter(self._message_channel)
self._nqq = Stubs.Notepadqq(self._message_interpreter, self._NQQ_STUB_ID)
def run_event_loop(self, started_callback=None):
"""Start the event loop. If started_callback is provided, it will
be called as soon as the connection with Notepadqq is ready.
"""
if started_callback is not None:
self.notepadqq.on('currentExtensionStarted', started_callback)
loop = asyncio.get_event_loop()
loop.run_until_complete(self._message_channel.start(loop, self._on_new_message))
@property
def extension_id(self):
"""The id assigned to this extension by Notepadqq"""
return self._extension_id
@property
def notepadqq(self):
"""Get an instance of the main Notepadqq object"""
return self._nqq
def on_window_created(self, callback):
"""Execute a callback for every new window.
This is preferable to the "newWindow" event of Notepadqq, because it
could happen that the extension isn't ready soon enough to receive
the "newWindow" event for the first window. This method, instead,
ensures that the passed callback will be called once and only once
for each current or future window.
"""
captured_windows = []
# Invoke the callback for every currently open window
for window in self.notepadqq.windows():
if window not in captured_windows:
captured_windows.append(window)
callback(window)
# Each time a new window gets opened, invoke the callback.
# When Notepadqq is starting and initializing all the extensions,
# we might not be fast enough to receive this event: this is why
# we manually invoked the callback for every currently open window.
def on_new_window(window):
if window not in captured_windows:
callback(window)
self.notepadqq.on('newWindow', on_new_window)
def for_each_window(self, f):
"""Decorator alternative for self.on_window_created(f)"""
self.on_window_created(f)
return f
def _on_new_message(self, msg):
# Called whenever a new message is received from the channel
self._message_interpreter.process_message(msg)
| mit | -2,928,871,828,242,222,600 | 37.380435 | 88 | 0.632965 | false |
rackerlabs/deuce-valere | deucevalere/common/validation.py | 1 | 1337 | """
Deuce Valere - Common - Validation
"""
import datetime
from deuceclient.api import *
from deuceclient.auth.base import AuthenticationBase
from deuceclient.client.deuce import DeuceClient
from deuceclient.common.validation import *
from deuceclient.common.validation_instance import *
from stoplight import Rule, ValidationFailed, validation_function
@validation_function
def val_authenticator_instance(value):
if not isinstance(value, AuthenticationBase):
raise ValidationFailed('authenticator must be derived from '
'deuceclient.auth.base.AuthenticationBase')
@validation_function
def val_deuceclient_instance(value):
if not isinstance(value, DeuceClient):
raise ValidationFailed('invalid Deuce Client instance')
@validation_function
def val_expire_age(value):
if not isinstance(value, datetime.timedelta):
raise ValidationFailed('must be type datetime.timedelta')
def _abort(error_code):
abort_errors = {
100: TypeError
}
raise abort_errors[error_code]
AuthEngineRule = Rule(val_authenticator_instance(), lambda: _abort(100))
ClientRule = Rule(val_deuceclient_instance(), lambda: _abort(100))
ExpireAgeRule = Rule(val_expire_age(), lambda: _abort(100))
ExpireAgeRuleNoneOkay = Rule(val_expire_age(none_ok=True), lambda: _abort(100))
| apache-2.0 | -5,001,324,934,594,096,000 | 30.093023 | 79 | 0.743455 | false |
hemebond/kapua | courses/views.py | 1 | 4832 | # Copyright 2011 James O'Neill
#
# This file is part of Kapua.
#
# Kapua is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Kapua is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kapua. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, UpdateView, \
FormView, CreateView
from django.views.generic.detail import SingleObjectMixin
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from .models import Course, Page
from .forms import CourseForm, PageForm
class CourseList(ListView):
model = Course
class CourseAdd(CreateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
context_object_name = "course"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CourseAdd, self).dispatch(*args, **kwargs)
class CourseDetail(DetailView):
template_name = "courses/course_detail.html"
model = Course
context_object_name = "course"
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.pages.exists():
return redirect('kapua-page-detail', self.object.pages.get(level=0).pk)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class CourseEdit(UpdateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
model = Course
class PageAdd(SingleObjectMixin, FormView):
model = Course
template_name = "courses/page_edit.html"
form_class = PageForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageAdd, self).dispatch(*args, **kwargs)
def get_form(self, form_class):
self.object = self.get_object()
return super(PageAdd, self).get_form(form_class)
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the form.
"""
form_kwargs = super(PageAdd, self).get_form_kwargs()
form_kwargs.update({
'valid_targets': self.object.pages.filter(level__gt=0)
})
return form_kwargs
def form_valid(self, form):
position = form.cleaned_data.get('position', 'last-child')
target = form.cleaned_data.get('target', None)
course = self.object
page = form.save(commit=False)
page.course = course
if not target:
if course.pages.exists():
target = course.pages.get(level=0)
position = 'last-child'
if target:
page.insert_at(
target=target,
position=position,
save=True,
)
self.success_url = page.get_absolute_url()
else:
page.save()
self.success_url = course.get_absolute_url()
return super(PageAdd, self).form_valid(form)
def get_context_data(self, *args, **kwargs):
context = super(PageAdd, self).get_context_data(*args, **kwargs)
if context['form'].errors:
context['error_message'] = context['form'].errors
return context
class PageDetail(DetailView):
template_name = "courses/page_detail.html"
context_object_name = "page"
model = Page
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(PageDetail, self).get_context_data(**kwargs)
context['course'] = self.object.course
pages = context['course'].pages.all()
for index, page in enumerate(pages):
if page.pk == self.object.pk:
if index > 0:
context['previous_page'] = pages[index - 1]
if index < (len(pages) - 1):
context['next_page'] = pages[index + 1]
break
# Remove the root page
context['pages'] = pages.filter(level__gt=0)
# This gets the ancestors of the current page but exluces the
# root page
context['breadcrumbs'] = pages.filter(
lft__lt=self.object.lft,
rght__gt=self.object.rght
).exclude(
level=0
)
return context
class PageEdit(UpdateView):
template_name = "courses/page_edit.html"
form_class = PageForm
model = Page
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageEdit, self).dispatch(*args, **kwargs)
def form_valid(self, form):
self.object = form.save()
target = form.cleaned_data.get('target')
if target:
position = form.cleaned_data.get('position')
self.object.move_to(
target=target,
position=position
)
return redirect('kapua-page-detail', self.object.pk)
| gpl-3.0 | 8,351,613,602,219,685,000 | 25.549451 | 74 | 0.710472 | false |
Pikl/PiklBot | cogs/pikl.py | 1 | 1281 | import discord
from discord.ext import commands
class Pikl:
"""Super pikly commands."""
def __init__(self, bot):
self.bot = bot
@commands.command(hidden=False)
async def helloworld(self):
"""Hello, world!"""
await self.bot.say("Hello, world!")
@commands.command(hidden=False)
async def postraidembed(self):
"""Posts an embedded message with a bunch of raid info"""
embed = discord.Embed(colour=discord.Colour(0x2ecc40), description="Some helpful information to aid and review [Dawn] raids.\n")
embed.set_image(url="https://cdn.discordapp.com/attachments/350137990959464459/354412417381433354/unknown.png")
embed.set_thumbnail(url="https://wiki.guildwars2.com/images/5/5e/Legendary_Insight.png")
embed.set_author(name="Dawn Raid Information", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.set_footer(text=": 'Stack on Pikl'", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.add_field(name="Raid Logs & Videos", value="https://raid.pikly.uk/", inline=True)
embed.add_field(name="Raid Class Spreadsheet", value="[Spreadsheet here](https://docs.google.com/spreadsheets/d/1zm46Jb8UBIoYP1_mewoOvLKopx_Sks9hYGm8OeWaQI8/edit?usp=sharing)", inline=True)
await self.bot.say(embed=embed)
def setup(bot):
bot.add_cog(Pikl(bot)) | gpl-3.0 | -9,634,640,449,993,438 | 40.354839 | 191 | 0.737705 | false |
jtwaleson/decrypt | decrypt/decrypt.py | 1 | 1745 | #!/usr/bin/env python
import curses
import time
import fileinput
import random
import string
screen = curses.initscr()
lines = []
chance = 0.1
confirmed_per_line = []
def main():
curses.noecho()
try:
curses.curs_set(0)
except:
pass
screen.keypad(1)
try:
for line in fileinput.input():
confirmed_per_line.append([])
lines.append(line.rstrip())
iterate()
fileinput.close()
while iterate(increase=True):
pass
time.sleep(2)
except KeyboardInterrupt:
pass
finally:
curses.endwin()
for line in lines:
print(line)
def iterate(increase=False):
global chance, confirmed_per_line, lines
still_random = 0
if increase:
chance += 0.01
screen.erase()
(y, x) = screen.getmaxyx()
final_line = len(lines)
if final_line > y:
first_line = final_line - y
else:
first_line = 0
for line_num in range(first_line, final_line):
line = lines[line_num]
for col in range(min(x, len(line))):
try:
if col not in confirmed_per_line[line_num]:
still_random += 1
if random.random() < chance:
confirmed_per_line[line_num].append(col)
screen.addch(line_num - first_line,
col,
random.choice(string.punctuation),
curses.A_REVERSE)
else:
screen.addstr(line_num - first_line, col, line[col])
except:
pass
screen.refresh()
time.sleep(0.1)
return still_random > 0
| mit | 3,421,878,440,241,431,600 | 23.928571 | 72 | 0.514613 | false |
vdrhtc/Measurement-automation | drivers/pyspcm.py | 1 | 7735 | import os
import platform
import sys
from ctypes import *
# load registers for easier access
from drivers.py_header.regs import *
# load registers for easier access
from drivers.py_header.spcerr import *
SPCM_DIR_PCTOCARD = 0
SPCM_DIR_CARDTOPC = 1
SPCM_BUF_DATA = 1000 # main data buffer for acquired or generated samples
SPCM_BUF_ABA = 2000 # buffer for ABA data, holds the A-DATA (slow samples)
SPCM_BUF_TIMESTAMP = 3000 # buffer for timestamps
# determine bit width of os
oPlatform = platform.architecture()
if (oPlatform[0] == '64bit'):
bIs64Bit = 1
else:
bIs64Bit = 0
# define pointer aliases
int8 = c_int8
int16 = c_int16
int32 = c_int32
int64 = c_int64
ptr8 = POINTER (int8)
ptr16 = POINTER (int16)
ptr32 = POINTER (int32)
ptr64 = POINTER (int64)
uint8 = c_uint8
uint16 = c_uint16
uint32 = c_uint32
uint64 = c_uint64
uptr8 = POINTER (uint8)
uptr16 = POINTER (uint16)
uptr32 = POINTER (uint32)
uptr64 = POINTER (uint64)
# Windows
if os.name == 'nt':
#sys.stdout.write("Python Version: {0} on Windows\n\n".format (
# platform.python_version()))
# define card handle type
if (bIs64Bit):
# for unknown reasons c_void_p gets messed up on Win7/64bit, but this works:
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use windll because all driver access functions use _stdcall calling convention under windows
if (bIs64Bit == 1):
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win64.dll")
else:
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win32.dll")
# load spcm_hOpen
if (bIs64Bit):
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
else:
spcm_hOpen = getattr (spcmDll, "_spcm_hOpen@4")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
if (bIs64Bit):
spcm_vClose = getattr (spcmDll, "spcm_vClose")
else:
spcm_vClose = getattr (spcmDll, "_spcm_vClose@4")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
if (bIs64Bit):
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
else:
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "_spcm_dwGetErrorInfo_i32@16")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
if (bIs64Bit):
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
else:
spcm_dwGetParam_i32 = getattr (spcmDll, "_spcm_dwGetParam_i32@12")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
if (bIs64Bit):
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
else:
spcm_dwGetParam_i64 = getattr (spcmDll, "_spcm_dwGetParam_i64@12")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
if (bIs64Bit):
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
else:
spcm_dwSetParam_i32 = getattr (spcmDll, "_spcm_dwSetParam_i32@12")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
if (bIs64Bit):
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
else:
spcm_dwSetParam_i64 = getattr (spcmDll, "_spcm_dwSetParam_i64@16")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
if (bIs64Bit):
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
else:
spcm_dwSetParam_i64m = getattr (spcmDll, "_spcm_dwSetParam_i64m@16")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
if (bIs64Bit):
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
else:
spcm_dwDefTransfer_i64 = getattr (spcmDll, "_spcm_dwDefTransfer_i64@36")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
if (bIs64Bit):
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
else:
spcm_dwInvalidateBuf = getattr (spcmDll, "_spcm_dwInvalidateBuf@8")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
if (bIs64Bit):
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
else:
spcm_dwGetContBuf_i64 = getattr (spcmDll, "_spcm_dwGetContBuf_i64@16")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
elif os.name == 'posix':
sys.stdout.write("Python Version: {0} on Linux\n\n".format (platform.python_version()))
# define card handle type
if (bIs64Bit):
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use cdll because all driver access functions use cdecl calling convention under linux
spcmDll = cdll.LoadLibrary ("libspcm_linux.so")
# load spcm_hOpen
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
spcm_vClose = getattr (spcmDll, "spcm_vClose")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
else:
raise Exception ('Operating system not supported by pySpcm')
| gpl-3.0 | 5,788,646,918,922,448,000 | 33.225664 | 99 | 0.68287 | false |
Dev-Cloud-Platform/Dev-Cloud | dev_cloud/web_service/urls/user/environment.py | 1 | 5340 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2015] Michał Szczygieł, M4GiK Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
from django.conf.urls import patterns, url, include
from core.utils.decorators import user_permission, vm_permission
from web_service.views.user.enviroment import wizard_setup, generate_dependencies, customize_environment, \
define_environment, summary, validation_process, validation_process_ip, validation_process_resources, \
validation_process_ip_pre, view_environment, environments_list, get_vm_status, destroy_vm, refresh_vm_tasks, \
show_vnc, get_cpu_load, get_ssh_key, view_predefined, customize_predefined_environment, \
define_predefined_environment
main_patterns = patterns('web_service.views.user.enviroment',
url(r'^app/create/environment/$', user_permission(wizard_setup),
name='personalized_environment'),
url(r'^app/create/environment/technology/(?P<technology>\w+)/$',
user_permission(generate_dependencies),
name='generate_dependencies'),
url(
r'^app/create/environment/customize/(?P<technology>\w+)/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_environment), name='customize_environment'),
url(r'^app/create/environment/define/(?P<technology>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(define_environment), name='define_environment'),
url(r'^app/create/environment/summary/$', user_permission(summary), name='summary'),
url(r'^app/create/environment/validation_process/(?P<template>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(validation_process), name='validation_process'),
url(r'^app/create/environment/validation_process_ip/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip), name='validation_process_ip'),
url(r'^app/create/environment/validation_process_ip_pre/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip_pre), name='validation_process_ip_pre'),
url(r'^app/create/environment/validation_process_resources/(?P<template_id>\w+)/$',
user_permission(validation_process_resources), name='validation_process_resources'),
url(r'^app/environments/$', user_permission(environments_list), name='environments_list'),
url(r'^app/environments/(?P<destroy_status>\w+)/$', user_permission(environments_list),
name='environments_list'),
url(r'^app/environments/show_vm/(?P<vm_id>\w+)/$', vm_permission(view_environment),
name='view_environment'),
url(r'^app/environments/vm_status/(?P<vm_id>\w+)/$', vm_permission(get_vm_status),
name='get_vm_status'),
url(r'^app/environments/destroy/(?P<vm_id>\w+)/$', vm_permission(destroy_vm),
name='destroy_vm'),
url(r'^app/environments/refresh_tasks/(?P<vm_id>\w+)/$', vm_permission(refresh_vm_tasks),
name='refresh_vm_tasks'),
url(r'^app/environments/show_vm/vnc/(?P<vm_id>\w+)/$', vm_permission(show_vnc),
name='show_vnc'),
url(r'^app/environments/show_vm/cpu_load/(?P<vm_id>\w+)/$', vm_permission(get_cpu_load),
name='get_cpu_load'),
url(r'^app/environments/show_vm/get_ssh_key/(?P<vm_id>\w+)/$', vm_permission(get_ssh_key),
name='get_ssh_key'),
url(r'^app/create/environment/predefined/$', user_permission(view_predefined),
name='predefined_environment'),
url(
r'^app/create/environment/predefined/customize/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_predefined_environment),
name='customize_predefined_environment'),
url(
r'^app/create/environment/predefined/define/(?P<application>[\w\-]+)/(?P<exposed_ip>\w+)/$',
user_permission(define_predefined_environment),
name='define_predefined_environment'))
urlpatterns = patterns('', url(r'^main/', include(main_patterns)))
| apache-2.0 | -6,435,118,396,431,205,000 | 70.173333 | 132 | 0.566317 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_peer_express_route_circuit_connections_operations.py | 1 | 9352 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PeerExpressRouteCircuitConnectionsOperations:
"""PeerExpressRouteCircuitConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
connection_name: str,
**kwargs
) -> "_models.PeerExpressRouteCircuitConnection":
"""Gets the specified Peer Express Route Circuit Connection from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param connection_name: The name of the peer express route circuit connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PeerExpressRouteCircuitConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.PeerExpressRouteCircuitConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PeerExpressRouteCircuitConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/peerConnections/{connectionName}'} # type: ignore
def list(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs
) -> AsyncIterable["_models.PeerExpressRouteCircuitConnectionListResult"]:
"""Gets all global reach peer connections associated with a private peering in an express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PeerExpressRouteCircuitConnectionListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.PeerExpressRouteCircuitConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PeerExpressRouteCircuitConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PeerExpressRouteCircuitConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/peerConnections'} # type: ignore
| mit | -2,033,514,880,389,216,000 | 48.744681 | 231 | 0.655368 | false |
googleapis/googleapis-gen | google/cloud/aiplatform/v1beta1/aiplatform-v1beta1-py/google/cloud/aiplatform_v1beta1/services/vizier_service/transports/__init__.py | 1 | 1185 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import VizierServiceTransport
from .grpc import VizierServiceGrpcTransport
from .grpc_asyncio import VizierServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[VizierServiceTransport]]
_transport_registry['grpc'] = VizierServiceGrpcTransport
_transport_registry['grpc_asyncio'] = VizierServiceGrpcAsyncIOTransport
__all__ = (
'VizierServiceTransport',
'VizierServiceGrpcTransport',
'VizierServiceGrpcAsyncIOTransport',
)
| apache-2.0 | -5,002,370,007,935,311,000 | 34.909091 | 84 | 0.775527 | false |
googleapis/googleapis-gen | google/cloud/translate/v3beta1/translation-v3beta1-py/google/cloud/translate/__init__.py | 1 | 5300 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.cloud.translate_v3beta1.services.translation_service.client import TranslationServiceClient
from google.cloud.translate_v3beta1.services.translation_service.async_client import TranslationServiceAsyncClient
from google.cloud.translate_v3beta1.types.translation_service import BatchDocumentInputConfig
from google.cloud.translate_v3beta1.types.translation_service import BatchDocumentOutputConfig
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateDocumentMetadata
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateDocumentRequest
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateDocumentResponse
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateMetadata
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateResponse
from google.cloud.translate_v3beta1.types.translation_service import BatchTranslateTextRequest
from google.cloud.translate_v3beta1.types.translation_service import CreateGlossaryMetadata
from google.cloud.translate_v3beta1.types.translation_service import CreateGlossaryRequest
from google.cloud.translate_v3beta1.types.translation_service import DeleteGlossaryMetadata
from google.cloud.translate_v3beta1.types.translation_service import DeleteGlossaryRequest
from google.cloud.translate_v3beta1.types.translation_service import DeleteGlossaryResponse
from google.cloud.translate_v3beta1.types.translation_service import DetectedLanguage
from google.cloud.translate_v3beta1.types.translation_service import DetectLanguageRequest
from google.cloud.translate_v3beta1.types.translation_service import DetectLanguageResponse
from google.cloud.translate_v3beta1.types.translation_service import DocumentInputConfig
from google.cloud.translate_v3beta1.types.translation_service import DocumentOutputConfig
from google.cloud.translate_v3beta1.types.translation_service import DocumentTranslation
from google.cloud.translate_v3beta1.types.translation_service import GcsDestination
from google.cloud.translate_v3beta1.types.translation_service import GcsSource
from google.cloud.translate_v3beta1.types.translation_service import GetGlossaryRequest
from google.cloud.translate_v3beta1.types.translation_service import GetSupportedLanguagesRequest
from google.cloud.translate_v3beta1.types.translation_service import Glossary
from google.cloud.translate_v3beta1.types.translation_service import GlossaryInputConfig
from google.cloud.translate_v3beta1.types.translation_service import InputConfig
from google.cloud.translate_v3beta1.types.translation_service import ListGlossariesRequest
from google.cloud.translate_v3beta1.types.translation_service import ListGlossariesResponse
from google.cloud.translate_v3beta1.types.translation_service import OutputConfig
from google.cloud.translate_v3beta1.types.translation_service import SupportedLanguage
from google.cloud.translate_v3beta1.types.translation_service import SupportedLanguages
from google.cloud.translate_v3beta1.types.translation_service import TranslateDocumentRequest
from google.cloud.translate_v3beta1.types.translation_service import TranslateDocumentResponse
from google.cloud.translate_v3beta1.types.translation_service import TranslateTextGlossaryConfig
from google.cloud.translate_v3beta1.types.translation_service import TranslateTextRequest
from google.cloud.translate_v3beta1.types.translation_service import TranslateTextResponse
from google.cloud.translate_v3beta1.types.translation_service import Translation
__all__ = ('TranslationServiceClient',
'TranslationServiceAsyncClient',
'BatchDocumentInputConfig',
'BatchDocumentOutputConfig',
'BatchTranslateDocumentMetadata',
'BatchTranslateDocumentRequest',
'BatchTranslateDocumentResponse',
'BatchTranslateMetadata',
'BatchTranslateResponse',
'BatchTranslateTextRequest',
'CreateGlossaryMetadata',
'CreateGlossaryRequest',
'DeleteGlossaryMetadata',
'DeleteGlossaryRequest',
'DeleteGlossaryResponse',
'DetectedLanguage',
'DetectLanguageRequest',
'DetectLanguageResponse',
'DocumentInputConfig',
'DocumentOutputConfig',
'DocumentTranslation',
'GcsDestination',
'GcsSource',
'GetGlossaryRequest',
'GetSupportedLanguagesRequest',
'Glossary',
'GlossaryInputConfig',
'InputConfig',
'ListGlossariesRequest',
'ListGlossariesResponse',
'OutputConfig',
'SupportedLanguage',
'SupportedLanguages',
'TranslateDocumentRequest',
'TranslateDocumentResponse',
'TranslateTextGlossaryConfig',
'TranslateTextRequest',
'TranslateTextResponse',
'Translation',
)
| apache-2.0 | -4,159,577,467,614,093,000 | 53.639175 | 114 | 0.836604 | false |
vertexproject/synapse | synapse/tests/test_tools_csvtool.py | 1 | 6295 | import csv
from unittest import mock
import synapse.common as s_common
import synapse.telepath as s_telepath
import synapse.tests.utils as s_t_utils
import synapse.tools.csvtool as s_csvtool
csvfile = b'''ipv4,fqdn,notes
1.2.3.4,vertex.link,malware
8.8.8.8,google.com,whitelist
'''
csvstorm = b'''
for ($ipv4, $fqdn, $note) in $rows {
$lib.print("oh hai")
[ inet:dns:a=($fqdn,$ipv4) ]
}
'''
csvfile_missing = b'''fqdn,email,tag
vertex.link,,mytag
google.com,[email protected],
yahoo.com,[email protected],mytag
'''
csvstorm_missing = b'''
for ($fqdn, $email, $tag) in $rows {
$lib.print("hello hello")
[ inet:dns:soa=$lib.guid() :fqdn=$fqdn :email?=$email +?#$tag ]
}
'''
# count is used for test coverage.
csvstorm_export = b'''
test:int $lib.csv.emit($node, $node.props.loc) | count
'''
class CsvToolTest(s_t_utils.SynTest):
def _getOldSynVers(self):
return (0, 0, 0)
async def test_csvtool(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('oh hai')
outp.expect('2 nodes')
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Cortex version 0.0.0 is outside of the csvtool supported range')
async def test_csvtool_missingvals(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile_missing)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_missing)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('hello hello')
outp.expect("'fqdn': 'google.com'")
outp.expect('3 nodes')
async def test_csvtool_local(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('2 nodes')
async def test_csvtool_cli(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn',
EOFError(),
])
with self.withCliPromptMockExtendOutp(outp):
with self.withTestCmdr(cmdg):
await s_csvtool.main(argv, outp=outp)
outp.expect('inet:fqdn=google.com')
outp.expect('2 nodes')
async def test_csvtool_export(self):
async with self.getTestCore() as core:
await core.nodes('[ test:int=20 :loc=us ]')
await core.nodes('[ test:int=30 :loc=cn ]')
await core.nodes('[ test:int=40 ]')
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_export)
# test a few no-no cases
argv = ['--test', '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires --cortex')
argv = ['--cortex', url, '--export', stormpath, csvpath, 'lol.csv']
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires exactly 1 csvfile')
argv = ['--cortex', url, '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Counted 3 nodes.')
outp.expect('3 csv rows')
with open(csvpath, 'r') as fd:
rows = [row for row in csv.reader(fd)]
self.eq(rows, (['20', 'us'], ['30', 'cn'], ['40', '']))
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect(f'Cortex version 0.0.0 is outside of the csvtool supported range')
| apache-2.0 | 5,403,740,842,168,833,000 | 31.448454 | 107 | 0.554408 | false |
google-research/social_cascades | news/google/launch_optimizer_main.py | 1 | 7283 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Launcher for main.py on PyTorch with GPUs using JobTrialGenerator.
Note that JobTrialGenerator uses CAIP Optimizer for automatic hyperparameter
tuning, which requires the training executable to report measurements via
setting up a CAIP Optimizer client.
"""
import os
from absl import app
from absl import flags
import termcolor
from google3.learning.brain.frameworks import xcloud as xm
from google3.learning.deepmind.xmanager import hyper
from google3.learning.vizier.service import automated_stopping_pb2
from google3.learning.vizier.service import vizier_pb2
GCS_PATH_PREFIX = 'gs://'
FLAGS = flags.FLAGS
flags.DEFINE_string('project_name', 'traceminer', 'name for the project')
flags.DEFINE_string('image_uri', None,
'A URI to a prebuilt Docker image, including tag.')
flags.DEFINE_string('base_image', None,
'A URI to a prebuilt Docker image, for option2.')
flags.DEFINE_boolean('use_gpu', True, 'use GPU')
flags.DEFINE_string('acc_type', 'v100', 'Accelerator type, v100 or t4')
flags.DEFINE_integer('num_gpus', 1, 'Number of GPUs.')
flags.DEFINE_string('gcs_path_in', None,
('A GCS directory within a bucket to store input'
'in gs://bucket/directory format.'))
flags.DEFINE_string('gcs_path_out', None,
('A GCS directory within a bucket to store output '
'in gs://bucket/directory format.'))
flags.DEFINE_string('task', 'cat', ('task: sr(subreddit classification), '
'cat(url categorization), '
'fake(fake news detection)'))
flags.DEFINE_string('local_path_in', './fake_input/', 'local path for input')
flags.DEFINE_string('local_path_out', './fake_output/', 'local path for output')
flags.DEFINE_string('g_emb', '', 'graph embedding file')
flags.DEFINE_string('seq_file', '', 'post sequence file')
flags.DEFINE_string('balance_df', '', 'the balanced dataset with url ids')
# RNN, LSTM parameters
flags.DEFINE_string('model', 'rnn', 'rnn, lstm')
flags.DEFINE_float('train_ratio', 0.8, 'training data ratio')
flags.DEFINE_float('val_ratio', 0.1, 'validation data ratio')
flags.DEFINE_integer('batch_size', 64, 'bacth size for rnn')
flags.DEFINE_integer('hid_dim', 32, 'hidden dimension in RNN, LSTM')
flags.DEFINE_integer('num_layers', 2, 'number of layers in RNN, LSTM')
flags.DEFINE_boolean('bi', False, 'birectional')
flags.DEFINE_float('dropout', 0.8, 'dropout')
flags.DEFINE_integer('epochs', 40, 'epochs')
flags.DEFINE_float('lr', 0.002, 'learning rate')
flags.DEFINE_integer('print_step', 10, 'print step during training')
flags.DEFINE_boolean('save_model', False, 'save model')
flags.DEFINE_string('name', '', 'specify model name')
# Flag specifications
flags.mark_flag_as_required('gcs_path_in')
flags.mark_flag_as_required('gcs_path_out')
flags.register_validator('gcs_path_in', lambda value: GCS_PATH_PREFIX in value,
message=('--gcs_path_in must follow'
'gs://bucket/directory format'))
flags.register_validator('gcs_path_out', lambda value: GCS_PATH_PREFIX in value,
message=('--gcs_path_out must follow'
'gs://bucket/directory format'))
def main(_):
if FLAGS.use_gpu:
accelerator = xm.GPU('nvidia-tesla-' + FLAGS.acc_type.lower(),
FLAGS.num_gpus)
else:
accelerator = None
runtime = xm.CloudRuntime(
cpu=3,
memory=24,
accelerator=accelerator,
)
args = {
'task': FLAGS.task,
'gcs_path_in': FLAGS.gcs_path_in,
'gcs_path_out': FLAGS.gcs_path_out,
'local_path_in': FLAGS.local_path_in,
'local_path_out': FLAGS.local_path_out,
'g_emb': FLAGS.g_emb,
'seq_file': FLAGS.seq_file,
'balance_df': FLAGS.balance_df,
'train_ratio': FLAGS.train_ratio,
'val_ratio': FLAGS.val_ratio,
'bi': FLAGS.bi,
'dropout': FLAGS.dropout,
'print_step': FLAGS.print_step,
'save_model': FLAGS.save_model,
'name': FLAGS.name,
'use_optimizer': True
}
if FLAGS.image_uri:
# Option 1 This will use a user-defined docker image.
executable = xm.CloudDocker(
name=FLAGS.project_name,
runtime=runtime,
image_uri=FLAGS.image_uri,
args=args,
)
else:
# Option 2 This will build a docker image for the user. Set up environment.
executable = xm.CloudPython(
name=FLAGS.project_name,
runtime=runtime,
project_path=(
os.path.dirname(os.path.dirname(os.path.realpath(__file__)))),
module_name='gnns_for_news.main',
base_image=FLAGS.base_image,
args=args,
build_steps=(xm.steps.default_build_steps('gnns_for_news')),
)
# Set UNIT_LOG_SCALE to explore more values in the lower range
# Set UNIT_REVERSE_LOG_SCALE to explore more values in the higher range
parameters = [
hyper.get_vizier_parameter_config(
'model', hyper.categorical(['rnn', 'lstm'])),
hyper.get_vizier_parameter_config(
'batch_size', hyper.discrete([16 * k for k in range(1, 6)])),
hyper.get_vizier_parameter_config(
'hid_dim', hyper.discrete([16 * k for k in range(3, 10)])),
hyper.get_vizier_parameter_config(
'num_layers', hyper.discrete([1, 2])),
hyper.get_vizier_parameter_config(
'lr', hyper.interval(0.00001, 0.2), scaling='UNIT_LOG_SCALE'),
hyper.get_vizier_parameter_config(
'dropout', hyper.discrete([0.0, 0.15, 0.3, 0.5, 0.7])),
hyper.get_vizier_parameter_config(
'epochs', hyper.discrete([5, 10, 20, 30]))
]
vizier_study_config = vizier_pb2.StudyConfig()
for parameter in parameters:
vizier_study_config.parameter_configs.add().CopyFrom(parameter)
metric = vizier_study_config.metric_information.add()
metric.name = 'valf1'
metric.goal = vizier_pb2.StudyConfig.GoalType.Value('MAXIMIZE')
# None early stopping
early_stopping = automated_stopping_pb2.AutomatedStoppingConfig()
vizier_study_config.automated_stopping_config.CopyFrom(early_stopping)
exploration = xm.HyperparameterOptimizer(
executable=executable,
max_num_trials=128,
parallel_evaluations=8,
vizier_study_config=vizier_study_config
)
xm.launch(xm.ExperimentDescription(FLAGS.project_name), exploration)
no_prefix = FLAGS.gcs_path_out.lstrip(GCS_PATH_PREFIX)
print()
print('When your job completes, you will see artifacts in ' +
termcolor.colored(
f'https://pantheon.corp.google.com/storage/browser/{no_prefix}',
color='blue'))
if __name__ == '__main__':
app.run(main)
| apache-2.0 | 2,330,041,257,538,481,700 | 39.461111 | 80 | 0.658932 | false |
Jumpscale/jumpscale_core8 | lib/JumpScale/tools/issuemanager/models/repoCollection.py | 1 | 2535 | from JumpScale import j
base = j.data.capnp.getModelBaseClassCollection()
class RepoCollection(base):
"""
This class represent a collection of Issues
"""
def list(self, owner=0, name='', id=0, source="", returnIndex=False):
"""
List all keys of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
if owner == "":
owner = ".*"
if name == "":
name = ".*"
if id == "" or id == 0:
id = ".*"
if source == "":
source = ".*"
regex = "%s:%s:%s:%s" % (owner, name, id, source)
return self._index.list(regex, returnIndex=returnIndex)
def find(self, owner='', name='', id=0, milestone=0, member=0, label='', source=""):
"""
List all instances of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param milestone int,, id of milestone in repo.
@param member int,, id of member in repo.
@param milestone int,, label in repo.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
res = []
for key in self.list(owner=owner, name=name, id=id, source=source):
res.append(self.get(key))
if milestone:
for model in res[::-1]:
for milestone_model in model.dictFiltered.get('milestones', []):
if milestone == milestone_model['id']:
break
else:
res.remove(model)
if member:
for model in res[::-1]:
for member_model in model.dictFiltered.get('members', []):
if member == member_model['userKey']:
break
else:
res.remove(model)
if label:
for model in res[::-1]:
if (label not in model.dictFiltered.get('labels', [])) or not model.dictFiltered.get('labels', False):
res.remove(model)
return res
def getFromId(self, id):
key = self._index.lookupGet("issue_id", id)
return self.get(key)
| apache-2.0 | 7,284,187,035,742,835,000 | 32.8 | 118 | 0.523077 | false |
fzza/rdio-sock | src/rdiosock/metadata.py | 1 | 3693 | from rdiosock.exceptions import RdioApiError
from rdiosock.objects.collection import RdioList
class SEARCH_TYPES:
"""Metadata search types"""
NONE = 0
ARTIST = 1
ALBUM = 2
TRACK = 4
PLAYLIST = 8
USER = 16
LABEL = 32
ALL = (
ARTIST |
ALBUM |
TRACK |
PLAYLIST |
USER |
LABEL
)
_MAP = {
ARTIST: 'Artist',
ALBUM: 'Album',
TRACK: 'Track',
PLAYLIST: 'Playlist',
USER: 'User',
LABEL: 'Label'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class SEARCH_EXTRAS:
"""Metadata search extras"""
NONE = 0
LOCATION = 1
USERNAME = 2
STATIONS = 4
DESCRIPTION = 8
FOLLOWER_COUNT = 16
FOLLOWING_COUNT = 32
FAVORITE_COUNT = 64
SET_COUNT = 128
ICON_250x375 = 256
ICON_500x750 = 512
ICON_250x333 = 1024
ICON_500x667 = 2048
ALL = (
LOCATION |
USERNAME |
STATIONS |
DESCRIPTION |
FOLLOWER_COUNT |
FOLLOWING_COUNT |
FAVORITE_COUNT |
SET_COUNT |
ICON_250x375 |
ICON_500x750 |
ICON_250x333 |
ICON_500x667
)
_MAP = {
LOCATION: 'location',
USERNAME: 'username',
STATIONS: 'stations',
DESCRIPTION: 'description',
FOLLOWER_COUNT: 'followerCount',
FOLLOWING_COUNT: 'followingCount',
FAVORITE_COUNT: 'favoriteCount',
SET_COUNT: 'setCount',
ICON_250x375: 'icon250x375',
ICON_500x750: 'icon500x750',
ICON_250x333: 'icon250x333',
ICON_500x667: 'icon500x667'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class RdioMetadata(object):
def __init__(self, sock):
"""
:type sock: RdioSock
"""
self._sock = sock
def search(self, query, search_types=SEARCH_TYPES.ALL, search_extras=SEARCH_EXTRAS.ALL):
"""Search for media item.
:param query: Search query
:type query: str
:param search_types: Search type (:class:`rdiosock.metadata.SEARCH_TYPES` bitwise-OR or list)
:type search_types: int or list of int
:param search_extras: Search result extras to include (:class:`rdiosock.metadata.SEARCH_EXTRAS` bitwise-OR or list)
:type search_extras: int or list of int
"""
result = self._sock._api_post('search', {
'query': query,
'types[]': SEARCH_TYPES.parse(search_types)
}, secure=False, extras=SEARCH_EXTRAS.parse(search_extras))
if result['status'] == 'error':
raise RdioApiError(result)
result = result['result']
if result['type'] == 'list':
return RdioList.parse(result)
else:
raise NotImplementedError()
| gpl-3.0 | 9,067,494,512,009,808,000 | 22.08125 | 123 | 0.516382 | false |
ciarams87/PyU4V | PyU4V/tests/ci_tests/test_pyu4v_ci_snapshot_policy.py | 1 | 15453 | # Copyright (c) 2020 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""test_pyu4v_ci_snapshot_policy.py."""
import testtools
import time
from PyU4V.tests.ci_tests import base
from PyU4V.utils import constants
class CITestSnapshotPolicy(base.TestBaseTestCase, testtools.TestCase):
"""Test Snapshot Policy Functions."""
def setUp(self):
"""SetUp."""
super(CITestSnapshotPolicy, self).setUp()
self.snapshot_policy = self.conn.snapshot_policy
self.provision = self.conn.provisioning
self.snapshot_policy_name_for_test = (
constants.SNAPSHOT_POLICY_NAME_FOR_TEST)
def test_get_snapshot_policy(self):
"""Test get_snapshot_policy."""
snapshot_policy_name = self.create_snapshot_policy()
snapshot_policy_info = self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name)
self.assertEqual(snapshot_policy_name,
snapshot_policy_info.get('snapshot_policy_name'))
def test_get_snapshot_policy_list(self):
"""Test get_snapshot_policy_list."""
snapshot_policy_name = self.create_snapshot_policy()
snapshot_policy_list = self.snapshot_policy.get_snapshot_policy_list()
self.assertIn(snapshot_policy_name, snapshot_policy_list)
def test_create_snapshot_policy_local_snapshot_policy_details(self):
"""Test create_snapshot_policy with local snapshot policy."""
snapshot_policy_name = self.generate_name(object_type='sp')
snapshot_policy_interval = '1 Day'
job = self.snapshot_policy.create_snapshot_policy(
snapshot_policy_name, snapshot_policy_interval,
local_snapshot_policy_snapshot_count=30,
offset_mins=5, compliance_count_warning=30,
compliance_count_critical=5,
_async=True)
self.conn.common.wait_for_job_complete(job)
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(snapshot_policy_name))
self.assertEqual(snapshot_policy_name,
snapshot_policy_info.get('snapshot_policy_name'))
self.assertEqual(1440, snapshot_policy_info.get('interval_minutes'))
self.assertEqual(30, snapshot_policy_info.get('snapshot_count'))
self.assertFalse(snapshot_policy_info.get('secure'))
self.snapshot_policy.delete_snapshot_policy(snapshot_policy_name)
def test_modify_snapshot_policy_name_change(self):
"""Test modify_snapshot_policy name change."""
original_snapshot_policy_name = self.create_snapshot_policy()
modified_snapshot_policy_name = self.generate_name(object_type='sp')
self.snapshot_policy.modify_snapshot_policy_properties(
original_snapshot_policy_name,
new_snapshot_policy_name=modified_snapshot_policy_name)
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
modified_snapshot_policy_name))
self.assertEqual(modified_snapshot_policy_name,
snapshot_policy_info.get('snapshot_policy_name'))
self.snapshot_policy.modify_snapshot_policy_properties(
modified_snapshot_policy_name,
new_snapshot_policy_name=original_snapshot_policy_name)
def test_associate_disassociate_snapshot_policy(self):
"""Test associate and disassociate to/from storage groups."""
snapshot_policy_name = self.create_snapshot_policy()
storage_group_name = self.create_empty_storage_group()
self.snapshot_policy.associate_to_storage_groups(
snapshot_policy_name, storage_group_names=[storage_group_name])
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name))
self.assertEqual(1, snapshot_policy_info.get('storage_group_count'))
self.snapshot_policy.disassociate_from_storage_groups(
snapshot_policy_name, storage_group_names=[storage_group_name])
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name))
self.assertIsNone(snapshot_policy_info.get('storage_group_count'))
def test_suspend_resume_snapshot_policy(self):
"""Test suspend_snapshot_policy and resume_snapshot_policy."""
snapshot_policy_name = self.create_snapshot_policy()
self.snapshot_policy.suspend_snapshot_policy(
snapshot_policy_name)
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name))
self.assertTrue(snapshot_policy_info.get('suspended'))
self.snapshot_policy.resume_snapshot_policy(
snapshot_policy_name)
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name))
self.assertFalse(snapshot_policy_info.get('suspended'))
def test_modify_snapshot_policy_properties_extra_settings(self):
"""Test modify_snapshot_policy_properties extra settings."""
snapshot_policy_name = self.create_snapshot_policy()
job = self.snapshot_policy.modify_snapshot_policy_properties(
snapshot_policy_name,
offset_mins=5, compliance_count_warning=30,
compliance_count_critical=5, interval='12 Minutes',
snapshot_count=40, _async=True)
self.conn.common.wait_for_job_complete(job)
snapshot_policy_info = (
self.snapshot_policy.get_snapshot_policy(
snapshot_policy_name))
self.assertEqual(5, snapshot_policy_info.get('offset_minutes'))
self.assertEqual(12, snapshot_policy_info.get('interval_minutes'))
self.assertEqual(
30, snapshot_policy_info.get('compliance_count_warning'))
self.assertEqual(
5, snapshot_policy_info.get('compliance_count_critical'))
def test_create_storage_group_with_snapshot_policy(self):
"""Test create_storage_group with snapshot policy."""
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
self.assertEqual(
[snapshot_policy_name],
storage_group_details.get('snapshot_policies'))
storage_group_name = storage_group_details.get('storageGroupId')
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def get_storage_group_and_associated_snapshot_policy(self):
snapshot_policy_name = self.create_snapshot_policy()
storage_group_name = self.generate_name(object_type='sg')
volume_name = self.generate_name()
self.provision.create_storage_group(
self.SRP, storage_group_name, self.SLO, None, False, 1, 1, 'GB',
False, False, volume_name,
snapshot_policy_ids=[snapshot_policy_name])
storage_group_details = self.provision.get_storage_group(
storage_group_name)
return snapshot_policy_name, storage_group_details
def cleanup_snapshot_policy_and_storage_group(
self, snapshot_policy_name, storage_group_name):
self.snapshot_policy.modify_snapshot_policy(
snapshot_policy_name, constants.DISASSOCIATE_FROM_STORAGE_GROUPS,
storage_group_names=[storage_group_name])
self.addCleanup(self.delete_storage_group, storage_group_name)
def test_get_snapshot_policy_compliance(self):
"""Test get_snapshot_policy_compliance."""
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance(
storage_group_name))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_epoch_time_seconds(self):
"""Test get_snapshot_policy_compliance epoch time."""
from_epoch = str(int(time.time()))
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
to_epoch = str(int(time.time()))
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance_epoch(
storage_group_name, from_epoch=from_epoch, to_epoch=to_epoch))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_epoch_time_milliseconds(self):
"""Test get_snapshot_policy_compliance epoch time."""
from_epoch = str(int(time.time() * 1000))
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
to_epoch = str(int(time.time() * 1000))
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance_epoch(
storage_group_name, from_epoch=from_epoch, to_epoch=to_epoch))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_human_readable_time(self):
"""Test get_snapshot_policy_compliance human readable time."""
ts = time.gmtime()
from_time_string = time.strftime("%Y-%m-%d %H:%M", ts)
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
ts = time.gmtime()
to_time_string = time.strftime("%Y-%m-%d %H:%M", ts)
sp = self.snapshot_policy
compliance_details = (
sp.get_snapshot_policy_compliance_human_readable_time(
storage_group_name, from_time_string=from_time_string,
to_time_string=to_time_string))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_mixed_time(self):
"""Test get_snapshot_policy_compliance human readable time."""
self.skipTest(reason='from human readable to to epoch bug')
ts = time.gmtime()
from_time_string = time.strftime("%Y-%m-%d %H:%M", ts)
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
to_epoch = str(int(time.time()))
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance(
storage_group_name, from_time_string=from_time_string,
to_epoch=to_epoch))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_mixed_time_2(self):
"""Test get_snapshot_policy_compliance human readable time."""
from_epoch = str(int(time.time()))
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
ts = time.gmtime()
to_time_string = time.strftime("%Y-%m-%d %H:%M", ts)
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance(
storage_group_name, from_epoch=from_epoch,
to_time_string=to_time_string))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_last_week(self):
"""Test get_snapshot_policy_compliance last week."""
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
compliance_details = (
self.snapshot_policy.get_snapshot_policy_compliance_last_week(
storage_group_name))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_compliance_last_four_weeks(self):
"""Test get_snapshot_policy_compliance last four weeks."""
snapshot_policy_name, storage_group_details = (
self.get_storage_group_and_associated_snapshot_policy())
storage_group_name = storage_group_details.get('storageGroupId')
sp = self.snapshot_policy
compliance_details = (
sp.get_snapshot_policy_compliance_last_four_weeks(
storage_group_name))
self.assertEqual(storage_group_name, compliance_details.get(
'storage_group_name'))
self.assertEqual('NONE', compliance_details.get('compliance'))
self.cleanup_snapshot_policy_and_storage_group(
snapshot_policy_name, storage_group_name)
def test_get_snapshot_policy_storage_group_list(self):
"""Test get_snapshot_policy_storage_group_list"""
sp = self.snapshot_policy
snapshot_policy_name = 'DailyDefault'
snap_list = (
sp.get_snapshot_policy_storage_group_list(
snapshot_policy_name=snapshot_policy_name))
self.assertIsInstance(snap_list, list)
| mit | 8,060,742,626,429,043,000 | 48.213376 | 78 | 0.658319 | false |
jeremiedecock/snippets | python/itertools/combinations.py | 1 | 1588 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import itertools
def main():
"""Main function"""
# All possible permutations
# combinations(iterable, r) --> combinations object
#
# Return successive r-length combinations of elements in the iterable.
# combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3)
for combination in itertools.combinations(range(4), 3):
print(combination)
if __name__ == '__main__':
main()
| mit | 4,929,401,236,573,387,000 | 39.666667 | 79 | 0.726356 | false |
Hiestaa/3D-Lsystem | lsystem/Tree7.py | 1 | 1145 | from lsystem.LSystem import LSystem
import math
class Tree7(LSystem):
"""Fractale en forme d'arbre v7"""
def defineParams(self):
self.LSName = "Tree7"
self.LSAngle = math.pi / 4
self.LSSegment = 100
self.LSSteps = 9
self.LSStartingString = "T(x)"
self.LSStochastic = False
self.LSStochRange = 0.2
def createVars(self):
self.LSVars = {
'F': self.turtle.forward,
'T': self.turtle.forward,
'+': self.turtle.rotZ,
'-': self.turtle.irotZ,
'^': self.turtle.rotY,
'&': self.turtle.irotY,
'<': self.turtle.rotX,
'>': self.turtle.irotX,
'|': self.turtle.rotX,
'[': self.turtle.push,
']': self.turtle.pop,
'I': self.turtle.setColor,
'Y': self.turtle.setColor
}
self.LSParams = {
'x': self.LSSegment,
'+': self.LSAngle,
'-': self.LSAngle,
'&': self.LSAngle,
'^': self.LSAngle,
'<': self.LSAngle,
'>': self.LSAngle,
'|': self.LSAngle * 2,
'[': None,
']': None,
'I': (0.5,0.25,0),
'Y': (0, 0.5, 0)
}
def createRules(self):
self.LSRules = {
"T(x)": "IT(x*0.3)F(x*0.3)",
"F(x)": "IF(x)[+YF(x*0.5)][-YF(x*0.5)][<YF(x*0.5)][>YF(x*0.5)]"
}
| mit | -4,130,226,436,172,089,000 | 20.603774 | 69 | 0.558952 | false |
aquamatt/Peloton | src/peloton/exceptions.py | 1 | 1915 | # $Id: exceptions.py 117 2008-04-09 16:55:41Z mp $
#
# Copyright (c) 2007-2008 ReThought Limited and Peloton Contributors
# All Rights Reserved
# See LICENSE for details
""" All Peloton exceptions """
class PelotonError(Exception):
""" Base for all Peloton exceptions; can be used on its own
if no other exception is suitable. """
def __init__(self, msg='', rootException=None):
""" Initialise with an optional message and, optionally,
a root exception object - likely the underlying exception that resulted
in this exception being raised."""
Exception.__init__(self, msg)
self.rootException = rootException
def __str__(self):
""" Use base str but if there is a rootException add that message
to it. """
if self.rootException:
return "%s : Root exception: %s" % \
(Exception.__str__(self), str(self.rootException))
else:
return Exception.__str__(self)
class ConfigurationError(PelotonError):
"""To be raised when an error occurs reading a configuration file,
a profile file or similar; also any other configuration-related errors."""
pass
class PluginError(PelotonError):
""" To be raised on the whole if a general plugin error occurs; specific
plugins may wish to provide a little more specific exceptions. """
pass
class ServiceNotFoundError(PelotonError):
pass
class ServiceConfigurationError(PelotonError):
pass
class ServiceError(PelotonError):
pass
class MessagingError(PelotonError):
pass
class WorkerError(PelotonError):
""" Generic error raised if something untoward occurs in a worker
process. """
pass
class PelotonConnectionError(PelotonError):
pass
class NoWorkersError(PelotonError):
pass
class NoWorkersError(PelotonError):
pass
class DeadPeerError(PelotonError):
pass
class DeadProxyError(PelotonError):
pass | bsd-3-clause | -5,744,289,821,403,928,000 | 27.597015 | 76 | 0.699217 | false |
wasade/qiime | scripts/compare_distance_matrices.py | 1 | 11439 | #!/usr/bin/env python
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2012, The QIIME project"
__credits__ = ["Jai Ram Rideout", "Michael Dwan", "Logan Knecht",
"Damien Coy", "Levi McCracken", "Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.8.0-dev"
__maintainer__ = "Jai Ram Rideout"
__email__ = "[email protected]"
from os import path
from skbio.util import create_dir
from qiime.parse import fields_to_dict, parse_distmat
from qiime.util import (parse_command_line_parameters,
get_options_lookup,
make_option)
from qiime.compare_distance_matrices import (run_mantel_correlogram,
run_mantel_test)
options_lookup = get_options_lookup()
script_info = {}
script_info[
'brief_description'] = """Computes Mantel correlation tests between sets of distance matrices"""
script_info['script_description'] = """
This script compares two or more distance/dissimilarity matrices for \
correlation by providing the Mantel, partial Mantel, and Mantel correlogram \
matrix correlation tests.
The Mantel test will test the correlation between two matrices. The data \
often represents the "distance" between objects or samples.
The partial Mantel test is a first-order correlation analysis that utilizes \
three distance (dissimilarity) matrices. This test builds on the traditional \
Mantel test which is a procedure that tests the hypothesis that distances \
between the objects within a given matrix are linearly independent of the \
distances withing those same objects in a separate matrix. It builds on the \
traditional Mantel test by adding a third "control" matrix.
Mantel correlogram produces a plot of distance classes versus Mantel \
statistics. Briefly, an ecological distance matrix (e.g. UniFrac distance \
matrix) and a second distance matrix (e.g. spatial distances, pH distances, \
etc.) are provided. The second distance matrix has its distances split into a \
number of distance classes (the number of classes is determined by Sturge's \
rule). A Mantel test is run over these distance classes versus the ecological \
distance matrix. The Mantel statistics obtained from each of these tests are \
then plotted in a correlogram. A filled-in point on the plot indicates that \
the Mantel statistic was statistically significant (you may provide what \
alpha to use).
For more information and examples pertaining to this script, please refer to \
the accompanying tutorial, which can be found at \
http://qiime.org/tutorials/distance_matrix_comparison.html.
"""
script_info['script_usage'] = []
script_info['script_usage'].append(("Partial Mantel",
"Performs a partial Mantel test on two distance matrices, "
"using a third matrix as a control. Runs 99 permutations to calculate the "
"p-value.",
"%prog --method partial_mantel -i "
"weighted_unifrac_dm.txt,unweighted_unifrac_dm.txt -c PH_dm.txt "
"-o partial_mantel_out -n 99"))
script_info['script_usage'].append(("Mantel",
"Performs a Mantel test on all pairs of four distance matrices, "
"including 999 permutations for each test.",
"%prog --method mantel "
"-i weighted_unifrac_dm.txt,unweighted_unifrac_dm.txt,"
"weighted_unifrac_even100_dm.txt,unweighted_unifrac_even100_dm.txt "
"-o mantel_out -n 999"))
script_info['script_usage'].append(("Mantel Correlogram",
"This example computes a Mantel correlogram on two distance matrices "
"using 999 permutations in each Mantel test. Output is written to the "
"mantel_correlogram_out directory.",
"%prog --method mantel_corr -i unweighted_unifrac_dm.txt,PH_dm.txt -o "
"mantel_correlogram_out -n 999"))
script_info['output_description'] = """
Mantel: One file is created containing the Mantel 'r' statistic and p-value.
Partial Mantel: One file is created in the output directory, which contains \
the partial Mantel statistic and p-value.
Mantel Correlogram: Two files are created in the output directory: a text \
file containing information about the distance classes, their associated \
Mantel statistics and p-values, etc. and an image of the correlogram plot.
"""
script_info['required_options'] = [
# All methods use these
make_option('--method',
help='matrix correlation method to use. Valid options: '
'[mantel, partial_mantel, mantel_corr]',
type='choice',
choices=['mantel', 'partial_mantel', 'mantel_corr']),
make_option('-i', '--input_dms', type='existing_filepaths',
help='the input distance matrices, comma-separated. WARNING: Only '
'symmetric, hollow distance matrices may be used as input. Asymmetric '
'distance matrices, such as those obtained by the UniFrac Gain metric '
'(i.e. beta_diversity.py -m unifrac_g), should not be used as input'),
options_lookup['output_dir']
]
script_info['optional_options'] = [
# All methods use these
make_option('-n', '--num_permutations',
help='the number of permutations to perform when calculating the '
'p-value [default: %default]', default=100, type='int'),
make_option('-s', '--sample_id_map_fp', type='existing_filepath',
help='Map of original sample ids to new sample ids [default: '
'%default]', default=None),
# Standard Mantel specific, i.e., method == mantel
make_option('-t', '--tail_type',
help='the type of tail test to perform when calculating the p-value. '
'Valid options: [two-sided, less, greater]. "two-sided" is a two-tailed '
'test, while "less" tests for r statistics less than the observed r '
'statistic, and "greater" tests for r statistics greater than the '
'observed r statistic. Only applies when method is mantel [default: '
'%default]', default='two-sided', type='choice',
choices=['two-sided', 'greater', 'less']),
# Mantel Correlogram specific, i.e., method == mantel_corr
make_option('-a', '--alpha',
help='the value of alpha to use when denoting significance in the '
'correlogram plot. Only applies when method is mantel_corr',
default=0.05, type='float'),
make_option('-g', '--image_type',
help='the type of image to produce. Valid options: [png, svg, pdf]. '
'Only applies when method is mantel_corr [default: %default]',
default='pdf', type='choice', choices=['pdf', 'png', 'svg']),
make_option('--variable_size_distance_classes', action='store_true',
help='if this option is supplied, each distance class will have an '
'equal number of distances (i.e. pairwise comparisons), which may '
'result in variable sizes of distance classes (i.e. each distance '
'class may span a different range of distances). If this option is '
'not supplied, each distance class will have the same width, but may '
'contain varying numbers of pairwise distances in each class. This '
'option can help maintain statistical power if there are large '
'differences in the number of distances in each class. See '
'Darcy et al. 2011 (PLoS ONE) for an example of this type of '
'correlogram. Only applies when method is mantel_corr '
'[default: %default]', default=False),
# Partial Mantel specific, i.e., method == partial_mantel
make_option('-c', '--control_dm',
help='the control matrix. Only applies (and is *required*) when '
'method is partial_mantel. [default: %default]', default=None,
type='existing_filepath')
]
script_info['version'] = __version__
comment_mantel_pmantel = """\
# Number of entries refers to the number of rows (or cols) retained in each
# distance matrix after filtering the distance matrices to include only those
# samples that were in both distance matrices. p-value contains the correct
# number of significant digits.
"""
comment_corr = comment_mantel_pmantel[:-1] + \
"""
# Distance classes with values of None were in the second half of the distance
# classes and not all samples could be included in the distance class, so
# calculations were not performed.
"""
def main():
option_parser, opts, args = parse_command_line_parameters(**script_info)
if opts.num_permutations < 1:
option_parser.error(
"--num_permutations must be greater than or equal to 1.")
# Create the output dir if it doesn't already exist.
try:
if not path.exists(opts.output_dir):
create_dir(opts.output_dir)
except:
option_parser.error("Could not create or access output directory "
"specified with the -o option.")
sample_id_map = None
if opts.sample_id_map_fp:
sample_id_map = dict([(k, v[0])
for k, v in fields_to_dict(open(opts.sample_id_map_fp, "U")).items()])
input_dm_fps = opts.input_dms
distmats = [parse_distmat(open(dm_fp, 'U')) for dm_fp in input_dm_fps]
if opts.method == 'mantel':
output_f = open(path.join(opts.output_dir, 'mantel_results.txt'), 'w')
output_f.write(run_mantel_test('mantel', input_dm_fps, distmats,
opts.num_permutations, opts.tail_type,
comment_mantel_pmantel, sample_id_map=sample_id_map))
elif opts.method == 'partial_mantel':
output_f = open(path.join(opts.output_dir,
'partial_mantel_results.txt'), 'w')
output_f.write(run_mantel_test('partial_mantel', input_dm_fps,
distmats, opts.num_permutations, opts.tail_type,
comment_mantel_pmantel, control_dm_fp=opts.control_dm,
control_dm=parse_distmat(open(opts.control_dm, 'U')),
sample_id_map=sample_id_map))
elif opts.method == 'mantel_corr':
output_f = open(path.join(opts.output_dir,
'mantel_correlogram_results.txt'), 'w')
result_str, correlogram_fps, correlograms = run_mantel_correlogram(
input_dm_fps, distmats, opts.num_permutations, comment_corr,
opts.alpha, sample_id_map=sample_id_map,
variable_size_distance_classes=opts.variable_size_distance_classes)
output_f.write(result_str)
for corr_fp, corr in zip(correlogram_fps, correlograms):
corr.savefig(path.join(opts.output_dir, corr_fp + opts.image_type),
format=opts.image_type)
output_f.close()
if __name__ == "__main__":
main()
| gpl-2.0 | 8,385,442,998,499,712,000 | 52.453271 | 111 | 0.621033 | false |
sagiss/sardana | src/sardana/taurus/qt/qtgui/extra_hkl/hklscan.py | 1 | 15114 | #!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
__docformat__ = 'restructuredtext'
import sys
import sardana
from taurus.external.qt import Qt
from taurus.qt.qtgui.container import TaurusWidget
from taurus.qt.qtgui.display import TaurusLabel
from taurus.qt.qtgui.base import TaurusBaseWidget
from taurus.external.qt import QtCore, QtGui
import taurus.core
from taurus.qt.qtcore.communication import SharedDataManager
from taurus.qt.qtgui.input import TaurusValueLineEdit
from displayscanangles import DisplayScanAngles
import taurus.core.util.argparse
import taurus.qt.qtgui.application
from taurus.qt.qtgui.util.ui import UILoadable
from PyTango import *
from sardana.taurus.qt.qtgui.extra_macroexecutor import TaurusMacroExecutorWidget, TaurusSequencerWidget, \
TaurusMacroConfigurationDialog, \
TaurusMacroDescriptionViewer, DoorOutput, DoorDebug, DoorResult
class EngineModesComboBox(Qt.QComboBox, TaurusBaseWidget):
"""ComboBox representing list of engine modes"""
def __init__(self, parent=None):
name = self.__class__.__name__
self.call__init__wo_kw(Qt.QComboBox, parent)
self.call__init__(TaurusBaseWidget, name)
self.setSizeAdjustPolicy(Qt.QComboBox.AdjustToContentsOnFirstShow)
self.setToolTip("Choose a engine mode ...")
QtCore.QMetaObject.connectSlotsByName(self)
def loadEngineModeNames(self, enginemodes):
self.clear()
self.addItems(enginemodes)
@UILoadable(with_ui="_ui")
class HKLScan(TaurusWidget):
def __init__(self, parent=None, designMode=False):
TaurusWidget.__init__(self, parent, designMode=designMode)
self.loadUi(filename="hklscan.ui")
self.connect(self._ui.hklStartScanButton,
Qt.SIGNAL("clicked()"), self.start_hklscan)
self.connect(self._ui.hklStopScanButton,
Qt.SIGNAL("clicked()"), self.stop_hklscan)
self.connect(self._ui.hklDisplayAnglesButton,
Qt.SIGNAL("clicked()"), self.display_angles)
self.connect(self._ui.MacroServerConnectionButton, Qt.SIGNAL(
"clicked()"), self.open_macroserver_connection_panel)
# Create a global SharedDataManager
Qt.qApp.SDM = SharedDataManager(self)
@classmethod
def getQtDesignerPluginInfo(cls):
ret = TaurusWidget.getQtDesignerPluginInfo()
ret['module'] = 'hklscan'
ret['group'] = 'Taurus Containers'
ret['container'] = ':/designer/frame.png'
ret['container'] = True
return ret
def setModel(self, model):
if model != None:
self.device = taurus.Device(model)
self.pseudo_motor_names = []
for motor in self.device.hklpseudomotorlist:
self.pseudo_motor_names.append(motor.split(' ')[0])
self.h_device_name = self.pseudo_motor_names[0]
self.h_device = taurus.Device(self.h_device_name)
self.k_device_name = self.pseudo_motor_names[1]
self.k_device = taurus.Device(self.k_device_name)
self.l_device_name = self.pseudo_motor_names[2]
self.l_device = taurus.Device(self.l_device_name)
# Add dynamically the angle widgets
motor_list = self.device.motorlist
motor_names = []
for motor in self.device.motorlist:
motor_names.append(motor.split(' ')[0])
self.nb_motors = len(motor_list)
angles_labels = []
angles_names = []
angles_taurus_label = []
gap_x = 800 / self.nb_motors
try:
angles_names = self.device.motorroles
except: # Only for compatibility
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
for i in range(0, self.nb_motors):
angles_labels.append(QtGui.QLabel(self))
angles_labels[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 290, 51, 17))
alname = "angleslabel" + str(i)
angles_labels[i].setObjectName(alname)
angles_labels[i].setText(QtGui.QApplication.translate(
"HKLScan", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
angles_taurus_label.append(TaurusLabel(self))
angles_taurus_label[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 320, 81, 19))
atlname = "anglestauruslabel" + str(i)
angles_taurus_label[i].setObjectName(atlname)
angles_taurus_label[i].setModel(motor_names[i] + "/Position")
# Set model to hkl display
hmodel = self.h_device_name + "/Position"
self._ui.taurusValueLineH.setModel(hmodel)
self._ui.taurusLabelValueH.setModel(hmodel)
kmodel = self.k_device_name + "/Position"
self._ui.taurusValueLineK.setModel(kmodel)
self._ui.taurusLabelValueK.setModel(kmodel)
lmodel = self.l_device_name + "/Position"
self._ui.taurusValueLineL.setModel(lmodel)
self._ui.taurusLabelValueL.setModel(lmodel)
# Set model to engine and modes
enginemodel = model + '/engine'
self._ui.taurusLabelEngine.setModel(enginemodel)
enginemodemodel = model + '/enginemode'
self._ui.taurusLabelEngineMode.setModel(enginemodemodel)
self.enginemodescombobox = EngineModesComboBox(self)
self.enginemodescombobox.setGeometry(QtCore.QRect(150, 445, 221, 27))
self.enginemodescombobox.setObjectName("enginemodeslist")
self.enginemodescombobox.loadEngineModeNames(self.device.hklmodelist)
self.connect(self.enginemodescombobox, Qt.SIGNAL(
"currentIndexChanged(QString)"), self.onModeChanged)
def onModeChanged(self, modename):
if self.device.engine != "hkl":
self.device.write_attribute("engine", "hkl")
self.device.write_attribute("enginemode", str(modename))
def start_hklscan(self):
start_hkl = []
stop_hkl = []
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
sample_time = float(self._ui.LineEditSampleTime.text())
dim = 0
macro_name = ["ascan", "a2scan", "a3scan"]
macro_command = []
index_to_scan = []
if self.door_device != None:
for i in range(0, 3):
if start_hkl[i] != stop_hkl[i]:
dim = dim + 1
index_to_scan.append(i)
if dim > 0:
macro_command.append(macro_name[dim - 1])
for i in range(len(index_to_scan)):
macro_command.append(
str(self.pseudo_motor_names[index_to_scan[i]]))
macro_command.append(str(start_hkl[index_to_scan[i]]))
macro_command.append(str(stop_hkl[index_to_scan[i]]))
macro_command.append(str(nb_points))
macro_command.append(str(sample_time))
self.door_device.RunMacro(macro_command)
def stop_hklscan(self):
self.door_device.StopMacro()
def display_angles(self):
xangle = []
for i in range(0, 6):
xangle.append(40 + i * 100)
yhkl = 50
tr = self.device.selectedtrajectory
w = DisplayScanAngles()
angles_labels = []
angles_names = []
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
dsa_label = []
for i in range(0, self.nb_motors):
dsa_label.append(QtGui.QLabel(w))
dsa_label[i].setGeometry(QtCore.QRect(xangle[i], yhkl, 51, 20))
label_name = "dsa_label_" + str(i)
dsa_label[i].setObjectName(label_name)
dsa_label[i].setText(QtGui.QApplication.translate(
"Form", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
start_hkl = []
stop_hkl = []
missed_values = 0
# TODO: This code will raise exception if one of the line edits is empty.
# But not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
try:
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
except:
nb_points = -1
missed_values = 1
increment_hkl = []
if nb_points > 0:
for i in range(0, 3):
increment_hkl.append((stop_hkl[i] - start_hkl[i]) / nb_points)
taurusValueAngle = []
for i in range(0, nb_points + 1):
hkl_temp = []
for j in range(0, 3):
hkl_temp.append(start_hkl[j] + i * increment_hkl[j])
no_trajectories = 0
try:
self.device.write_attribute("computetrajectoriessim", hkl_temp)
except:
no_trajectories = 1
if not no_trajectories:
angles_list = self.device.trajectorylist[tr]
taurusValueAngle.append([])
for iangle in range(0, self.nb_motors):
taurusValueAngle[i].append(TaurusValueLineEdit(w))
taurusValueAngle[i][iangle].setGeometry(
QtCore.QRect(xangle[iangle], yhkl + 30 * (i + 1), 80, 27))
taurusValueAngle[i][iangle].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i) + "_" + str(iangle)
taurusValueAngle[i][iangle].setObjectName(tva_name)
taurusValueAngle[i][iangle].setValue(
"%10.4f" % angles_list[iangle])
else:
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[i].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30 * (i + 1), self.nb_motors * 120, 27))
taurusValueAngle[i].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i)
taurusValueAngle[i].setObjectName(tva_name)
taurusValueAngle[i].setValue(
"... No angle solution for hkl values ...")
# TODO: not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
if nb_points == -1:
nb_points = 0
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[0].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30, self.nb_motors * 120, 27))
taurusValueAngle[0].setReadOnly(True)
tva_name = "taurusValueAngle"
taurusValueAngle[0].setObjectName(tva_name)
taurusValueAngle[0].setValue(
"... No scan parameters filled. Fill them in the main window ...")
w.resize(self.nb_motors * 140, 120 + nb_points * 40)
w.show()
w.show()
def open_macroserver_connection_panel(self):
w = TaurusMacroConfigurationDialog(self)
Qt.qApp.SDM.connectReader("macroserverName", w.selectMacroServer)
Qt.qApp.SDM.connectReader("doorName", w.selectDoor)
Qt.qApp.SDM.connectReader("doorName", self.onDoorChanged)
Qt.qApp.SDM.connectWriter(
"macroserverName", w, 'macroserverNameChanged')
Qt.qApp.SDM.connectWriter("doorName", w, 'doorNameChanged')
w.show()
def onDoorChanged(self, doorName):
if doorName != self.door_device_name:
self.door_device_name = doorName
self.door_device = taurus.Device(doorName)
def main():
parser = taurus.core.util.argparse.get_taurus_parser()
parser.usage = "%prog <model> [door_name]"
parser.set_description("a taurus application for performing hkl scans")
app = taurus.qt.qtgui.application.TaurusApplication(cmd_line_parser=parser,
app_version=sardana.Release.version)
app.setApplicationName("hklscan")
args = app.get_command_line_args()
if len(args) < 1:
msg = "model not set (requires diffractometer controller)"
parser.error(msg)
w = HKLScan()
w.model = args[0]
w.setModel(w.model)
w.door_device = None
w.door_device_name = None
if len(args) > 1:
w.onDoorChanged(args[1])
else:
print "WARNING: Not door name supplied. Connection to MacroServer/Door not automatically done"
w.show()
sys.exit(app.exec_())
# if len(sys.argv)>1: model=sys.argv[1]
# else: model = None
# app = Qt.QApplication(sys.argv)
# w = HKLScan()
# w.setModel(model)
# w.show()
# sys.exit(app.exec_())
if __name__ == "__main__":
main()
| lgpl-3.0 | 5,986,230,292,049,450,000 | 37.070529 | 119 | 0.596335 | false |
ByteInternet/libcloud | libcloud/test/compute/test_dimensiondata_v2_4.py | 1 | 163733 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from types import GeneratorType
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import ET
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataAPIException, NetworkDomainServicePlan
from libcloud.common.dimensiondata import DimensionDataServerCpuSpecification, DimensionDataServerDisk, DimensionDataServerVMWareTools
from libcloud.common.dimensiondata import DimensionDataTag, DimensionDataTagKey
from libcloud.common.dimensiondata import DimensionDataIpAddress, \
DimensionDataIpAddressList, DimensionDataChildIpAddressList, \
DimensionDataPortList, DimensionDataPort, DimensionDataChildPortList
from libcloud.common.dimensiondata import TYPES_URN
from libcloud.compute.drivers.dimensiondata import DimensionDataNodeDriver as DimensionData
from libcloud.compute.drivers.dimensiondata import DimensionDataNic
from libcloud.compute.base import Node, NodeAuthPassword, NodeLocation
from libcloud.test import MockHttp, unittest
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
from libcloud.utils.xml import fixxpath, findtext, findall
class DimensionData_v2_4_Tests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.active_api_version = '2.4'
DimensionData.connectionCls.conn_class = DimensionDataMockHttp
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
with self.assertRaises(ValueError):
DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
with self.assertRaises(InvalidCredsError):
self.driver.list_nodes()
def test_get_account_details(self):
DimensionDataMockHttp.type = None
ret = self.driver.connection.get_account_details()
self.assertEqual(ret.full_name, 'Test User')
self.assertEqual(ret.first_name, 'Test')
self.assertEqual(ret.email, '[email protected]')
def test_list_locations_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
self.assertEqual(len(ret), 5)
first_loc = ret[0]
self.assertEqual(first_loc.id, 'NA3')
self.assertEqual(first_loc.name, 'US - West')
self.assertEqual(first_loc.country, 'US')
def test_list_nodes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 7)
def test_node_extras(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertTrue(isinstance(ret[0].extra['vmWareTools'], DimensionDataServerVMWareTools))
self.assertTrue(isinstance(ret[0].extra['cpu'], DimensionDataServerCpuSpecification))
self.assertTrue(isinstance(ret[0].extra['disks'], list))
self.assertTrue(isinstance(ret[0].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[0].extra['disks'][0].size_gb, 10)
self.assertTrue(isinstance(ret[1].extra['disks'], list))
self.assertTrue(isinstance(ret[1].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[1].extra['disks'][0].size_gb, 10)
def test_server_states(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertTrue(ret[0].state == 'running')
self.assertTrue(ret[1].state == 'starting')
self.assertTrue(ret[2].state == 'stopping')
self.assertTrue(ret[3].state == 'reconfiguring')
self.assertTrue(ret[4].state == 'running')
self.assertTrue(ret[5].state == 'terminated')
self.assertTrue(ret[6].state == 'stopped')
self.assertEqual(len(ret), 7)
def test_list_nodes_response_PAGINATED(self):
DimensionDataMockHttp.type = 'PAGINATED'
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 9)
def test_paginated_mcp2_call_EMPTY(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'EMPTY'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server')
empty_node_list = []
for node_list in node_list_generator:
empty_node_list.extend(node_list)
self.assertTrue(len(empty_node_list) == 0)
def test_paginated_mcp2_call_PAGED_THEN_EMPTY(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'PAGED_THEN_EMPTY'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server')
final_node_list = []
for node_list in node_list_generator:
final_node_list.extend(node_list)
self.assertTrue(len(final_node_list) == 2)
def test_paginated_mcp2_call_with_page_size(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'PAGESIZE50'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server', page_size=50)
self.assertTrue(isinstance(node_list_generator, GeneratorType))
# We're making sure here the filters make it to the URL
# See _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS for asserts
def test_list_nodes_response_strings_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
ret = self.driver.list_nodes(ex_location='fake_loc', ex_name='fake_name',
ex_ipv6='fake_ipv6', ex_ipv4='fake_ipv4', ex_vlan='fake_vlan',
ex_image='fake_image', ex_deployed=True,
ex_started=True, ex_state='fake_state',
ex_network='fake_network', ex_network_domain='fake_network_domain')
self.assertTrue(isinstance(ret, list))
self.assertEqual(len(ret), 7)
node = ret[3]
self.assertTrue(isinstance(node.extra['disks'], list))
self.assertTrue(isinstance(node.extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(node.size.id, '1')
self.assertEqual(node.image.id, '3ebf3c0f-90fe-4a8b-8585-6e65b316592c')
self.assertEqual(node.image.name, 'WIN2008S/32')
disk = node.extra['disks'][0]
self.assertEqual(disk.id, "c2e1f199-116e-4dbc-9960-68720b832b0a")
self.assertEqual(disk.scsi_id, 0)
self.assertEqual(disk.size_gb, 50)
self.assertEqual(disk.speed, "STANDARD")
self.assertEqual(disk.state, "NORMAL")
def test_list_nodes_response_LOCATION(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
first_loc = ret[0]
ret = self.driver.list_nodes(ex_location=first_loc)
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_nodes_response_LOCATION_STR(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes(ex_location='NA3')
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_sizes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_sizes()
self.assertEqual(len(ret), 1)
size = ret[0]
self.assertEqual(size.name, 'default')
def test_reboot_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.reboot()
self.assertTrue(ret is True)
def test_reboot_node_response_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.reboot()
def test_destroy_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.destroy()
self.assertTrue(ret is True)
def test_destroy_node_response_RESOURCE_BUSY(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.destroy()
def test_list_images(self):
images = self.driver.list_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'RedHat 6 64-bit 2 CPU')
self.assertEqual(images[0].id, 'c14b1a46-2428-44c1-9c1a-b20e6418d08c')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 2)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_clean_failed_deployment_response_with_node(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_clean_failed_deployment(node)
self.assertTrue(ret is True)
def test_clean_failed_deployment_response_with_node_id(self):
node = 'e75ead52-692f-4314-8725-c8a4f4d13a87'
ret = self.driver.ex_clean_failed_deployment(node)
self.assertTrue(ret is True)
def test_ex_list_customer_images(self):
images = self.driver.ex_list_customer_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'ImportedCustomerImage')
self.assertEqual(images[0].id, '5234e5c7-01de-4411-8b6e-baeb8d91cf5d')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 4)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_create_mcp1_node_optional_param(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
network = self.driver.ex_list_networks()[0]
cpu_spec = DimensionDataServerCpuSpecification(cpu_count='4',
cores_per_socket='2',
performance='STANDARD')
disks = [DimensionDataServerDisk(scsi_id='0', speed='HIGHPERFORMANCE')]
node = self.driver.create_node(name='test2', image=image, auth=root_pw,
ex_description='test2 node',
ex_network=network,
ex_is_started=False,
ex_memory_gb=8,
ex_disks=disks,
ex_cpu_specification=cpu_spec,
ex_primary_dns='10.0.0.5',
ex_secondary_dns='10.0.0.6'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_mcp1_node_response_no_pass_random_gen(self):
image = self.driver.list_images()[0]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node',
ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_windows(self):
image = self.driver.ex_list_customer_images()[1]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_windows_STR(self):
image = self.driver.ex_list_customer_images()[1].id
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_linux(self):
image = self.driver.ex_list_customer_images()[0]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' not in node.extra)
def test_create_mcp1_node_response_no_pass_customer_linux_STR(self):
image = self.driver.ex_list_customer_images()[0].id
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' not in node.extra)
def test_create_mcp1_node_response_STR(self):
rootPw = 'pass123'
image = self.driver.list_images()[0].id
network = self.driver.ex_list_networks()[0].id
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0]
vlan = self.driver.ex_list_vlans(location=location)[0]
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain_STR(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0].id
vlan = self.driver.ex_list_vlans(location=location)[0].id
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_mcp1_node_no_network(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(InvalidRequestError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network=None,
ex_is_started=False)
def test_create_node_mcp1_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network='fakenetwork',
ex_primary_ipv4='10.0.0.1',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp1_network(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network='fakenetwork',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp2_vlan(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_vlan='fakevlan',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp2_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_network_domain_no_vlan_or_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_is_started=False)
def test_create_node_response(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_ms_time_zone(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan',
ex_microsoft_time_zone='040'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_ambigious_mcps_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_network='fakenetwork',
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_no_network_domain_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_no_primary_nic_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain'
)
def test_create_node_primary_vlan_nic(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan',
ex_primary_nic_network_adapter='v1000'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_primary_ipv4(self):
rootPw = 'pass123'
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_both_primary_nic_and_vlan_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_cpu_specification(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
cpu_spec = DimensionDataServerCpuSpecification(cpu_count='4',
cores_per_socket='2',
performance='STANDARD')
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_cpu_specification=cpu_spec)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_memory(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_memory_gb=8)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_disks(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
disks = [DimensionDataServerDisk(scsi_id='0', speed='HIGHPERFORMANCE')]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_disks=disks)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_disks_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
disks = 'blah'
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_disks=disks)
def test_create_node_ipv4_gateway(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_ipv4_gateway='10.2.2.2')
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_network_domain_no_vlan_no_ipv4_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_is_started=False)
def test_create_node_mcp2_additional_nics_legacy(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
additional_vlans = ['fakevlan1', 'fakevlan2']
additional_ipv4 = ['10.0.0.2', '10.0.0.3']
node = self.driver.create_node(
name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_additional_nics_vlan=additional_vlans,
ex_additional_nics_ipv4=additional_ipv4,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_bad_additional_nics_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_vlan='fake_vlan',
ex_additional_nics_ipv4='badstring',
ex_is_started=False)
def test_create_node_additional_nics(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(vlan='fake_vlan',
network_adapter_name='v1000')
nic2 = DimensionDataNic(private_ip_v4='10.1.1.2',
network_adapter_name='v1000')
additional_nics = [nic1, nic2]
node = self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_additional_nics_vlan_ipv4_coexist_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(private_ip_v4='10.1.1.1', vlan='fake_vlan',
network_adapter_name='v1000')
nic2 = DimensionDataNic(private_ip_v4='10.1.1.2', vlan='fake_vlan2',
network_adapter_name='v1000')
additional_nics = [nic1, nic2]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False
)
def test_create_node_additional_nics_invalid_input_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
additional_nics = 'blah'
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False
)
def test_create_node_additional_nics_vlan_ipv4_not_exist_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(network_adapter_name='v1000')
nic2 = DimensionDataNic(network_adapter_name='v1000')
additional_nics = [nic1, nic2]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False)
def test_create_node_bad_additional_nics_vlan(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_vlan='fake_vlan',
ex_additional_nics_vlan='badstring',
ex_is_started=False)
def test_create_node_mcp2_indicate_dns(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test node dns',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_primary_dns='8.8.8.8',
ex_secondary_dns='8.8.4.4',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_ex_shutdown_graceful(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_shutdown_graceful(node)
self.assertTrue(ret is True)
def test_ex_shutdown_graceful_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_shutdown_graceful(node)
def test_ex_start_node(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_start_node(node)
self.assertTrue(ret is True)
def test_ex_start_node_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_start_node(node)
def test_ex_power_off(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_power_off(node)
self.assertTrue(ret is True)
def test_ex_update_vm_tools(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_update_vm_tools(node)
self.assertTrue(ret is True)
def test_ex_power_off_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state='STOPPING',
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_power_off(node)
def test_ex_reset(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_reset(node)
self.assertTrue(ret is True)
def test_ex_attach_node_to_vlan(self):
node = self.driver.ex_get_node_by_id('e75ead52-692f-4314-8725-c8a4f4d13a87')
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
ret = self.driver.ex_attach_node_to_vlan(node, vlan)
self.assertTrue(ret is True)
def test_ex_destroy_nic(self):
node = self.driver.ex_destroy_nic('a202e51b-41c0-4cfc-add0-b1c62fc0ecf6')
self.assertTrue(node)
def test_list_networks(self):
nets = self.driver.list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_create_network(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network", "test")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_create_network_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_delete_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_delete_network(net)
self.assertTrue(result)
def test_ex_rename_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_rename_network(net, "barry")
self.assertTrue(result)
def test_ex_create_network_domain(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
description='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_create_network_domain_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_get_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.description, 'test2')
self.assertEqual(net.name, 'test')
def test_ex_update_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
net.name = 'new name'
net2 = self.driver.ex_update_network_domain(net)
self.assertEqual(net2.name, 'new name')
def test_ex_delete_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
result = self.driver.ex_delete_network_domain(net)
self.assertTrue(result)
def test_ex_list_networks(self):
nets = self.driver.ex_list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_network_domains(self):
nets = self.driver.ex_list_network_domains()
self.assertEqual(nets[0].name, 'Aurora')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_network_domains_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
nets = self.driver.ex_list_network_domains(location='fake_location', name='fake_name',
service_plan='fake_plan', state='fake_state')
self.assertEqual(nets[0].name, 'Aurora')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_vlans(self):
vlans = self.driver.ex_list_vlans()
self.assertEqual(vlans[0].name, "Primary")
def test_ex_list_vlans_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
vlans = self.driver.ex_list_vlans(location='fake_location', network_domain='fake_network_domain',
name='fake_name', ipv4_address='fake_ipv4', ipv6_address='fake_ipv6', state='fake_state')
self.assertEqual(vlans[0].name, "Primary")
def test_ex_create_vlan(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24',
description='test vlan')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_create_vlan_NO_DESCRIPTION(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_get_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.description, 'test2')
self.assertEqual(vlan.status, 'NORMAL')
self.assertEqual(vlan.name, 'Production VLAN')
self.assertEqual(vlan.private_ipv4_range_address, '10.0.3.0')
self.assertEqual(vlan.private_ipv4_range_size, 24)
self.assertEqual(vlan.ipv6_range_size, 64)
self.assertEqual(vlan.ipv6_range_address, '2607:f480:1111:1153:0:0:0:0')
self.assertEqual(vlan.ipv4_gateway, '10.0.3.1')
self.assertEqual(vlan.ipv6_gateway, '2607:f480:1111:1153:0:0:0:1')
def test_ex_wait_for_state(self):
self.driver.ex_wait_for_state('NORMAL',
self.driver.ex_get_vlan,
vlan_id='0e56433f-d808-4669-821d-812769517ff8',
poll_interval=0.1)
def test_ex_wait_for_state_NODE(self):
self.driver.ex_wait_for_state('running',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87',
poll_interval=0.1)
def test_ex_wait_for_state_FAIL(self):
with self.assertRaises(DimensionDataAPIException) as context:
self.driver.ex_wait_for_state('starting',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87',
poll_interval=0.1,
timeout=0.1
)
self.assertEqual(context.exception.code, 'running')
self.assertTrue('timed out' in context.exception.msg)
def test_ex_update_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.name = 'new name'
vlan2 = self.driver.ex_update_vlan(vlan)
self.assertEqual(vlan2.name, 'new name')
def test_ex_delete_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
result = self.driver.ex_delete_vlan(vlan)
self.assertTrue(result)
def test_ex_expand_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.private_ipv4_range_size = '23'
vlan = self.driver.ex_expand_vlan(vlan)
self.assertEqual(vlan.private_ipv4_range_size, '23')
def test_ex_add_public_ip_block_to_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_add_public_ip_block_to_network_domain(net)
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
def test_ex_list_public_ip_blocks(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
blocks = self.driver.ex_list_public_ip_blocks(net)
self.assertEqual(blocks[0].base_ip, '168.128.4.18')
self.assertEqual(blocks[0].size, '2')
self.assertEqual(blocks[0].id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(blocks[0].location.id, 'NA9')
self.assertEqual(blocks[0].network_domain.id, net.id)
def test_ex_get_public_ip_block(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.base_ip, '168.128.4.18')
self.assertEqual(block.size, '2')
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.location.id, 'NA9')
self.assertEqual(block.network_domain.id, net.id)
def test_ex_delete_public_ip_block(self):
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
result = self.driver.ex_delete_public_ip_block(block)
self.assertTrue(result)
def test_ex_list_firewall_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
self.assertEqual(rules[0].id, '756cba02-b0bc-48f4-aea5-9445870b6148')
self.assertEqual(rules[0].network_domain.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(rules[0].name, 'CCDEFAULT.BlockOutboundMailIPv4')
self.assertEqual(rules[0].action, 'DROP')
self.assertEqual(rules[0].ip_version, 'IPV4')
self.assertEqual(rules[0].protocol, 'TCP')
self.assertEqual(rules[0].source.ip_address, 'ANY')
self.assertTrue(rules[0].source.any_ip)
self.assertTrue(rules[0].destination.any_ip)
def test_ex_create_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[0], 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_specific_source_ip(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
rule = self.driver.ex_create_firewall_rule(net, specific_source_ip_rule, 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_source_ip(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = \
list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
specific_source_ip_rule.source.any_ip = False
specific_source_ip_rule.source.ip_address = '10.0.0.1'
specific_source_ip_rule.source.ip_prefix_size = '15'
rule = self.driver.ex_create_firewall_rule(net,
specific_source_ip_rule,
'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_any_ip(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = \
list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
specific_source_ip_rule.source.any_ip = True
rule = self.driver.ex_create_firewall_rule(net,
specific_source_ip_rule,
'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_ip_prefix_size(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.address_list_id = None
rule.source.any_ip = False
rule.source.ip_address = '10.2.1.1'
rule.source.ip_prefix_size = '10'
rule.destination.address_list_id = None
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = '20'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_address_list(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.address_list_id = '12345'
rule.destination.address_list_id = '12345'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_port_list(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.port_list_id = '12345'
rule.destination.port_list_id = '12345'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.port_list_id = None
rule.source.port_begin = '8000'
rule.source.port_end = '8005'
rule.destination.port_list_id = None
rule.destination.port_begin = '7000'
rule.destination.port_end = '7005'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_ALL_VALUES(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
for rule in rules:
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_WITH_POSITION_RULE(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[-2], 'BEFORE', rules[-1])
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_WITH_POSITION_RULE_STR(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[-2], 'BEFORE', 'RULE_WITH_SOURCE_AND_DEST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_FAIL_POSITION(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
with self.assertRaises(ValueError):
self.driver.ex_create_firewall_rule(net, rules[0], 'BEFORE')
def test_ex_create_firewall_rule_FAIL_POSITION_WITH_RULE(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
with self.assertRaises(ValueError):
self.driver.ex_create_firewall_rule(net, rules[0], 'LAST', 'RULE_WITH_SOURCE_AND_DEST')
def test_ex_get_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_set_firewall_rule_state(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_set_firewall_rule_state(rule, False)
self.assertTrue(result)
def test_ex_delete_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_delete_firewall_rule(rule)
self.assertTrue(result)
def test_ex_edit_firewall_rule(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.any_ip = True
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_ipaddresslist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.address_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
rule.source.any_ip = False
rule.source.ip_address = '10.0.0.1'
rule.source.ip_prefix_size = 10
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_ipaddresslist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.address_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = 10
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_ipaddress(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.address_list_id = None
rule.source.any_ip = False
rule.source.ip_address = '10.0.0.1'
rule.source.ip_prefix_size = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_ipaddress(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.address_list_id = None
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_with_relative_rule(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
placement_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
result = self.driver.ex_edit_firewall_rule(
rule=rule, position='BEFORE',
relative_rule_for_position=placement_rule)
self.assertTrue(result)
def test_ex_edit_firewall_rule_with_relative_rule_by_name(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
placement_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
result = self.driver.ex_edit_firewall_rule(
rule=rule, position='BEFORE',
relative_rule_for_position=placement_rule.name)
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_portlist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.port_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.port_list_id = None
rule.source.port_begin = '3'
rule.source.port_end = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_portlist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.port_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.port_list_id = None
rule.destination.port_begin = '3'
rule.destination.port_end = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_invalid_position_fail(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
with self.assertRaises(ValueError):
self.driver.ex_edit_firewall_rule(rule=rule, position='BEFORE')
def test_ex_edit_firewall_rule_invalid_position_relative_rule_fail(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
relative_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
with self.assertRaises(ValueError):
self.driver.ex_edit_firewall_rule(rule=rule, position='FIRST',
relative_rule_for_position=relative_rule)
def test_ex_create_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_create_nat_rule(net, '1.2.3.4', '4.3.2.1')
self.assertEqual(rule.id, 'd31c2db0-be6b-4d50-8744-9a7a534b5fba')
def test_ex_list_nat_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_nat_rules(net)
self.assertEqual(rules[0].id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rules[0].internal_ip, '10.0.0.15')
self.assertEqual(rules[0].external_ip, '165.180.12.18')
def test_ex_get_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.internal_ip, '10.0.0.16')
self.assertEqual(rule.external_ip, '165.180.12.19')
def test_ex_delete_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
result = self.driver.ex_delete_nat_rule(rule)
self.assertTrue(result)
def test_ex_enable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_enable_monitoring(node, "ADVANCED")
self.assertTrue(result)
def test_ex_disable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_disable_monitoring(node)
self.assertTrue(result)
def test_ex_change_monitoring_plan(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_monitoring_plan(node, "ESSENTIALS")
self.assertTrue(result)
def test_ex_add_storage_to_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_add_storage_to_node(node, 30, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_remove_storage_from_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_remove_storage_from_node(node, 0)
self.assertTrue(result)
def test_ex_change_storage_speed(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_speed(node, 1, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_change_storage_size(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_size(node, 1, 100)
self.assertTrue(result)
def test_ex_clone_node_to_image(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_clone_node_to_image(node, 'my image', 'a description')
self.assertTrue(result)
def test_ex_update_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_node(node, 'my new name', 'a description', 2, 4048)
self.assertTrue(result)
def test_ex_reconfigure_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_reconfigure_node(node, 4, 4, 1, 'HIGHPERFORMANCE')
self.assertTrue(result)
def test_ex_get_location_by_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertTrue(location.id, 'NA9')
def test_ex_get_location_by_id_NO_LOCATION(self):
location = self.driver.ex_get_location_by_id(None)
self.assertIsNone(location)
def test_ex_get_base_image_by_id(self):
image_id = self.driver.list_images()[0].id
image = self.driver.ex_get_base_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'UNIX')
def test_ex_get_customer_image_by_id(self):
image_id = self.driver.ex_list_customer_images()[1].id
image = self.driver.ex_get_customer_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'WINDOWS')
def test_ex_get_image_by_id_base_img(self):
image_id = self.driver.list_images()[1].id
image = self.driver.ex_get_base_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'WINDOWS')
def test_ex_get_image_by_id_customer_img(self):
image_id = self.driver.ex_list_customer_images()[0].id
image = self.driver.ex_get_customer_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'UNIX')
def test_ex_get_image_by_id_customer_FAIL(self):
image_id = 'FAKE_IMAGE_ID'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_get_base_image_by_id(image_id)
def test_ex_create_anti_affinity_rule(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule([node_list[0], node_list[1]])
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_TUPLE(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule((node_list[0], node_list[1]))
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_TUPLE_STR(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule((node_list[0].id, node_list[1].id))
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_FAIL_STR(self):
node_list = 'string'
with self.assertRaises(TypeError):
self.driver.ex_create_anti_affinity_rule(node_list)
def test_ex_create_anti_affinity_rule_FAIL_EXISTING(self):
node_list = self.driver.list_nodes()
DimensionDataMockHttp.type = 'FAIL_EXISTING'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_create_anti_affinity_rule((node_list[0], node_list[1]))
def test_ex_delete_anti_affinity_rule(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
success = self.driver.ex_delete_anti_affinity_rule(rule)
self.assertTrue(success)
def test_ex_delete_anti_affinity_rule_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
success = self.driver.ex_delete_anti_affinity_rule(rule.id)
self.assertTrue(success)
def test_ex_delete_anti_affinity_rule_FAIL(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
DimensionDataMockHttp.type = 'FAIL'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_delete_anti_affinity_rule(rule)
def test_ex_list_anti_affinity_rules_NETWORK_DOMAIN(self):
net_domain = self.driver.ex_list_network_domains()[0]
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_NETWORK(self):
network = self.driver.list_networks()[0]
rules = self.driver.ex_list_anti_affinity_rules(network=network)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_NODE(self):
node = self.driver.list_nodes()[0]
rules = self.driver.ex_list_anti_affinity_rules(node=node)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_PAGINATED(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'PAGINATED'
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 4)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_ALLFILTERS(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'ALLFILTERS'
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain, filter_id='FAKE_ID', filter_state='FAKE_STATE')
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_BAD_ARGS(self):
with self.assertRaises(ValueError):
self.driver.ex_list_anti_affinity_rules(network='fake_network', network_domain='fake_network_domain')
def test_ex_create_tag_key(self):
success = self.driver.ex_create_tag_key('MyTestKey')
self.assertTrue(success)
def test_ex_create_tag_key_ALLPARAMS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLPARAMS'
success = self.driver.ex_create_tag_key('MyTestKey', description="Test Key Desc.", value_required=False, display_on_report=False)
self.assertTrue(success)
def test_ex_create_tag_key_BADREQUEST(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'BADREQUEST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_create_tag_key('MyTestKey')
def test_ex_list_tag_keys(self):
tag_keys = self.driver.ex_list_tag_keys()
self.assertTrue(isinstance(tag_keys, list))
self.assertTrue(isinstance(tag_keys[0], DimensionDataTagKey))
self.assertTrue(isinstance(tag_keys[0].id, str))
def test_ex_list_tag_keys_ALLFILTERS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLFILTERS'
self.driver.ex_list_tag_keys(id='fake_id', name='fake_name', value_required=False, display_on_report=False)
def test_ex_get_tag_by_id(self):
tag = self.driver.ex_get_tag_key_by_id('d047c609-93d7-4bc5-8fc9-732c85840075')
self.assertTrue(isinstance(tag, DimensionDataTagKey))
def test_ex_get_tag_by_id_NOEXIST(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'NOEXIST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_get_tag_key_by_id('d047c609-93d7-4bc5-8fc9-732c85840075')
def test_ex_get_tag_by_name(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'SINGLE'
tag = self.driver.ex_get_tag_key_by_name('LibcloudTest')
self.assertTrue(isinstance(tag, DimensionDataTagKey))
def test_ex_get_tag_by_name_NOEXIST(self):
with self.assertRaises(ValueError):
self.driver.ex_get_tag_key_by_name('LibcloudTest')
def test_ex_modify_tag_key_NAME(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NAME'
success = self.driver.ex_modify_tag_key(tag_key, name='NewName')
self.assertTrue(success)
def test_ex_modify_tag_key_NOTNAME(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOTNAME'
success = self.driver.ex_modify_tag_key(tag_key, description='NewDesc', value_required=False, display_on_report=True)
self.assertTrue(success)
def test_ex_modify_tag_key_NOCHANGE(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOCHANGE'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_modify_tag_key(tag_key)
def test_ex_remove_tag_key(self):
tag_key = self.driver.ex_list_tag_keys()[0]
success = self.driver.ex_remove_tag_key(tag_key)
self.assertTrue(success)
def test_ex_remove_tag_key_NOEXIST(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOEXIST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_remove_tag_key(tag_key)
def test_ex_apply_tag_to_asset(self):
node = self.driver.list_nodes()[0]
success = self.driver.ex_apply_tag_to_asset(node, 'TagKeyName', 'FakeValue')
self.assertTrue(success)
def test_ex_apply_tag_to_asset_NOVALUE(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOVALUE'
success = self.driver.ex_apply_tag_to_asset(node, 'TagKeyName')
self.assertTrue(success)
def test_ex_apply_tag_to_asset_NOTAGKEY(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOTAGKEY'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_apply_tag_to_asset(node, 'TagKeyNam')
def test_ex_apply_tag_to_asset_BADASSETTYPE(self):
network = self.driver.list_networks()[0]
DimensionDataMockHttp.type = 'NOTAGKEY'
with self.assertRaises(TypeError):
self.driver.ex_apply_tag_to_asset(network, 'TagKeyNam')
def test_ex_remove_tag_from_asset(self):
node = self.driver.list_nodes()[0]
success = self.driver.ex_remove_tag_from_asset(node, 'TagKeyName')
self.assertTrue(success)
def test_ex_remove_tag_from_asset_NOTAG(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOTAG'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_remove_tag_from_asset(node, 'TagKeyNam')
def test_ex_list_tags(self):
tags = self.driver.ex_list_tags()
self.assertTrue(isinstance(tags, list))
self.assertTrue(isinstance(tags[0], DimensionDataTag))
self.assertTrue(len(tags) == 3)
def test_ex_list_tags_ALLPARAMS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLPARAMS'
tags = self.driver.ex_list_tags(asset_id='fake_asset_id', asset_type='fake_asset_type',
location='fake_location', tag_key_name='fake_tag_key_name',
tag_key_id='fake_tag_key_id', value='fake_value',
value_required=False, display_on_report=False)
self.assertTrue(isinstance(tags, list))
self.assertTrue(isinstance(tags[0], DimensionDataTag))
self.assertTrue(len(tags) == 3)
def test_priv_location_to_location_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertEqual(
self.driver._location_to_location_id(location),
'NA9'
)
def test_priv_location_to_location_id_STR(self):
self.assertEqual(
self.driver._location_to_location_id('NA9'),
'NA9'
)
def test_priv_location_to_location_id_TYPEERROR(self):
with self.assertRaises(TypeError):
self.driver._location_to_location_id([1, 2, 3])
def test_priv_image_needs_auth_os_img(self):
image = self.driver.list_images()[1]
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_os_img_STR(self):
image = self.driver.list_images()[1].id
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_windows(self):
image = self.driver.ex_list_customer_images()[1]
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_windows_STR(self):
image = self.driver.ex_list_customer_images()[1].id
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_linux(self):
image = self.driver.ex_list_customer_images()[0]
self.assertTrue(not self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_linux_STR(self):
image = self.driver.ex_list_customer_images()[0].id
self.assertTrue(not self.driver._image_needs_auth(image))
def test_summary_usage_report(self):
report = self.driver.ex_summary_usage_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 13)
self.assertEqual(len(report_content[0]), 6)
def test_detailed_usage_report(self):
report = self.driver.ex_detailed_usage_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 42)
self.assertEqual(len(report_content[0]), 4)
def test_audit_log_report(self):
report = self.driver.ex_audit_log_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 25)
self.assertEqual(report_content[2][2], 'OEC_SYSTEM')
def test_ex_list_ip_address_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
ip_list = self.driver.ex_list_ip_address_list(
ex_network_domain=net_domain)
self.assertTrue(isinstance(ip_list, list))
self.assertEqual(len(ip_list), 4)
self.assertTrue(isinstance(ip_list[0].name, str))
self.assertTrue(isinstance(ip_list[0].description, str))
self.assertTrue(isinstance(ip_list[0].ip_version, str))
self.assertTrue(isinstance(ip_list[0].state, str))
self.assertTrue(isinstance(ip_list[0].create_time, str))
self.assertTrue(isinstance(ip_list[0].child_ip_address_lists, list))
self.assertEqual(len(ip_list[1].child_ip_address_lists), 1)
self.assertTrue(isinstance(ip_list[1].child_ip_address_lists[0].name,
str))
def test_ex_get_ip_address_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'FILTERBYNAME'
ip_list = self.driver.ex_get_ip_address_list(
ex_network_domain=net_domain.id,
ex_ip_address_list_name='Test_IP_Address_List_3')
self.assertTrue(isinstance(ip_list, list))
self.assertEqual(len(ip_list), 1)
self.assertTrue(isinstance(ip_list[0].name, str))
self.assertTrue(isinstance(ip_list[0].description, str))
self.assertTrue(isinstance(ip_list[0].ip_version, str))
self.assertTrue(isinstance(ip_list[0].state, str))
self.assertTrue(isinstance(ip_list[0].create_time, str))
ips = ip_list[0].ip_address_collection
self.assertEqual(len(ips), 3)
self.assertTrue(isinstance(ips[0].begin, str))
self.assertTrue(isinstance(ips[0].prefix_size, str))
self.assertTrue(isinstance(ips[2].end, str))
def test_ex_create_ip_address_list_FAIL(self):
net_domain = self.driver.ex_list_network_domains()[0]
with self.assertRaises(TypeError):
self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain.id)
def test_ex_create_ip_address_list(self):
name = "Test_IP_Address_List_3"
description = "Test Description"
ip_version = "IPV4"
child_ip_address_list_id = '0291ef78-4059-4bc1-b433-3f6ad698dc41'
child_ip_address_list = DimensionDataChildIpAddressList(
id=child_ip_address_list_id,
name="test_child_ip_addr_list")
net_domain = self.driver.ex_list_network_domains()[0]
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.100')
ip_address_2 = DimensionDataIpAddress(begin='190.2.2.106',
end='190.2.2.108')
ip_address_3 = DimensionDataIpAddress(begin='190.2.2.0',
prefix_size='24')
ip_address_collection = [ip_address_1, ip_address_2,
ip_address_3]
# Create IP Address List
success = self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain, name=name,
ip_version=ip_version, description=description,
ip_address_collection=ip_address_collection,
child_ip_address_list=child_ip_address_list)
self.assertTrue(success)
def test_ex_create_ip_address_list_STR(self):
name = "Test_IP_Address_List_3"
description = "Test Description"
ip_version = "IPV4"
child_ip_address_list_id = '0291ef78-4059-4bc1-b433-3f6ad698dc41'
net_domain = self.driver.ex_list_network_domains()[0]
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.100')
ip_address_2 = DimensionDataIpAddress(begin='190.2.2.106',
end='190.2.2.108')
ip_address_3 = DimensionDataIpAddress(begin='190.2.2.0',
prefix_size='24')
ip_address_collection = [ip_address_1, ip_address_2,
ip_address_3]
# Create IP Address List
success = self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain.id, name=name,
ip_version=ip_version, description=description,
ip_address_collection=ip_address_collection,
child_ip_address_list=child_ip_address_list_id)
self.assertTrue(success)
def test_ex_edit_ip_address_list(self):
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.111')
ip_address_collection = [ip_address_1]
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
ip_address_list = DimensionDataIpAddressList(
id='1111ef78-4059-4bc1-b433-3f6ad698d111',
name="test ip address list edited",
ip_version="IPv4", description="test",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list,
state="NORMAL",
create_time='2015-09-29T02:49:45'
)
success = self.driver.ex_edit_ip_address_list(
ex_ip_address_list=ip_address_list,
description="test ip address list",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list
)
self.assertTrue(success)
def test_ex_edit_ip_address_list_STR(self):
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.111')
ip_address_collection = [ip_address_1]
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
success = self.driver.ex_edit_ip_address_list(
ex_ip_address_list='84e34850-595d- 436e-a885-7cd37edb24a4',
description="test ip address list",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list
)
self.assertTrue(success)
def test_ex_delete_ip_address_list(self):
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
ip_address_list = DimensionDataIpAddressList(
id='1111ef78-4059-4bc1-b433-3f6ad698d111',
name="test ip address list edited",
ip_version="IPv4", description="test",
ip_address_collection=None,
child_ip_address_lists=child_ip_address_list,
state="NORMAL",
create_time='2015-09-29T02:49:45'
)
success = self.driver.ex_delete_ip_address_list(
ex_ip_address_list=ip_address_list)
self.assertTrue(success)
def test_ex_delete_ip_address_list_STR(self):
success = self.driver.ex_delete_ip_address_list(
ex_ip_address_list='111ef78-4059-4bc1-b433-3f6ad698d111')
self.assertTrue(success)
def test_ex_list_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(
ex_network_domain=net_domain)
self.assertTrue(isinstance(portlist, list))
self.assertEqual(len(portlist), 3)
self.assertTrue(isinstance(portlist[0].name, str))
self.assertTrue(isinstance(portlist[0].description, str))
self.assertTrue(isinstance(portlist[0].state, str))
self.assertTrue(isinstance(portlist[0].port_collection, list))
self.assertTrue(isinstance(portlist[0].port_collection[0].begin, str))
self.assertTrue(isinstance(portlist[0].port_collection[0].end, str))
self.assertTrue(isinstance(portlist[0].child_portlist_list, list))
self.assertTrue(isinstance(portlist[0].child_portlist_list[0].id,
str))
self.assertTrue(isinstance(portlist[0].child_portlist_list[0].name,
str))
self.assertTrue(isinstance(portlist[0].create_time, str))
def test_ex_get_port_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist_id = self.driver.ex_list_portlist(
ex_network_domain=net_domain)[0].id
portlist = self.driver.ex_get_portlist(
ex_portlist_id=portlist_id)
self.assertTrue(isinstance(portlist, DimensionDataPortList))
self.assertTrue(isinstance(portlist.name, str))
self.assertTrue(isinstance(portlist.description, str))
self.assertTrue(isinstance(portlist.state, str))
self.assertTrue(isinstance(portlist.port_collection, list))
self.assertTrue(isinstance(portlist.port_collection[0].begin, str))
self.assertTrue(isinstance(portlist.port_collection[0].end, str))
self.assertTrue(isinstance(portlist.child_portlist_list, list))
self.assertTrue(isinstance(portlist.child_portlist_list[0].id,
str))
self.assertTrue(isinstance(portlist.child_portlist_list[0].name,
str))
self.assertTrue(isinstance(portlist.create_time, str))
def test_ex_get_portlist_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(
ex_network_domain=net_domain)[0]
port_list = self.driver.ex_get_portlist(
ex_portlist_id=portlist.id)
self.assertTrue(isinstance(port_list, DimensionDataPortList))
self.assertTrue(isinstance(port_list.name, str))
self.assertTrue(isinstance(port_list.description, str))
self.assertTrue(isinstance(port_list.state, str))
self.assertTrue(isinstance(port_list.port_collection, list))
self.assertTrue(isinstance(port_list.port_collection[0].begin, str))
self.assertTrue(isinstance(port_list.port_collection[0].end, str))
self.assertTrue(isinstance(port_list.child_portlist_list, list))
self.assertTrue(isinstance(port_list.child_portlist_list[0].id,
str))
self.assertTrue(isinstance(port_list.child_portlist_list[0].name,
str))
self.assertTrue(isinstance(port_list.create_time, str))
def test_ex_create_portlist_NOCHILDPORTLIST(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain, name=name,
description=description,
port_collection=port_collection
)
self.assertTrue(success)
def test_ex_create_portlist(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports = [child_port_1, child_port_2]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain, name=name,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports
)
self.assertTrue(success)
def test_ex_create_portlist_STR(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports_ids = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain.id, name=name,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports_ids
)
self.assertTrue(success)
def test_ex_edit_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
description = "Test Description"
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_edit_portlist(
ex_portlist=portlist,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports
)
self.assertTrue(success)
def test_ex_edit_portlist_STR(self):
portlist_id = "484174a2-ae74-4658-9e56-50fc90e086cf"
description = "Test Description"
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports_ids = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_edit_portlist(
ex_portlist=portlist_id,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports_ids
)
self.assertTrue(success)
def test_ex_delete_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
success = self.driver.ex_delete_portlist(
ex_portlist=portlist)
self.assertTrue(success)
def test_ex_delete_portlist_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
success = self.driver.ex_delete_portlist(
ex_portlist=portlist.id)
self.assertTrue(success)
def test_import_image(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
success = self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id='QA1_N2_VMWARE_1-01',
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
self.assertTrue(success)
def test_import_image_error_too_many_choice(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
with self.assertRaises(ValueError):
self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id='QA1_N2_VMWARE_1-01',
datacenter_id='QA1_N1_VMWARE_1',
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
def test_import_image_error_missing_choice(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
with self.assertRaises(ValueError):
self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id=None,
datacenter_id=None,
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
def test_exchange_nic_vlans(self):
success = self.driver.ex_exchange_nic_vlans(
nic_id_1='a4b4b42b-ccb5-416f-b052-ce7cb7fdff12',
nic_id_2='b39d09b8-ea65-424a-8fa6-c6f5a98afc69')
self.assertTrue(success)
def test_change_nic_network_adapter(self):
success = self.driver.ex_change_nic_network_adapter(
nic_id='0c55c269-20a5-4fec-8054-22a245a48fe4',
network_adapter_name='E1000')
self.assertTrue(success)
def test_ex_create_node_uncustomized_mcp2_using_vlan(self):
# Get VLAN
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
# Create node using vlan instead of private IPv4
node = self.driver.ex_create_node_uncustomized(
name='test_server_05',
image='fake_customer_image',
ex_network_domain='fakenetworkdomain',
ex_is_started=False,
ex_description=None,
ex_cluster_id=None,
ex_cpu_specification=None,
ex_memory_gb=None,
ex_primary_nic_private_ipv4=None,
ex_primary_nic_vlan=vlan,
ex_primary_nic_network_adapter=None,
ex_additional_nics=None,
ex_disks=None,
ex_tagid_value_pairs=None,
ex_tagname_value_pairs=None)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
def test_ex_create_node_uncustomized_mcp2_using_ipv4(self):
node = self.driver.ex_create_node_uncustomized(
name='test_server_05',
image='fake_customer_image',
ex_network_domain='fakenetworkdomain',
ex_is_started=False,
ex_description=None,
ex_cluster_id=None,
ex_cpu_specification=None,
ex_memory_gb=None,
ex_primary_nic_private_ipv4='10.0.0.1',
ex_primary_nic_vlan=None,
ex_primary_nic_network_adapter=None,
ex_additional_nics=None,
ex_disks=None,
ex_tagid_value_pairs=None,
ex_tagname_value_pairs=None)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
class InvalidRequestError(Exception):
def __init__(self, tag):
super(InvalidRequestError, self).__init__("Invalid Request - %s" % tag)
class DimensionDataMockHttp(MockHttp):
fixtures = ComputeFileFixtures('dimensiondata')
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_report_usage(self, method, url, body, headers):
body = self.fixtures.load(
'summary_usage_report.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_report_usageDetailed(self, method, url, body, headers):
body = self.fixtures.load(
'detailed_usage_report.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_auditlog(self, method, url, body, headers):
body = self.fixtures.load(
'audit_log.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_PAGINATED(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_ALLFILTERS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_image(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_imageWithDiskSpeed(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_imageWithDiskSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_INPROGRESS(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart_INPROGRESS.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown_INPROGRESS.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete_INPROGRESS.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start_INPROGRESS.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'_oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation(self, method, url, body, headers):
if method is "POST":
request = ET.fromstring(body)
if request.tag != "{http://oec.api.opsource.net/schemas/network}NewNetworkWithLocation":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation_NA9(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e(self, method,
url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1(self, method, url, body, headers):
action = url.split('?')[-1]
if action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if method == 'POST':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_POST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_create.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_FAIL_EXISTING(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_create_FAIL.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_07e3621a_a920_4a9a_943c_d8021f27f418(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_delete.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_07e3621a_a920_4a9a_943c_d8021f27f418_FAIL(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_delete_FAIL.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_deleteServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_deleteServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_rebootServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_rebootServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server(self, method, url, body, headers):
if url.endswith('datacenterId=NA3'):
body = self.fixtures.load(
'2.4/server_server_NA3.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGESIZE50(self, method, url, body, headers):
if not url.endswith('pageSize=50'):
raise ValueError("pageSize is not set as expected")
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_EMPTY(self, method, url, body, headers):
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGED_THEN_EMPTY(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'2.4/server_server_paginated.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGINATED(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'2.4/server_server_paginated.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGINATEDEMPTY(self, method, url, body, headers):
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_loc'
elif key == 'networkId':
assert value == 'fake_network'
elif key == 'networkDomainId':
assert value == 'fake_network_domain'
elif key == 'vlanId':
assert value == 'fake_vlan'
elif key == 'ipv6':
assert value == 'fake_ipv6'
elif key == 'privateIpv4':
assert value == 'fake_ipv4'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
elif key == 'started':
assert value == 'True'
elif key == 'deployed':
assert value == 'True'
elif key == 'sourceImageId':
assert value == 'fake_image'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule(self, method, url, body, headers):
body = self.fixtures.load(
'server_antiAffinityRule_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'id':
assert value == 'FAKE_ID'
elif key == 'state':
assert value == 'FAKE_STATE'
elif key == 'pageSize':
assert value == '250'
elif key == 'networkDomainId':
pass
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'server_antiAffinityRule_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule_PAGINATED(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'server_antiAffinityRule_list.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'server_antiAffinityRule_list_PAGINATED.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter(self, method, url, body, headers):
if url.endswith('id=NA9'):
body = self.fixtures.load(
'infrastructure_datacenter_NA9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'infrastructure_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter_ALLFILTERS(self, method, url, body, headers):
if url.endswith('id=NA9'):
body = self.fixtures.load(
'infrastructure_datacenter_NA9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'infrastructure_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_updateVmwareTools(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}updateVmwareTools":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_updateVmwareTools.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_startServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_startServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_shutdownServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_shutdownServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_resetServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}resetServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_resetServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_powerOffServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_powerOffServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_11_INPROGRESS(
self, method, url, body, headers):
body = self.fixtures.load('2.4/server_GetServer.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_location'
elif key == 'type':
assert value == 'fake_plan'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'network_networkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan(self, method, url, body, headers):
body = self.fixtures.load(
'network_vlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_location'
elif key == 'networkDomainId':
assert value == 'fake_network_domain'
elif key == 'ipv6Address':
assert value == 'fake_ipv6'
elif key == 'privateIpv4Address':
assert value == 'fake_ipv4'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'network_vlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployServer":
raise InvalidRequestError(request.tag)
# Make sure the we either have a network tag with an IP or networkId
# Or Network info with a primary nic that has privateip or vlanid
network = request.find(fixxpath('network', TYPES_URN))
network_info = request.find(fixxpath('networkInfo', TYPES_URN))
if network is not None:
if network_info is not None:
raise InvalidRequestError("Request has both MCP1 and MCP2 values")
ipv4 = findtext(network, 'privateIpv4', TYPES_URN)
networkId = findtext(network, 'networkId', TYPES_URN)
if ipv4 is None and networkId is None:
raise InvalidRequestError('Invalid request MCP1 requests need privateIpv4 or networkId')
elif network_info is not None:
if network is not None:
raise InvalidRequestError("Request has both MCP1 and MCP2 values")
primary_nic = network_info.find(fixxpath('primaryNic', TYPES_URN))
ipv4 = findtext(primary_nic, 'privateIpv4', TYPES_URN)
vlanId = findtext(primary_nic, 'vlanId', TYPES_URN)
if ipv4 is None and vlanId is None:
raise InvalidRequestError('Invalid request MCP2 requests need privateIpv4 or vlanId')
else:
raise InvalidRequestError('Invalid request, does not have network or network_info in XML')
body = self.fixtures.load(
'server_deployServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/server_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deployNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be_ALLFILTERS(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deployVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_0e56433f_d808_4669_821d_812769517ff8(self, method, url, body, headers):
body = self.fixtures.load(
'network_vlan_0e56433f_d808_4669_821d_812769517ff8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_expandVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}expandVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_expandVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_addPublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addPublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_addPublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_removePublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removePublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_removePublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule(self, method, url, body, headers):
body = self.fixtures.load(
'network_firewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_createFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c(self, method, url, body, headers):
body = self.fixtures.load(
'network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_createNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule(self, method, url, body, headers):
body = self.fixtures.load(
'network_natRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce(self, method, url, body, headers):
body = self.fixtures.load(
'network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_addNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removeNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_removeNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_disableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}disableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_disableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_enableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}enableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_enableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeServerMonitoringPlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}changeServerMonitoringPlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_changeServerMonitoringPlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_c14b1a46_2428_44c1_9c1a_b20e6418d08c(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage_c14b1a46_2428_44c1_9c1a_b20e6418d08c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_6b4fb0c7_a57b_4f58_b59c_9958f94f971a(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage_6b4fb0c7_a57b_4f58_b59c_9958f94f971a.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_FAKE_IMAGE_ID(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_FAKE_IMAGE_ID(self, method, url, body, headers):
body = self.fixtures.load(
'image_customerImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_reconfigureServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}reconfigureServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_reconfigureServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_cleanServer(self, method, url, body, headers):
body = self.fixtures.load(
'server_cleanServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addDisk(self, method, url, body, headers):
body = self.fixtures.load(
'server_addDisk.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeDisk(self, method, url, body, headers):
body = self.fixtures.load(
'server_removeDisk.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is not None:
raise ValueError("Default description for a tag should be blank")
if value_required is None or value_required != 'true':
raise ValueError("Default valueRequired should be true")
if display_on_report is None or display_on_report != 'true':
raise ValueError("Default displayOnReport should be true")
body = self.fixtures.load(
'tag_createTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey_ALLPARAMS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is None:
raise ValueError("Description should have a value")
if value_required is None or value_required != 'false':
raise ValueError("valueRequired should be false")
if display_on_report is None or display_on_report != 'false':
raise ValueError("displayOnReport should be false")
body = self.fixtures.load(
'tag_createTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey_BADREQUEST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_createTagKey_BADREQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_SINGLE(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_list_SINGLE.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'id':
assert value == 'fake_id'
elif key == 'name':
assert value == 'fake_name'
elif key == 'valueRequired':
assert value == 'false'
elif key == 'displayOnReport':
assert value == 'false'
elif key == 'pageSize':
assert value == '250'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'tag_tagKey_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_d047c609_93d7_4bc5_8fc9_732c85840075(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_5ab77f5f_5aa9_426f_8459_4eab34e03d54.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_d047c609_93d7_4bc5_8fc9_732c85840075_NOEXIST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_5ab77f5f_5aa9_426f_8459_4eab34e03d54_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NAME(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is not None:
raise ValueError("Description should be empty")
if value_required is not None:
raise ValueError("valueRequired should be empty")
if display_on_report is not None:
raise ValueError("displayOnReport should be empty")
body = self.fixtures.load(
'tag_editTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NOTNAME(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is not None:
raise ValueError("Name should be empty")
if description is None:
raise ValueError("Description should not be empty")
if value_required is None:
raise ValueError("valueRequired should not be empty")
if display_on_report is None:
raise ValueError("displayOnReport should not be empty")
body = self.fixtures.load(
'tag_editTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NOCHANGE(self, method, url, body, headers):
body = self.fixtures.load(
'tag_editTagKey_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_deleteTagKey(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteTagKey":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'tag_deleteTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_deleteTagKey_NOEXIST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_deleteTagKey_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}applyTags":
raise InvalidRequestError(request.tag)
asset_type = findtext(request, 'assetType', TYPES_URN)
asset_id = findtext(request, 'assetId', TYPES_URN)
tag = request.find(fixxpath('tag', TYPES_URN))
tag_key_name = findtext(tag, 'tagKeyName', TYPES_URN)
value = findtext(tag, 'value', TYPES_URN)
if asset_type is None:
raise ValueError("assetType should not be empty")
if asset_id is None:
raise ValueError("assetId should not be empty")
if tag_key_name is None:
raise ValueError("tagKeyName should not be empty")
if value is None:
raise ValueError("value should not be empty")
body = self.fixtures.load(
'tag_applyTags.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags_NOVALUE(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}applyTags":
raise InvalidRequestError(request.tag)
asset_type = findtext(request, 'assetType', TYPES_URN)
asset_id = findtext(request, 'assetId', TYPES_URN)
tag = request.find(fixxpath('tag', TYPES_URN))
tag_key_name = findtext(tag, 'tagKeyName', TYPES_URN)
value = findtext(tag, 'value', TYPES_URN)
if asset_type is None:
raise ValueError("assetType should not be empty")
if asset_id is None:
raise ValueError("assetId should not be empty")
if tag_key_name is None:
raise ValueError("tagKeyName should not be empty")
if value is not None:
raise ValueError("value should be empty")
body = self.fixtures.load(
'tag_applyTags.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags_NOTAGKEY(self, method, url, body, headers):
body = self.fixtures.load(
'tag_applyTags_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_removeTags(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removeTags":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'tag_removeTag.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_removeTags_NOTAG(self, method, url, body, headers):
body = self.fixtures.load(
'tag_removeTag_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tag(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tag_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tag_ALLPARAMS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'assetId':
assert value == 'fake_asset_id'
elif key == 'assetType':
assert value == 'fake_asset_type'
elif key == 'valueRequired':
assert value == 'false'
elif key == 'displayOnReport':
assert value == 'false'
elif key == 'pageSize':
assert value == '250'
elif key == 'datacenterId':
assert value == 'fake_location'
elif key == 'value':
assert value == 'fake_value'
elif key == 'tagKeyName':
assert value == 'fake_tag_key_name'
elif key == 'tagKeyId':
assert value == 'fake_tag_key_id'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'tag_tag_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_ipAddressList(
self, method, url, body, headers):
body = self.fixtures.load('ip_address_lists.xml')
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_ipAddressList_FILTERBYNAME(
self, method, url, body, headers):
body = self.fixtures.load('ip_address_lists_FILTERBYNAME.xml')
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"createIpAddressList":
raise InvalidRequestError(request.tag)
net_domain = findtext(request, 'networkDomainId', TYPES_URN)
if net_domain is None:
raise ValueError("Network Domain should not be empty")
name = findtext(request, 'name', TYPES_URN)
if name is None:
raise ValueError("Name should not be empty")
ip_version = findtext(request, 'ipVersion', TYPES_URN)
if ip_version is None:
raise ValueError("IP Version should not be empty")
ip_address_col_required = findall(request, 'ipAddress', TYPES_URN)
child_ip_address_required = findall(request, 'childIpAddressListId',
TYPES_URN)
if 0 == len(ip_address_col_required) and \
0 == len(child_ip_address_required):
raise ValueError("At least one ipAddress element or "
"one childIpAddressListId element must be "
"provided.")
if ip_address_col_required[0].get('begin') is None:
raise ValueError("IP Address should not be empty")
body = self.fixtures.load(
'ip_address_list_create.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"editIpAddressList":
raise InvalidRequestError(request.tag)
ip_address_list = request.get('id')
if ip_address_list is None:
raise ValueError("IpAddressList ID should not be empty")
name = findtext(request, 'name', TYPES_URN)
if name is not None:
raise ValueError("Name should not exists in request")
ip_version = findtext(request, 'ipVersion', TYPES_URN)
if ip_version is not None:
raise ValueError("IP Version should not exists in request")
ip_address_col_required = findall(request, 'ipAddress', TYPES_URN)
child_ip_address_required = findall(request, 'childIpAddressListId',
TYPES_URN)
if 0 == len(ip_address_col_required) and \
0 == len(child_ip_address_required):
raise ValueError("At least one ipAddress element or "
"one childIpAddressListId element must be "
"provided.")
if ip_address_col_required[0].get('begin') is None:
raise ValueError("IP Address should not be empty")
body = self.fixtures.load(
'ip_address_list_edit.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"deleteIpAddressList":
raise InvalidRequestError(request.tag)
ip_address_list = request.get('id')
if ip_address_list is None:
raise ValueError("IpAddressList ID should not be empty")
body = self.fixtures.load(
'ip_address_list_delete.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_portList(
self, method, url, body, headers):
body = self.fixtures.load(
'port_list_lists.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_portList_c8c92ea3_2da8_4d51_8153_f39bec794d69(
self, method, url, body, headers):
body = self.fixtures.load(
'port_list_get.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createPortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"createPortList":
raise InvalidRequestError(request.tag)
net_domain = findtext(request, 'networkDomainId', TYPES_URN)
if net_domain is None:
raise ValueError("Network Domain should not be empty")
ports_required = findall(request, 'port', TYPES_URN)
child_port_list_required = findall(request, 'childPortListId',
TYPES_URN)
if 0 == len(ports_required) and \
0 == len(child_port_list_required):
raise ValueError("At least one port element or one "
"childPortListId element must be provided")
if ports_required[0].get('begin') is None:
raise ValueError("PORT begin value should not be empty")
body = self.fixtures.load(
'port_list_create.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editPortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"editPortList":
raise InvalidRequestError(request.tag)
ports_required = findall(request, 'port', TYPES_URN)
child_port_list_required = findall(request, 'childPortListId',
TYPES_URN)
if 0 == len(ports_required) and \
0 == len(child_port_list_required):
raise ValueError("At least one port element or one "
"childPortListId element must be provided")
if ports_required[0].get('begin') is None:
raise ValueError("PORT begin value should not be empty")
body = self.fixtures.load(
'port_list_edit.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deletePortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"deletePortList":
raise InvalidRequestError(request.tag)
port_list = request.get('id')
if port_list is None:
raise ValueError("Port List ID should not be empty")
body = self.fixtures.load(
'ip_address_list_delete.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_cloneServer(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/server_clone_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_importImage(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/import_image_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_exchangeNicVlans(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/exchange_nic_vlans_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeNetworkAdapter(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/change_nic_networkadapter_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployUncustomizedServer(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/deploy_customised_server.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 | -2,990,969,940,556,288,500 | 47.170933 | 159 | 0.612821 | false |
josephxsxn/alchemists_notepad | Tests.py | 1 | 6304 | #List all ENUMS
from Object.Ingredient import Ingredient
for i in Ingredient:
print(i)
from Object.PotionColor import PotionColor
for r in PotionColor:
print(r)
from Object.PotionSign import PotionSign
for r in PotionSign:
print(r)
#//TODO
#NEED TO ADD ALCHEMICAL ENUMS HERE
#Make a Potion and Fetch its values
from Object.Potion import Potion
from Object.PotionColor import PotionColor
from Object.PotionSign import PotionSign
flowertoad = Potion(Ingredient.TOAD, Ingredient.FLOWER, PotionColor.RED, PotionSign.POSITIVE)
print(flowertoad.get_ingredients())
print(flowertoad.get_color())
print(flowertoad.get_sign())
###Put some Potions in the List and Get back
from Object.PotionList import PotionList
polist = PotionList()
polist.add_potion(flowertoad)
pores = polist.get_potions()
for po in pores:
print(po.get_ingredients())
print(po.get_color())
print(po.get_sign())
#Get an exact one from the list
pores = polist.get_potion(0)
print(pores.get_ingredients())
print(pores.get_color())
print(pores.get_sign())
#fetch one that doesnt exist from the list
pores = polist.get_potion(1)
print(pores)
#make an few Alchemicals
from Object.Alchemical import Alchemical
from Object.AlchemicalColor import AlchemicalColor
from Object.AlchemicalSign import AlchemicalSign
from Object.AlchemicalSize import AlchemicalSize
#triplet one
redposlarge = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.LARGE)
bluenegsmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
greennegsmall = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
#triplet two
redpossmall = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
bluepossmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
greenposlarge = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
print('T1 ' + str(redposlarge.get_color()) + ' ' + str(redposlarge.get_sign()) + ' ' + str(redposlarge.get_size()))
print('T1 ' + str(bluenegsmall.get_color()) + ' ' + str(bluenegsmall.get_sign()) + ' ' + str(bluenegsmall.get_size()))
print('T1 ' + str(greennegsmall.get_color()) + ' ' + str(greennegsmall.get_sign()) + ' ' + str(greennegsmall.get_size()))
print('T2 ' + str(redpossmall.get_color()) + ' ' + str(redpossmall.get_sign()) + ' ' + str(redpossmall.get_size()))
print('T2 ' + str(bluepossmall.get_color()) + ' ' + str(bluepossmall.get_sign()) + ' ' + str(bluepossmall.get_size()))
print('T2 ' + str(greenposlarge.get_color()) + ' ' + str(greenposlarge.get_sign()) + ' ' + str(greenposlarge.get_size()))
#make a Triplet
from Object.AlchemicalTriplet import AlchemicalTriplet
triplet_one = AlchemicalTriplet([redposlarge, bluenegsmall, greennegsmall])
triplet_one_list = triplet_one.get_alchemicals()
for a in triplet_one_list:
print('Triplet_ONE ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
triplet_two = AlchemicalTriplet([redpossmall, bluepossmall, greenposlarge])
triplet_two_list = triplet_two.get_alchemicals()
print(triplet_two_list)
for b in triplet_two_list:
print('Triplet_TWO ' + str(b.get_color()) + ' ' + str(b.get_sign()) + ' ' + str(b.get_size()))
#make some ingredients and properties
from Object.IngredientProperties import IngredientProperties
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options([triplet_one])
ip_triplet_list = ip.get_alchemical_options()
#for given ingredient list all triplet props
for l in ip_triplet_list:
for a in l.get_alchemicals():
print('IngredientProps ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
#Alchemical Combinations Test
from Routine.AlchemicalCombinations import AlchemicalCombinations
ingredient_dic = {Ingredient.TOAD : ip}
print(ingredient_dic.keys())
triplet_list = ingredient_dic[Ingredient.TOAD].get_alchemical_options()
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('AC Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ac = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(polist.get_potion(0), ingredient_dic)
print(polist.get_potion(0).get_ingredients())
print(polist.get_potion(0).get_sign())
print(polist.get_potion(0).get_color())
print(res.keys())
triplet_list = res[Ingredient.TOAD]
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('Filtered Toad Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
print(len(res[Ingredient.TOAD]))
print(len(res[Ingredient.FLOWER]))
#triplet_list = res[Ingredient.FLOWER]
#for triplet in triplet_list:
# for a in triplet.get_alchemicals():
# print('Filtered Flower Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ip = IngredientProperties(Ingredient.FLOWER)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options(res[Ingredient.FLOWER])
ingredient_dic[Ingredient.FLOWER] = ip
print('TOAD LEN ' + str(len(ingredient_dic[Ingredient.TOAD].get_alchemical_options())))
print('FLOWER LEN ' + str(len(ingredient_dic[Ingredient.FLOWER].get_alchemical_options())))
initalTriplets = ac.inital_alchemical_options()
print(len(initalTriplets))
print(len(ac.potion_only_filter(initalTriplets, polist.get_potion(0).get_color(), polist.get_potion(0).get_sign())))
#################
###NEUTRAL POTION
#################
herbtoad = Potion(Ingredient.TOAD, Ingredient.HERB, PotionColor.NEUTRAL, PotionSign.NEUTRAL)
polist.add_potion(herbtoad)
#ac2 = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(herbtoad, ingredient_dic)
print(polist.get_potion(1).get_ingredients())
print(polist.get_potion(1).get_sign())
print(polist.get_potion(1).get_color())
print(res.keys())
print('TOAD LEN RES: ' + str(len(res[Ingredient.TOAD])))
print('HERB LEN RES: ' + str(len(res[Ingredient.HERB])))
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.TOAD])
ingredient_dic[Ingredient.TOAD] = ip
ip = IngredientProperties(Ingredient.HERB)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.HERB])
ingredient_dic[Ingredient.HERB] = ip
print(ingredient_dic.keys())
| apache-2.0 | -8,223,180,517,559,525,000 | 39.410256 | 121 | 0.740641 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.