repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
PLyczkowski/Sticky-Keymap | 2.74/scripts/addons/io_anim_bvh/__init__.py | 1 | 8032 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
bl_info = {
"name": "BioVision Motion Capture (BVH) format",
"author": "Campbell Barton",
"blender": (2, 74, 0),
"location": "File > Import-Export",
"description": "Import-Export BVH from armature objects",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/MotionCapture_BVH",
"support": 'OFFICIAL',
"category": "Import-Export"}
if "bpy" in locals():
import importlib
if "import_bvh" in locals():
importlib.reload(import_bvh)
if "export_bvh" in locals():
importlib.reload(export_bvh)
import bpy
from bpy.props import (StringProperty,
FloatProperty,
IntProperty,
BoolProperty,
EnumProperty,
)
from bpy_extras.io_utils import (ImportHelper,
ExportHelper,
orientation_helper_factory,
axis_conversion,
)
ImportBVHOrientationHelper = orientation_helper_factory("ImportBVHOrientationHelper", axis_forward='-Z', axis_up='Y')
class ImportBVH(bpy.types.Operator, ImportHelper, ImportBVHOrientationHelper):
"""Load a BVH motion capture file"""
bl_idname = "import_anim.bvh"
bl_label = "Import BVH"
bl_options = {'REGISTER', 'UNDO'}
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
target = EnumProperty(items=(
('ARMATURE', "Armature", ""),
('OBJECT', "Object", ""),
),
name="Target",
description="Import target type",
default='ARMATURE')
global_scale = FloatProperty(
name="Scale",
description="Scale the BVH by this value",
min=0.0001, max=1000000.0,
soft_min=0.001, soft_max=100.0,
default=1.0,
)
frame_start = IntProperty(
name="Start Frame",
description="Starting frame for the animation",
default=1,
)
use_fps_scale = BoolProperty(
name="Scale FPS",
description=("Scale the framerate from the BVH to "
"the current scenes, otherwise each "
"BVH frame maps directly to a Blender frame"),
default=False,
)
use_cyclic = BoolProperty(
name="Loop",
description="Loop the animation playback",
default=False,
)
rotate_mode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('QUATERNION', "Quaternion",
"Convert rotations to quaternions"),
('NATIVE', "Euler (Native)", ("Use the rotation order "
"defined in the BVH file")),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='NATIVE',
)
def execute(self, context):
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
from . import import_bvh
return import_bvh.load(self, context, **keywords)
class ExportBVH(bpy.types.Operator, ExportHelper):
"""Save a BVH motion capture file from an armature"""
bl_idname = "export_anim.bvh"
bl_label = "Export BVH"
filename_ext = ".bvh"
filter_glob = StringProperty(
default="*.bvh",
options={'HIDDEN'},
)
global_scale = FloatProperty(
name="Scale",
description="Scale the BVH by this value",
min=0.0001, max=1000000.0,
soft_min=0.001, soft_max=100.0,
default=1.0,
)
frame_start = IntProperty(
name="Start Frame",
description="Starting frame to export",
default=0,
)
frame_end = IntProperty(
name="End Frame",
description="End frame to export",
default=0,
)
rotate_mode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('NATIVE', "Euler (Native)",
"Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='NATIVE',
)
root_transform_only = BoolProperty(
name="Root Translation Only",
description="Only write out translation channels for the root bone",
default=False,
)
@classmethod
def poll(cls, context):
obj = context.object
return obj and obj.type == 'ARMATURE'
def invoke(self, context, event):
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
return super().invoke(context, event)
def execute(self, context):
if self.frame_start == 0 and self.frame_end == 0:
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
keywords = self.as_keywords(ignore=("check_existing", "filter_glob"))
from . import export_bvh
return export_bvh.save(self, context, **keywords)
def menu_func_import(self, context):
self.layout.operator(ImportBVH.bl_idname, text="Motion Capture (.bvh)")
def menu_func_export(self, context):
self.layout.operator(ExportBVH.bl_idname, text="Motion Capture (.bvh)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()
| gpl-2.0 | 8,315,018,521,340,759,000 | 35.017937 | 117 | 0.54868 | false |
desihub/desimodel | py/desimodel/weather.py | 1 | 15591 | # See LICENSE.rst for BSD 3-clause license info
# -*- coding: utf-8 -*-
"""
desimodel.weather
=================
Model of the expected weather conditions at KPNO during the DESI survey.
To generate a random time series of expected FWHM seeing in arcsecs and
atmospheric transparency, use, for example::
n = 10000
dt = 300 # seconds
t = np.arange(n) * dt
gen = np.random.RandomState(seed=123)
seeing = sample_seeing(n, dt_sec=dt, gen=gen)
transp = sample_transp(n, dt_sec=dt, gen=gen)
The resulting arrays are randomly sampled from models of the 1D probability
density and 2-point power spectral density derived from MzLS observations.
See `DESI-doc-3087
<https://desi.lbl.gov/DocDB/cgi-bin/private/ShowDocument?docid=3087>`__
for details.
Used by :mod:`surveysim.weather` for simulations of DESI observing and
survey strategy studies.
"""
from __future__ import print_function, division
import os
import datetime
import calendar
import numpy as np
import scipy.interpolate
import scipy.special
import astropy.table
def whiten_transforms_from_cdf(x, cdf):
"""
Calculate a pair of transforms to whiten and unwhiten a distribution.
The whitening transform is monotonic and invertible.
Parameters
----------
x : array
1D array of non-decreasing values giving bin edges for the distribution
to whiten and unwhiten.
cdf : array
1D array of non-decreasing values giving the cummulative probability
density associated with each bin edge. Does not need to be normalized.
Must have the same length as x.
Returns
-------
tuple
Tuple (F,G) of callable objects that whiten y=F(x) and unwhiten x=G(y)
samples x of the input distribution, so that y has a Gaussian
distribution with zero mean and unit variance.
"""
x = np.asarray(x)
cdf = np.asarray(cdf)
if x.shape != cdf.shape:
raise ValueError('Input arrays must have same shape.')
if len(x.shape) != 1:
raise ValueError('Input arrays must be 1D.')
if not np.all(np.diff(x) >= 0):
raise ValueError('Values of x must be non-decreasing.')
if not np.all(np.diff(cdf) >= 0):
raise ValueError('Values of cdf must be non-decreasing.')
# Normalize.
cdf /= cdf[-1]
# Use linear interpolation for the forward and inverse transforms between
# the input range and Gaussian CDF values.
args = dict(
kind='linear', assume_sorted=True, copy=False, bounds_error=True)
forward = scipy.interpolate.interp1d(x, cdf, **args)
backward = scipy.interpolate.interp1d(cdf, x, **args)
# Add wrappers to convert between CDF and PDF samples.
root2 = np.sqrt(2)
forward_transform = (
lambda x: root2 * scipy.special.erfinv(2 * forward(x) - 1))
inverse_transform = (
lambda y: backward(0.5 * (1 + scipy.special.erf(y / root2))))
return forward_transform, inverse_transform
def whiten_transforms(data, data_min=None, data_max=None):
"""Calculate a pair of transforms to whiten and unwhiten a distribution.
Uses :func:`desimodel.weather.whiten_transforms_from_cdf`.
Parameters
----------
data : array
1D array of samples from the distribution to whiten.
data_min : float or None
Clip the distribution to this minimum value, or at min(data) if None.
Must be <= min(data).
data_max : float or None
Clip the distribution to this maximum value, or at max(data) if None.
Must be >= max(data).
Returns
-------
tuple
See :func:`desimodel.weather.whiten_transforms_from_cdf`.
"""
n_data = len(data)
# Sort the input data with padding at each end for the min/max values.
sorted_data = np.empty(shape=n_data + 2, dtype=data.dtype)
sorted_data[1:-1] = np.sort(data)
if data_min is None:
sorted_data[0] = sorted_data[1]
else:
if data_min > sorted_data[1]:
raise ValueError('data_min > min(data)')
sorted_data[0] = data_min
if data_max is None:
sorted_data[-1] = sorted_data[-2]
else:
if data_max < sorted_data[-2]:
raise ValueError('data_max < max(data)')
sorted_data[-1] = data_max
# Calculate the Gaussian CDF value associated with each input value in
# sorted order. The pad values are associated with CDF = 0, 1 respectively.
cdf = np.arange(n_data + 2) / (n_data + 1.)
return whiten_transforms_from_cdf(sorted_data, cdf)
def _seeing_fit_model(x):
"""Evalute the fit to MzLS seeing described in DESI-doc-3087.
"""
p = np.array([ 0.07511146, 0.44276671, 23.02442192, 38.07691498])
y = (1 + ((x - p[0]) / p[1]) ** 2) ** (-p[2]) * x ** p[3]
return y / (y.sum() * np.gradient(x))
def get_seeing_pdf(median_seeing=1.1, max_seeing=2.5, n=250):
"""Return PDF of FWHM seeing for specified clipped median value.
Note that this is atmospheric seeing, not delivered image quality.
The reference wavelength for seeing values is 6355A, in the r band,
and the observed wavelength dependence in Dey & Valdes is closer to
``lambda ** (-1/15)`` than the ``lambda ** (-1/5)`` predicted by
Kolmogorov theory. See DESI-doc-3087 for details.
Scales the clipped MzLS seeing PDF in order to achieve the requested
median value. Note that clipping is applied before scaling, so
the output PDF is clipped at scale * max_seeing.
Parameters
----------
median_seeing : float
Target FWHM seeing value in arcsec. Must be in the range [0.95, 1.30].
max_seeing : float
Calculate scaled median using unscaled values below this value.
n : int
Size of grid to use for tabulating the returned arrays.
Returns
-------
tuple
Tuple (fwhm, pdf) that tabulates pdf[fwhm]. Normalized so that
``np.sum(pdf * np.gradient(fwhm)) = 1``.
"""
# Tabulate the nominal (scale=1) seeing PDF.
fwhm = np.linspace(0., max_seeing, n)
pdf = _seeing_fit_model(fwhm)
pdf /= (pdf.sum() * np.gradient(fwhm))
cdf = np.cumsum(pdf)
cdf /= cdf[-1]
# Tabulate the median as a function of FWHM scale.
scale = np.linspace(0.9, 1.4, 11)
median = np.empty_like(scale)
for i, s in enumerate(scale):
median[i] = np.interp(0.5, cdf, s * fwhm)
if median_seeing < median[0] or median_seeing > median[-1]:
raise ValueError('Requested median is outside allowed range.')
# Interpolate to find the scale factor that gives the requested median.
s = np.interp(median_seeing, median, scale)
return fwhm * s, pdf / s
def sample_timeseries(x_grid, pdf_grid, psd, n_sample, dt_sec=180., gen=None):
"""Sample a time series specified by a power spectrum and 1D PDF.
The PSD should describe the temporal correlations of whitened samples.
Generated samples will then be unwhitened to recover the input 1D PDF.
See DESI-doc-3087 for details.
Uses :func:`whiten_transforms_from_cdf`.
Parameters
----------
x_grid : array
1D array of N increasing grid values covering the parameter range
to sample from.
pdf_grid : array
1D array of N increasing PDF values corresponding to each x_grid.
Does not need to be normalized.
psd : callable
Function of frequency in 1/days that returns the power-spectral
density of whitened temporal fluctations to sample from. Will only be
called for positive frequencies. Normalization does not matter.
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
"""
x_grid = np.array(x_grid)
pdf_grid = np.array(pdf_grid)
if not np.all(np.diff(x_grid) > 0):
raise ValueError('x_grid values are not increasing.')
if x_grid.shape != pdf_grid.shape:
raise ValueError('x_grid and pdf_grid arrays have different shapes.')
# Initialize random numbers if necessary.
if gen is None:
gen = np.random.RandomState()
# Calculate the CDF.
cdf_grid = np.cumsum(pdf_grid)
cdf_grid /= cdf_grid[-1]
# Calculate whitening / unwhitening transforms.
whiten, unwhiten = whiten_transforms_from_cdf(x_grid, cdf_grid)
# Build a linear grid of frequencies present in the Fourier transform
# of the requested time series. Frequency units are 1/day.
dt_day = dt_sec / (24. * 3600.)
df_day = 1. / (n_sample * dt_day)
f_grid = np.arange(1 + (n_sample // 2)) * df_day
# Tabulate the power spectral density at each frequency. The PSD
# describes seeing fluctuations that have been "whitened", i.e., mapped
# via a non-linear monotonic transform to have unit Gaussian probability
# density.
psd_grid = np.empty_like(f_grid)
psd_grid[1:] = psd(f_grid[1:])
# Force the mean to zero.
psd_grid[0] = 0.
# Force the variance to one.
psd_grid[1:] /= psd_grid[1:].sum() * df_day ** 2
# Generate random whitened samples.
n_psd = len(psd_grid)
x_fft = np.ones(n_psd, dtype=complex)
x_fft[1:-1].real = gen.normal(size=n_psd - 2)
x_fft[1:-1].imag = gen.normal(size=n_psd - 2)
x_fft *= np.sqrt(psd_grid) / (2 * dt_day)
x_fft[0] *= np.sqrt(2)
x = np.fft.irfft(x_fft, n_sample)
# Un-whiten the samples to recover the desired 1D PDF.
x_cdf = 0.5 * (1 + scipy.special.erf(x / np.sqrt(2)))
return np.interp(x_cdf, cdf_grid, x_grid)
def _seeing_psd(freq):
"""Evaluate the 'chi-by-eye' fit of the seeing PSD described in
DESI-doc-3087.
"""
N, f0, a0, a1 = 8000, 0.10, 2.8, -1.1
return (N * (freq/f0)**a0 / (1 + (freq/f0)**a0) *
(freq/f0) ** a1 / (10 + (freq/f0) ** a1))
def sample_seeing(n_sample, dt_sec=180., median_seeing=1.1, max_seeing=2.5,
gen=None):
"""Generate a random time series of FWHM seeing values.
See DESI-doc-3087 for details. Uses :func:`get_seeing_pdf`,
:func:`_seeing_psd` and :func:`sample_timeseries`.
Parameters
----------
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
median_seeing : float
See :func:`get_seeing_pdf`.
mex_seeing : float
See :func:`get_seeing_pdf`.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
Returns
-------
array
1D array of randomly generated samples.
"""
fwhm_grid, pdf_grid = get_seeing_pdf(median_seeing, max_seeing)
return sample_timeseries(
fwhm_grid, pdf_grid, _seeing_psd, n_sample, dt_sec, gen)
_transp_pdf_cum = np.array([0.06,0.11,1.0])
_transp_pdf_powers = np.array([0., 2.5, 35.])
def get_transp_pdf(n=250):
"""Return PDF of atmospheric transparency.
Derived from MzLS observations, but corrected for dust accumulation and
measurement error. See DESI-doc-3087 for details.
Parameters
----------
n : int
Size of grid to use for tabulating the returned arrays.
Returns
-------
tuple
Tuple (transp, pdf) that tabulates pdf[transp]. Normalized so that
``np.sum(pdf * np.gradient(transp)) = 1``.
"""
transp = np.linspace(0., 1., n)
pdf = np.zeros_like(transp)
last_c = 0.
for c, p in zip(_transp_pdf_cum, _transp_pdf_powers):
pdf += (c - last_c) * np.power(transp, p) * (p + 1)
last_c = c
pdf /= pdf.sum() * np.gradient(transp)
return transp, pdf
def _transp_psd(freq):
"""Evaluate the 'chi-by-eye' fit of the transparency PSD described in
DESI-doc-3087.
"""
N, f0, a0, a1 = 500, 1.5, 0.0, -1.5
return (N * (freq/f0)**a0 / (1 + (freq/f0)**a0) *
(freq/f0) ** a1 / (1 + (freq/f0) ** a1))
def sample_transp(n_sample, dt_sec=180., gen=None):
"""Generate a random time series of atmospheric transparency values.
See DESI-doc-3087 for details. Uses :func:`get_transp_pdf`,
:func:`_transp_psd` and :func:`sample_timeseries`.
Parameters
----------
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
Returns
-------
array
1D array of randomly generated samples.
"""
transp_grid, pdf_grid = get_transp_pdf()
return sample_timeseries(
transp_grid, pdf_grid, _transp_psd, n_sample, dt_sec, gen)
def dome_closed_fractions(start_date, stop_date,
replay='Y2007,Y2008,Y2009,Y2010,Y2011,Y2012,Y2013,Y2014'):
"""Return dome-closed fractions for each night of the survey.
Years can be replayed in any order. If the number of years to replay is less
than the survey duration, they are repeated.
Parameters
----------
start_date : datetime.date or None
Survey starts on the evening of this date. Use the ``first_day``
config parameter if None (the default).
stop_date : datetime.date or None
Survey stops on the morning of this date. Use the ``last_day``
config parameter if None (the default).
replay : str
Comma-separated list of years to replay, identified by arbitrary strings
that must match column names in the DESIMODEL weather history.
Returns
-------
numpy array
1D array of N probabilities between 0-1, where N is the number of nights
spanned by the start and stop dates.
"""
# Check the inputs.
num_nights = (stop_date - start_date).days
if num_nights <= 0:
raise ValueError('Expected start_date < stop_date.')
replay = replay.split(',')
# Load tabulated daily weather history.
DESIMODEL = os.getenv('DESIMODEL')
path = os.path.join(DESIMODEL, 'data', 'weather', 'daily-2007-2017.csv')
t = astropy.table.Table.read(path)
if not len(t) == 365:
raise ValueError('Invalid weather history length (expected 365).')
years = t.colnames
lostfracs = []
for yr in replay:
if yr not in years:
raise ValueError('Replay year "{}" not in weather history.'.format(yr))
lostfrac = t[yr].data
if not np.all((lostfrac >= 0) & (lostfrac <= 1)):
raise ValueError('Invalid weather history for replay year "{}".'.format(yr))
lostfracs.append(lostfrac)
# Replay the specified years (with wrap-around if necessary),
# overlaid on the actual survey dates.
probs = np.zeros(num_nights)
start = start_date
for year_num, year in enumerate(range(start_date.year, stop_date.year + 1)):
first = datetime.date(year=year, month=1, day=1)
stop = datetime.date(year=year + 1, month=1, day=1)
if stop > stop_date:
stop = stop_date
n = (stop - start).days
if n == 0:
break
if calendar.isleap(year):
n -= 1
idx = (start - start_date).days
jdx = (start - first).days
lostfrac = lostfracs[year_num % len(replay)]
probs[idx:idx + n] = lostfrac[jdx:jdx + n]
start = stop
return probs
| bsd-3-clause | -3,878,208,059,081,789,000 | 35.174014 | 88 | 0.635944 | false |
taoliu/taolib | Scripts/kmeans2image.py | 1 | 1598 | #!/usr/bin/env python
# Time-stamp: <2009-04-14 14:07:21 Tao Liu>
import os
import sys
import re
from PIL import Image, ImageDraw
# ------------------------------------
# Main function
# ------------------------------------
help_message = """
Draw the K-means clustering result.
need 6 parameter: %s <kmeans_file> <lim> <x_points> <y_points> <x_ext> <y_ext>
kmeans_file : tab-delimited plain text file. First column is cluster number by k-means, and following columns are data columns.
lim : data value limit
x_points : number of data value columns
y_points : number of rows
x_ext : pixels extended in x-axis
y_ext : pixels extended in y-axis
""" % sys.argv[0]
def main():
if len(sys.argv) < 7:
sys.stderr.write(help_message)
sys.exit(1)
fhd = open (sys.argv[1])
lim = int(sys.argv[2])
x_points = int(sys.argv[3])
y_points = int(sys.argv[4])
x_ext = int(sys.argv[5])
y_ext = int(sys.argv[6])
a = Image.new("RGB",(x_points*x_ext,y_points*y_ext),"white")
d = ImageDraw.Draw(a)
y = 0
for i in fhd:
y += 1
i.strip()
if not re.search("^\d+",i):
continue
values = map(float,i.split())
x = 0
cl = values[0]
for v in values[1:]:
x += 1
c = "hsl(%d,100%%,%d%%)" % (cl*70,min(1,v/lim)*90.0)
d.rectangle([(int(x*x_ext),int(y*y_ext)),(int((x+1)*x_ext),int((y+1)*y_ext))],outline=c,fill=c)
a.save(sys.argv[1]+".png")
print "check %s!" % (sys.argv[1]+".png")
if __name__ == '__main__':
main()
| bsd-3-clause | -4,875,349,459,735,880,000 | 25.196721 | 127 | 0.530663 | false |
TripleSnail/blender-zombie | python/text.py | 1 | 1754 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bge
import bgl
import blf
DPI = 72
class TextObject(object):
def __init__(self, text, px, py, size, time=0):
self.text = text
self.px = px
self.py = py
self.size = size
self.time = time
text_objects = []
def init(controller):
font_path = bge.logic.expandPath('//fonts/DejaVuSans.ttf')
bge.logic.font_id = blf.load(font_path)
scene = bge.logic.getCurrentScene()
scene.post_draw = [write]
def write():
width = bge.render.getWindowWidth()
height = bge.render.getWindowHeight()
bgl.glMatrixMode(bgl.GL_PROJECTION)
bgl.glLoadIdentity()
bgl.gluOrtho2D(0, width, 0, height)
bgl.glMatrixMode(bgl.GL_MODELVIEW)
bgl.glLoadIdentity()
font_id = bge.logic.font_id
for text_obj in text_objects:
blf.position(font_id, width * text_obj.px , height * text_obj.py, 0)
blf.size(font_id, text_obj.size, DPI)
blf.draw(font_id, text_obj.text)
| gpl-2.0 | 8,312,866,228,532,016,000 | 28.728814 | 76 | 0.676739 | false |
renyi/drum | drum/links/templatetags/drum_tags.py | 1 | 1472 | from __future__ import unicode_literals
from collections import defaultdict
from django.template.defaultfilters import timesince
from mezzanine import template
from mezzanine.generic.models import ThreadedComment
from drum.links.utils import order_by_score
from drum.links.models import LinkCategory
from drum.links.views import CommentList, USER_PROFILE_RELATED_NAME
register = template.Library()
@register.filter
def get_profile(user):
"""
Returns the profile object associated with the given user.
"""
return getattr(user, USER_PROFILE_RELATED_NAME)
@register.simple_tag(takes_context=True)
def order_comments_by_score_for(context, link):
"""
Preloads threaded comments in the same way Mezzanine initially does,
but here we order them by score.
"""
comments = defaultdict(list)
qs = link.comments.visible().select_related(
"user",
"user__%s" % (USER_PROFILE_RELATED_NAME)
)
for comment in order_by_score(qs, CommentList.score_fields, "submit_date"):
comments[comment.replied_to_id].append(comment)
context["all_comments"] = comments
return ""
@register.filter
def short_timesince(date):
return timesince(date).split(",")[0]
@register.as_tag
def link_category_list(*args):
return LinkCategory.objects.all()
@register.as_tag
def latest_comments(limit=5, *args):
qs = ThreadedComment.objects.filter(is_removed=False, is_public=True)
return qs.reverse()[:limit]
| bsd-2-clause | -4,074,832,187,691,731,000 | 25.763636 | 79 | 0.724185 | false |
gdsfactory/gdsfactory | pp/layers.py | 1 | 9564 | """A GDS layer is a tuple of two integers.
You can:
- Define your layers in a dataclass
- Load it from Klayout XML file (.lyp)
LayerSet adapted from phidl.device_layout
load_lyp, name_to_description, name_to_short_name adapted from phidl.utilities
preview_layerset adapted from phidl.geometry
"""
import pathlib
from pathlib import Path
from typing import Optional, Tuple
import xmltodict
from phidl.device_layout import Layer as LayerPhidl
from phidl.device_layout import LayerSet as LayerSetPhidl
from pp.component import Component
from pp.name import clean_name
from pp.tech import TECH
from pp.types import PathType
LAYER = TECH.layer
class LayerSet(LayerSetPhidl):
def add_layer(
self,
name: str = "unnamed",
gds_layer: int = 0,
gds_datatype: int = 0,
description: Optional[str] = None,
color: Optional[str] = None,
inverted: bool = False,
alpha: float = 0.6,
dither: bool = None,
):
"""Adds a layer to an existing LayerSet object for nice colors.
Args:
name: Name of the Layer.
gds_layer: GDSII Layer number.
gds_datatype: GDSII datatype.
description: Layer description.
color: Hex code of color for the Layer.
inverted: If true, inverts the Layer.
alpha: layer opacity between 0 and 1.
dither: KLayout dither style, only used in phidl.utilities.write_lyp().
"""
new_layer = LayerPhidl(
gds_layer=gds_layer,
gds_datatype=gds_datatype,
name=name,
description=description,
inverted=inverted,
color=color,
alpha=alpha,
dither=dither,
)
if name in self._layers:
raise ValueError(
f"Adding {name} already defined {list(self._layers.keys())}"
)
else:
self._layers[name] = new_layer
# def __getitem__(self, val: str) -> Tuple[int, int]:
# """Returns gds layer tuple."""
# if val not in self._layers:
# raise ValueError(f"Layer {val} not in {list(self._layers.keys())}")
# else:
# layer = self._layers[val]
# return layer.gds_layer, layer.gds_datatype
def __repr__(self):
"""Prints the number of Layers in the LayerSet object."""
return (
f"LayerSet ({len(self._layers)} layers total) \n"
+ f"{list(self._layers.keys())}"
)
def get(self, name: str) -> LayerPhidl:
"""Returns Layer from name."""
if name not in self._layers:
raise ValueError(f"Layer {name} not in {list(self._layers.keys())}")
else:
return self._layers[name]
def get_from_tuple(self, layer_tuple: Tuple[int, int]) -> LayerPhidl:
"""Returns Layer from layer tuple (gds_layer, gds_datatype)."""
tuple_to_name = {
(v.gds_layer, v.gds_datatype): k for k, v in self._layers.items()
}
if layer_tuple not in tuple_to_name:
raise ValueError(f"Layer {layer_tuple} not in {list(tuple_to_name.keys())}")
name = tuple_to_name[layer_tuple]
return self._layers[name]
LAYER_COLORS = LayerSet() # Layerset makes plotgds look good
LAYER_COLORS.add_layer("WG", LAYER.WG[0], 0, "wg", color="gray", alpha=1)
LAYER_COLORS.add_layer("WGCLAD", LAYER.WGCLAD[0], 0, "", color="gray", alpha=0)
LAYER_COLORS.add_layer("SLAB150", LAYER.SLAB150[0], 0, "", color="lightblue", alpha=0.6)
LAYER_COLORS.add_layer("SLAB90", LAYER.SLAB90[0], 0, "", color="lightblue", alpha=0.2)
LAYER_COLORS.add_layer("WGN", LAYER.WGN[0], 0, "", color="orange", alpha=1)
LAYER_COLORS.add_layer("WGN_CLAD", LAYER.WGN_CLAD[0], 0, "", color="gray", alpha=0)
LAYER_COLORS.add_layer("DEVREC", LAYER.DEVREC[0], 0, "", color="gray", alpha=0.1)
PORT_LAYER_TO_TYPE = {
LAYER.PORT: "optical",
LAYER.PORTE: "dc",
LAYER.PORTH: "heater",
LAYER.TE: "vertical_te",
LAYER.TM: "vertical_tm",
}
PORT_TYPE_TO_LAYER = {v: k for k, v in PORT_LAYER_TO_TYPE.items()}
def preview_layerset(
ls: LayerSet = LAYER_COLORS, size: float = 100.0, spacing: float = 100.0
) -> Component:
"""Generates a preview Device with representations of all the layers,
used for previewing LayerSet color schemes in quickplot or saved .gds
files
"""
import numpy as np
import pp
D = Component(name="layerset")
scale = size / 100
num_layers = len(ls._layers)
matrix_size = int(np.ceil(np.sqrt(num_layers)))
sorted_layers = sorted(
ls._layers.values(), key=lambda x: (x.gds_layer, x.gds_datatype)
)
for n, layer in enumerate(sorted_layers):
R = pp.components.rectangle(size=(100 * scale, 100 * scale), layer=layer)
T = pp.components.text(
text="%s\n%s / %s" % (layer.name, layer.gds_layer, layer.gds_datatype),
size=20 * scale,
position=(50 * scale, -20 * scale),
justify="center",
layer=layer,
)
xloc = n % matrix_size
yloc = int(n // matrix_size)
D.add_ref(R).movex((100 + spacing) * xloc * scale).movey(
-(100 + spacing) * yloc * scale
)
D.add_ref(T).movex((100 + spacing) * xloc * scale).movey(
-(100 + spacing) * yloc * scale
)
return D
def _name_to_short_name(name_str: str) -> str:
"""Maps the name entry of the lyp element to a name of the layer,
i.e. the dictionary key used to access it.
Default format of the lyp name is
key - layer/datatype - description
or
key - description
"""
if name_str is None:
raise IOError(f"layer {name_str} has no name")
fields = name_str.split("-")
name = fields[0].split()[0].strip()
return clean_name(name)
def _name_to_description(name_str) -> str:
"""Gets the description of the layer contained in the lyp name field.
It is not strictly necessary to have a description. If none there, it returns ''.
Default format of the lyp name is
key - layer/datatype - description
or
key - description
"""
if name_str is None:
raise IOError(f"layer {name_str} has no name")
fields = name_str.split()
description = ""
if len(fields) > 1:
description = " ".join(fields[1:])
return description
def _add_layer(entry, lys: LayerSet) -> LayerSet:
"""Entry is a dict of one element of 'properties'.
No return value. It adds it to the lys variable directly
"""
info = entry["source"].split("@")[0]
# skip layers without name or with */*
if "'" in info or "*" in info:
return
name = entry.get("name") or entry.get("source")
if not name:
return
gds_layer, gds_datatype = info.split("/")
gds_layer = gds_layer.split()[-1]
gds_datatype = gds_datatype.split()[-1]
settings = dict()
settings["gds_layer"] = int(gds_layer)
settings["gds_datatype"] = int(gds_datatype)
settings["color"] = entry["fill-color"]
settings["dither"] = entry["dither-pattern"]
settings["name"] = _name_to_short_name(name)
settings["description"] = _name_to_description(name)
lys.add_layer(**settings)
return lys
def load_lyp(filepath: Path) -> LayerSet:
"""Returns a LayerSet object from a Klayout lyp file in XML format."""
with open(filepath, "r") as fx:
lyp_dict = xmltodict.parse(fx.read(), process_namespaces=True)
# lyp files have a top level that just has one dict: layer-properties
# That has multiple children 'properties', each for a layer. So it gives a list
lyp_list = lyp_dict["layer-properties"]["properties"]
if not isinstance(lyp_list, list):
lyp_list = [lyp_list]
lys = LayerSet()
for entry in lyp_list:
try:
group_members = entry["group-members"]
except KeyError: # it is a real layer
_add_layer(entry, lys)
else: # it is a group of other entries
if not isinstance(group_members, list):
group_members = [group_members]
for member in group_members:
_add_layer(member, lys)
return lys
# For port labelling purpose
# LAYERS_OPTICAL = [LAYER.WG]
# LAYERS_ELECTRICAL = [LAYER.M1, LAYER.M2, LAYER.M3]
# LAYERS_HEATER = [LAYER.HEATER]
def lyp_to_dataclass(lyp_filepath: PathType, overwrite: bool = True) -> str:
filepathin = pathlib.Path(lyp_filepath)
filepathout = filepathin.with_suffix(".py")
if filepathout.exists() and not overwrite:
raise FileExistsError(f"You can delete {filepathout}")
script = """
import dataclasses
@dataclasses.dataclass
class LayerMap():
"""
lys = load_lyp(filepathin)
for layer_name, layer in sorted(lys._layers.items()):
script += (
f" {layer_name}: Layer = ({layer.gds_layer}, {layer.gds_datatype})\n"
)
filepathout.write_text(script)
return script
def test_load_lyp():
from pp.config import layer_path
lys = load_lyp(layer_path)
assert len(lys._layers) == 82
return lys
if __name__ == "__main__":
pass
# print(LAYER_STACK.get_from_tuple((1, 0)))
# print(LAYER_STACK.get_layer_to_material())
# lys = test_load_lyp()
# c = preview_layerset(ls)
# c.show()
# print(LAYERS_OPTICAL)
# print(layer("wgcore"))
# print(layer("wgclad"))
# print(layer("padding"))
# print(layer("TEXT"))
# print(type(layer("wgcore")))
| mit | 7,863,857,862,018,327,000 | 30.564356 | 88 | 0.603199 | false |
alissonperez/django-onmydesk | onmydesk/utils.py | 1 | 1501 | """Module with common utilities to this package"""
import re
from datetime import timedelta
import importlib
def my_import(class_name):
"""
Usage example::
Report = my_import('myclass.models.Report')
model_instance = Report()
model_instance.name = 'Test'
model_instance.save()
:param str class_name: Class name
:returns: Class object
"""
*packs, class_name = class_name.split('.')
try:
module = importlib.import_module('.'.join(packs))
klass = getattr(module, class_name)
return klass
except (ImportError, AttributeError) as e:
msg = 'Could not import "{}" from {}: {}.'.format(
class_name, e.__class__.__name__, e)
raise ImportError(msg)
def str_to_date(value, reference_date):
'''
Convert a string like 'D-1' to a "reference_date - timedelta(days=1)"
:param str value: String like 'D-1', 'D+1', 'D'...
:param date reference_date: Date to be used as 'D'
:returns: Result date
:rtype: date
'''
n_value = value.strip(' ').replace(' ', '').upper()
if not re.match('^D[\-+][0-9]+$|^D$', n_value):
raise ValueError('Wrong value "{}"'.format(value))
if n_value == 'D':
return reference_date
elif n_value[:2] == 'D-':
days = int(n_value[2:])
return reference_date - timedelta(days=days)
elif n_value[:2] == 'D+':
days = int(n_value[2:])
return reference_date + timedelta(days=days)
| mit | -7,885,954,554,795,849,000 | 26.290909 | 73 | 0.578281 | false |
bradallred/gemrb | gemrb/GUIScripts/iwd2/Abilities.py | 1 | 7433 | # GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, ability (GUICG4)
import GemRB
from GUIDefines import *
import CharOverview
import CommonTables
from ie_stats import IE_STR, IE_DEX, IE_CON, IE_INT, IE_WIS, IE_CHR
AbilityWindow = 0
TextAreaControl = 0
DoneButton = 0
AbilityTable = 0
PointsLeft = 0
Minimum = 0
Maximum = 0
Add = 0
KitIndex = 0
CharGen = 0
Stats = [ IE_STR, IE_DEX, IE_CON, IE_INT, IE_WIS, IE_CHR ]
def CalcLimits(Abidx):
global Minimum, Maximum, Add
if not CharGen:
pc = GemRB.GameGetSelectedPCSingle ()
Minimum = GemRB.GetPlayerStat (pc, Stats[Abidx], 1)
Maximum = 25
return
Abracead = GemRB.LoadTable("ABRACEAD")
RaceID = GemRB.GetVar("Race")
RowIndex = CommonTables.Races.FindValue(3, RaceID)
RaceName = CommonTables.Races.GetRowName(RowIndex)
Minimum = 3
Maximum = 18
Abclasrq = GemRB.LoadTable("ABCLASRQ")
tmp = Abclasrq.GetValue(KitIndex, Abidx)
if tmp!=0 and tmp>Minimum:
Minimum = tmp
Abracerq = GemRB.LoadTable("ABRACERQ")
Race = Abracerq.GetRowIndex(RaceName)
tmp = Abracerq.GetValue(Race, Abidx*2)
if tmp!=0 and tmp>Minimum:
Minimum = tmp
tmp = Abracerq.GetValue(Race, Abidx*2+1)
if tmp!=0 and tmp>Maximum:
Maximum = tmp
Race = Abracead.GetRowIndex(RaceName)
Add = Abracead.GetValue(Race, Abidx)
Maximum = Maximum + Add
Minimum = Minimum + Add
if Minimum<1:
Minimum=1
return
def GetModColor(mod):
if mod < 0:
return {'r' : 255, 'g' : 0, 'b' : 0}
elif mod > 0:
return {'r' : 0, 'g' : 255, 'b' : 0}
else:
return {'r' : 255, 'g' : 255, 'b' : 255}
def RollPress():
global Add
GemRB.SetVar("Ability",0)
SumLabel = AbilityWindow.GetControl(0x10000002)
SumLabel.SetTextColor ({'r' : 255, 'g' : 255, 'b' : 0})
SumLabel.SetUseRGB(1)
SumLabel.SetText(str(PointsLeft))
for i in range(0,6):
CalcLimits(i)
v = 10+Add
if not CharGen:
v = Minimum
b = v//2-5
GemRB.SetVar("Ability "+str(i), v )
Label = AbilityWindow.GetControl(0x10000003+i)
Label.SetText(str(v) )
Label = AbilityWindow.GetControl(0x10000024+i)
Label.SetUseRGB(1)
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
return
def OnLoad():
OpenAbilitiesWindow (1, 16)
def OpenAbilitiesWindow(chargen, points):
global AbilityWindow, TextAreaControl, DoneButton
global CharGen, PointsLeft
global AbilityTable
global KitIndex, Minimum, Maximum
CharGen = chargen
PointsLeft = points
AbilityTable = GemRB.LoadTable ("ability")
if chargen:
Kit = GemRB.GetVar("Class Kit")
Class = GemRB.GetVar("Class")-1
if Kit == 0:
KitName = CommonTables.Classes.GetRowName(Class)
else:
#rowname is just a number, first value row what we need here
KitName = CommonTables.KitList.GetValue(Kit, 0)
Abclasrq = GemRB.LoadTable("ABCLASRQ")
KitIndex = Abclasrq.GetRowIndex(KitName)
# in a fit of clarity, they used the same ids in both windowpacks
if chargen:
AbilityWindow = GemRB.LoadWindow (4, "GUICG")
else:
AbilityWindow = GemRB.LoadWindow (7, "GUIREC")
CharOverview.PositionCharGenWin(AbilityWindow)
RollPress ()
for i in range(0,6):
Button = AbilityWindow.GetControl(i+30)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, JustPress)
Button.SetVarAssoc("Ability", i)
Button = AbilityWindow.GetControl(i*2+16)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, LeftPress)
Button.SetVarAssoc("Ability", i )
Button.SetActionInterval (200)
Button = AbilityWindow.GetControl(i*2+17)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, RightPress)
Button.SetVarAssoc("Ability", i )
Button.SetActionInterval (200)
if chargen:
BackButton = AbilityWindow.GetControl (36)
BackButton.SetText (15416)
BackButton.MakeEscape()
BackButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BackPress)
else:
AbilityWindow.DeleteControl (36)
DoneButton = AbilityWindow.GetControl(0)
DoneButton.SetText(36789)
DoneButton.MakeDefault()
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NextPress)
TextAreaControl = AbilityWindow.GetControl(29)
TextAreaControl.SetText(17247)
if not chargen:
AbilityWindow.ShowModal (MODAL_SHADOW_GRAY)
else:
AbilityWindow.Focus()
return
def RightPress(btn, Abidx):
global PointsLeft
Ability = GemRB.GetVar("Ability "+str(Abidx) )
#should be more elaborate
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
if Ability<=Minimum:
return
Ability -= 1
GemRB.SetVar("Ability "+str(Abidx), Ability)
PointsLeft = PointsLeft + 1
SumLabel = AbilityWindow.GetControl(0x10000002)
SumLabel.SetText(str(PointsLeft) )
SumLabel.SetTextColor ({'r' : 255, 'g' : 255, 'b' : 0})
Label = AbilityWindow.GetControl(0x10000003+Abidx)
Label.SetText(str(Ability) )
Label = AbilityWindow.GetControl(0x10000024+Abidx)
b = Ability // 2 - 5
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
return
def JustPress(btn, Abidx):
Ability = GemRB.GetVar("Ability "+str(Abidx) )
#should be more elaborate
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
return
def LeftPress(btn, Abidx):
global PointsLeft
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
Ability = GemRB.GetVar("Ability "+str(Abidx) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
if PointsLeft == 0:
return
if Ability>=Maximum: #should be more elaborate
return
Ability += 1
GemRB.SetVar("Ability "+str(Abidx), Ability)
PointsLeft = PointsLeft - 1
SumLabel = AbilityWindow.GetControl(0x10000002)
if PointsLeft == 0:
SumLabel.SetTextColor({'r' : 255, 'g' : 255, 'b' : 255})
SumLabel.SetText(str(PointsLeft) )
Label = AbilityWindow.GetControl(0x10000003+Abidx)
Label.SetText(str(Ability) )
Label = AbilityWindow.GetControl(0x10000024+Abidx)
b = Ability // 2 - 5
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
if PointsLeft == 0:
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def BackPress():
if AbilityWindow:
AbilityWindow.Unload()
GemRB.SetNextScript("CharGen5")
for i in range(6):
GemRB.SetVar("Ability "+str(i),0) #scrapping the abilities
return
def NextPress():
if AbilityWindow:
AbilityWindow.Unload()
if CharGen:
GemRB.SetNextScript("CharGen6") #skills
else:
# set the upgraded stats
pc = GemRB.GameGetSelectedPCSingle ()
for i in range (len(Stats)):
newValue = GemRB.GetVar ("Ability "+str(i))
GemRB.SetPlayerStat (pc, Stats[i], newValue)
# open up the next levelup window
import Enemy
Enemy.OpenEnemyWindow ()
return
| gpl-2.0 | 7,460,413,711,233,489,000 | 26.428044 | 81 | 0.727297 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py | 1 | 8295 | import hierlist, string, regutil, os
import win32con, win32ui, win32api
import commctrl
from pywin.mfc import dialog
import glob
import pyclbr
import pywin.framework.scriptutils
import afxres
class HLIErrorItem(hierlist.HierListItem):
def __init__(self, text):
self.text = text
hierlist.HierListItem.__init__(self)
def GetText(self):
return self.text
class HLICLBRItem(hierlist.HierListItem):
def __init__(self, name, file, lineno, suffix = ""):
# If the 'name' object itself has a .name, use it. Not sure
# how this happens, but seems pyclbr related.
# See PyWin32 bug 817035
self.name = getattr(name, "name", name)
self.file = file
self.lineno = lineno
self.suffix = suffix
def __cmp__(self, other):
return cmp(self.name, other.name)
def GetText(self):
return self.name + self.suffix
def TakeDefaultAction(self):
if self.file:
pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop=1)
else:
win32ui.SetStatusText("The source of this object is unknown")
def PerformItemSelected(self):
if self.file is None:
msg = "%s - source can not be located." % (self.name, )
else:
msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file)
win32ui.SetStatusText(msg)
class HLICLBRClass(HLICLBRItem):
def __init__(self, clbrclass, suffix = ""):
try:
name = clbrclass.name
file = clbrclass.file
lineno = clbrclass.lineno
self.super = clbrclass.super
self.methods = clbrclass.methods
except AttributeError:
name = clbrclass
file = lineno = None
self.super = []; self.methods = {}
HLICLBRItem.__init__(self, name, file, lineno, suffix)
def GetSubList(self):
ret = []
for c in self.super:
ret.append(HLICLBRClass(c, " (Parent class)"))
for meth, lineno in self.methods.items():
ret.append(HLICLBRMethod(meth, self.file, lineno, " (method)"))
return ret
def IsExpandable(self):
return len(self.methods) + len(self.super)
def GetBitmapColumn(self):
return 21
class HLICLBRFunction(HLICLBRClass):
def GetBitmapColumn(self):
return 22
class HLICLBRMethod(HLICLBRItem):
def GetBitmapColumn(self):
return 22
class HLIModuleItem(hierlist.HierListItem):
def __init__(self, path):
hierlist.HierListItem.__init__(self)
self.path = path
def GetText(self):
return os.path.split(self.path)[1] + " (module)"
def IsExpandable(self):
return 1
def TakeDefaultAction(self):
win32ui.GetApp().OpenDocumentFile( self.path )
def GetBitmapColumn(self):
col = 4 # Default
try:
if win32api.GetFileAttributes(self.path) & win32con.FILE_ATTRIBUTE_READONLY:
col = 5
except win32api.error:
pass
return col
def GetSubList(self):
mod, path = pywin.framework.scriptutils.GetPackageModuleName(self.path)
win32ui.SetStatusText("Building class list - please wait...", 1)
win32ui.DoWaitCursor(1)
try:
try:
reader = pyclbr.readmodule_ex # Post 1.5.2 interface.
extra_msg = " or functions"
except AttributeError:
reader = pyclbr.readmodule
extra_msg = ""
data = reader(mod, [path])
if data:
ret = []
for item in data.values():
if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2)
ret.append(HLICLBRFunction( item, " (function)" ) )
else:
ret.append(HLICLBRClass( item, " (class)") )
ret.sort()
return ret
else:
return [HLIErrorItem("No Python classes%s in module." % (extra_msg,))]
finally:
win32ui.DoWaitCursor(0)
win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
def MakePathSubList(path):
ret = []
for filename in glob.glob(os.path.join(path,'*')):
if os.path.isdir(filename) and os.path.isfile(os.path.join(filename, "__init__.py")):
ret.append(HLIDirectoryItem(filename, os.path.split(filename)[1]))
else:
if string.lower(os.path.splitext(filename)[1]) in ['.py', '.pyw']:
ret.append(HLIModuleItem(filename))
return ret
class HLIDirectoryItem(hierlist.HierListItem):
def __init__(self, path, displayName = None, bSubDirs = 0):
hierlist.HierListItem.__init__(self)
self.path = path
self.bSubDirs = bSubDirs
if displayName:
self.displayName = displayName
else:
self.displayName = path
def IsExpandable(self):
return 1
def GetText(self):
return self.displayName
def GetSubList(self):
ret = MakePathSubList(self.path)
if os.path.split(self.path)[1] == "win32com": # Complete and utter hack for win32com.
try:
path = win32api.GetFullPathName(os.path.join(self.path, "..\\win32comext"))
ret = ret + MakePathSubList(path)
except win32ui.error:
pass
return ret
class HLIProjectRoot(hierlist.HierListItem):
def __init__(self, projectName, displayName = None):
hierlist.HierListItem.__init__(self)
self.projectName = projectName
self.displayName = displayName or projectName
def GetText(self):
return self.displayName
def IsExpandable(self):
return 1
def GetSubList(self):
paths = regutil.GetRegisteredNamedPath(self.projectName)
pathList = string.split(paths,";")
if len(pathList)==1: # Single dir - dont bother putting the dir in
ret = MakePathSubList(pathList[0])
else:
ret = map( HLIDirectoryItem, pathList )
return ret
class HLIRoot(hierlist.HierListItem):
def __init__(self):
hierlist.HierListItem.__init__(self)
def IsExpandable(self):
return 1
def GetSubList(self):
keyStr = regutil.BuildDefaultPythonKey() + "\\PythonPath"
hKey = win32api.RegOpenKey(regutil.GetRootKey(), keyStr)
try:
ret = []
ret.append(HLIProjectRoot("", "Standard Python Library")) # The core path.
index = 0
while 1:
try:
ret.append(HLIProjectRoot(win32api.RegEnumKey(hKey, index)))
index = index + 1
except win32api.error:
break
return ret
finally:
win32api.RegCloseKey(hKey)
class dynamic_browser (dialog.Dialog):
style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE
cs = (
win32con.WS_CHILD |
win32con.WS_VISIBLE |
commctrl.TVS_HASLINES |
commctrl.TVS_LINESATROOT |
commctrl.TVS_HASBUTTONS
)
dt = [
["Python Projects", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")],
["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs]
]
def __init__ (self, hli_root):
dialog.Dialog.__init__ (self, self.dt)
self.hier_list = hierlist.HierListWithItems (
hli_root,
win32ui.IDB_BROWSER_HIER
)
self.HookMessage (self.on_size, win32con.WM_SIZE)
def OnInitDialog (self):
self.hier_list.HierInit (self)
return dialog.Dialog.OnInitDialog (self)
def on_size (self, params):
lparam = params[3]
w = win32api.LOWORD(lparam)
h = win32api.HIWORD(lparam)
self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h))
def BrowseDialog():
root = HLIRoot()
if not root.IsExpandable():
raise TypeError, "Browse() argument must have __dict__ attribute, or be a Browser supported type"
dlg = dynamic_browser (root)
dlg.CreateWindow()
def DockableBrowserCreator(parent):
root = HLIRoot()
hl = hierlist.HierListWithItems (
root,
win32ui.IDB_BROWSER_HIER
)
style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS
control = win32ui.CreateTreeCtrl()
control.CreateWindow(style, (0, 0, 150, 300), parent, win32ui.IDC_LIST1)
list = hl.HierInit (parent, control)
return control
def DockablePathBrowser():
import pywin.docking.DockingBar
bar = pywin.docking.DockingBar.DockingBar()
bar.CreateWindow(win32ui.GetMainFrame(), DockableBrowserCreator, "Path Browser", 0x8e0a)
bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
win32ui.GetMainFrame().DockControlBar(bar)
# The "default" entry point
Browse = DockablePathBrowser
| epl-1.0 | 5,805,571,004,573,647,000 | 30.276265 | 146 | 0.66522 | false |
Commonists/SurfaceImageContentGap | surfaceimagecontentgap/rc.py | 1 | 2812 | from argparse import ArgumentParser
import datetime
import time
from surfaceimagecontentgap.imagegap import isthereanimage
from surfaceimagecontentgap.bot import SurfaceContentGapBot
def last_rc_time(site):
"""Datetime of last change."""
rc = site.recentchanges()
last_rev = rc.next()
return datetime.datetime \
.utcfromtimestamp(time.mktime(last_rev['timestamp']))
def previoushour(dt):
"""One hour previous given datetime."""
delta = datetime.timedelta(hours=1)
return dt - delta
def previousday(dt):
"""One day before given datetime."""
delta = datetime.timedelta(days=1)
return dt - delta
def rc_from(site, dt):
"""Recent changes from a given datetime."""
kwargs = {
'end': dt.strftime('%Y%m%d%H%M%S'),
'namespace': 0
}
rc = site.recentchanges(**kwargs)
# revisions
changes = []
# page titles
pages = []
for rev in rc:
changes.append(rev)
title = rev['title'].encode('utf-8')
if title not in pages:
pages.append(title)
return {
'list_revisions': changes,
'list_pages': pages
}
def articles_from_titles(site, titles):
"""Articles object from list of titles"""
return [site.Pages[title.decode('utf-8')] for title in titles]
def list_articles(bot):
# site
site = bot.site
# last hours rc
end_dt = previoushour(last_rc_time(site))
recent_changes = rc_from(site, end_dt)
pages = recent_changes['list_pages']
return articles_from_titles(site, pages)
def main():
description = 'Analyzing Wikipedia to surface image content gap (rc).'
parser = ArgumentParser(description=description)
parser.add_argument('-w', '--wikipedia',
type=str,
dest='lang',
required=False,
default='fr',
help='Language code for Wikipedia')
parser.add_argument('-r', '--report',
type=str,
dest='report',
required=True,
help='Page name to write a report.')
parser.add_argument('-f', '--configfile',
type=str,
dest='config',
required=True,
help='Config file with login and password.')
args = parser.parse_args()
kwargs = {
'config_file': args.config,
'lang': args.lang,
'report': args.report,
'list_fun': list_articles,
'filter_fun': lambda bot, x: not isthereanimage(x),
'rank_fun': lambda bot, x: 0,
'frequency': 60
}
rc_bot = SurfaceContentGapBot(**kwargs)
rc_bot.run()
if __name__ == '__main__':
main()
| mit | 8,370,749,980,757,185,000 | 26.568627 | 74 | 0.558677 | false |
vitalyvolkov/fontbakery | bakery/project/views.py | 1 | 16523 | # coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License.
# pylint:disable-msg=E1101
from flask import (Blueprint, render_template, g, flash, request,
url_for, redirect, json, Markup, current_app, abort, make_response)
from flask.ext.babel import gettext as _
from ..decorators import login_required
from ..utils import project_fontaine
from .models import Project, ProjectBuild
from functools import wraps
import itsdangerous
project = Blueprint('project', __name__, url_prefix='/project')
DEFAULT_SUBSET_LIST = [
'menu', 'latin', 'latin-ext+latin', 'cyrillic+latin', 'cyrillic-ext+latin',
'greek+latin', 'greek-ext+latin', 'vietnamese+latin']
def chkhash(hashstr):
try:
int(hashstr, 16)
except ValueError:
flash(_('Error in provided data'))
abort(500)
@project.before_request
def before_request():
if g.user:
g.projects = Project.query.filter_by(login=g.user.login).all()
# project resolve decorator
def project_required(f):
""" Decorator reads project_id from arguments list and resolve it into project object.
In parallel it check if project object is ready Usage:
@project.route('/test', methods=['GET'])
@project_required
def test(p):
# p is Project model instance
return "Project is available"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if 'project_id' in kwargs:
project_id = kwargs.pop('project_id')
else:
project_id = args.pop(0)
args = list(args)
p = Project.query.filter_by(
login=g.user.login, id=project_id).first_or_404()
# Here can be located ownership access checks in the future.
if p.is_ready:
args.insert(0, p)
return f(*args, **kwargs)
else:
flash(_('Project is being synchronized, wait until it is done'))
return redirect(url_for('frontend.splash'))
return decorated_function
# API methods
@project.route('/api/<int:project_id>/build', methods=['GET'])
@login_required
@project_required
def build(p):
""" Revision id is dangerous parameter, because it added to command line to
git call. That is why it always should be signed with hash.
"""
if not p.config['local'].get('setup'):
flash(_("Complete setup first"))
return redirect(url_for('project.setup', project_id=p.id))
if request.args.get('revision'):
signer = itsdangerous.Signer(current_app.secret_key)
revision = signer.unsign(request.args.get('revision'))
build = ProjectBuild.make_build(p, revision)
else:
build = ProjectBuild.make_build(p, 'HEAD')
flash(Markup(_("Updated repository (<a href='%(repo)s'>see files</a>) Next step: <a href='%(step)s'>set it up</a>",
repo=url_for('project.ufiles', project_id=p.id), step=url_for('project.setup', project_id=p.id))))
return redirect(url_for('project.log', project_id=p.id, build_id=build.id))
@project.route('/api/<int:project_id>/pull', methods=['GET'])
@login_required
# this is only exception where decorator @project_required is not needed
def pull(project_id):
p = Project.query.filter_by(
login=g.user.login, id=project_id).first_or_404()
p.sync()
flash(_("Changes will be pulled from upstream in a moment"))
return redirect(url_for('project.git', project_id=p.id))
# Setup views
@project.route('/<int:project_id>/setup', methods=['GET', 'POST'])
@login_required
@project_required
def setup(p):
config = p.config
originalConfig = p.config
error = False
if request.method == 'GET':
return render_template('project/setup.html', project=p)
if not request.form.get('license_file') in config['local']['txt_files']:
error = True
flash(_("Please select the license file"))
config['state']['license_file'] = request.form.get('license_file')
if request.form.get('familyname'):
if len(request.form.get('familyname')) > 0:
config['state']['familyname'] = request.form.get('familyname')
else:
if 'familyname' in config['state']:
config['state'].pop('familyname')
if config['local']['ufo_dirs'] and config['local']['ttx_files']:
if request.form.get('source_files_type'):
if request.form.get('source_files_type') in ['ttx', 'ufo']:
config['state']['source_files_type'] = request.form.get('source_files_type')
else:
config['state'].pop('source_files_type')
else:
error = True
flash(_('Select UFO or TTX as primary source'))
txt_files_to_copy = request.form.getlist('txt_files')
config['state']['txt_files_copied'] = txt_files_to_copy
# XXX: unsure should it be local or state property
process_files = request.form.getlist('process_files')
config['state']['process_files'] = process_files
subset_list = request.form.getlist('subset')
for i in subset_list:
if i not in dict(p.get_subsets()).keys():
error = True
flash(_('Subset value is wrong'))
if len(subset_list) < 0:
error = True
flash(_("Select at least one subset from list"))
config['state']['subset'] = subset_list
if request.form.get('ttfautohint'):
if len(request.form.get('ttfautohint')) > 0:
config['state']['ttfautohint'] = request.form.get('ttfautohint')
else:
if 'ttfautohint' in config['state']:
config['state'].pop('ttfautohint')
if error:
return render_template('project/setup.html', project=p)
if originalConfig != config:
flash(_("Setup updated"))
config['local']['setup'] = True
p.save_state()
if request.form.get('bake'):
p.save_state()
return redirect(url_for('project.build', project_id=p.id))
else:
flash(_("Setup saved"))
return redirect(url_for('project.setup', project_id=p.id))
@project.route('/<int:project_id>/setup/dashboard_save', methods=['POST'])
@login_required
@project_required
def dashboard_save(p):
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
for item in request.form:
if request.form.get(item):
if len(request.form.get(item)) > 0:
p.config['state'][item] = request.form.get(item)
flash(_('Set %(item)s', item=item))
else:
if item in p.config['state']:
del p.config['state'][item]
flash(_('Unset %(item)s', item=item))
p.save_state()
return redirect(url_for('project.setup', project_id=p.id))
# File browser views
@project.route('/<int:project_id>/files/', methods=['GET'])
@project.route('/<int:project_id>/files/<revision>/', methods=['GET'])
@login_required
@project_required
def ufiles(p, revision=None, name=None):
# this page can be visible by others, not only by owner
# TODO consider all pages for that
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
return render_template('project/ufiles.html', project=p,
revision=revision)
@project.route('/<int:project_id>/files/<revision>/<path:name>', methods=['GET'])
@login_required
@project_required
def ufile(p, revision=None, name=None):
# this page can be visible by others, not only by owner
# TODO consider all pages for that
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
mime, data = p.revision_file(revision, name)
return render_template('project/ufile.html', project=p,
revision=revision, name=name, mime=mime, data=data)
@project.route('/<int:project_id>/files/<revision>/blob', methods=['GET'])
@login_required
@project_required
def ufileblob(p, revision=None):
""" Mandatory parameter is `name` signed by cypher hash on server side.
This view is pretty much "heavy", each request spawn additional process and
read its output.
"""
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
signer = itsdangerous.Signer(current_app.secret_key)
name = signer.unsign(request.args.get('name'))
mime, data = p.revision_file(revision, name)
if mime.startswith('image'):
response = make_response(data)
response.headers['Content-Type'] = mime
response.headers['Content-Disposition'] = 'attachment; filename=%s' % name
return response
else:
abort(500)
# Builds views
@project.route('/<int:project_id>/build', methods=['GET'])
@login_required
@project_required
def history(p):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(project=p).order_by("id desc").all()
return render_template('project/history.html', project=p, builds=b)
@project.route('/<int:project_id>/build/<int:build_id>/log', methods=['GET'])
@login_required
@project_required
def log(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
param = {'login': p.login, 'id': p.id, 'revision': b.revision, 'build': b.id}
log_file = "%(login)s/%(id)s.out/%(build)s.%(revision)s.process.log" % param
return render_template('project/log.html', project=p, build=b, log_file=log_file)
@project.route('/<int:project_id>/build/<int:build_id>/rfiles', methods=['GET'])
@login_required
@project_required
def rfiles(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not b.is_done:
return redirect(url_for('project.log', project_id=p.id, build_id=b.id))
yaml = p.read_asset('yaml')
f = project_fontaine(p, b)
tree = b.files()
return render_template('project/rfiles.html', project=p, yaml=yaml,
fontaineFonts=f, build=b, tree=tree)
@project.route('/<int:project_id>/build/<int:build_id>/tests', methods=['GET'])
@login_required
@project_required
def rtests(p, build_id):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = b.result_tests()
summary = {
'all_tests': sum([int(y.get('sum', 0)) for x, y in test_result.items()]),
'fonts': test_result.keys(),
'all_error': sum([len(x.get('error', [])) for x in test_result.values()]),
'all_failure': sum([len(x.get('failure', [])) for x in test_result.values()]),
'all_fixed': sum([len(x.get('fixed', [])) for x in test_result.values()]),
'all_success': sum([len(x.get('success', [])) for x in test_result.values()]),
'fix_asap': [dict(font=y, **t) for t in x['failure'] for y, x in test_result.items() if 'required' in t['tags']],
}
return render_template('project/rtests.html', project=p,
tests=test_result, build=b, summary=summary)
@project.route('/<int:project_id>/build/<int:build_id>/', methods=['GET'])
@login_required
@project_required
def summary(p, build_id):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = b.result_tests()
summary = {
'all_tests': sum([int(y.get('sum', 0)) for x, y in test_result.items()]),
'fonts': test_result.keys(),
'all_error': sum([len(x.get('error', [])) for x in test_result.values()]),
'all_failure': sum([len(x.get('failure', [])) for x in test_result.values()]),
'all_fixed': sum([len(x.get('fixed', [])) for x in test_result.values()]),
'all_success': sum([len(x.get('success', [])) for x in test_result.values()]),
'fix_asap': [dict(font=y, **t) for t in x.get('failure', []) for y, x in test_result.items() if 'required' in t['tags']],
}
return render_template('project/summary.html', project=p,
tests=test_result, build=b, summary=summary)
@project.route('/<int:project_id>/build/<int:build_id>/description', methods=['GET', 'POST'])
@login_required
@project_required
def description(p, build_id):
""" Description file management """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if request.method == 'GET':
data = b.read_asset('description')
return render_template('project/description.html', project=p, build=b,
description=data)
# POST
b.save_asset('description', request.form.get('description'))
flash(_('Description saved'))
return redirect(url_for('project.description', build_id=b.id, project_id=p.id))
@project.route('/<int:project_id>/build/<int:build_id>/metadatajson', methods=['GET', 'POST'])
@login_required
@project_required
def metadatajson(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if request.method == 'GET':
metadata = b.read_asset('metadata')
metadata_new = b.read_asset('metadata_new')
return render_template('project/metadatajson.html', project=p, build=b,
metadata=metadata, metadata_new=metadata_new)
# POST
try:
# this line trying to parse json
json.loads(request.form.get('metadata'))
b.save_asset('metadata', request.form.get('metadata'),
del_new=request.form.get('delete', None))
flash(_('METADATA.json saved'))
return redirect(url_for('project.metadatajson', project_id=p.id, build_id=b.id))
except ValueError:
flash(_('Wrong format for METADATA.json file'))
metadata_new = b.read_asset('metadata_new')
return render_template('project/metadatajson.html', project=p, build=b,
metadata=request.form.get('metadata'),
metadata_new=metadata_new)
# Base views
@project.route('/<int:project_id>/tests/<revision>', methods=['GET'])
@login_required
@project_required
def utests(p, revision):
""" Results of processing tests, for ufo files """
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = p.revision_tests(revision)
return render_template('project/utests.html', project=p, revision=revision,
tests=test_result)
@project.route('/<int:project_id>/git', methods=['GET'])
@login_required
@project_required
def git(p):
""" Results of processing tests, for ttf files """
gitlog = p.gitlog()
return render_template('project/gitlog.html', project=p, log=gitlog)
@project.route('/<int:project_id>/diff', methods=['GET'])
@login_required
@project_required
def diff(p):
""" Show diff between different revisions, since we want to make this view
more user friendly we can't signify left and right revision. And this mean
that we should check input data"""
if not all([request.args.get('left'), request.args.get('right')]):
flash(_("Left and right hash for comparsion should be provided"))
try:
left = request.args.get('left')
right = request.args.get('right')
# let python try to parse strings, if it fails then there can be
# something evil
int(left, 16)
int(right, 16)
except ValueError:
flash(_('Error in provided data'))
return redirect(url_for('project.git', project_id=p.id))
diffdata = p.diff_files(left, right)
return render_template('project/diff.html', project=p,
diff=diffdata, left=left, right=right)
| apache-2.0 | 2,932,717,679,689,174,000 | 34.230277 | 129 | 0.629486 | false |
Purg/SMQTK | python/smqtk/bin/classifyFiles.py | 1 | 5843 | """
Based on an input, trained classifier configuration, classify a number of media
files, whose descriptor is computed by the configured descriptor generator.
Input files that classify as the given label are then output to standard out.
Thus, this script acts like a filter.
"""
import glob
import json
import logging
import os
from smqtk.algorithms import get_classifier_impls
from smqtk.algorithms import get_descriptor_generator_impls
from smqtk.representation import ClassificationElementFactory
from smqtk.representation import DescriptorElementFactory
from smqtk.representation.data_element.file_element import DataFileElement
from smqtk.utils import plugin
from smqtk.utils.bin_utils import (
initialize_logging,
output_config,
basic_cli_parser,
)
__author__ = "[email protected]"
def get_cli_parser():
parser = basic_cli_parser(__doc__)
g_classifier = parser.add_argument_group("Classification")
g_classifier.add_argument('--overwrite',
action='store_true', default=False,
help='When generating a configuration file, '
'overwrite an existing file.')
g_classifier.add_argument('-l', '--label',
type=str, default=None,
help='The class to filter by. This is based on '
'the classifier configuration/model used. '
'If this is not provided, we will list the '
'available labels in the provided '
'classifier configuration.')
# Positional
parser.add_argument("file_globs",
nargs='*',
metavar='GLOB',
help='Series of shell globs specifying the files to '
'classify.')
return parser
def get_default_config():
return {
"descriptor_factory":
DescriptorElementFactory.get_default_config(),
"descriptor_generator":
plugin.make_config(get_descriptor_generator_impls()),
"classification_factory":
ClassificationElementFactory.get_default_config(),
"classifier":
plugin.make_config(get_classifier_impls()),
}
def main():
log = logging.getLogger(__name__)
parser = get_cli_parser()
args = parser.parse_args()
config_path = args.config
generate_config = args.generate_config
config_overwrite = args.overwrite
is_debug = args.verbose
label = args.label
file_globs = args.file_globs
initialize_logging(logging.getLogger(__name__),
is_debug and logging.DEBUG or logging.INFO)
initialize_logging(logging.getLogger('smqtk'),
is_debug and logging.DEBUG or logging.INFO)
log.debug("Showing debug messages.")
config = get_default_config()
config_loaded = False
if config_path and os.path.isfile(config_path):
with open(config_path) as f:
log.info("Loading configuration: %s", config_path)
config.update(
json.load(f)
)
config_loaded = True
output_config(generate_config, config, log, config_overwrite, 100)
if not config_loaded:
log.error("No configuration provided")
exit(101)
classify_files(config, label, file_globs)
def classify_files(config, label, file_globs):
log = logging.getLogger(__name__)
#: :type: smqtk.algorithms.Classifier
classifier = \
plugin.from_plugin_config(config['classifier'],
get_classifier_impls())
def log_avaialable_labels():
log.info("Available classifier labels:")
for l in classifier.get_labels():
log.info("- %s", l)
if label is None:
log_avaialable_labels()
return
elif label not in classifier.get_labels():
log.error("Invalid classification label provided to compute and filter "
"on: '%s'", label)
log_avaialable_labels()
return
log.info("Collecting files from globs")
#: :type: list[DataFileElement]
data_elements = []
uuid2filepath = {}
for g in file_globs:
if os.path.isfile(g):
d = DataFileElement(g)
data_elements.append(d)
uuid2filepath[d.uuid()] = g
else:
log.debug("expanding glob: %s", g)
for fp in glob.iglob(g):
d = DataFileElement(fp)
data_elements.append(d)
uuid2filepath[d.uuid()] = fp
if not data_elements:
raise RuntimeError("No files provided for classification.")
log.info("Computing descriptors")
descriptor_factory = \
DescriptorElementFactory.from_config(config['descriptor_factory'])
#: :type: smqtk.algorithms.DescriptorGenerator
descriptor_generator = \
plugin.from_plugin_config(config['descriptor_generator'],
get_descriptor_generator_impls())
descr_map = descriptor_generator\
.compute_descriptor_async(data_elements, descriptor_factory)
log.info("Classifying descriptors")
classification_factory = ClassificationElementFactory \
.from_config(config['classification_factory'])
classification_map = classifier\
.classify_async(descr_map.values(), classification_factory)
log.info("Printing input file paths that classified as the given label.")
# map of UUID to filepath:
uuid2c = dict((c.uuid, c) for c in classification_map.itervalues())
for data in data_elements:
if uuid2c[data.uuid()].max_label() == label:
print uuid2filepath[data.uuid()]
if __name__ == '__main__':
main()
| bsd-3-clause | -9,202,389,211,392,059,000 | 33.169591 | 80 | 0.610303 | false |
bgris/ODL_bgris | lib/python3.5/site-packages/odl/util/graphics.py | 1 | 15419 | # Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Functions for graphical output."""
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
import numpy as np
from odl.util.testutils import run_doctests
from odl.util.utility import is_real_dtype
__all__ = ('show_discrete_data',)
def _safe_minmax(values):
"""Calculate min and max of array with guards for nan and inf."""
# Nan and inf guarded min and max
minval = np.min(values[np.isfinite(values)])
maxval = np.max(values[np.isfinite(values)])
return minval, maxval
def _colorbar_ticks(minval, maxval):
"""Return the ticks (values show) in the colorbar."""
return [minval, (maxval + minval) / 2., maxval]
def _digits(minval, maxval):
"""Digits needed to comforatbly display values in [minval, maxval]"""
if minval == maxval:
return 3
else:
return min(10, max(2, int(1 + abs(np.log10(maxval - minval)))))
def _colorbar_format(minval, maxval):
"""Return the format string for the colorbar."""
return '%.{}f'.format(_digits(minval, maxval))
def _axes_info(grid, npoints=5):
result = []
min_pt = grid.min()
max_pt = grid.max()
for axis in range(grid.ndim):
xmin = min_pt[axis]
xmax = max_pt[axis]
points = np.linspace(xmin, xmax, npoints)
indices = np.linspace(0, grid.shape[axis] - 1, npoints, dtype=int)
tick_values = grid.coord_vectors[axis][indices]
# Do not use corner point in case of a partition, use outer corner
tick_values[[0, -1]] = xmin, xmax
format_str = '{:.' + str(_digits(xmin, xmax)) + 'f}'
tick_labels = [format_str.format(f) for f in tick_values]
result += [(points, tick_labels)]
return result
def show_discrete_data(values, grid, title=None, method='',
force_show=False, fig=None, **kwargs):
"""Display a discrete 1d or 2d function.
Parameters
----------
values : `numpy.ndarray`
The values to visualize
grid : `TensorGrid` or `RectPartition`
Grid of the values
title : string, optional
Set the title of the figure
method : string, optional
1d methods:
'plot' : graph plot
'scatter' : scattered 2d points
(2nd axis <-> value)
2d methods:
'imshow' : image plot with coloring according to value,
including a colorbar.
'scatter' : cloud of scattered 3d points
(3rd axis <-> value)
'wireframe', 'plot_wireframe' : surface plot
force_show : bool, optional
Whether the plot should be forced to be shown now or deferred until
later. Note that some backends always displays the plot, regardless
of this value.
fig : `matplotlib.figure.Figure`, optional
The figure to show in. Expected to be of same "style", as the figure
given by this function. The most common usecase is that fig is the
return value from an earlier call to this function.
Default: New figure
interp : {'nearest', 'linear'}, optional
Interpolation method to use.
Default: 'nearest'
axis_labels : string, optional
Axis labels, default: ['x', 'y']
update_in_place : bool, optional
Update the content of the figure in place. Intended for faster real
time plotting, typically ~5 times faster.
This is only performed for ``method == 'imshow'`` with real data and
``fig != None``. Otherwise this parameter is treated as False.
Default: False
axis_fontsize : int, optional
Fontsize for the axes. Default: 16
kwargs : {'figsize', 'saveto', ...}
Extra keyword arguments passed on to display method
See the Matplotlib functions for documentation of extra
options.
Returns
-------
fig : `matplotlib.figure.Figure`
The resulting figure. It is also shown to the user.
See Also
--------
matplotlib.pyplot.plot : Show graph plot
matplotlib.pyplot.imshow : Show data as image
matplotlib.pyplot.scatter : Show scattered 3d points
"""
# Importing pyplot takes ~2 sec, only import when needed.
import matplotlib.pyplot as plt
args_re = []
args_im = []
dsp_kwargs = {}
sub_kwargs = {}
arrange_subplots = (121, 122) # horzontal arrangement
# Create axis labels which remember their original meaning
axis_labels = kwargs.pop('axis_labels', ['x', 'y'])
values_are_complex = not is_real_dtype(values.dtype)
figsize = kwargs.pop('figsize', None)
saveto = kwargs.pop('saveto', None)
interp = kwargs.pop('interp', 'nearest')
axis_fontsize = kwargs.pop('axis_fontsize', 16)
# Check if we should and can update the plot in place
update_in_place = kwargs.pop('update_in_place', False)
if (update_in_place and
(fig is None or values_are_complex or values.ndim != 2 or
(values.ndim == 2 and method not in ('', 'imshow')))):
update_in_place = False
if values.ndim == 1: # TODO: maybe a plotter class would be better
if not method:
if interp == 'nearest':
method = 'step'
dsp_kwargs['where'] = 'mid'
elif interp == 'linear':
method = 'plot'
else:
method = 'plot'
if method == 'plot' or method == 'step' or method == 'scatter':
args_re += [grid.coord_vectors[0], values.real]
args_im += [grid.coord_vectors[0], values.imag]
else:
raise ValueError('`method` {!r} not supported'
''.format(method))
elif values.ndim == 2:
if not method:
method = 'imshow'
if method == 'imshow':
args_re = [np.rot90(values.real)]
args_im = [np.rot90(values.imag)] if values_are_complex else []
extent = [grid.min()[0], grid.max()[0],
grid.min()[1], grid.max()[1]]
if interp == 'nearest':
interpolation = 'nearest'
elif interp == 'linear':
interpolation = 'bilinear'
else:
interpolation = 'none'
dsp_kwargs.update({'interpolation': interpolation,
'cmap': 'bone',
'extent': extent,
'aspect': 'auto'})
elif method == 'scatter':
pts = grid.points()
args_re = [pts[:, 0], pts[:, 1], values.ravel().real]
args_im = ([pts[:, 0], pts[:, 1], values.ravel().imag]
if values_are_complex else [])
sub_kwargs.update({'projection': '3d'})
elif method in ('wireframe', 'plot_wireframe'):
method = 'plot_wireframe'
x, y = grid.meshgrid
args_re = [x, y, np.rot90(values.real)]
args_im = ([x, y, np.rot90(values.imag)] if values_are_complex
else [])
sub_kwargs.update({'projection': '3d'})
else:
raise ValueError('`method` {!r} not supported'
''.format(method))
else:
raise NotImplementedError('no method for {}d display implemented'
''.format(values.ndim))
# Additional keyword args are passed on to the display method
dsp_kwargs.update(**kwargs)
if fig is not None:
# Reuse figure if given as input
if not isinstance(fig, plt.Figure):
raise TypeError('`fig` {} not a matplotlib figure'.format(fig))
if not plt.fignum_exists(fig.number):
# If figure does not exist, user either closed the figure or
# is using IPython, in this case we need a new figure.
fig = plt.figure(figsize=figsize)
updatefig = False
else:
# Set current figure to given input
fig = plt.figure(fig.number)
updatefig = True
if values.ndim > 1 and not update_in_place:
# If the figure is larger than 1d, we can clear it since we
# dont reuse anything. Keeping it causes performance problems.
fig.clf()
else:
fig = plt.figure(figsize=figsize)
updatefig = False
if values_are_complex:
# Real
if len(fig.axes) == 0:
# Create new axis if needed
sub_re = plt.subplot(arrange_subplots[0], **sub_kwargs)
sub_re.set_title('Real part')
sub_re.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub_re.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub_re.set_ylabel('value')
else:
sub_re = fig.axes[0]
display_re = getattr(sub_re, method)
csub_re = display_re(*args_re, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow' and len(fig.axes) < 2:
# Create colorbar if none seems to exist
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval_re, maxval_re = _safe_minmax(values.real)
else:
minval_re, maxval_re = kwargs['clim']
ticks_re = _colorbar_ticks(minval_re, maxval_re)
format_re = _colorbar_format(minval_re, maxval_re)
plt.colorbar(csub_re, orientation='horizontal',
ticks=ticks_re, format=format_re)
# Imaginary
if len(fig.axes) < 3:
sub_im = plt.subplot(arrange_subplots[1], **sub_kwargs)
sub_im.set_title('Imaginary part')
sub_im.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub_im.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub_im.set_ylabel('value')
else:
sub_im = fig.axes[2]
display_im = getattr(sub_im, method)
csub_im = display_im(*args_im, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow' and len(fig.axes) < 4:
# Create colorbar if none seems to exist
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval_im, maxval_im = _safe_minmax(values.imag)
else:
minval_im, maxval_im = kwargs['clim']
ticks_im = _colorbar_ticks(minval_im, maxval_im)
format_im = _colorbar_format(minval_im, maxval_im)
plt.colorbar(csub_im, orientation='horizontal',
ticks=ticks_im, format=format_im)
else:
if len(fig.axes) == 0:
# Create new axis object if needed
sub = plt.subplot(111, **sub_kwargs)
sub.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub.set_ylabel('value')
try:
# For 3d plots
sub.set_zlabel('z')
except AttributeError:
pass
else:
sub = fig.axes[0]
if update_in_place:
import matplotlib as mpl
imgs = [obj for obj in sub.get_children()
if isinstance(obj, mpl.image.AxesImage)]
if len(imgs) > 0 and updatefig:
imgs[0].set_data(args_re[0])
csub = imgs[0]
# Update min-max
if 'clim' not in kwargs:
minval, maxval = _safe_minmax(values)
else:
minval, maxval = kwargs['clim']
csub.set_clim(minval, maxval)
else:
display = getattr(sub, method)
csub = display(*args_re, **dsp_kwargs)
else:
display = getattr(sub, method)
csub = display(*args_re, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow':
# Add colorbar
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval, maxval = _safe_minmax(values)
else:
minval, maxval = kwargs['clim']
ticks = _colorbar_ticks(minval, maxval)
format = _colorbar_format(minval, maxval)
if len(fig.axes) < 2:
# Create colorbar if none seems to exist
plt.colorbar(mappable=csub, ticks=ticks, format=format)
elif update_in_place:
# If it exists and we should update it
csub.colorbar.set_clim(minval, maxval)
csub.colorbar.set_ticks(ticks)
csub.colorbar.set_ticklabels([format % tick for tick in ticks])
csub.colorbar.draw_all()
# Fixes overlapping stuff at the expense of potentially squashed subplots
if not update_in_place:
fig.tight_layout()
if title is not None:
if not values_are_complex:
# Do not overwrite title for complex values
plt.title(title)
fig.canvas.manager.set_window_title(title)
if updatefig or plt.isinteractive():
# If we are running in interactive mode, we can always show the fig
# This causes an artifact, where users of `CallbackShow` without
# interactive mode only shows the figure after the second iteration.
plt.show(block=False)
if not update_in_place:
plt.draw()
plt.pause(0.0001)
else:
try:
sub.draw_artist(csub)
fig.canvas.blit(fig.bbox)
fig.canvas.update()
fig.canvas.flush_events()
except AttributeError:
plt.draw()
plt.pause(0.0001)
if force_show:
plt.show()
if saveto is not None:
fig.savefig(saveto)
return fig
if __name__ == '__main__':
run_doctests()
| gpl-3.0 | -3,538,194,548,052,486,700 | 33.037528 | 79 | 0.560672 | false |
kylejusticemagnuson/pyti | tests/test_stochastic.py | 1 | 15612 | from __future__ import absolute_import
import unittest
import numpy as np
from tests.sample_data import SampleData
from pyti import stochastic
class TestStochastic(unittest.TestCase):
def setUp(self):
"""Create data to use for testing."""
self.data = SampleData().get_sample_close_data()
self.percent_k_period_6_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, 0.9856979405034324, 1.0, 1.0, 0.63513513513513342,
0.27567567567568274, 1.0, 1.0, 0.68322981366460012, 0.0,
0.15515515515516184, 0.0, 0.0, 0.0, 0.06131650135257203, 0.0, 0.0,
0.4255711127487089, 1.0, 0.85463958582237798, 0.63201911589008342,
0.58422939068100166, 0.67256637168141331, 0.55555555555554825, 0.0, 1.0,
0.39352306182532032, 0.0, 0.0, 0.56253794778384958, 0.82179720704310821,
1.0, 1.0, 0.83066712049012859, 0.23241362167536711,
0.059955822025878437, 0.23704663212435031, 0.78950777202072531, 1.0,
1.0, 0.94086165373294273, 1.0, 1.0, 1.0, 0.36487221315932178,
0.23273518216421837, 0.38695960311835798, 0.0, 0.0, 0.0, 0.0,
0.33420252064319617, 0.31533601378518206, 1.0, 0.0, 0.17607726597325543,
0.038632986627041961, 0.15453194650816784, 0.0, 1.0,
0.61413043478261453, 1.0, 1.0, 0.21932367149758231, 1.0, 1.0,
0.17894736842105138, 0.0, 0.0, 0.12548638132295883, 0.2840466926070046,
0.0, 0.0, 0.80735411670663715, 0.0, 1.0, 1.0, 1.0, 0.42937563971340847,
0.14943705220061232, 0.0, 0.11392405063290814, 0.32856356631810901,
0.48005698005698194, 0.24288107202678813, 0.62814070351758511, 1.0, 1.0,
1.0, 1.0, 1.0, 0.52095130237826281, 1.0, 1.0, 1.0, 1.0,
0.86164383561643876, 0.0, 0.52147239263801737, 0.0, 0.14857651245551226,
0.28054740957966762, 0.3811983471074456, 0.0, 0.0, 0.0, 0.0, 0.0,
0.052040212891779666, 0.0, 0.35317460317461002, 0.0, 0.0, 0.0,
0.0079254079254060007, 0.0, 0.12661930631007018, 0.0, 0.0, 0.0,
0.067722772277229157, 0.0, 0.24025100851636036]
self.percent_k_period_8_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, 1.0, 0.78084415584415301, 0.49576669802445755,
1.0, 1.0, 0.68940316686967806, 0.0, 0.15515515515516184, 0.0, 0.0, 0.0,
0.048909134500121687, 0.0, 0.0, 0.25598404255319046,
0.81420233463035285, 0.79071481208548022, 0.63201911589008342,
0.58422939068100166, 0.82317801672640178, 0.81521306252488657,
0.0066371681415952387, 0.75649591685225837, 0.39352306182532032, 0.0,
0.0, 0.56253794778384958, 0.82179720704310821, 1.0, 1.0,
0.83066712049012859, 0.47447243022464258, 0.49302246426140284,
0.41436738752174873, 0.79488797727989935, 0.93264248704663077, 1.0,
0.94253770150806226, 1.0, 1.0, 1.0, 0.61401189689358671,
0.45394736842105277, 0.52963567156063163, 0.22512234910277268, 0.0, 0.0,
0.0, 0.33420252064319617, 0.23859191655801873, 0.43850499782702834, 0.0,
0.17607726597325543, 0.038632986627041961, 0.15453194650816784, 0.0,
0.26686004350978676, 0.16388687454677281, 1.0, 1.0, 0.21932367149758231,
1.0, 1.0, 0.17956423741547525, 0.0, 0.0, 0.12548638132295883,
0.2840466926070046, 0.0, 0.0, 0.61925199264255404, 0.0, 1.0, 1.0, 1.0,
0.42937563971340847, 0.14943705220061232, 0.070112589559877536,
0.17604912998976188, 0.32856356631810901, 0.18547055586131053,
0.079801871216287013, 0.53418803418803562, 1.0, 1.0, 1.0, 1.0, 1.0,
0.7004249291784771, 1.0, 1.0, 1.0, 1.0, 0.86164383561643876,
0.55342465753424508, 0.78630136986300425, 0.0, 0.14857651245551226,
0.25533807829181515, 0.32829181494662379, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040534315983417502, 0.0, 0.07229894394801159, 0.0, 0.0, 0.0,
0.0071881606765310463, 0.0, 0.1097826086956511, 0.0, 0.0, 0.0,
0.059915907498249425, 0.0, 0.19406227371469995]
self.percent_k_period_10_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, 0.76439560439560383, 1.0, 1.0,
0.74727452923687354, 0.009910802775026999, 0.15515515515516184, 0.0,
0.0, 0.0, 0.048909134500121687, 0.0, 0.0, 0.22642619094295152,
0.55651595744680871, 0.47562056737588476, 0.51459143968871746,
0.54053058216654259, 0.82317801672640178, 0.81521306252488657,
0.46356033452807566, 0.86937475109517781, 0.30235988200590008, 0.0, 0.0,
0.56253794778384958, 0.82179720704310821, 1.0, 1.0, 0.83066712049012859,
0.47447243022464258, 0.49302246426140284, 0.59904697072838564,
0.88938053097345127, 0.94829729057916878, 1.0, 0.94253770150806226, 1.0,
1.0, 1.0, 0.78188608776843938, 0.70181741335587489, 0.7141440846001329,
0.44852941176470656, 0.0, 0.0, 0.0, 0.24289324068224727,
0.17340492735312743, 0.43850499782702834, 0.0, 0.089840788476118455,
0.025024061597689246, 0.15453194650816784, 0.0, 0.26686004350978676,
0.16388687454677281, 0.70195794053661897, 0.75054387237128717,
0.21932367149758231, 1.0, 1.0, 0.2986512524084754, 0.0, 0.0,
0.12548638132295883, 0.2840466926070046, 0.0, 0.0, 0.3709144326110913,
0.0, 0.86767371601208776, 1.0, 1.0, 0.42937563971340847,
0.14943705220061232, 0.070112589559877536, 0.17604912998976188,
0.37563971340839536, 0.24257932446264166, 0.079801871216287013,
0.2063841496973037, 0.37094111172262106, 1.0, 1.0, 1.0, 1.0,
0.7004249291784771, 1.0, 1.0, 1.0, 1.0, 0.9124783362218376,
0.63122171945701588, 0.78630136986300425, 0.0, 0.14857651245551226,
0.25533807829181515, 0.32829181494662379, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040534315983417502, 0.0, 0.057382333978080118, 0.0, 0.0, 0.0,
0.0064540622627167372, 0.0, 0.10167785234899253, 0.0, 0.0, 0.0,
0.037053087757313918, 0.0, 0.17340666450986797]
self.percent_d_period_6_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, 0.99523264683447754, 0.87837837837837773,
0.63693693693693865, 0.63693693693693876, 0.75855855855856091,
0.8944099378882, 0.56107660455486663, 0.27946165627325398,
0.051718385051720613, 0.051718385051720613, 0.0, 0.020438833784190678,
0.020438833784190678, 0.020438833784190678, 0.14185703758290297,
0.47519037091623634, 0.76007023285702902, 0.82888623390415372,
0.69029603079782087, 0.62960495941749939, 0.60411710597265433,
0.40937397574565387, 0.51851851851851605, 0.46450768727510677,
0.46450768727510677, 0.13117435394177343, 0.18751264926128319,
0.46144505160898591, 0.79477838494231923, 0.9405990690143694,
0.94355570683004286, 0.68769358072183184, 0.37434552139712474,
0.17647202527519865, 0.36217007539031804, 0.6755181347150252,
0.9298359240069084, 0.98028721791098095, 0.98028721791098095,
0.98028721791098095, 1.0, 0.78829073771977398, 0.53253579844118004,
0.32818899948063268, 0.20656492842752547, 0.12898653437278598, 0.0, 0.0,
0.11140084021439872, 0.2165128448094594, 0.54984617814279269,
0.43844533792839407, 0.39202575532441847, 0.071570084200099124,
0.12308073303615508, 0.064388311045069938, 0.38484398216938925,
0.53804347826087151, 0.87137681159420488, 0.87137681159420488,
0.73977455716586071, 0.73977455716586071, 0.73977455716586071,
0.7263157894736838, 0.39298245614035049, 0.059649122807017126,
0.041828793774319611, 0.13651102464332113, 0.13651102464332113,
0.09468223086900153, 0.26911803890221236, 0.26911803890221236,
0.60245137223554568, 0.66666666666666663, 1.0, 0.80979187990446944,
0.52627089730467358, 0.19293756397134029, 0.08778703427784014,
0.1474958723170057, 0.307514865669333, 0.35050053946729304,
0.45035958520045166, 0.6236739251814577, 0.87604690117252837, 1.0, 1.0,
1.0, 0.84031710079275435, 0.84031710079275435, 0.84031710079275435, 1.0,
1.0, 0.95388127853881288, 0.62054794520547951, 0.46103874275148532,
0.17382413087933912, 0.22334963503117655, 0.14304130734505996,
0.27010742304754182, 0.22058191889570442, 0.12706611570248186, 0.0, 0.0,
0.0, 0.017346737630593221, 0.017346737630593221, 0.13507160535546323,
0.11772486772487001, 0.11772486772487001, 0.0, 0.0026418026418020004,
0.0026418026418020004, 0.044848238078492059, 0.04220643543669006,
0.04220643543669006, 0.0, 0.022574257425743052, 0.022574257425743052,
0.10265792693119651]
self.percent_d_period_8_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, 0.75887028462287021,
0.75887028462287009, 0.83192223267481913, 0.89646772228989269,
0.56313438895655932, 0.28151944067494666, 0.051718385051720613,
0.051718385051720613, 0.0, 0.016303044833373897, 0.016303044833373897,
0.016303044833373897, 0.085328014184396825, 0.35672879239451444,
0.62030039642300794, 0.74564542086863883, 0.66898777288552169,
0.67980884109916229, 0.74087348997742997, 0.54834274913096126,
0.52611538250624668, 0.38555204893972461, 0.38333965955919291,
0.13117435394177343, 0.18751264926128319, 0.46144505160898591,
0.79477838494231923, 0.9405990690143694, 0.94355570683004286,
0.76837985023825706, 0.59938733832539137, 0.46062076066926472,
0.56742594302101701, 0.71396595061609303, 0.9091768214421766,
0.95839339618489772, 0.98084590050268738, 0.98084590050268738, 1.0,
0.87133729896452883, 0.68931975510487975, 0.53253164562509037,
0.40290179636148565, 0.25158600688780147, 0.075040783034257555, 0.0,
0.11140084021439872, 0.19093147906707164, 0.33709981167608111,
0.22569897146168236, 0.20486075460009459, 0.071570084200099124,
0.12308073303615508, 0.064388311045069938, 0.14046399667265153,
0.14358230601885319, 0.4769156393521865, 0.72129562484892418,
0.73977455716586071, 0.73977455716586071, 0.73977455716586071,
0.72652141247182511, 0.3931880791384918, 0.05985474580515842,
0.041828793774319611, 0.13651102464332113, 0.13651102464332113,
0.09468223086900153, 0.20641733088085135, 0.20641733088085135,
0.53975066421418472, 0.66666666666666663, 1.0, 0.80979187990446944,
0.52627089730467358, 0.21630842715796614, 0.13186625725008391,
0.19157509528924946, 0.23002775072306048, 0.19794533113190219,
0.26648682042187771, 0.53799663513477425, 0.84472934472934524, 1.0, 1.0,
1.0, 0.9001416430594924, 0.9001416430594924, 0.9001416430594924, 1.0,
1.0, 0.95388127853881288, 0.80502283105022787, 0.73378995433789607,
0.44657534246574976, 0.31162596077283883, 0.1346381969157758,
0.24406880189798374, 0.19454329774614632, 0.10943060498220793, 0.0, 0.0,
0.0, 0.013511438661139167, 0.013511438661139167, 0.037611086643809695,
0.02409964798267053, 0.02409964798267053, 0.0, 0.0023960535588436823,
0.0023960535588436823, 0.038990256457394047, 0.036594202898550365,
0.036594202898550365, 0.0, 0.019971969166083143, 0.019971969166083143,
0.084659393737649788]
self.percent_d_period_10_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan,
0.92146520146520128, 0.91575817641229118, 0.58572844400396684,
0.30411349572235413, 0.055021985976729616, 0.051718385051720613, 0.0,
0.016303044833373897, 0.016303044833373897, 0.016303044833373897,
0.075475396980983836, 0.26098071612992008, 0.41952090525521496,
0.51557598817047035, 0.5102475297437149, 0.62610001286055394,
0.72630722047261032, 0.70065047125978808, 0.71604938271604668,
0.54509832254305113, 0.39057821103369261, 0.10078662733530003,
0.18751264926128319, 0.46144505160898591, 0.79477838494231923,
0.9405990690143694, 0.94355570683004286, 0.76837985023825706,
0.59938733832539137, 0.52218062173814372, 0.66048332198774662,
0.81224159742700186, 0.94589260718420665, 0.96361166402907694,
0.98084590050268738, 0.98084590050268738, 1.0, 0.92729536258947975,
0.82790116704143812, 0.73261586190814898, 0.62149696990690473,
0.38755783212161315, 0.14950980392156885, 0.0, 0.080964413560749085,
0.13876605601179157, 0.284934388620801, 0.20396997506005191,
0.17611526210104891, 0.038288283357935902, 0.089798932193991862,
0.059852002701952366, 0.14046399667265153, 0.14358230601885319,
0.37756828619772614, 0.53879622915155967, 0.55727516146849621,
0.65662251462295651, 0.73977455716586071, 0.76621708413615852,
0.43288375080282515, 0.099550417469491795, 0.041828793774319611,
0.13651102464332113, 0.13651102464332113, 0.09468223086900153,
0.12363814420369711, 0.12363814420369711, 0.41286271620772635,
0.62255790533736255, 0.95589123867069592, 0.80979187990446944,
0.52627089730467358, 0.21630842715796614, 0.13186625725008391,
0.20726714431934493, 0.26475605595359963, 0.23267363636244134,
0.17625511512541078, 0.21904237754540393, 0.52577508713997501,
0.79031370390754041, 1.0, 1.0, 0.9001416430594924, 0.9001416430594924,
0.9001416430594924, 1.0, 1.0, 0.97082611207394587, 0.84790001855961783,
0.7766671418472858, 0.47250769644000673, 0.31162596077283883,
0.1346381969157758, 0.24406880189798374, 0.19454329774614632,
0.10943060498220793, 0.0, 0.0, 0.0, 0.013511438661139167,
0.013511438661139167, 0.032638883320499211, 0.019127444659360039,
0.019127444659360039, 0.0, 0.0021513540875722457, 0.0021513540875722457,
0.036043971537236423, 0.033892617449664174, 0.033892617449664174, 0.0,
0.012351029252437973, 0.012351029252437973, 0.070153250755727301]
def test_percent_k_period_6(self):
period = 6
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_6_expected)
def test_percent_k_period_8(self):
period = 8
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_8_expected)
def test_percent_k_period_10(self):
period = 10
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_10_expected)
def test_percent_k_invalid_period(self):
period = 128
with self.assertRaises(Exception):
stochastic.percent_k(self.data, period)
def test_percent_d_period_6(self):
period = 6
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_6_expected)
def test_percent_d_period_8(self):
period = 8
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_8_expected)
def test_percent_d_period_10(self):
period = 10
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_10_expected)
def test_percent_d_invalid_period(self):
period = 128
with self.assertRaises(Exception) as cm:
stochastic.percent_d(self.data, period)
expected = "Error: data_len < period"
self.assertEqual(str(cm.exception), expected)
| mit | 5,619,512,950,334,885,000 | 61.951613 | 83 | 0.711248 | false |
edcast-inc/edx-platform-edcast | common/djangoapps/student/tests/test_login.py | 1 | 25194 | '''
Tests for student activation and login
'''
import json
import unittest
from unittest import skip
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import HttpResponseBadRequest, HttpResponse
import httpretty
from mock import patch
from social.apps.django_app.default.models import UserSocialAuth
from external_auth.models import ExternalAuthMap
from student.tests.factories import UserFactory, RegistrationFactory, UserProfileFactory
from student.views import login_oauth_token
from third_party_auth.tests.utils import (
ThirdPartyOAuthTestMixin,
ThirdPartyOAuthTestMixinFacebook,
ThirdPartyOAuthTestMixinGoogle
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
class LoginTest(TestCase):
'''
Test student.views.login_user() view
'''
def setUp(self):
super(LoginTest, self).setUp()
# Create one user and save it to the database
self.user = UserFactory.build(username='test', email='[email protected]')
self.user.set_password('test_password')
self.user.save()
# Create a registration for the user
RegistrationFactory(user=self.user)
# Create a profile for the user
UserProfileFactory(user=self.user)
# Create the test client
self.client = Client()
cache.clear()
# Store the login url
try:
self.url = reverse('login_post')
except NoReverseMatch:
self.url = reverse('login')
def test_login_success(self):
response, mock_audit_log = self._login_response('[email protected]', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', u'[email protected]'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_success_no_pii(self):
response, mock_audit_log = self._login_response('[email protected]', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'[email protected]'])
def test_login_success_unicode_email(self):
unicode_email = u'test' + unichr(40960) + u'@edx.org'
self.user.email = unicode_email
self.user.save()
response, mock_audit_log = self._login_response(unicode_email, 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', unicode_email])
def test_login_fail_no_user_exists(self):
nonexistent_email = u'[email protected]'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'ADVANCED_SECURITY': True})
def test_login_fail_incorrect_email_with_advanced_security(self):
nonexistent_email = u'[email protected]'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_no_user_exists_no_pii(self):
nonexistent_email = u'[email protected]'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [nonexistent_email])
def test_login_fail_wrong_password(self):
response, mock_audit_log = self._login_response('[email protected]', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'[email protected]', u'invalid'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_wrong_password_no_pii(self):
response, mock_audit_log = self._login_response('[email protected]', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'invalid'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'[email protected]'])
def test_login_not_activated(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_not_activated_no_pii(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'test'])
def test_login_unicode_email(self):
unicode_email = u'[email protected]' + unichr(40960)
response, mock_audit_log = self._login_response(unicode_email, 'test_password')
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', unicode_email])
def test_login_unicode_password(self):
unicode_password = u'test_password' + unichr(1972)
response, mock_audit_log = self._login_response('[email protected]', unicode_password)
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'[email protected]', u'invalid'])
def test_logout_logging(self):
response, _ = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout', u'test'])
def test_login_user_info_cookie(self):
response, _ = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
# Verify the format of the "user info" cookie set on login
cookie = self.client.cookies[settings.EDXMKTG_USER_INFO_COOKIE_NAME]
user_info = json.loads(cookie.value)
# Check that the version is set
self.assertEqual(user_info["version"], settings.EDXMKTG_USER_INFO_COOKIE_VERSION)
# Check that the username and email are set
self.assertEqual(user_info["username"], self.user.username)
self.assertEqual(user_info["email"], self.user.email)
# Check that the URLs are absolute
for url in user_info["header_urls"].values():
self.assertIn("http://testserver/", url)
@skip('we skip in edcast')
def test_logout_deletes_mktg_cookies(self):
response, _ = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
# Check that the marketing site cookies have been set
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
# Log out
logout_url = reverse('logout')
response = self.client.post(logout_url)
# Check that the marketing site cookies have been deleted
# (cookies are deleted by setting an expiration date in 1970)
for cookie_name in [settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, settings.EDXMKTG_USER_INFO_COOKIE_NAME]:
cookie = self.client.cookies[cookie_name]
self.assertIn("01-Jan-1970", cookie.get('expires'))
@override_settings(
EDXMKTG_LOGGED_IN_COOKIE_NAME=u"unicode-logged-in",
EDXMKTG_USER_INFO_COOKIE_NAME=u"unicode-user-info",
)
@skip('we skip in edcast')
def test_unicode_mktg_cookie_names(self):
# When logged in cookie names are loaded from JSON files, they may
# have type `unicode` instead of `str`, which can cause errors
# when calling Django cookie manipulation functions.
response, _ = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
response = self.client.post(reverse('logout'))
self.assertRedirects(response, "/")
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_logout_logging_no_pii(self):
response, _ = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'test'])
def test_login_ratelimited_success(self):
# Try (and fail) logging in with fewer attempts than the limit of 30
# and verify that you can still successfully log in afterwards.
for i in xrange(20):
password = u'test_password{0}'.format(i)
response, _audit_log = self._login_response('[email protected]', password)
self._assert_response(response, success=False)
# now try logging in with a valid password
response, _audit_log = self._login_response('[email protected]', 'test_password')
self._assert_response(response, success=True)
def test_login_ratelimited(self):
# try logging in 30 times, the default limit in the number of failed
# login attempts in one 5 minute period before the rate gets limited
for i in xrange(30):
password = u'test_password{0}'.format(i)
self._login_response('[email protected]', password)
# check to see if this response indicates that this was ratelimited
response, _audit_log = self._login_response('[email protected]', 'wrong_password')
self._assert_response(response, success=False, value='Too many failed login attempts')
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session(self):
creds = {'email': '[email protected]', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
# Reload the user from the database
self.user = UserFactory.FACTORY_FOR.objects.get(pk=self.user.pk)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
try:
# this test can be run with either lms or studio settings
# since studio does not have a dashboard url, we should
# look for another url that is login_required, in that case
url = reverse('dashboard')
except NoReverseMatch:
url = reverse('upload_transcripts')
response = client1.get(url)
# client1 will be logged out
self.assertEqual(response.status_code, 302)
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session_with_url_not_having_login_required_decorator(self):
# accessing logout url as it does not have login-required decorator it will avoid redirect
# and go inside the enforce_single_login
creds = {'email': '[email protected]', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
url = reverse('logout')
response = client1.get(url)
self.assertEqual(response.status_code, 302)
def test_change_enrollment_400(self):
"""
Tests that a 400 in change_enrollment doesn't lead to a 404
and in fact just logs in the user without incident
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponseBadRequest("I am a 400")
response, _ = self._login_response(
'[email protected]',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def test_change_enrollment_200_no_redirect(self):
"""
Tests "redirect_url" is None if change_enrollment returns a HttpResponse
with no content
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponse()
response, _ = self._login_response(
'[email protected]',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def _login_response(self, email, password, patched_audit_log='student.views.AUDIT_LOG', extra_post_params=None):
''' Post the login info '''
post_params = {'email': email, 'password': password}
if extra_post_params is not None:
post_params.update(extra_post_params)
with patch(patched_audit_log) as mock_audit_log:
result = self.client.post(self.url, post_params)
return result, mock_audit_log
def _assert_response(self, response, success=None, value=None):
'''
Assert that the response had status 200 and returned a valid
JSON-parseable dict.
If success is provided, assert that the response had that
value for 'success' in the JSON dict.
If value is provided, assert that the response contained that
value for 'value' in the JSON dict.
'''
self.assertEqual(response.status_code, 200)
try:
response_dict = json.loads(response.content)
except ValueError:
self.fail("Could not parse response content as JSON: %s"
% str(response.content))
if success is not None:
self.assertEqual(response_dict['success'], success)
if value is not None:
msg = ("'%s' did not contain '%s'" %
(str(response_dict['value']), str(value)))
self.assertTrue(value in response_dict['value'], msg)
def _assert_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertIn(log_string, format_string)
def _assert_not_in_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertNotIn(log_string, format_string)
class ExternalAuthShibTest(ModuleStoreTestCase):
"""
Tests how login_user() interacts with ExternalAuth, in particular Shib
"""
def setUp(self):
super(ExternalAuthShibTest, self).setUp()
self.course = CourseFactory.create(
org='Stanford',
number='456',
display_name='NO SHIB',
user_id=self.user.id,
)
self.shib_course = CourseFactory.create(
org='Stanford',
number='123',
display_name='Shib Only',
enrollment_domain='shib:https://idp.stanford.edu/',
user_id=self.user.id,
)
self.user_w_map = UserFactory.create(email='[email protected]')
self.extauth = ExternalAuthMap(external_id='[email protected]',
external_email='[email protected]',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=self.user_w_map)
self.user_w_map.save()
self.extauth.save()
self.user_wo_map = UserFactory.create(email='[email protected]')
self.user_wo_map.save()
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_page_redirect(self):
"""
Tests that when a shib user types their email address into the login page, they get redirected
to the shib login.
"""
response = self.client.post(reverse('login'), {'email': self.user_w_map.email, 'password': ''})
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj, {
'success': False,
'redirect': reverse('shib-login'),
})
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_required_dashboard(self):
"""
Tests redirects to when @login_required to dashboard, which should always be the normal login,
since there is no course context
"""
response = self.client.get(reverse('dashboard'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://testserver/login?next=/dashboard')
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_externalauth_login_required_course_context(self):
"""
Tests the redirects when visiting course-specific URL with @login_required.
Should vary by course depending on its enrollment_domain
"""
TARGET_URL = reverse('courseware', args=[self.course.id.to_deprecated_string()]) # pylint: disable=invalid-name
noshib_response = self.client.get(TARGET_URL, follow=True)
self.assertEqual(noshib_response.redirect_chain[-1],
('http://testserver/login?next={url}'.format(url=TARGET_URL), 302))
self.assertContains(noshib_response, ("Sign in or Register | {platform_name}"
.format(platform_name=settings.PLATFORM_NAME)))
self.assertEqual(noshib_response.status_code, 200)
TARGET_URL_SHIB = reverse('courseware', args=[self.shib_course.id.to_deprecated_string()]) # pylint: disable=invalid-name
shib_response = self.client.get(**{'path': TARGET_URL_SHIB,
'follow': True,
'REMOTE_USER': self.extauth.external_id,
'Shib-Identity-Provider': 'https://idp.stanford.edu/'})
# Test that the shib-login redirect page with ?next= and the desired page are part of the redirect chain
# The 'courseware' page actually causes a redirect itself, so it's not the end of the chain and we
# won't test its contents
self.assertEqual(shib_response.redirect_chain[-3],
('http://testserver/shib-login/?next={url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.redirect_chain[-2],
('http://testserver{url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.status_code, 200)
@httpretty.activate
class LoginOAuthTokenMixin(ThirdPartyOAuthTestMixin):
"""
Mixin with tests for the login_oauth_token view. A TestCase that includes
this must define the following:
BACKEND: The name of the backend from python-social-auth
USER_URL: The URL of the endpoint that the backend retrieves user data from
UID_FIELD: The field in the user data that the backend uses as the user id
"""
def setUp(self):
super(LoginOAuthTokenMixin, self).setUp()
self.url = reverse(login_oauth_token, kwargs={"backend": self.BACKEND})
def _assert_error(self, response, status_code, error):
"""Assert that the given response was a 400 with the given error code"""
self.assertEqual(response.status_code, status_code)
self.assertEqual(json.loads(response.content), {"error": error})
self.assertNotIn("partial_pipeline", self.client.session)
def test_success(self):
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 204)
self.assertEqual(self.client.session['_auth_user_id'], self.user.id) # pylint: disable=no-member
def test_invalid_token(self):
self._setup_provider_response(success=False)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_missing_token(self):
response = self.client.post(self.url)
self._assert_error(response, 400, "invalid_request")
def test_unlinked_user(self):
UserSocialAuth.objects.all().delete()
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_get_method(self):
response = self.client.get(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 405)
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestFacebook(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinFacebook, TestCase):
"""Tests login_oauth_token with the Facebook backend"""
pass
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestGoogle(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinGoogle, TestCase):
"""Tests login_oauth_token with the Google backend"""
pass
| agpl-3.0 | 888,919,011,720,206,700 | 45.828996 | 134 | 0.645114 | false |
yantrabuddhi/blocos | tabs/UploadTab.py | 1 | 13455 | # -*- coding: utf-8 -*-
# Este arquivo é parte do programa Monitor
# Monitor é um software livre; você pode redistribui-lo e/ou
# modifica-lo dentro dos termos da Licença Pública Geral GNU como
# publicada pela Fundação do Software Livre (FSF); na versão 3 da
# Licença, ou (na sua opinião) qualquer versão.
#
# Este programa é distribuido na esperança que possa ser util,
# mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer
# MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a
# Licença Pública Geral GNU para maiores detalhes.
#
# Você deve ter recebido uma cópia da Licença Pública Geral GNU
# junto com este programa, se não, escreva para a Fundação do Software
# Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# Centro de Tecnologia da Informação Renato Archer, Campinas-SP, Brasil
# Projeto realizado com fundos do Conselho Nacional de Desenvolvimento Científico e Tecnológico (CNPQ)
# Esse código faz parte do projeto BR-Gogo, disponível em http://sourceforge.net/projects/br-gogo/
import os
if os.name=='nt':
import win32api
import win32con
from gettext import gettext as _
try:
import gtk
except ImportError:
#print _('GTK+ Runtime Enviromnt precisa ser instalado:')
print _('GTK+ Runtime Enviroment needs to be installed:')
print "http://downloads.sourceforge.net/gladewin32/gtk-2.12.9-win32-1.exe?modtime=1208401479&big_mirror=0"
raw_input()
from Tab import Tab
from pyLogoCompiler.Exceptions import ConnectionProblem
import pango
import math
from cairoplot import plots
from cairoplot.series import Series
# >>>>>>>>>>>>>>>>> temp
# For non-dev machines, quick hack at attempt to show traceback in in a msg dialog
import sys
import traceback
def logexception(type, value, tb):
text = ' '.join(t for t in traceback.format_exception(type, value, tb))
print text
try:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, \
gtk.MESSAGE_INFO, \
gtk.BUTTONS_OK, \
text)
dialog.run()
dialog.destroy()
except:
pass
sys.excepthook = logexception
# <<<<<<<<<<<<<<<<<<< temp
class UploadTab(Tab):
LAST_DATA_FILENAME = '.last_data.txt'
defaultTab = 9
def __init__(self, gui, GoGo, liststoreSensorsTypes, sensorTypes):
self.gui = gui
self.GoGo = GoGo
self.sensorTypes = sensorTypes
self.dataFilename = ""
self.data = []
self.colDataRaw = []
self.colDataMapped = []
self.textviewData = self.gui.get_widget('textviewData')
self.textviewData.modify_font(pango.FontDescription('monospace'))
self.textviewBuffer = gtk.TextBuffer()
self.textviewData.set_buffer(self.textviewBuffer)
self.spinbuttonColumns = self.gui.get_widget('spinbuttonColumns')
self.checkbuttonShowHeaders = self.gui.get_widget('checkbuttonShowHeaders')
self.checkbuttonTwoLineHeader = self.gui.get_widget('checkbuttonTwoLineHeader')
self.radiobuttonUploadAuto = self.gui.get_widget("radiobuttonUploadAuto")
self.uploadCount = self.gui.get_widget("spinbuttonUploadCount")
self.progressbar = self.gui.get_widget('progressbarUpload')
self.lblProgress = self.gui.get_widget('labelValuesUploaded')
self.colSpec = []
for c in range(8):
w = self.gui.get_widget('comboboxC%i' % c)
w.set_active(0)
w.set_sensitive(c == 0)
w.set_model(liststoreSensorsTypes)
self.colSpec.append(w)
try:
f=open(self.LAST_DATA_FILENAME,'r')
self.textviewBuffer.set_text(f.read())
f.close()
except:
pass
self.graphContainer = None
self.graphWidth = 50
self.graphHeight = 50
self.graphData = None
self.graph = None
self.graphVisible = False
self.graphUpdateRequired = False
self.notebookDataView = self.gui.get_widget('notebookDataView')
#self.notebookDataView.set_current_page(0)
def buttonStartUpload_clicked_cb(self,widget):
try:
self.progressbar.set_fraction(0.0)
self.lblProgress.set_text(_("%i Values Uploaded") % 0)
while gtk.events_pending():
gtk.main_iteration(False)
if self.radiobuttonUploadAuto.get_active():
self.data = self.GoGo.autoUpload(None, self.uploadProgress_cb)
else:
count = self.uploadCount.get_value_as_int()
self.data = self.GoGo.autoUpload(count, self.uploadProgress_cb)
except ConnectionProblem:
self.showWarning(_("Check GoGo plugged in, turned on and connected"))
return
except:
self.showError(_("Error communicating"))
return
else:
self.lblProgress.set_text(_("%i Values Uploaded") % len(self.data))
if self.refreshTextView():
self.showInfo(_("Data successfully uploaded."), self.gui.get_widget('mainWindow'))
def buttonSaveData_clicked_cb(self,widget):
if len(self.data) == 0:
return
dialog = gtk.FileChooserDialog(_("Save As.."), None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE, gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
response = dialog.run()
if response == gtk.RESPONSE_OK:
self.dataFilename = dialog.get_filename()
try:
FILE = open(self.dataFilename,"w")
FILE.write(self.dataFormattedForSaving())
FILE.close()
except:
self.showError(Exception.__str__())
dialog.destroy()
def buttonClearData_clicked_cb(self,widget):
self.data = []
self.colDataRaw = []
self.colDataMapped = []
self.dataFilename = ""
self.progressbar.set_fraction(0.0)
self.lblProgress.set_text(_("%i Values Uploaded") % 0)
self.refreshTextView()
def spinbuttonColumns_changed_cb(self,widget):
cc = self.spinbuttonColumns.get_value_as_int()
for c in range(8):
self.colSpec[c].set_sensitive(c < cc)
self.refreshTextView()
def colSpec_changed_cb(self,widget):
self.refreshTextView()
def checkbuttonShowHeaders_toggled_cb(self,widget):
self.checkbuttonTwoLineHeader.set_sensitive(widget.get_active())
self.refreshTextView()
def checkbuttonTwoLineHeader_toggled_cb(self,widget):
self.refreshTextView()
def notebookDataView_switch_page_cb(self,widget,page,page_num):
self.graphVisible = page_num == 1
if self.graphVisible:
self.refreshGraph()
def getSelectedSensors(self):
sensorIndexes = [w.get_active() for w in self.colSpec[:self.spinbuttonColumns.get_value_as_int()]]
for i in [i for i,v in enumerate(sensorIndexes) if v == -1]:
sensorIndexes[i] = 0
try:
return [self.sensorTypes[n] for n in sensorIndexes]
except:
return None
def calibrateData(self):
self.colDataMapped = []
maxRows = max([len(c) for c in self.colDataRaw])
sensors = self.getSelectedSensors()
for c,data in enumerate(self.colDataRaw):
m = [round(sensors[c].get_new_value(v),3) for v in data]
if len(m) < maxRows:
m += [''] * (maxRows - len(m))
self.colDataMapped += [m]
def getSensorHeaders(self):
self.useHdrs = False
self.hdrs = []
if not self.checkbuttonShowHeaders.get_active():
return False
sensors = self.getSelectedSensors()
if not sensors:
return False
self.hdrs = [[s.name,s.unit] for s in sensors]
for i in [i for i,h in enumerate(self.hdrs) if h[1] == None or h[1] == '']:
self.hdrs[i][1] = 'None'
self.useHdrs = True
return True
def csvHeaders(self):
if not self.useHdrs:
return ''
if not self.checkbuttonTwoLineHeader.get_active():
t = ','.join([('%s (%s)' % (h[0],h[1])) for h in self.hdrs]) + '\n'
return t
t = ','.join([h[0] for h in self.hdrs]) + '\n'
t += ','.join([h[1] for h in self.hdrs]) + '\n'
return t
def displayHeaders(self):
if not self.useHdrs:
return ''
t = ''
if not self.checkbuttonTwoLineHeader.get_active():
hdrs = [('%s (%s)' % (h[0],h[1])) for h in self.hdrs]
hdrs = [h.rjust(max(len(h),self.defaultTab), ' ') for h in hdrs]
self.hdrTabs = []
for h in hdrs:
t += h + ' '
self.hdrTabs.extend([len(h)])
return t + '\n' + ('-' * len(t)) + '\n'
hdrs0 = []
hdrs1 = []
for h in self.hdrs:
w = max(len(h[0]), len(h[1]), self.defaultTab)
hdrs0 += [h[0].rjust(w, ' ')]
hdrs1 += [h[1].rjust(w, ' ')]
self.hdrTabs = []
for h in hdrs0:
t += h + ' '
self.hdrTabs.extend([len(h)])
w = len(t)
t += '\n'
for h in hdrs1:
t += h + ' '
return t + '\n' + ('-' * w) + '\n'
def dataFormattedForSaving(self):
t = self.csvHeaders()
for line in self.colDataMapped:
t = t + ','.join(map(str, line)) + '\n'
return t
def dataFormattedForDisplay(self):
t = self.displayHeaders()
if len(self.colDataMapped) == 1:
d = zip(self.colDataMapped[0])
else:
d = zip(*self.colDataMapped)
for r,rowData in enumerate(d):
for c,v in enumerate(rowData):
if self.useHdrs:
t = t + str(v).rjust(self.hdrTabs[c], ' ') + ' '
else:
t = t + str(v).rjust(self.defaultTab, ' ') + ' '
t = t + '\n'
return t
def refreshTextView(self):
if len(self.data) == 0:
self.textviewBuffer.set_text("")
return False
if self.getSensorHeaders():
nCols = self.spinbuttonColumns.get_value_as_int()
if nCols == 1:
self.colDataRaw = [self.data]
else:
self.colDataRaw = list(self.data[i::nCols] for i in range(nCols))
for i in range(nCols-1, -1):
if len(self.colDataRaw[i]) > len(self.colDataRaw[i+1]):
self.colDataRaw[i].pop()
print "aqui"
self.calibrateData()
self.textviewBuffer.set_text(self.dataFormattedForDisplay())
self.graphUpdateRequired = True
self.refreshGraph()
return True
else:
self.showWarning(_("Please, add at least one sensor in Sensors Tab"))
return False
def refreshGraph(self):
if not (self.graphVisible and self.graphUpdateRequired): return
if self.graphContainer == None:
self.graphContainer = self.gui.get_widget("dataGraphContainer")
if self.graphContainer == None: return
r = self.graphContainer.get_allocation()
self.graphWidth, self.graphHeight = (r.width,r.height)
self.graph = None
data = {}
for c,t in enumerate(self.colDataMapped):
lbl = '%(colNum)i-%(name)s (%(units)s)' % \
{'colNum': c+1, 'name': self.hdrs[c][0], 'units': self.hdrs[c][1]}
data[lbl] = t
#if len(self.data) % self.spinbuttonColumns.get_value_as_int() > 0:
# self.showWarning(_("The graph can not be generated with this configuration.\nPlease check the number of columns."))
#else:
self.drawGraph(data,[str(x) for x in range(len(self.colDataMapped[0]))])
self.graphUpdateRequired = False
def drawGraph(self, data=[], xLabels=[]):
if data == {}: return
if self.graph != None:
self.graphContainer.remove(self.graph.handler)
self.graph = plots.DotLinePlot('gtk', data=data, x_labels=xLabels,
width=self.graphWidth, height=self.graphHeight, background="white",
border=5, axis=True, grid=True, series_legend = True)
self.graphContainer.add(self.graph.handler)
self.graph.handler.show()
def uploadProgress_cb(self, count, total):
self.progressbar.set_fraction(float(count) / total)
self.lblProgress.set_text(_('%i Values Uploaded' % count))
while gtk.events_pending():
gtk.main_iteration(False)
| gpl-3.0 | -6,296,304,755,429,825,000 | 33.242347 | 127 | 0.554943 | false |
AnumSheraz/IP-Controlled-Robotic-Car | Manual-IP-Controlled-Robotic-Car/Code.py | 1 | 1696 |
import sys
from PyQt4 import QtGui, QtCore
import time, socket, json
from main import Ui_MainWindow
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
IP = "localhost"
PORT = 8001
class main_menu(QtGui.QMainWindow):
def __init__(self):
super(main_menu, self).__init__()
self.ui=Ui_MainWindow()
self.ui.setupUi(self)
self.show()
def keyPressEvent(self, event1):
verbose = {"FB":"", "LR":""}
if event1.key() == QtCore.Qt.Key_W:
#print "Up pressed"
verbose["FB"] = "F"
if event1.key() == QtCore.Qt.Key_S:
#print "D pressed"
verbose["FB"] = "B"
if event1.key() == QtCore.Qt.Key_A:
#print "L pressed"
verbose["LR"] = "L"
if event1.key() == QtCore.Qt.Key_D:
#print "R pressed"
verbose["LR"] = "R"
print verbose
json_data=json.dumps(verbose)
s.sendto((json_data), (IP, PORT))
def keyReleaseEvent(self, event):
verbose = {"FB":"", "LR":""}
if event.key() == QtCore.Qt.Key_W:
#print "Up rel"
verbose["FB"] = "S"
if event.key() == QtCore.Qt.Key_S:
#print "D rel"
verbose["FB"] = "S"
if event.key() == QtCore.Qt.Key_A:
#print "L pressed"
verbose["LR"] = "S"
if event.key() == QtCore.Qt.Key_D:
#print "R pressed"
verbose["LR"] = "S"
print verbose
json_data=json.dumps(verbose)
s.sendto((json_data), (IP, PORT))
def main():
app = QtGui.QApplication(sys.argv)
ex = main_menu()
app.exec_()
if __name__ == '__main__':
main()
| gpl-2.0 | 5,014,913,625,355,572,000 | 23.228571 | 49 | 0.504717 | false |
be-cloud-be/horizon-addons | horizon/school_evaluations/wizard/evaluation_summary.py | 1 | 3973 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2015 be-cloud.be
# Jerome Sonnet <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp import api, fields, models, _
from openerp.exceptions import UserError
from openerp.tools.safe_eval import safe_eval
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from dateutil.relativedelta import relativedelta
from datetime import datetime,date
import openerp.addons.decimal_precision as dp
_logger = logging.getLogger(__name__)
class EvaluationSummaryWizard(models.TransientModel):
_name = "school.evaluation.summary.wizard"
_description = "School Evaluation Summary Wizard"
year_id = fields.Many2one('school.year', string='Year', default=lambda self: self.env.user.current_year_id, ondelete='cascade')
domain_id = fields.Many2one('school.domain', string='Domain', ondelete='cascade')
session = fields.Selection([
('first','First Session'),
('second','Second Session'),
], string="Session")
@api.multi
def generate_summary(self):
self.ensure_one()
data = {}
data['year_id'] = self.year_id.id
data['domain_id'] = self.domain_id.id
data['session'] = self.session
return self.env['report'].get_action(self, 'school_evaluations.evaluation_summary_content', data=data)
class ReportEvaluationSummary(models.AbstractModel):
_name = 'report.school_evaluations.evaluation_summary_content'
@api.multi
def render_html(self, data):
_logger.info('render_html')
year_id = data['year_id']
session = data['session']
domain_id = data['domain_id']
if session == 'first':
states = ['postponed','awarded_first_session']
else:
states = ['awarded_second_session','failed']
if domain_id:
records = self.env['school.individual_bloc'].search([('year_id','=',year_id),('source_bloc_domain_id','=',domain_id),('state','in',states)],order="source_bloc_level, name")
else:
records = self.env['school.individual_bloc'].search([('year_id','=',year_id),('state','in',states)],order="source_bloc_level, name")
docs = [
{
"name" : 'Bac 1',
'blocs' : [],
},
{
"name" : 'Bac 2',
'blocs' : [],
},
{
"name" : 'Bac 3',
'blocs' : [],
},
{
"name" : 'Master 1',
'blocs' : [],
},
{
"name" : 'Master 2',
'blocs' : [],
},
]
for record in records:
docs[int(record.source_bloc_level)-1]['blocs'].append(record)
docargs = {
'doc_model': 'school.individual_bloc',
'docs': docs,
'year' : self.env['school.year'].browse(year_id).name,
}
return self.env['report'].render('school_evaluations.evaluation_summary_content', docargs) | agpl-3.0 | 5,508,467,179,672,360,000 | 36.490566 | 184 | 0.558017 | false |
chen2aaron/SnirteneCodes | JustForFUN/v2ex_mission.py | 1 | 1598 | # -*- coding: utf-8 -*-
import re
USE_SOCKS_PROXY = 0
if USE_SOCKS_PROXY:
import requesocks as requests
else:
import requests
# import socks, socket
# socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080)
# socket.socket = socks.socksocket
username = '[email protected]'
password = 'xyz'
host = 'http://www.v2ex.com'
signin_url = host + '/signin'
mission_url = host + '/mission/daily'
coin_url = mission_url + '/redeem'
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0',
'Origin': 'http://v2ex.com',
'Referer': 'http://v2ex.com/signin',
}
proxies = {
'http': 'socks5://127.0.0.1:1080',
'https': 'socks5://127.0.0.1:1080'
}
params = {
'u': username,
'p': password,
'next': '/',
}
# 建立session
s = requests.session()
# s.proxies = proxies if USE_SOCKS_PROXY else ''
# 请求登陆页面 获取once值
signin_resp = s.get(signin_url)
signin_once = re.findall(r'value="(\d+)" name="once"', signin_resp.text)[0]
# 将once加入POST表单中 请求登陆
params['once'] = signin_once
r = s.post(url=signin_url, data=params, headers=headers, verify=True)
if r.url != host + '/':
print('FAIL: %s' % r.url)
else:
# 登陆成功 请求/mission/daily页面 获取once值
daily_once = re.findall(r'once=(\d+)', s.get(mission_url).text)[0]
if daily_once:
# 拿到once请求/mission/daily/redeem
resp = s.get(url=coin_url, data={'once': daily_once}, headers=headers, verify=True)
print('SUCCESS: %s' % resp.url)
else:
print('BOOM: %s' % daily_once)
| gpl-2.0 | -2,073,786,518,579,206,100 | 24.433333 | 103 | 0.625819 | false |
migihajami/memin | memin/frontend.py | 1 | 10715 | __author__ = 'algol'
import cherrypy
from jinja2 import Environment, PackageLoader
import memin.core as mc
from configparser import ConfigParser
class Menu:
def __init__(self):
self.menu = [
{'name': 'Главная', 'link': '/'},
{'name': 'Персоны', 'link': '/persons'},
{'name': 'Залы', 'link': '/classrooms'},
{'name': 'Занятия', 'link': '/lessons'},
{'name': 'Типы платежей', 'link': '/payment_types'}
]
class FrontendBase:
def __init__(self):
self.env = Environment(loader=PackageLoader('memin', 'templates'))
self.menu = Menu()
def get_template(self, template_name='index.html'):
return self.env.get_template(template_name)
class Main(FrontendBase):
def __init__(self):
super().__init__()
@staticmethod
def checkPassword(realm, username, password):
c = ConfigParser()
c.read('config.ini')
users = {k: c['users'][k].strip("'") for k in c['users']}
if password == users.get(username, None):
return True
return False
@cherrypy.expose
def index(self, name=''):
return self.get_template('index.html').render(nick=name,
title='Main page',
h1='Главная страница',
menu=self.menu.menu
)
@cherrypy.expose
def halt(self):
cherrypy.engine.exit()
@cherrypy.expose
def persons(self):
return self.get_template('persons.html').render(
title='Main page',
h1='Персоны',
menu=self.menu.menu,
table_title='Персоны',
url_prefix='person',
ptypes=str({a.pk_value: a.name for a in mc.PaymentType.get_all()}),
classrooms=str({a.pk_value: a.name for a in mc.Classroom.get_all()}),
lessons=str({a.pk_value: a.name for a in mc.Lesson.get_all()})
)
@cherrypy.expose
def payment_types(self):
return self.get_template('payment_types.html').render(
title='Типы платежей',
h1='Типы платежей',
menu=self.menu.menu,
table_title='Типы платежей',
url_prefix='ptype'
)
@cherrypy.expose
def classrooms(self):
return self.get_template('classrooms.html').render(
title='Залы для занятий',
h1='Залы для занятий',
menu=self.menu.menu,
table_title='список залов',
url_prefix='classroom'
)
@cherrypy.expose
def lessons(self):
return self.get_template('lessons.html').render(
title='Занятия',
h1='Занятия',
menu=self.menu.menu,
table_title='Список занятий',
url_prefix='lesson'
)
class MeminCrud(FrontendBase):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Person(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
prs = mc.Person.get_all()
persons = [{'PersonID': p.pk_value,
'Fname': p.fname,
'Lname': p.lname,
'Phone': p.phone,
'Email': p.email,
'InsertDate': p.insert_date
} for p in prs]
res = {'Result': 'OK' if prs else 'ERROR', 'Records': persons, 'Args': args}
return res
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
p = mc.Person(args['Fname'], args['Lname'], args['Phone'], args['Email'])
args['PersonID'] = p.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
p = mc.Person.load(args['PersonID'])
p.fname = args['Fname']
p.lname = args['Lname']
p.phone = args['Phone']
p.email = args['Email']
p.save()
return {'Result': 'OK'}
class PaymentType(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
ptypes = mc.PaymentType.get_all()
res = [{'Name': p.name,
'Comment': p.comment,
'PaymentTypeID': p.pk_value} for p in ptypes]
return {'Result': 'OK' if ptypes else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
pt = mc.PaymentType(args['Name'], args['Comment'])
args['PaymenTypeID'] = pt.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
pt = mc.PaymentType.load(args['PaymentTypeID'])
pt.name = args['Name']
pt.comment = args['Comment']
pt.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Classroom(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
cl = mc.Classroom.get_all()
res = [{'Name': c.name,
'Address': c.address,
'Comment': c.comment,
'Active': c.active,
'ClassroomID': c.pk_value} for c in cl]
return {'Result': 'OK' if cl else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
cl = mc.Classroom(args['Name'],
args['Address'],
args['Comment'],
args['Active'] if 'Active' in args else 0
)
args['ClassroomID'] = cl.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
cl = mc.Classroom.load(args['ClassroomID'])
cl.comment = args['Comment']
cl.name = args['Name']
cl.active = args['Active'] if 'Active' in args else 0
cl.address = args['Address']
cl.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Lesson(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
lsns = mc.Lesson.get_all()
res = [{'Name': l.name,
'Comment': l.comment,
'Duration': l.duration,
'LessonID': l.pk_value
} for l in lsns]
return {'Result': 'OK' if lsns else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
l = mc.Lesson(args['Name'], args['Duration'], args['Comment'])
args['LessonID'] = l.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
l = mc.Lesson.load(args['LessonID'])
l.name = args['Name']
l.comment = args['Comment']
l.duration = args['Duration']
l.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Payment(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
pl = mc.Payment.get_all({'PersonID': args['PersonID']})
res = [{'PersonID': p.person_id,
'PaymentType': p.payment_type_id,
'PaymentTypeID': p.payment_type_id,
'PaymentID': p.pk_value,
'Amount': p.amount,
'Date': '-'.join(reversed(p.date.split('.')))
} for p in pl]
return {'Result': 'OK' if pl else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
p = mc.Payment(args['PersonID'], args['Amount'], args['PaymentType'])
args['PaymentID'] = p.save()
args['Date'] = p.date
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Visit(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, PersonID, **args):
visits = mc.Visit.get_all({'PersonID': PersonID})
res = [{'VisitID': a.pk_value,
'Classroom': a.classroom_id,
'Lesson': a.lesson_id,
'Date': '-'.join(reversed(a.date.split('.')))
} for a in visits]
return {'Result': 'OK' if visits else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
v = mc.Visit(args['PersonID'], args['Classroom'], args['Lesson'], args['Date'])
args['VisitID'] = v.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
v = mc.Visit.load(args.get('VisitID'))
if v:
v.classroom_id = args['Classroom']
v.lesson_id = args['Lesson']
v.date = args['Date']
v.save()
return {'Result': 'OK'}
return {'Result': 'ERROR'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
| bsd-3-clause | -6,257,122,399,343,155,000 | 29.134286 | 87 | 0.526216 | false |
Roshan2017/spinnaker | testing/citest/tests/openstack_smoke_test.py | 1 | 9447 | # Copyright 2017 Veritas Technologies, LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Smoke test to see if Spinnaker can interoperate with OpenStack.
See testable_service/integration_test.py and spinnaker_testing/spinnaker.py
for more details.
The test will use the spinnaker configuration parameters from the server
endpoint (gate) to determine the managed project it should verify, and to
determine the spinnaker account name to use when sending it commands.
Note:
This test needs certain environment variables defined in order for the
OpenStack client to work. Please refer testing/citest/README.md for more
details.
Sample Usage:
PYTHONPATH=testing/citest \
python tesing/citest/tests/openstack_smoke_test.py \
--native_hostname=host-running-smoke-test
"""
# Standard python modules.
import sys
# citest modules.
import citest.openstack_testing as os
import citest.json_contract as jc
import citest.json_predicate as jp
import citest.service_testing as st
import citest.base
# Spinnaker modules.
import spinnaker_testing as sk
import spinnaker_testing.gate as gate
class OpenStackSmokeTestScenario(sk.SpinnakerTestScenario):
"""Defines the scenario for the smoke test.
This scenario defines the different test operations.
We're going to:
Create a Spinnaker Application
Create a Spinnaker Security Group
Delete each of the above (in reverse order)
"""
@classmethod
def new_agent(cls, bindings):
"""Implements citest.service_testing.AgentTestScenario.new_agent."""
agent = gate.new_agent(bindings)
agent.default_max_wait_secs = 180
return agent
def __init__(self, bindings, agent=None):
"""Constructor.
Args:
bindings: [dict] The data bindings to use to configure the scenario.
agent: [GateAgent] The agent for invoking the test operations on Gate.
"""
super(OpenStackSmokeTestScenario, self).__init__(bindings, agent)
bindings = self.bindings
# We'll call out the app name because it is widely used
# because it scopes the context of our activities.
# pylint: disable=invalid-name
self.TEST_APP = bindings['TEST_APP']
self.TEST_STACK = bindings['TEST_STACK']
self.TEST_SECURITY_GROUP = 'sec_grp_'+ bindings['TEST_APP']
def create_app(self):
"""Creates OperationContract that creates a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_create_app_operation(
bindings=self.bindings, application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_OS_ACCOUNT']),
contract=contract)
def delete_app(self):
"""Creates OperationContract that deletes a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_delete_app_operation(
application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_OS_ACCOUNT']),
contract=contract)
def create_a_security_group(self):
"""Creates OsContract for createServerGroup.
To verify the operation, we just check that the spinnaker security group
for the given application was created.
"""
rules = [{
"fromPort":80,
"toPort":80,
"prevcidr":"0.0.0.0/0",
"cidr":"0.0.0.0/0",
"ruleType":"TCP",
"remoteSecurityGroupId":"",
"icmpType":-1,
"icmpCode":-1
},
{
"fromPort":10,
"toPort":65530,
"prevcidr":"",
"cidr":"",
"ruleType":"UDP",
"remoteSecurityGroupId":"SELF",
"icmpType":-1,
"icmpCode":-1
},
{
"fromPort":1,
"toPort":65535,
"prevcidr":"",
"cidr":"",
"ruleType":"ICMP",
"remoteSecurityGroupId":"SELF",
"icmpType":12,
"icmpCode":9}]
job = [{
"provider": "openstack",
"region": self.bindings['OS_REGION_NAME'],
"stack": self.TEST_STACK,
"description": "Test - create security group for {app}".format(
app=self.TEST_APP),
"detail": "",
"account": self.bindings['SPINNAKER_OS_ACCOUNT'],
"rules": rules,
"name": self.TEST_SECURITY_GROUP,
"securityGroupName": self.TEST_SECURITY_GROUP,
"cloudProvider": "openstack",
"type": "upsertSecurityGroup",
"user": self.bindings['TEST_OS_USERNAME']
}]
builder = os.OsContractBuilder(self.os_observer)
(builder.new_clause_builder(
'Security Group Created', retryable_for_secs=30)
.show_resource('security group', self.TEST_SECURITY_GROUP)
.contains_pred_list([
jp.DICT_MATCHES({
'name': jp.STR_SUBSTR(self.TEST_SECURITY_GROUP),
'rules': jp.STR_SUBSTR("protocol='tcp'")
and jp.STR_SUBSTR("port_range_min='80'")
and jp.STR_SUBSTR("port_range_max='80'")}),
jp.DICT_MATCHES({
'rules': jp.STR_SUBSTR("protocol='udp'")
and jp.STR_SUBSTR("port_range_min='10'")
and jp.STR_SUBSTR("port_range_max='65530'")}),
jp.DICT_MATCHES({
'rules': jp.STR_SUBSTR("protocol='icmp'")
and jp.STR_SUBSTR("port_range_min='12'")
and jp.STR_SUBSTR("port_range_max='9'")})]))
payload = self.agent.make_json_payload_from_kwargs(
job=job, description=' Test - create security group for {app}'.format(
app=self.TEST_APP),
application=self.TEST_APP)
return st.OperationContract(
self.new_post_operation(
title='create_security_group', data=payload,
path='applications/{app}/tasks'.format(app=self.TEST_APP)),
contract=builder.build())
def delete_a_security_group(self):
"""Creates OsContract for deleteServerGroup.
To verify the operation, we just check that the spinnaker security group
for the given application was deleted.
"""
#Get ID of the created security group
os_agent = os.OsAgent(None)
data = os_agent.get_resource('security group', self.TEST_SECURITY_GROUP)
security_group_id = data['id']
payload = self.agent.make_json_payload_from_kwargs(
job=[{
"Provider": "openstack",
"id": security_group_id,
"region": self.bindings['OS_REGION_NAME'],
"regions": [self.bindings['OS_REGION_NAME']],
"account": self.bindings['SPINNAKER_OS_ACCOUNT'],
"securityGroupName": self.TEST_SECURITY_GROUP,
"cloudProvider": "openstack",
"type": "deleteSecurityGroup",
"user": self.bindings['TEST_OS_USERNAME']
}],
application=self.TEST_APP,
description='Delete Security Group: : ' + self.TEST_SECURITY_GROUP)
builder = os.OsContractBuilder(self.os_observer)
(builder.new_clause_builder(
'Security Group Deleted', retryable_for_secs=30)
.show_resource('security group', self.TEST_SECURITY_GROUP,
no_resources_ok=True)
.excludes_path_eq('name', self.TEST_SECURITY_GROUP)
)
return st.OperationContract(
self.new_post_operation(
title='delete_security_group', data=payload,
path='applications/{app}/tasks'.format(app=self.TEST_APP)),
contract=builder.build())
class OpenStackSmokeTest(st.AgentTestCase):
"""The test fixture for the OpenStackSmokeTest.
This is implemented using citest OperationContract instances that are
created by the OpenStackSmokeTestScenario.
"""
# pylint: disable=missing-docstring
@property
def scenario(self):
return citest.base.TestRunner.global_runner().get_shared_data(
OpenStackSmokeTestScenario)
def test_a_create_app(self):
self.run_test_case(self.scenario.create_app())
def test_z_delete_app(self):
self.run_test_case(self.scenario.delete_app(),
retry_interval_secs=8, max_retries=8)
def test_b_create_security_group(self):
self.run_test_case(self.scenario.create_a_security_group())
def test_y_delete_security_group(self):
self.run_test_case(self.scenario.delete_a_security_group(),
retry_interval_secs=8, max_retries=8)
def main():
"""Implements the main method running this smoke test."""
defaults = {
'TEST_STACK': str(OpenStackSmokeTestScenario.DEFAULT_TEST_ID),
'TEST_APP': 'openstack-smoketest' + OpenStackSmokeTestScenario.DEFAULT_TEST_ID
}
return citest.base.TestRunner.main(
parser_inits=[OpenStackSmokeTestScenario.initArgumentParser],
default_binding_overrides=defaults,
test_case_list=[OpenStackSmokeTest])
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | -574,020,269,942,373,200 | 34.784091 | 84 | 0.638615 | false |
raintank/graphite-api | setup.py | 1 | 2147 | # coding: utf-8
import sys
from setuptools import setup, find_packages
install_requires = [
'Flask',
'PyYAML',
'cairocffi',
'pyparsing>=1.5.7',
'pytz',
'six',
'tzlocal',
]
if sys.version_info < (2, 7):
install_requires.append('importlib')
install_requires.append('logutils')
install_requires.append('ordereddict')
install_requires.append('structlog<=16.0.0')
else:
install_requires.append('structlog')
with open('README.rst') as f:
long_description = f.read()
setup(
name='graphite-api',
version='1.1.3',
url='https://github.com/brutasse/graphite-api',
author="Bruno Renié, based on Chris Davis's graphite-web",
author_email='[email protected]',
license='Apache Software License 2.0',
description=('Graphite-web, without the interface. '
'Just the rendering HTTP API.'),
long_description=long_description,
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=install_requires,
extras_require={
'sentry': ['raven[flask]'],
'cyanite': ['cyanite'],
'cache': ['Flask-Cache'],
'statsd': ['statsd'],
},
zip_safe=False,
platforms='any',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: System :: Monitoring',
),
test_suite='tests',
)
| apache-2.0 | 5,227,318,412,880,639,000 | 30.101449 | 66 | 0.60671 | false |
chrplr/AIP2015 | resources/python-scripts/icons.py | 1 | 9336 | goicon=[
"32 32 5 1",
". c None",
"b c #008000",
"a c #00c000",
"# c #00ff00",
"c c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
"..........#.....................",
".........a##.....#..............",
"........ba###...a##.............",
"........ba####..a###............",
"........ba####..a####...........",
"........ba#####.c#####..........",
"........ba######ca#####.........",
"........ba#######ca#####........",
"........ba########ca#####.......",
"........ba#########ca#####......",
"........ba##########ca#####.....",
"........ba#########ac#####a.....",
"........ba########acc####a......",
"........ba#######abc####ab......",
"........ba######abc####ab.......",
"........ba#####abc####ab........",
"........ba####abc####ab.........",
"........ba###abca###ab..........",
"........ba##ab.ca##ab...........",
"........ba#ab..ba#ab............",
"........baab...baab.............",
"........bbb....bab..............",
".........b......b...............",
"................................",
"................................",
"................................",
"................................"]
stopicon=[
"32 32 4 1",
". c None",
"b c #800000",
"a c #c00000",
"# c #ff0000",
"................................",
"................................",
"................................",
"................................",
"................................",
".....####################.......",
"....a####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...baaaaaaaaaaaaaaaaaaaa........",
"...bbbbbbbbbbbbbbbbbbbb.........",
"................................",
"................................",
"................................",
"................................",
"................................"]
print_xpm = ['32 32 12 1',
'a c #ffffff',
'h c #ffff00',
'c c #ffffff',
'f c #dcdcdc',
'b c #c0c0c0',
'j c #a0a0a4',
'e c #808080',
'g c #808000',
'd c #585858',
'i c #00ff00',
'# c #000000',
'. c None',
'................................',
'................................',
'...........###..................',
'..........#abb###...............',
'.........#aabbbbb###............',
'.........#ddaaabbbbb###.........',
'........#ddddddaaabbbbb###......',
'.......#deffddddddaaabbbbb###...',
'......#deaaabbbddddddaaabbbbb###',
'.....#deaaaaaaabbbddddddaaabbbb#',
'....#deaaabbbaaaa#ddedddfggaaad#',
'...#deaaaaaaaaaa#ddeeeeafgggfdd#',
'..#deaaabbbaaaa#ddeeeeabbbbgfdd#',
'.#deeefaaaaaaa#ddeeeeabbhhbbadd#',
'#aabbbeeefaaa#ddeeeeabbbbbbaddd#',
'#bbaaabbbeee#ddeeeeabbiibbadddd#',
'#bbbbbaaabbbeeeeeeabbbbbbaddddd#',
'#bjbbbbbbaaabbbbeabbbbbbadddddd#',
'#bjjjjbbbbbbaaaeabbbbbbaddddddd#',
'#bjaaajjjbbbbbbaaabbbbadddddddd#',
'#bbbbbaaajjjbbbbbbaaaaddddddddd#',
'#bjbbbbbbaaajjjbbbbbbddddddddd#.',
'#bjjjjbbbbbbaaajjjbbbdddddddd#..',
'#bjaaajjjbbbbbbjaajjbddddddd#...',
'#bbbbbaaajjjbbbjbbaabdddddd#....',
'###bbbbbbaaajjjjbbbbbddddd#.....',
'...###bbbbbbaaajbbbbbdddd#......',
'......###bbbbbbjbbbbbddd#.......',
'.........###bbbbbbbbbdd#........',
'............###bbbbbbd#.........',
'...............###bbb#..........',
'..................###...........']
pwspec=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaabaaaaaaaaaaaaaa#......",
".....#aaaabaaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaabaabaaababaaaaaa#......",
".....#bbbaaaabbbaaabbbbbb#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
scope=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaabaaaaaaaaaaaaa#......",
".....#aaabbabbaaaaaaaaaaa#......",
".....#aabaaaaabaaaaaaaaaa#......",
".....#abaaaaaaabaaaaaaaaa#......",
".....#baaaaaaaaabaaaaaaaa#......",
".....#aaaaaaaaaaabaaaaaaa#......",
".....#aaaaaaaaaaaabaaaaaa#......",
".....#aaaaaaaaaaaaabaaaab#......",
".....#aaaaaaaaaaaaaabbaba#......",
".....#aaaaaaaaaaaaaaaabaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
single=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaabaaaaaaaaa#......",
".....#aaaaaaaabbaaaaaaaaa#......",
".....#abaabaaabbaaaaabaaa#......",
".....#abaabababbaababbbaa#......",
".....#abbabababbaabbbbbaa#......",
".....#bbbbbbbbbbbbbbbbbbb#......",
".....#aabbbbabaabbabbbaaa#......",
".....#aaabbbabaabbabbaaaa#......",
".....#aaababaaaabbabbaaaa#......",
".....#aaaaaaaaaabbabbaaaa#......",
".....#aaaaaaaaaabaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
avge=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#bbbbbbbbbbbbbbbbbbb#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
| gpl-2.0 | -6,271,612,860,441,996,000 | 32.106383 | 48 | 0.269173 | false |
MShel/ttw | listener/packets/udpPacket.py | 1 | 1408 | from listener.packets.abstractPacket import AbstractPacket
from struct import unpack
class UdpPacket(AbstractPacket):
UNPACK_FORMAT = '!HHHH'
UDP_HEADER_LENGTH = 8
PROTOCOL_NAME = 'UDP'
def __init__(self, binPacket: bytes, margin: int):
self.binPacket = binPacket
self.headerMargin = margin
self.parse()
def parse(self):
AbstractPacket.addMsg(AbstractPacket, 'Started Parsing UDP packet')
binUdpHeader = self.binPacket[self.headerMargin:self.headerMargin + self.UDP_HEADER_LENGTH]
unpackedHeader = unpack(self.UNPACK_FORMAT, binUdpHeader)
self.fromPort = str(unpackedHeader[0])
self.toPort = str(unpackedHeader[1])
self.udpHeaderLength = unpackedHeader[2]
self.udpCheckSum = unpackedHeader[3]
fullHeaderSize = self.headerMargin + self.udpHeaderLength
self.dataSize = len(self.binPacket) - fullHeaderSize
# get data from the packet
self.data = self.binPacket[fullHeaderSize:]
AbstractPacket.addMsg(AbstractPacket, 'Parsed UDP packet from port: ' + self.fromPort + ' to: ' + self.toPort)
AbstractPacket.addMsg(AbstractPacket, 'UDP-PACKET data:\n\n\n ' + str(self.data) +'\n\n')
def getMsg(self):
return self.msg
def getName(self):
return self.PROTOCOL_NAME
def __del__(self):
pass | mit | 2,091,908,008,284,972,500 | 36.078947 | 119 | 0.651989 | false |
aerler/WRF-Tools | Python/wrfavg/wrfout_average.py | 1 | 79431 | '''
Created on 2013-09-28, revised 2014-06-17, added daily output 2020-05-04
A script to average WRF output; the default settings are meant for my 'fineIO' output configuration and
process the smaller diagnostic files.
The script can run in parallel mode, with each process averaging one filetype and domain, producing
exactly one output file.
@author: Andre R. Erler, GPL v3
'''
#TODO: add time-dependent auxiliary files to file processing (use prerequisites from other files)
#TODO: add option to discard prerequisit variables
#TODO: add base variables for correlation and standard deviation (and (co-)variance).
#TODO: more variables: tropopause height, baroclinicity, PV, water flux (require full 3D fields)
#TODO: add shape-averaged output stream (shapes based on a template file)
## imports
import numpy as np
from collections import OrderedDict
#import numpy.ma as ma
import os, re, sys, shutil, gc
import netCDF4 as nc
# my own netcdf stuff
from utils.nctools import add_coord, copy_dims, copy_ncatts, copy_vars
from processing.multiprocess import asyncPoolEC
# import module providing derived variable classes
import wrfavg.derived_variables as dv
# aliases
days_per_month_365 = dv.days_per_month_365
dtype_float = dv.dtype_float
# thresholds for wet-day variables (from AMS glossary and ETCCDI Climate Change Indices)
from utils.constants import precip_thresholds
# N.B.: importing from WRF Tools to GeoPy causes a name collision
# date error class
class DateError(Exception):
''' Exceptions related to wrfout date strings, e.g. in file names. '''
pass
# date error class
class ArgumentError(Exception):
''' Exceptions related to arguments passed to the script. '''
pass
def getDateRegX(period):
''' function to define averaging period based on argument '''
# use '\d' for any number and [1-3,45] for ranges; '\d\d\d\d'
if period == '1979-1980': prdrgx = '19(79|80)' # 2 year historical period
elif period == '1979-1981': prdrgx = '19(79|8[0-1])' # 3 year historical period
elif period == '1979-1983': prdrgx = '19(79|8[0-3])' # 5 year historical period
elif period == '1979-1988': prdrgx = '19(79|8[0-8])' # 10 year historical period
elif period == '1980-1994': prdrgx = '19(8[0-9]|9[04])' # 15 year historical period
elif period == '2045-2047': prdrgx = '204[5-7]' # 3 year future period
elif period == '2045-2049': prdrgx = '204[5-9]' # 5 year future period
elif period == '2045-2054': prdrgx = '20(4[5-9]|5[0-4])' # 10 year future period
elif period == '2045-2059': prdrgx = '20(4[5-9]|5[0-9])' # 15 year future period
elif period == '2085-2087': prdrgx = '208[5-7]' # 3 year future period
elif period == '2085-2089': prdrgx = '208[5-9]' # 5 year future period
elif period == '2085-2094': prdrgx = '20(8[5-9]|9[0-4])' # 10 year future period
elif period == '2085-2099': prdrgx = '20(8[5-9]|9[0-9])' # 15 year future period
elif period == '2090-2094': prdrgx = '209[0-4]' # 5 year future period
else: prdrgx = None
if prdrgx: print(("\nLoading regular expression for date string: '{:s}'".format(period)))
return prdrgx
## read arguments
# number of processes NP
if 'PYAVG_THREADS' in os.environ:
NP = int(os.environ['PYAVG_THREADS'])
else: NP = None
# only compute derived variables
if 'PYAVG_DERIVEDONLY' in os.environ:
lderivedonly = os.environ['PYAVG_DERIVEDONLY'] == 'DERIVEDONLY'
else: lderivedonly = False # i.e. all
# # scale dry-day threshold
# if os.environ.has_key('PYAVG_DRYDAY') and bool(os.environ['PYAVG_DRYDAY']): # i.e. not empty and non-zero
# dryday_correction = float(os.environ['PYAVG_DRYDAY']) # relative to WMO recommendation
# dv.dryday_threshold = dv.dryday_threshold * dryday_correction # precip treshold for a dry day: 2.3e-7 mm/s
# print("\n *** The dry-day threshold was increased by a factor of {:3.2f} relative to WMO recommendation *** \n".format(dryday_correction))
# recompute last timestep and continue (usefule after a crash)
if 'PYAVG_RECOVER' in os.environ:
lrecover = os.environ['PYAVG_RECOVER'] == 'RECOVER'
else: lrecover = False # i.e. normal operation
# just add new and leave old
if 'PYAVG_ADDNEW' in os.environ:
laddnew = os.environ['PYAVG_ADDNEW'] == 'ADDNEW'
else: laddnew = False # i.e. recompute all
# recompute specified variables
if 'PYAVG_RECALC' in os.environ:
if os.environ['PYAVG_RECALC'] == 'DERIVEDONLY':
# recalculate all derived variables and leave others in place
lrecalc = True; lderivedonly = True; recalcvars = []
else:
recalcvars = os.environ['PYAVG_RECALC'].split() # space separated list (other characters cause problems...)
if len(recalcvars) > 0 and len(recalcvars[0]) > 0: lrecalc = True # if there is a variable to recompute
else: lrecalc = False
# lrecalc uses the same pathway, but they can operate independently
else: lrecalc = False # i.e. recompute all
# overwrite existing data
if 'PYAVG_OVERWRITE' in os.environ:
loverwrite = os.environ['PYAVG_OVERWRITE'] == 'OVERWRITE'
if loverwrite: laddnew = False; lrecalc = False
else: loverwrite = False # i.e. append
# N.B.: when loverwrite is True and and prdarg is empty, the entire file is replaced,
# otherwise only the selected months are recomputed
# file types to process
if 'PYAVG_FILETYPES' in os.environ:
filetypes = os.environ['PYAVG_FILETYPES'].split() # space separated list (other characters cause problems...)
if len(filetypes) == 1 and len(filetypes[0]) == 0: filetypes = None # empty string, substitute default
else: filetypes = None # defaults are set below
# domains to process
if 'PYAVG_DOMAINS' in os.environ:
domains = os.environ['PYAVG_DOMAINS'].split() # space separated list (other characters cause problems...)
if len(domains) == 1: domains = [int(i) for i in domains[0]] # string of single-digit indices
else: domains = [int(i) for i in domains] # semi-colon separated list
else: domains = None # defaults are set below
# run script in debug mode
if 'PYAVG_DEBUG' in os.environ:
ldebug = os.environ['PYAVG_DEBUG'] == 'DEBUG'
lderivedonly = ldebug or lderivedonly # usually this is what we are debugging, anyway...
else: ldebug = False # operational mode
# wipe temporary storage after every month (no carry-over)
if 'PYAVG_CARRYOVER' in os.environ:
lcarryover = os.environ['PYAVG_CARRYOVER'] == 'CARRYOVER'
else: lcarryover = True # operational mode
# use simple differences or centered differences for accumulated variables
if 'PYAVG_SMPLDIFF' in os.environ:
lsmplDiff = os.environ['PYAVG_SMPLDIFF'] == 'SMPLDIFF'
else: lsmplDiff = False # default mode: centered differences
# generate formatted daily/sub-daily output files for selected variables
if 'PYAVG_DAILY' in os.environ:
lglobaldaily = os.environ['PYAVG_DAILY'] == 'DAILY'
else: lglobaldaily = False # operational mode
# working directories
exproot = os.getcwd()
exp = exproot.split('/')[-1] # root folder name
infolder = exproot + '/wrfout/' # input folder
outfolder = exproot + '/wrfavg/' # output folder
# figure out time period
# N.B.: values or regex' can be passed for year, month, and day as arguments in this order; alternatively,
# a single argument with the values/regex separated by commas (',') can be used
if len(sys.argv) == 1 or not any(sys.argv[1:]): # treat empty arguments as no argument
period = [] # means recompute everything
elif len(sys.argv) == 2:
period = sys.argv[1].split(',') # regular expression identifying
else:
period = sys.argv[1:]
# prdarg = '1979'; period = prdarg.split('-') # for tests
# default time intervals
yearstr = '\d\d\d\d'; monthstr = '\d\d'; daystr = '\d\d'
# figure out time interval
if len(period) >= 1:
# first try some common expressions
yearstr = getDateRegX(period[0])
if yearstr is None: yearstr = period[0]
if len(period) >= 2: monthstr = period[1]
if len(period) >= 3: daystr = period[2]
# N.B.: the timestr variables are interpreted as strings and support Python regex syntax
if len(period) > 0 or ldebug: print('Date string interpretation:',yearstr,monthstr,daystr)
## definitions
# input files and folders
filetypes = filetypes or ['srfc', 'plev3d', 'xtrm', 'hydro', 'lsm', 'rad', 'snow']
domains = domains or [1,2,3,4]
# filetypes and domains can also be set in an semi-colon-separated environment variable (see above)
# file pattern (WRF output and averaged files)
# inputpattern = 'wrf{0:s}_d{1:02d}_{2:s}-{3:s}-{4:s}_\d\d:\d\d:\d\d.nc' # expanded with format(type,domain,year,month)
inputpattern = '^wrf{0:s}_d{1:s}_{2:s}_\d\d[_:]\d\d[_:]\d\d(?:\.nc$|$)' # expanded with format(type,domain,datestring)
#inputpattern = '^wrf{0:s}_d{1:s}_{2:s}_\d\d[_:]\d\d[_:]\d\d.*$' # expanded with format(type,domain,datestring)
# N.B.: the last section (?:\.nc$|$) matches either .nc at the end or just the end of the string;
# ?: just means that the group defined by () can not be retrieved (it is just to hold "|")
constpattern = 'wrfconst_d{0:02d}' # expanded with format(domain), also WRF output
# N.B.: file extension is added automatically for constpattern and handled by regex for inputpattern
monthlypattern = 'wrf{0:s}_d{1:02d}_monthly.nc' # expanded with format(type,domain)
dailypattern = 'wrf{0:s}_d{1:02d}_daily.nc' # expanded with format(type,domain)
# variable attributes
wrftime = 'Time' # time dim in wrfout files
wrfxtime = 'XTIME' # time in minutes since WRF simulation start
wrfaxes = dict(Time='tax', west_east='xax', south_north='yax', num_press_levels_stag='pax')
wrftimestamp = 'Times' # time-stamp variable in WRF
time = 'time' # time dim in monthly mean files
dimlist = ['x','y'] # dimensions we just copy
dimmap = {time:wrftime} #{time:wrftime, 'x':'west_east','y':'south_north'}
midmap = dict(list(zip(list(dimmap.values()),list(dimmap.keys())))) # reverse dimmap
# accumulated variables (only total accumulation since simulation start, not, e.g., daily accumulated)
acclist = dict(RAINNC=100.,RAINC=100.,RAINSH=None,SNOWNC=None,GRAUPELNC=None,SFCEVP=None,POTEVP=None, # srfc vars
SFROFF=None,UDROFF=None,ACGRDFLX=None,ACSNOW=None,ACSNOM=None,ACHFX=None,ACLHF=None, # lsm vars
ACSWUPT=1.e9,ACSWUPTC=1.e9,ACSWDNT=1.e9,ACSWDNTC=1.e9,ACSWUPB=1.e9,ACSWUPBC=1.e9,ACSWDNB=1.e9,ACSWDNBC=1.e9, # rad vars
ACLWUPT=1.e9,ACLWUPTC=1.e9,ACLWDNT=1.e9,ACLWDNTC=1.e9,ACLWUPB=1.e9,ACLWUPBC=1.e9,ACLWDNB=1.e9,ACLWDNBC=1.e9) # rad vars
# N.B.: keys = variables and values = bucket sizes; value = None or 0 means no bucket
bktpfx = 'I_' # prefix for bucket variables; these are processed together with their accumulated variables
# derived variables
derived_variables = {filetype:[] for filetype in filetypes} # derived variable lists by file type
derived_variables['srfc'] = [dv.Rain(), dv.LiquidPrecipSR(), dv.SolidPrecipSR(), dv.NetPrecip(sfcevp='QFX'),
dv.WaterVapor(), dv.OrographicIndex(), dv.CovOIP(), dv.WindSpeed(),
dv.SummerDays(threshold=25., temp='T2'), dv.FrostDays(threshold=0., temp='T2')]
# N.B.: measures the fraction of 6-hourly samples above/below the threshold (day and night)
derived_variables['xtrm'] = [dv.RainMean(), dv.TimeOfConvection(),
dv.SummerDays(threshold=25., temp='T2MAX'), dv.FrostDays(threshold=0., temp='T2MIN')]
derived_variables['hydro'] = [dv.Rain(), dv.LiquidPrecip(), dv.SolidPrecip(),
dv.NetPrecip(sfcevp='SFCEVP'), dv.NetWaterFlux(), dv.WaterForcing()]
derived_variables['rad'] = [dv.NetRadiation(), dv.NetLWRadiation()]
derived_variables['lsm'] = [dv.RunOff()]
derived_variables['plev3d'] = [dv.OrographicIndexPlev(), dv.Vorticity(), dv.WindSpeed(),
dv.WaterDensity(), dv.WaterFlux_U(), dv.WaterFlux_V(), dv.ColumnWater(),
dv.WaterTransport_U(), dv.WaterTransport_V(),
dv.HeatFlux_U(), dv.HeatFlux_V(), dv.ColumnHeat(),
dv.HeatTransport_U(),dv.HeatTransport_V(),
dv.GHT_Var(), dv.Vorticity_Var()]
# add wet-day variables for different thresholds
wetday_variables = [dv.WetDays, dv.WetDayRain, dv.WetDayPrecip]
for threshold in precip_thresholds:
for wetday_var in wetday_variables:
derived_variables['srfc'].append(wetday_var(threshold=threshold, rain='RAIN'))
derived_variables['hydro'].append(wetday_var(threshold=threshold, rain='RAIN'))
derived_variables['xtrm'].append(wetday_var(threshold=threshold, rain='RAINMEAN'))
# N.B.: derived variables need to be listed in order of computation
# Consecutive exceedance variables
consecutive_variables = {filetype:None for filetype in filetypes} # consecutive variable lists by file type
# skip in debug mode (only specific ones for debug)
if ldebug:
print("Skipping 'Consecutive Days of Exceedance' Variables")
else:
consecutive_variables['srfc'] = {'CFD' : ('T2', 'below', 273.14, 'Consecutive Frost Days (< 0C)'),
'CSD' : ('T2', 'above', 273.14+25., 'Consecutive Summer Days (>25C)'),
# N.B.: night temperatures >25C will rarely happen... so this will be very short
'CNWD' : ('NetPrecip', 'above', 0., 'Consecutive Net Wet Days'),
'CNDD' : ('NetPrecip', 'below', 0., 'Consecutive Net Dry Days'),}
consecutive_variables['xtrm'] = {'CFD' : ('T2MIN', 'below', 273.14, 'Consecutive Frost Days (< 0C)'),
'CSD' : ('T2MAX', 'above', 273.14+25., 'Consecutive Summer Days (>25C)'),}
consecutive_variables['hydro'] = {'CNWD' : ('NetPrecip', 'above', 0., 'Consecutive Net Wet Days'),
'CNDD' : ('NetPrecip', 'below', 0., 'Consecutive Net Dry Days'),
'CWGD' : ('NetWaterFlux', 'above', 0., 'Consecutive Water Gain Days'),
'CWLD' : ('NetWaterFlux', 'below', 0., 'Consecutive Water Loss Days'),}
# add wet-day variables for different thresholds
for threshold in precip_thresholds:
for filetype,rain_var in zip(['srfc','hydro','xtrm'],['RAIN','RAIN','RAINMEAN']):
suffix = '_{:03d}'.format(int(10*threshold)); name_suffix = '{:3.1f} mm/day)'.format(threshold)
consecutive_variables[filetype]['CWD'+suffix] = (rain_var, 'above', threshold/86400.,
'Consecutive Wet Days (>'+name_suffix)
consecutive_variables[filetype]['CDD'+suffix] = (rain_var, 'below', threshold/86400. ,
'Consecutive Dry Days (<'+name_suffix)
## single- and multi-step Extrema
maximum_variables = {filetype:[] for filetype in filetypes} # maxima variable lists by file type
daymax_variables = {filetype:[] for filetype in filetypes} # maxima variable lists by file type
daymin_variables = {filetype:[] for filetype in filetypes} # mininma variable lists by file type
weekmax_variables = {filetype:[] for filetype in filetypes} # maxima variable lists by file type
minimum_variables = {filetype:[] for filetype in filetypes} # minima variable lists by file type
weekmin_variables = {filetype:[] for filetype in filetypes} # mininma variable lists by file type
# skip in debug mode (only specific ones for debug)
if ldebug:
print("Skipping Single- and Multi-step Extrema")
else:
# Maxima (just list base variables; derived variables will be created later)
maximum_variables['srfc'] = ['T2', 'U10', 'V10', 'RAIN', 'RAINC', 'RAINNC', 'NetPrecip', 'WindSpeed']
maximum_variables['xtrm'] = ['T2MEAN', 'T2MAX', 'SPDUV10MEAN', 'SPDUV10MAX',
'RAINMEAN', 'RAINNCVMAX', 'RAINCVMAX']
maximum_variables['hydro'] = ['RAIN', 'RAINC', 'RAINNC', 'ACSNOW', 'ACSNOM', 'NetPrecip', 'NetWaterFlux', 'WaterForcing']
maximum_variables['lsm'] = ['SFROFF', 'Runoff']
maximum_variables['plev3d'] = ['S_PL', 'GHT_PL', 'Vorticity']
# daily (smoothed) maxima
daymax_variables['srfc'] = ['T2','RAIN', 'RAINC', 'RAINNC', 'NetPrecip', 'WindSpeed']
# daily (smoothed) minima
daymin_variables['srfc'] = ['T2']
# weekly (smoothed) maxima
weekmax_variables['xtrm'] = ['T2MEAN', 'T2MAX', 'SPDUV10MEAN']
weekmax_variables['hydro'] = ['RAIN', 'RAINC', 'RAINNC', 'ACSNOW', 'ACSNOM', 'NetPrecip', 'NetWaterFlux', 'WaterForcing']
weekmax_variables['lsm'] = ['SFROFF', 'UDROFF', 'Runoff']
# Maxima (just list base variables; derived variables will be created later)
minimum_variables['srfc'] = ['T2']
minimum_variables['xtrm'] = ['T2MEAN', 'T2MIN', 'SPDUV10MEAN']
minimum_variables['hydro'] = ['RAIN', 'NetPrecip', 'NetWaterFlux', 'WaterForcing']
minimum_variables['plev3d'] = ['GHT_PL', 'Vorticity']
# weekly (smoothed) minima
weekmin_variables['xtrm'] = ['T2MEAN', 'T2MIN', 'SPDUV10MEAN']
weekmin_variables['hydro'] = ['RAIN', 'NetPrecip', 'NetWaterFlux', 'WaterForcing']
weekmin_variables['lsm'] = ['SFROFF','UDROFF','Runoff']
# N.B.: it is important that the derived variables are listed in order of dependency!
# set of pre-requisites
prereq_vars = {key:set() for key in derived_variables.keys()} # pre-requisite variable set by file type
for key in prereq_vars.keys():
prereq_vars[key].update(*[devar.prerequisites for devar in derived_variables[key] if not devar.linear])
## daily variables (can also be 6-hourly or hourly, depending on source file)
if lglobaldaily:
daily_variables = {filetype:[] for filetype in filetypes} # daily variable lists by file type
daily_variables['srfc'] = ['T2', 'PSFC', 'WaterVapor', 'WindSpeed',] # surface climate
daily_variables['xtrm'] = ['T2MIN', 'T2MAX'] # min/max T2
daily_variables['hydro'] = ['RAIN', 'RAINC', 'LiquidPrecip', 'WaterForcing', 'SFCEVP', 'POTEVP'] # water budget
daily_variables['rad'] = ['NetRadiation','ACSWDNB','ACLWDNB','NetLWRadiation',] # surface radiation budget
#daily_variables['lsm'] = [] # runoff and soil temperature
## main work function
# N.B.: the loop iterations should be entirely independent, so that they can be run in parallel
def processFileList(filelist, filetype, ndom, lparallel=False, pidstr='', logger=None, ldebug=False):
''' This function is doing the main work, and is supposed to be run in a multiprocessing environment. '''
## setup files and folders
# load first file to copy some meta data
wrfoutfile = infolder+filelist[0]
logger.debug("\n{0:s} Opening first input file '{1:s}'.".format(pidstr,wrfoutfile))
wrfout = nc.Dataset(wrfoutfile, 'r', format='NETCDF4')
# timeless variables (should be empty, since all timeless variables should be in constant files!)
timeless = [varname for varname,var in wrfout.variables.items() if 'Time' not in var.dimensions]
assert len(timeless) == 0 # actually useless, since all WRF variables have a time dimension...
# time-dependent variables
varlist = [] # list of time-dependent variables to be processed
for varname,var in wrfout.variables.items():
if ('Time' in var.dimensions) and np.issubdtype(var.dtype, np.number) and varname[0:len(bktpfx)] != bktpfx:
varlist.append(varname)
varlist.sort() # alphabetical order...
## derived variables, extrema, and dependencies
# derived variable list
derived_vars = OrderedDict() # it is important that the derived variables are computed in order:
# the reason is that derived variables can depend on other derived variables, and the order in
# which they are listed, should take this into account
for devar in derived_variables[filetype]:
derived_vars[devar.name] = devar
# create consecutive extrema variables
if consecutive_variables[filetype] is not None:
for key,value in consecutive_variables[filetype].items():
if value[0] in derived_vars:
derived_vars[key] = dv.ConsecutiveExtrema(derived_vars[value[0]], value[1], threshold=value[2],
name=key, long_name=value[3])
else:
derived_vars[key] = dv.ConsecutiveExtrema(wrfout.variables[value[0]], value[1], threshold=value[2],
name=key, long_name=value[3], dimmap=midmap)
# method to create derived variables for extrema
def addExtrema(new_variables, mode, interval=0):
for exvar in new_variables[filetype]:
# create derived variable instance
if exvar in derived_vars:
if interval == 0: devar = dv.Extrema(derived_vars[exvar],mode)
else: devar = dv.MeanExtrema(derived_vars[exvar],mode,interval=interval)
else:
if interval == 0: devar = dv.Extrema(wrfout.variables[exvar],mode, dimmap=midmap)
else: devar = dv.MeanExtrema(wrfout.variables[exvar],mode, interval=interval, dimmap=midmap)
# append to derived variables
derived_vars[devar.name] = devar # derived_vars is from the parent scope, not local!
# and now add them
addExtrema(maximum_variables, 'max')
addExtrema(minimum_variables, 'min')
addExtrema(daymax_variables, 'max', interval=1)
addExtrema(daymin_variables, 'min', interval=1)
addExtrema(weekmax_variables, 'max', interval=5) # 5 days is the preferred interval, according to
addExtrema(weekmin_variables, 'min', interval=5) # ETCCDI Climate Change Indices
ldaily = False
if lglobaldaily:
# get varlist (does not include dependencies)
daily_varlist_full = daily_variables[filetype]
if len(daily_varlist_full)>0:
ldaily = True
daily_varlist = []; daily_derived_vars = []
for varname in daily_varlist_full:
if varname in wrfout.variables: daily_varlist.append(varname)
elif varname in derived_vars: daily_derived_vars.append(varname)
else:
raise ArgumentError("Variable '{}' not found in wrfout or derived variables; can only output derived variables that are already being computed for monthly output.".format(varname))
else:
logger.info("\n{0:s} Skipping (sub-)daily output for filetype '{1:s}', since variable list is empty.\n".format(pidstr,filetype))
# if we are only computing derived variables, remove all non-prerequisites
prepq = set().union(*[devar.prerequisites for devar in derived_vars.values()])
if ldaily: prepq |= set(daily_varlist)
if lderivedonly: varlist = [var for var in varlist if var in prepq]
# get some meta info and construct title string (printed after file creation)
begindate = str(nc.chartostring(wrfout.variables[wrftimestamp][0,:10])) # first timestamp in first file
beginyear, beginmonth, beginday = [int(tmp) for tmp in begindate.split('-')]
# always need to begin on the first of a month (discard incomplete data of first month)
if beginday != 1:
beginmonth += 1 # move on to next month
beginday = 1 # and start at the first (always...)
begindate = '{0:04d}-{1:02d}-{2:02d}'.format(beginyear, beginmonth, beginday) # rewrite begin date
# open last file and get last date
lastoutfile = infolder+filelist[-1]
logger.debug("{0:s} Opening last input file '{1:s}'.".format(pidstr,lastoutfile))
lastout = nc.Dataset(lastoutfile, 'r', format='NETCDF4')
lstidx = lastout.variables[wrftimestamp].shape[0]-1 # netcdf library has problems with negative indexing
enddate = str(nc.chartostring(lastout.variables[wrftimestamp][lstidx,:10])) # last timestamp in last file
endyear, endmonth, endday = [int(tmp) for tmp in enddate.split('-')]; del endday # make warning go away...
# the last timestamp should be the next month (i.e. that month is not included)
if endmonth == 1:
endmonth = 12; endyear -= 1 # previous year
else: endmonth -= 1
endday = 1 # first day of last month (always 1st..)
assert 1 <= endday <= 31 and 1 <= endmonth <= 12 # this is kinda trivial...
enddate = '{0:04d}-{1:02d}-{2:02d}'.format(endyear, endmonth, endday) # rewrite begin date
## open/create monthly mean output file
monthly_file = monthlypattern.format(filetype,ndom)
if lparallel: tmppfx = 'tmp_wrfavg_{:s}_'.format(pidstr[1:-1])
else: tmppfx = 'tmp_wrfavg_'
monthly_filepath = outfolder + monthly_file
tmp_monthly_filepath = outfolder + tmppfx + monthly_file
if os.path.exists(monthly_filepath):
if loverwrite or os.path.getsize(monthly_filepath) < 1e6: os.remove(monthly_filepath)
# N.B.: NetCDF files smaller than 1MB are usually incomplete header fragments from a previous crashed job
if os.path.exists(tmp_monthly_filepath) and not lrecover: os.remove(tmp_monthly_filepath) # remove old temp files
if os.path.exists(monthly_filepath):
# make a temporary copy of the file to work on (except, if we are recovering a broken temp file)
if not ( lrecover and os.path.exists(tmp_monthly_filepath) ): shutil.copy(monthly_filepath,tmp_monthly_filepath)
# open (temporary) file
logger.debug("{0:s} Opening existing output file '{1:s}'.\n".format(pidstr,monthly_filepath))
monthly_dataset = nc.Dataset(tmp_monthly_filepath, mode='a', format='NETCDF4') # open to append data (mode='a')
# infer start index
meanbeginyear, meanbeginmonth, meanbeginday = [int(tmp) for tmp in monthly_dataset.begin_date.split('-')]
assert meanbeginday == 1, 'always have to begin on the first of a month'
t0 = (beginyear-meanbeginyear)*12 + (beginmonth-meanbeginmonth) + 1
# check time-stamps in old datasets
if monthly_dataset.end_date < begindate: assert t0 == len(monthly_dataset.dimensions[time]) + 1 # another check
else: assert t0 <= len(monthly_dataset.dimensions[time]) + 1 # get time index where we start; in month beginning 1979
##
## *** special functions like adding new and recalculating old variables could be added later for daily output ***
##
# checks for new variables
if laddnew or lrecalc:
if t0 != 1: raise DateError("Have to start at the beginning to add new or recompute old variables!") # t0 starts with 1, not 0
meanendyear, meanendmonth, meanendday = [int(tmp) for tmp in monthly_dataset.end_date.split('-')]
assert meanendday == 1
endyear, endmonth = meanendyear, meanendmonth # just adding new, not extending!
enddate = monthly_dataset.end_date # for printing...
# check base variables
if laddnew or lrecalc: newvars = []
for var in varlist:
if var not in monthly_dataset.variables:
if laddnew: newvars.append(var)
else: varlist.remove(var)
#raise IOError, "{0:s} variable '{1:s}' not found in file '{2:s}'".format(pidstr,var.name,monthly_file)
# add new variables to netcdf file
if laddnew and len(newvars) > 0:
# copy remaining dimensions to new datasets
if midmap is not None:
dimlist = [midmap.get(dim,dim) for dim in wrfout.dimensions.keys() if dim != wrftime]
else: dimlist = [dim for dim in wrfout.dimensions.keys() if dim != wrftime]
dimlist = [dim for dim in dimlist if dim not in monthly_dataset.dimensions] # only the new ones!
copy_dims(monthly_dataset, wrfout, dimlist=dimlist, namemap=dimmap, copy_coords=False) # don't have coordinate variables
# create time-dependent variable in new datasets
copy_vars(monthly_dataset, wrfout, varlist=newvars, dimmap=dimmap, copy_data=False) # do not copy data - need to average
# change units of accumulated variables (per second)
for varname in newvars: # only new vars
assert varname in monthly_dataset.variables
if varname in acclist:
meanvar = monthly_dataset.variables[varname]
meanvar.units = meanvar.units + '/s' # units per second!
# add variables that should be recalculated
if lrecalc:
for var in recalcvars:
if var in monthly_dataset.variables and var in wrfout.variables:
if var not in newvars: newvars.append(var)
#else: raise ArgumentError, "Variable '{:s}' scheduled for recalculation is not present in output file '{:s}'.".format(var,monthly_filepath)
# check derived variables
if laddnew or lrecalc: newdevars = []
for varname,var in derived_vars.items():
if varname in monthly_dataset.variables:
var.checkPrerequisites(monthly_dataset)
if not var.checked: raise ValueError("Prerequisits for derived variable '{:s}' not found.".format(varname))
if lrecalc:
if ( lderivedonly and len(recalcvars) == 0 ) or ( varname in recalcvars ):
newdevars.append(varname)
var.checkPrerequisites(monthly_dataset) # as long as they are sorted correctly...
#del monthly_dataset.variables[varname]; monthly_dataset.sync()
#var.createVariable(monthly_dataset) # this does not seem to work...
else:
if laddnew:
var.checkPrerequisites(monthly_dataset) # as long as they are sorted correctly...
var.createVariable(monthly_dataset)
newdevars.append(varname)
else: del derived_vars[varname] # don't bother
# N.B.: it is not possible that a previously computed variable depends on a missing variable,
# unless it was purposefully deleted, in which case this will crash!
#raise (dv.DerivedVariableError, "{0:s} Derived variable '{1:s}' not found in file '{2:s}'".format(pidstr,var.name,monthly_file))
# now figure out effective variable list
if laddnew or lrecalc:
varset = set(newvars)
devarset = set(newdevars)
ndv = -1
# check prerequisites
while ndv != len(devarset):
ndv = len(devarset)
for devar in list(devarset): # normal variables don't have prerequisites
for pq in derived_vars[devar].prerequisites:
if pq in derived_vars: devarset.add(pq)
else: varset.add(pq)
# N.B.: this algorithm for dependencies relies on the fact that derived_vars is already ordered correctly,
# and unused variables can simply be removed (below), without changing the order;
# a stand-alone dependency resolution would require soring the derived_vars in order of execution
# consolidate lists
for devar in derived_vars.keys():
if devar not in devarset: del derived_vars[devar] # don't bother with this one...
varlist = list(varset) # order doesnt really matter... but whatever...
varlist.sort() # ... alphabetical order...
else:
logger.debug("{0:s} Creating new output file '{1:s}'.\n".format(pidstr,monthly_filepath))
monthly_dataset = nc.Dataset(tmp_monthly_filepath, 'w', format='NETCDF4') # open to start a new file (mode='w')
t0 = 1 # time index where we start (first month)
monthly_dataset.createDimension(time, size=None) # make time dimension unlimited
add_coord(monthly_dataset, time, data=None, dtype='i4', atts=dict(units='month since '+begindate)) # unlimited time dimension
# copy remaining dimensions to new datasets
if midmap is not None:
dimlist = [midmap.get(dim,dim) for dim in wrfout.dimensions.keys() if dim != wrftime]
else: dimlist = [dim for dim in wrfout.dimensions.keys() if dim != wrftime]
copy_dims(monthly_dataset, wrfout, dimlist=dimlist, namemap=dimmap, copy_coords=False) # don't have coordinate variables
# copy time-less variable to new datasets
copy_vars(monthly_dataset, wrfout, varlist=timeless, dimmap=dimmap, copy_data=True) # copy data
# create time-dependent variable in new datasets
copy_vars(monthly_dataset, wrfout, varlist=varlist, dimmap=dimmap, copy_data=False) # do not copy data - need to average
# change units of accumulated variables (per second)
for varname in acclist:
if varname in monthly_dataset.variables:
meanvar = monthly_dataset.variables[varname]
meanvar.units = meanvar.units + '/s' # units per second!
# also create variable for time-stamps in new datasets
if wrftimestamp in wrfout.variables:
copy_vars(monthly_dataset, wrfout, varlist=[wrftimestamp], dimmap=dimmap, copy_data=False) # do nto copy data - need to average
# create derived variables
for var in derived_vars.values():
var.checkPrerequisites(monthly_dataset) # as long as they are sorted correctly...
var.createVariable(monthly_dataset) # derived variables need to be added in order of computation
# copy global attributes
copy_ncatts(monthly_dataset, wrfout, prefix='') # copy all attributes (no need for prefix; all upper case are original)
# some new attributes
monthly_dataset.acc_diff_mode = 'simple' if lsmplDiff else 'centered'
monthly_dataset.description = 'wrf{0:s}_d{1:02d} monthly means'.format(filetype,ndom)
monthly_dataset.begin_date = begindate
monthly_dataset.experiment = exp
monthly_dataset.creator = 'Andre R. Erler'
# sync with file
monthly_dataset.sync()
## open/create daily output file
if ldaily:
# get datetime
begindatetime = dv.getTimeStamp(wrfout, 0, wrftimestamp)
# figure out filename
daily_file = dailypattern.format(filetype,ndom)
if lparallel: tmppfx = 'tmp_wrfavg_{:s}_'.format(pidstr[1:-1])
else: tmppfx = 'tmp_wrfavg_'
daily_filepath = outfolder + daily_file
tmp_daily_filepath = outfolder + tmppfx + daily_file
if os.path.exists(daily_filepath):
if loverwrite or os.path.getsize(daily_filepath) < 1e6: os.remove(daily_filepath)
# N.B.: NetCDF files smaller than 1MB are usually incomplete header fragments from a previous crashed job
if os.path.exists(tmp_daily_filepath) and not lrecover: os.remove(tmp_daily_filepath) # remove old temp files
if os.path.exists(daily_filepath):
raise NotImplementedError("Currently, updating of and appending to (sub-)daily output files is not supported.")
else:
logger.debug("{0:s} Creating new (sub-)daily output file '{1:s}'.\n".format(pidstr,daily_filepath))
daily_dataset = nc.Dataset(tmp_daily_filepath, 'w', format='NETCDF4') # open to start a new file (mode='w')
timestep_start = 0 # time step where we start (first tiem step)
daily_dataset.createDimension(time, size=None) # make time dimension unlimited
add_coord(daily_dataset, time, data=None, dtype='i8', atts=dict(units='seconds since '+begindatetime)) # unlimited time dimension
# copy remaining dimensions to new datasets
if midmap is not None:
dimlist = [midmap.get(dim,dim) for dim in wrfout.dimensions.keys() if dim != wrftime]
else: dimlist = [dim for dim in wrfout.dimensions.keys() if dim != wrftime]
copy_dims(daily_dataset, wrfout, dimlist=dimlist, namemap=dimmap, copy_coords=False) # don't have coordinate variables
# copy time-less variable to new datasets
copy_vars(daily_dataset, wrfout, varlist=timeless, dimmap=dimmap, copy_data=True) # copy data
# create time-dependent variable in new datasets
copy_vars(daily_dataset, wrfout, varlist=daily_varlist, dimmap=dimmap, copy_data=False) # do not copy data - need to resolve buckets and straighten time
# change units of accumulated variables (per second)
for varname in acclist:
if varname in daily_dataset.variables:
dayvar = daily_dataset.variables[varname]
dayvar.units = dayvar.units + '/s' # units per second!
# also create variable for time-stamps in new datasets
if wrftimestamp in wrfout.variables:
copy_vars(daily_dataset, wrfout, varlist=[wrftimestamp], dimmap=dimmap, copy_data=False) # do not copy data - need to straighten out time axis
if wrfxtime in wrfout.variables:
copy_vars(daily_dataset, wrfout, varlist=[wrfxtime], dimmap=dimmap, copy_data=False) # do not copy data - need to straighten out time axis
# create derived variables
for devarname in daily_derived_vars:
# don't need to check for prerequisites, since they are already being checked and computed for monthly output
derived_vars[devarname].createVariable(daily_dataset) # derived variables need to be added in order of computation
# copy global attributes
copy_ncatts(daily_dataset, wrfout, prefix='') # copy all attributes (no need for prefix; all upper case are original)
# some new attributes
daily_dataset.acc_diff_mode = 'simple' if lsmplDiff else 'centered'
daily_dataset.description = 'wrf{0:s}_d{1:02d} post-processed timestep output'.format(filetype,ndom)
daily_dataset.begin_date = begindatetime
daily_dataset.experiment = exp
daily_dataset.creator = 'Andre R. Erler'
# sync with file
daily_dataset.sync()
## construct dependencies
# update linearity: dependencies of non-linear variables have to be treated as non-linear themselves
lagain = True
# parse through dependencies until nothing changes anymore
while lagain:
lagain = False
for dename,devar in derived_vars.items():
# variables for daily output can be treated as non-linear, so that they are computed at the native timestep
if ldaily and dename in daily_derived_vars: devar.linear = False
if not devar.linear:
# make sure all dependencies are also treated as non-linear
for pq in devar.prerequisites:
if pq in derived_vars and derived_vars[pq].linear:
lagain = True # indicate modification
derived_vars[pq].linear = False
# construct dependency set (should include extrema now)
pqset = set().union(*[devar.prerequisites for devar in derived_vars.values() if not devar.linear])
if ldaily:
# daily output variables need to be treated as prerequisites, so that full timestep fields are loaded for bucket variables
pqset |= set(daily_varlist)
cset = set().union(*[devar.constants for devar in derived_vars.values() if devar.constants is not None])
# initialize dictionary for temporary storage
tmpdata = dict() # not allocated - use sparingly
# load constants, if necessary
const = dict()
lconst = len(cset) > 0
if lconst:
constfile = infolder+constpattern.format(ndom)
if not os.path.exists(constfile): constfile += '.nc' # try with extension
if not os.path.exists(constfile): raise IOError("No constants file found! ({:s})".format(constfile))
logger.debug("\n{0:s} Opening constants file '{1:s}'.\n".format(pidstr,constfile))
wrfconst = nc.Dataset(constfile, 'r', format='NETCDF4')
# constant variables
for cvar in cset:
if cvar in wrfconst.variables: const[cvar] = wrfconst.variables[cvar][:]
elif cvar in wrfconst.ncattrs(): const[cvar] = wrfconst.getncattr(cvar)
else: raise ValueError("Constant variable/attribute '{:s}' not found in constants file '{:s}'.".format(cvar,constfile))
else: const = None
# check axes order of prerequisits and constants
for devar in derived_vars.values():
for pq in devar.prerequisites:
# get dimensions of prerequisite
if pq in varlist: pqax = wrfout.variables[pq].dimensions
elif lconst and pq in wrfconst.variables: pqax = wrfconst.variables[pq].dimensions
elif lconst and pq in const: pqax = () # a scalar value, i.e. no axes
elif pq in derived_vars: pqax = derived_vars[pq].axes
else: raise ValueError("Prerequisite '{:s} for variable '{:s}' not found!".format(pq,devar.name))
# check axes for consistent order
index = -1
for ax in devar.axes:
if ax in pqax:
idx = pqax.index(ax)
if idx > index: index = idx
else: raise IndexError("The axis order of '{:s}' and '{:s}' is inconsistent - this can lead to unexpected results!".format(devar.name,pq))
# announcement: format title string and print
varstr = ''; devarstr = '' # make variable list, also for derived variables
for var in varlist: varstr += '{}, '.format(var)
for devar in derived_vars.values(): devarstr += '%s, '%devar.name
titlestr = '\n\n{0:s} *** Processing wrf{1:s} files for domain {2:d}. ***'.format(pidstr,filetype,ndom)
titlestr += '\n (monthly means from {0:s} to {1:s}, incl.)'.format(begindate,enddate)
if varstr: titlestr += '\n Variable list: {0:s}'.format(str(varstr),)
else: titlestr += '\n Variable list: None'
if devarstr: titlestr += '\n Derived variables: {0:s}'.format(str(devarstr),)
# print meta info (print everything in one chunk, so output from different processes does not get mangled)
logger.info(titlestr)
# extend time dimension in monthly average
if (endyear < beginyear) or (endyear == beginyear and endmonth < beginmonth):
raise DateError("End date is before begin date: {:04d}-{:02d} < {:04d}-{:02d}".format(endyear,endmonth,beginyear,beginmonth))
times = np.arange(t0,t0+(endyear-beginyear)*12+endmonth-beginmonth+1)
# handling of time intervals for accumulated variables
if wrfxtime in wrfout.variables:
lxtime = True # simply compute differences from XTIME (assuming minutes)
time_desc = wrfout.variables[wrfxtime].description
assert time_desc.startswith("minutes since "), time_desc
assert "simulation start" in time_desc or begindate in time_desc or '**' in time_desc, time_desc
# N.B.: the last check (**) is for cases where the date in WRF is garbled...
if t0 == 1 and not wrfout.variables[wrfxtime][0] == 0:
raise ValueError( 'XTIME in first input file does not start with 0!\n'+
'(this can happen, when the first input file is missing)' )
elif wrftimestamp in wrfout.variables:
lxtime = False # interpret timestamp in Times using datetime module
else: raise TypeError
# check if there is a missing_value flag
if 'P_LEV_MISSING' in wrfout.ncattrs():
missing_value = wrfout.P_LEV_MISSING # usually -999.
# N.B.: this is only used in plev3d files, where pressure levels intersect the ground
else: missing_value = None
# allocate fields
data = dict() # temporary data arrays
for var in varlist:
tmpshape = list(wrfout.variables[var].shape)
del tmpshape[wrfout.variables[var].dimensions.index(wrftime)] # allocated arrays have no time dimension
assert len(tmpshape) == len(wrfout.variables[var].shape) -1
data[var] = np.zeros(tmpshape, dtype=dtype_float) # allocate
#if missing_value is not None:
# data[var] += missing_value # initialize with missing value
# allocate derived data arrays (for non-linear variables)
pqdata = {pqvar:None for pqvar in pqset} # temporary data array holding instantaneous values to compute derived variables
# N.B.: since data is only referenced from existing arrays, allocation is not necessary
dedata = dict() # non-linear derived variables
# N.B.: linear derived variables are computed directly from the monthly averages
for dename,devar in derived_vars.items():
if not devar.linear:
tmpshape = [len(wrfout.dimensions[ax]) for ax in devar.axes if ax != time] # infer shape
assert len(tmpshape) == len(devar.axes) -1 # no time dimension
dedata[dename] = np.zeros(tmpshape, dtype=dtype_float) # allocate
# prepare computation of monthly means
filecounter = 0 # number of wrfout file currently processed
i0 = t0-1 # index position we write to: i = i0 + n (zero-based, of course)
if ldaily: daily_start_idx = daily_end_idx = timestep_start # for each file cycle, the time index where to write the data
## start loop over month
if lparallel: progressstr = '' # a string printing the processed dates
else: logger.info('\n Processed dates:')
try:
# loop over month and progressively stepping through input files
for n,meantime in enumerate(times):
# meantime: (complete) month since simulation start
lasttimestamp = None # carry over start time, when moving to the next file (defined below)
# N.B.: when moving to the next file, the script auto-detects and resets this property, no need to change here!
# However (!) it is necessary to reset this for every month, because it is not consistent!
# extend time array / month counter
meanidx = i0 + n
if meanidx == len(monthly_dataset.variables[time]):
lskip = False # append next data point / time step
elif loverwrite or laddnew or lrecalc:
lskip = False # overwrite this step or add data point for new variables
elif meanidx == len(monthly_dataset.variables[time])-1:
if lrecover or monthly_dataset.variables[time][meanidx] == -1:
lskip = False # recompute last step, because it may be incomplete
else: lskip = True
else:
lskip = True # skip this step, but we still have to verify the timing
# check if we are overwriting existing data
if meanidx != len(monthly_dataset.variables[time]):
assert meanidx < len(monthly_dataset.variables[time])
assert meantime == monthly_dataset.variables[time][meanidx] or monthly_dataset.variables[time][meanidx] == -1
# N.B.: writing records is delayed to avoid incomplete records in case of a crash
# current date
currentyear, currentmonth = divmod(n+beginmonth-1,12)
currentyear += beginyear; currentmonth +=1
# sanity checks
assert meanidx + 1 == meantime
currentdate = '{0:04d}-{1:02d}'.format(currentyear,currentmonth)
# determine appropriate start index
wrfstartidx = 0
while currentdate > str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,0:7])):
wrfstartidx += 1 # count forward
if wrfstartidx != 0: logger.debug('\n{0:s} {1:s}: Starting month at index {2:d}.'.format(pidstr, currentdate, wrfstartidx))
# save WRF time-stamp for beginning of month for the new file, for record
firsttimestamp_chars = wrfout.variables[wrftimestamp][wrfstartidx,:]
#logger.debug('\n{0:s}{1:s}-01_00:00:00, {2:s}'.format(pidstr, currentdate, str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,:])))
if '{0:s}-01_00:00:00'.format(currentdate,) == str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,:])):
pass # proper start of the month
elif meanidx == 0 and '{0:s}-01_06:00:00'.format(currentdate,) == str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,:])):
pass # for some reanalysis... but only at start of simulation
else: raise DateError("{0:s} Did not find first day of month to compute monthly average.".format(pidstr) +
"file: {0:s} date: {1:s}-01_00:00:00".format(monthly_file,currentdate))
# prepare summation of output time steps
lcomplete = False #
ntime = 0 # accumulated output time steps
# time when accumulation starts (in minutes)
# N.B.: the first value is saved as negative, so that adding the last value yields a positive interval
if lxtime: xtime = -1 * wrfout.variables[wrfxtime][wrfstartidx] # minutes
monthlytimestamps = [] # list of timestamps, also used for time period calculation
# clear temporary arrays
for varname,var in data.items(): # base variables
data[varname] = np.zeros(var.shape, dtype=dtype_float) # reset to zero
for dename,devar in dedata.items(): # derived variables
dedata[dename] = np.zeros(devar.shape, dtype=dtype_float) # reset to zero
## loop over files and average
while not lcomplete:
# determine valid end index by checking dates from the end counting backwards
# N.B.: start index is determined above (if a new file was opened in the same month,
# the start index is automatically set to 0 or 1 when the file is opened, below)
wrfendidx = len(wrfout.dimensions[wrftime])-1
while wrfendidx >= 0 and currentdate < str(nc.chartostring(wrfout.variables[wrftimestamp][wrfendidx,0:7])):
if not lcomplete: lcomplete = True # break loop over file if next month is in this file (critical!)
wrfendidx -= 1 # count backwards
#if wrfendidx < len(wrfout.dimensions[wrftime])-1: # check if count-down actually happened
wrfendidx += 1 # reverse last step so that counter sits at first step of next month
# N.B.: if this is not the last file, there was no iteration and wrfendidx should be the length of the the file;
# in this case, wrfendidx is only used to define Python ranges, which are exclusive to the upper boundary;
# if the first date in the file is already the next month, wrfendidx will be 0 and this is the final step;
assert wrfendidx >= wrfstartidx # i.e. wrfendidx = wrfstartidx = 0 is an empty step to finalize accumulation
assert lcomplete or wrfendidx == len(wrfout.dimensions[wrftime])
# if this is the last file and the month is not complete, we have to forcefully terminate
if filecounter == len(filelist)-1 and not lcomplete:
lcomplete = True # end loop
lskip = True # don't write results for this month!
if not lskip:
## compute monthly averages
# loop over variables
for varname in varlist:
logger.debug('{0:s} {1:s}'.format(pidstr,varname))
if varname not in wrfout.variables:
logger.info("{:s} Variable {:s} missing in file '{:s}' - filling with NaN!".format(pidstr,varname,filelist[filecounter]))
data[varname] *= np.NaN # turn everything into NaN, if variable is missing
# N.B.: this can happen, when an output stream was reconfigured between cycle steps
else:
var = wrfout.variables[varname]
tax = var.dimensions.index(wrftime) # index of time axis
slices = [slice(None)]*len(var.shape)
# construct informative IOError message
ioerror = "An Error occcured in file '{:s}'; variable: '{:s}'\n('{:s}')".format(filelist[filecounter], varname, infolder)
# decide how to average
## Accumulated Variables
if varname in acclist:
if missing_value is not None:
raise NotImplementedError("Can't handle accumulated variables with missing values yet.")
# compute mean as difference between end points; normalize by time difference
if ntime == 0: # first time step of the month
slices[tax] = wrfstartidx # relevant time interval
try: tmp = var.__getitem__(slices) # get array
except: raise IOError(ioerror) # informative IO Error
if acclist[varname] is not None: # add bucket level, if applicable
bkt = wrfout.variables[bktpfx+varname]
tmp += bkt.__getitem__(slices) * acclist[varname]
# check that accumulated fields at the beginning of the simulation are zero
if meanidx == 0 and wrfstartidx == 0:
# note that if we are skipping the first step, there is no check
if np.max(tmp) != 0 or np.min(tmp) != 0:
raise ValueError( 'Accumulated fields were not initialized with zero!\n' +
'(this can happen, when the first input file is missing)' )
data[varname] = -1 * tmp # so we can do an in-place operation later
# N.B.: both, begin and end, can be in the same file, hence elif is not appropriate!
if lcomplete: # last step
slices[tax] = wrfendidx # relevant time interval
try: tmp = var.__getitem__(slices) # get array
except: raise IOError(ioerror) # informative IO Error
if acclist[varname] is not None: # add bucket level, if applicable
bkt = wrfout.variables[bktpfx+varname]
tmp += bkt.__getitem__(slices) * acclist[varname]
data[varname] += tmp # the starting data is already negative
# if variable is a prerequisit to others, compute instantaneous values
if varname in pqset:
# compute mean via sum over all elements; normalize by number of time steps
if lsmplDiff: slices[tax] = slice(wrfstartidx,wrfendidx+1) # load longer time interval for diff
else: slices[tax] = slice(wrfstartidx,wrfendidx) # relevant time interval
try: tmp = var.__getitem__(slices) # get array
except: raise IOError(ioerror) # informative IO Error
if acclist[varname] is not None: # add bucket level, if applicable
bkt = wrfout.variables[bktpfx+varname]
tmp = tmp + bkt.__getitem__(slices) * acclist[varname]
if lsmplDiff: pqdata[varname] = np.diff(tmp, axis=tax) # simple differences
else: pqdata[varname] = dv.ctrDiff(tmp, axis=tax, delta=1) # normalization comes later
##
## *** daily values for bucket variables are generated here, ***
## *** but should we really use *centered* differences??? ***
##
elif varname[0:len(bktpfx)] == bktpfx:
pass # do not process buckets
## Normal Variables
else:
# skip "empty" steps (only needed to difference accumulated variables)
if wrfendidx > wrfstartidx:
# compute mean via sum over all elements; normalize by number of time steps
slices[tax] = slice(wrfstartidx,wrfendidx) # relevant time interval
try: tmp = var.__getitem__(slices) # get array
except: raise IOError(ioerror) # informative IO Error
if missing_value is not None:
# N.B.: missing value handling is really only necessary when missing values are time-dependent
tmp = np.where(tmp == missing_value, np.NaN, tmp) # set missing values to NaN
#tmp = ma.masked_equal(tmp, missing_value, copy=False) # mask missing values
data[varname] = data[varname] + tmp.sum(axis=tax) # add to sum
# N.B.: in-place operations with non-masked array destroy the mask, hence need to use this
# keep data in memory if used in computation of derived variables
if varname in pqset: pqdata[varname] = tmp
## compute derived variables
# but first generate a list of timestamps
if lcomplete: tmpendidx = wrfendidx
else: tmpendidx = wrfendidx -1 # end of file
# assemble list of time stamps
currenttimestamps = [] # relevant timestamps in this file
for i in range(wrfstartidx,tmpendidx+1):
timestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][i,:]))
currenttimestamps.append(timestamp)
monthlytimestamps.extend(currenttimestamps) # add to monthly collection
# write daily timestamps
if ldaily:
nsteps = wrfendidx - wrfstartidx
daily_start_idx = daily_end_idx # from previous step
daily_end_idx = daily_start_idx + nsteps
# set time values to -1, to inticate they are being worked on
daily_dataset.variables[time][daily_start_idx:daily_end_idx] = -1
ncvar = None; vardata = None # dummies, to prevent crash later on, if varlist is empty
# copy timestamp and xtime data
daily_dataset.variables[wrftimestamp][daily_start_idx:daily_end_idx,:] = wrfout.variables[wrftimestamp][wrfstartidx:wrfendidx,:]
if lxtime:
daily_dataset.variables[wrfxtime][daily_start_idx:daily_end_idx] = wrfout.variables[wrfxtime][wrfstartidx:wrfendidx]
daily_dataset.sync()
# normalize accumulated pqdata with output interval time
if wrfendidx > wrfstartidx:
assert tmpendidx > wrfstartidx, 'There should never be a single value in a file: wrfstartidx={:d}, wrfendidx={:d}, lcomplete={:s}'.format(wrfstartidx,wrfendidx,str(lcomplete))
# compute time delta
delta = dv.calcTimeDelta(currenttimestamps)
if lxtime:
xdelta = wrfout.variables[wrfxtime][tmpendidx] - wrfout.variables[wrfxtime][wrfstartidx]
xdelta *= 60. # convert minutes to seconds
if delta != xdelta: raise ValueError("Time calculation from time stamps and model time are inconsistent: {:f} != {:f}".format(delta,xdelta))
delta /= float(tmpendidx - wrfstartidx) # the average interval between output time steps
# loop over time-step data
for pqname,pqvar in pqdata.items():
if pqname in acclist: pqvar /= delta # normalize
# write to daily file
if ldaily:
# loop over variables and save data arrays
for varname in daily_varlist:
ncvar = daily_dataset.variables[varname] # destination variable in daily output
vardata = pqdata[varname] # timestep data
if missing_value is not None: # make sure the missing value flag is preserved
vardata = np.where(np.isnan(vardata), missing_value, vardata)
ncvar.missing_value = missing_value # just to make sure
if ncvar.ndim > 1: ncvar[daily_start_idx:daily_end_idx,:] = vardata # here time is always the outermost index
else: ncvar[daily_start_idx:daily_end_idx] = vardata
daily_dataset.sync()
# loop over derived variables
# special treatment for certain string variables
if 'Times' in pqset: pqdata['Times'] = currenttimestamps[:wrfendidx-wrfstartidx] # need same length as actual time dimension
logger.debug('\n{0:s} Available prerequisites: {1:s}'.format(pidstr, str(list(pqdata.keys()))))
for dename,devar in derived_vars.items():
if not devar.linear: # only non-linear ones here, linear one at the end
logger.debug('{0:s} {1:s} {2:s}'.format(pidstr, dename, str(devar.prerequisites)))
tmp = devar.computeValues(pqdata, aggax=tax, delta=delta, const=const, tmp=tmpdata) # possibly needed as pre-requisite
dedata[dename] = devar.aggregateValues(tmp, aggdata=dedata[dename], aggax=tax)
# N.B.: in-place operations with non-masked array destroy the mask, hence need to use this
if dename in pqset: pqdata[dename] = tmp
# save to daily output
if ldaily:
if dename in daily_derived_vars:
ncvar = daily_dataset.variables[dename] # destination variable in daily output
vardata = tmp
if missing_value is not None: # make sure the missing value flag is preserved
vardata = np.where(np.isnan(vardata), missing_value, vardata)
ncvar.missing_value = missing_value # just to make sure
if ncvar.ndim > 1: ncvar[daily_start_idx:daily_end_idx,:] = vardata # here time is always the outermost index
else: ncvar[daily_start_idx:daily_end_idx] = vardata
# N.B.: missing values should be handled implicitly, following missing values in pre-requisites
del tmp # memory hygiene
if ldaily:
# add time in seconds, based on index and time delta
daily_dataset.variables[time][daily_start_idx:daily_end_idx] = np.arange(daily_start_idx,daily_end_idx, dtype='i8')*int(delta)
daily_dataset.end_date = dv.getTimeStamp(wrfout, wrfendidx-1, wrftimestamp) # update current end date
# N.B.: adding the time coordinate and attributes finalized this step
# sync data and clear memory
daily_dataset.sync(); daily_dataset.close() # sync and close dataset
del daily_dataset, ncvar, vardata # remove all other references to data
gc.collect() # clean up memory
# N.B.: the netCDF4 module keeps all data written to a netcdf file in memory; there is no flush command
daily_dataset = nc.Dataset(tmp_daily_filepath, mode='a', format='NETCDF4') # re-open to append more data (mode='a')
# N.B.: flushing the mean file here prevents repeated close/re-open when no data was written (i.e.
# the month was skiped); only flush memory when data was actually written.
# increment counters
ntime += wrfendidx - wrfstartidx
if lcomplete:
# N.B.: now wrfendidx should be a valid time step
# check time steps for this month
laststamp = monthlytimestamps[0]
for timestamp in monthlytimestamps[1:]:
if laststamp >= timestamp:
raise DateError('Timestamps not in order, or repetition: {:s}'.format(timestamp))
laststamp = timestamp
# calculate time period and check against model time (if available)
timeperiod = dv.calcTimeDelta(monthlytimestamps)
if lxtime:
xtime += wrfout.variables[wrfxtime][wrfendidx] # get final time interval (in minutes)
xtime *= 60. # convert minutes to seconds
if timeperiod != xtime:
logger.info("Time calculation from time stamps and model time are inconsistent: {:f} != {:f}".format(timeperiod,xtime))
# two possible ends: month is done or reached end of file
# if we reached the end of the file, open a new one and go again
if not lcomplete:
# N.B.: here wrfendidx is not a valid time step, but the length of the file, i.e. wrfendidx-1 is the last valid time step
lasttimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][wrfendidx-1,:])) # needed to determine, if first timestep is the same as last
assert lskip or lasttimestamp == monthlytimestamps[-1]
# lasttimestep is also used for leap-year detection later on
assert len(wrfout.dimensions[wrftime]) == wrfendidx, (len(wrfout.dimensions[wrftime]),wrfendidx) # wrfendidx should be the length of the file, not the last index!
## find first timestep (compare to last of previous file) and (re-)set time step counter
# initialize search
tmptimestamp = lasttimestamp; filelen1 = len(wrfout.dimensions[wrftime]) - 1; wrfstartidx = filelen1;
while tmptimestamp <= lasttimestamp:
if wrfstartidx < filelen1:
wrfstartidx += 1 # step forward in current file
else:
# open next file, if we reach the end
wrfout.close() # close file
#del wrfout; gc.collect() # doesn't seem to work here - strange error
# N.B.: filecounter +1 < len(filelist) is already checked above
filecounter += 1 # move to next file
if filecounter < len(filelist):
logger.debug("\n{0:s} Opening input file '{1:s}'.\n".format(pidstr,filelist[filecounter]))
wrfout = nc.Dataset(infolder+filelist[filecounter], 'r', format='NETCDF4') # ... and open new one
filelen1 = len(wrfout.dimensions[wrftime]) - 1 # length of new file
wrfstartidx = 0 # reset index
# check consistency of missing value flag
assert missing_value is None or missing_value == wrfout.P_LEV_MISSING
else: break # this is not really tested...
tmptimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,:]))
# some checks
firsttimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][0,:]))
error_string = "Inconsistent time-stamps between files:\n lasttimestamp='{:s}', firsttimestamp='{:s}', wrfstartidx={:d}"
if firsttimestamp == lasttimestamp: # skip the initialization step (was already processed in last step)
if wrfstartidx != 1: raise DateError(error_string.format(lasttimestamp, firsttimestamp, wrfstartidx))
if firsttimestamp > lasttimestamp: # no duplicates: first timestep in next file was not present in previous file
if wrfstartidx != 0: raise DateError(error_string.format(lasttimestamp, firsttimestamp, wrfstartidx))
if firsttimestamp < lasttimestamp: # files overlap: count up to next timestamp in sequence
#if wrfstartidx == 2: warn(error_string.format(lasttimestamp, firsttimestamp, wrfstartidx))
if wrfstartidx == 0: raise DateError(error_string.format(lasttimestamp, firsttimestamp, wrfstartidx))
else: # month complete
# print feedback (the current month) to indicate completion
if lparallel: progressstr += '{0:s}, '.format(currentdate) # bundle output in parallel mode
else: logger.info('{0:s},'.format(currentdate)) # serial mode
# clear temporary storage
if lcarryover:
for devar in list(derived_vars.values()):
if not (devar.tmpdata is None or devar.carryover):
if devar.tmpdata in tmpdata: del tmpdata[devar.tmpdata]
else: tmpdata = dict() # reset entire temporary storage
# N.B.: now wrfendidx is a valid timestep, but indicates the first of the next month
lasttimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][wrfendidx,:])) # this should be the first timestep of the next month
assert lskip or lasttimestamp == monthlytimestamps[-1]
# open next file (if end of month and file coincide)
if wrfendidx == len(wrfout.dimensions[wrftime])-1: # reach end of file
## find first timestep (compare to last of previous file) and (re-)set time step counter
# initialize search
tmptimestamp = lasttimestamp; filelen1 = len(wrfout.dimensions[wrftime]) - 1; wrfstartidx = filelen1;
while tmptimestamp <= lasttimestamp:
if wrfstartidx < filelen1:
wrfstartidx += 1 # step forward in current file
else:
# open next file, if we reach the end
wrfout.close() # close file
#del wrfout; gc.collect() # doesn't seem to work here - strange error
# N.B.: filecounter +1 < len(filelist) is already checked above
filecounter += 1 # move to next file
if filecounter < len(filelist):
logger.debug("\n{0:s} Opening input file '{1:s}'.\n".format(pidstr,filelist[filecounter]))
wrfout = nc.Dataset(infolder+filelist[filecounter], 'r', format='NETCDF4') # ... and open new one
filelen1 = len(wrfout.dimensions[wrftime]) - 1 # length of new file
wrfstartidx = 0 # reset index
# check consistency of missing value flag
assert missing_value is None or missing_value == wrfout.P_LEV_MISSING
else: break # this is not really tested...
tmptimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][wrfstartidx,:]))
# N.B.: same code as in "not complete" section
# wrfout.close() # close file
# #del wrfout; gc.collect() # doesn't seem to work here - strange error
# filecounter += 1 # move to next file
# if filecounter < len(filelist):
# logger.debug("\n{0:s} Opening input file '{1:s}'.\n".format(pidstr,filelist[filecounter]))
# wrfout = nc.Dataset(infolder+filelist[filecounter], 'r', format='NETCDF4') # ... and open new one
# firsttimestamp = str(nc.chartostring(wrfout.variables[wrftimestamp][0,:])) # check first timestep (compare to last of previous file)
# wrfstartidx = 0 # always use initialization step (but is reset above anyway)
# if firsttimestamp != lasttimestamp:
# raise NotImplementedError, "If the first timestep of the next month is the last timestep in the file, it has to be duplicated in the next file."
## now the the loop over files has terminated and we need to normalize and save the results
if not lskip:
# extend time axis
monthly_dataset.variables[time][meanidx] = -1 # mark timestep in progress
ncvar = None; vardata = None # dummies, to prevent crash later on, if varlist is empty
# loop over variable names
for varname in varlist:
vardata = data[varname]
# decide how to normalize
if varname in acclist: vardata /= timeperiod
else: vardata /= ntime
# save variable
ncvar = monthly_dataset.variables[varname] # this time the destination variable
if missing_value is not None: # make sure the missing value flag is preserved
vardata = np.where(np.isnan(vardata), missing_value, vardata)
ncvar.missing_value = missing_value # just to make sure
if ncvar.ndim > 1: ncvar[meanidx,:] = vardata # here time is always the outermost index
else: ncvar[meanidx] = vardata
# compute derived variables
#logger.debug('\n{0:s} Derived Variable Stats: (mean/min/max)'.format(pidstr))
for dename,devar in derived_vars.items():
if devar.linear:
vardata = devar.computeValues(data) # compute derived variable now from averages
elif devar.normalize:
vardata = dedata[dename] / ntime # no accumulated variables here!
else: vardata = dedata[dename] # just the data...
# not all variables are normalized (e.g. extrema)
#if ldebug:
# mmm = (float(np.nanmean(vardata)),float(np.nanmin(vardata)),float(np.nanmax(vardata)),)
# logger.debug('{0:s} {1:s}, {2:f}, {3:f}, {4:f}'.format(pidstr,dename,*mmm))
data[dename] = vardata # add to data array, so that it can be used to compute linear variables
# save variable
ncvar = monthly_dataset.variables[dename] # this time the destination variable
if missing_value is not None: # make sure the missing value flag is preserved
vardata = np.where(np.isnan(vardata), missing_value, vardata)
ncvar.missing_value = missing_value # just to make sure
if ncvar.ndim > 1: ncvar[meanidx,:] = vardata # here time is always the outermost index
else: ncvar[meanidx] = vardata
#raise dv.DerivedVariableError, "%s Derived variable '%s' is not linear."%(pidstr,devar.name)
# update current end date
monthly_dataset.end_date = str(nc.chartostring(firsttimestamp_chars[:10])) # the date of the first day of the last included month
monthly_dataset.variables[wrftimestamp][meanidx,:] = firsttimestamp_chars
monthly_dataset.variables[time][meanidx] = meantime # update time axis (last action)
# sync data and clear memory
monthly_dataset.sync(); monthly_dataset.close() # sync and close dataset
del monthly_dataset, ncvar, vardata # remove all other references to data
gc.collect() # clean up memory
# N.B.: the netCDF4 module keeps all data written to a netcdf file in memory; there is no flush command
monthly_dataset = nc.Dataset(tmp_monthly_filepath, mode='a', format='NETCDF4') # re-open to append more data (mode='a')
# N.B.: flushing the mean file here prevents repeated close/re-open when no data was written (i.e.
# the month was skiped); only flush memory when data was actually written.
ec = 0 # set zero exit code for this operation
except Exception:
# report error
logger.exception('\n # {0:s} WARNING: an Error occured while stepping through files! '.format(pidstr)+
'\n # Last State: month={0:d}, variable={1:s}, file={2:s}'.format(meanidx,varname,filelist[filecounter])+
'\n # Saving current data and exiting\n')
wrfout.close()
#logger.exception(pidstr) # print stack trace of last exception and current process ID
ec = 1 # set non-zero exit code
# N.B.: this enables us to still close the file!
## here the loop over months finishes and we can close the output file
# print progress
# save to file
if not lparallel: logger.info('') # terminate the line (of dates)
else: logger.info('\n{0:s} Processed dates: {1:s}'.format(pidstr, progressstr))
monthly_dataset.sync()
logger.info("\n{0:s} Writing monthly output to: {1:s}\n('{2:s}')\n".format(pidstr, monthly_file, monthly_filepath))
if ldaily:
daily_dataset.sync()
logger.info("\n{0:s} Writing (sub-)daily output to: {1:s}\n('{2:s}')\n".format(pidstr, daily_file, daily_filepath))
# Finalize: close files and rename to proper names, clean up
monthly_dataset.close() # close NetCDF file
os.rename(tmp_monthly_filepath,monthly_filepath) # rename file to proper name
del monthly_dataset, data # clean up memory
if ldaily:
daily_dataset.close() # close NetCDF file
os.rename(tmp_daily_filepath,daily_filepath) # rename file to proper name
del daily_dataset # clean up memory
gc.collect()
# return exit code
return ec
## now begin execution
if __name__ == '__main__':
# print settings
print('')
print(('OVERWRITE: {:s}, RECOVER: {:s}, CARRYOVER: {:s}, SMPLDIFF: {:s}'.format(
str(loverwrite), str(lrecover), str(lcarryover), str(lsmplDiff))))
print(('DERIVEDONLY: {:s}, ADDNEW: {:s}, RECALC: {:s}'.format(
str(lderivedonly), str(laddnew), str(recalcvars) if lrecalc else str(lrecalc))))
print(('DAILY: {:s}, FILETYPES: {:s}, DOMAINS: {:s}'.format(str(lglobaldaily),str(filetypes),str(domains))))
print(('THREADS: {:s}, DEBUG: {:s}'.format(str(NP),str(ldebug))))
print('')
# compile regular expression, used to infer start and end dates and month (later, during computation)
datestr = '{0:s}-{1:s}-{2:s}'.format(yearstr,monthstr,daystr)
datergx = re.compile(datestr)
# get file list
wrfrgx = re.compile(inputpattern.format('.*','\d\d',datestr,)) # for initial search (all filetypes)
# regular expression to match the name pattern of WRF timestep output files
masterlist = [wrfrgx.match(filename) for filename in os.listdir(infolder)] # list folder and match
masterlist = [match.group() for match in masterlist if match is not None] # assemble valid file list
if len(masterlist) == 0:
raise IOError('No matching WRF output files found for date: {0:s}'.format(datestr))
## loop over filetypes and domains to construct job list
args = []
for filetype in filetypes:
# make list of files
filelist = []
for domain in domains:
typergx = re.compile(inputpattern.format(filetype,"{:02d}".format(domain), datestr))
# N.B.: domain has to be inserted as string, because above it is replaced by a regex
# regular expression to also match type and domain index
filelist = [typergx.match(filename) for filename in masterlist] # list folder and match
filelist = [match.group() for match in filelist if match is not None] # assemble valid file list
filelist.sort() # now, when the list is shortest, we can sort...
# N.B.: sort alphabetically, so that files are in temporally sequence
# now put everything into the lists
if len(filelist) > 0:
args.append( (filelist, filetype, domain) )
else:
print(("Can not process filetype '{:s}' (domain {:d}): no source files.".format(filetype,domain)))
print('\n')
# call parallel execution function
kwargs = dict() # no keyword arguments
ec = asyncPoolEC(processFileList, args, kwargs, NP=NP, ldebug=ldebug, ltrialnerror=True)
# exit with number of failures plus 10 as exit code
exit(int(10+ec) if ec > 0 else 0)
| gpl-3.0 | 659,906,441,429,333,200 | 64.054054 | 196 | 0.6327 | false |
Aloomaio/googleads-python-lib | examples/ad_manager/v201808/creative_wrapper_service/update_creative_wrappers.py | 1 | 2747 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates a creative wrapper to the 'OUTER' wrapping order.
To determine which creative wrappers exist, run get_all_creative_wrappers.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set the ID of the creative wrapper to update.
CREATIVE_WRAPPER_ID = 'INSERT_CREATIVE_WRAPPER_ID_HERE'
def main(client, creative_wrapper_id):
# Initialize appropriate service.
creative_wrapper_service = client.GetService('CreativeWrapperService',
version='v201808')
# Create statement to get a creative wrapper by ID.
statement = (ad_manager.StatementBuilder(version='v201808')
.Where('id = :creativeWrapperId')
.WithBindVariable('creativeWrapperId',
long(creative_wrapper_id)))
# Get creative wrappers.
response = creative_wrapper_service.getCreativeWrappersByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
updated_creative_wrappers = []
for creative_wrapper in response['results']:
creative_wrapper['ordering'] = 'OUTER'
updated_creative_wrappers.append(creative_wrapper)
# Update the creative wrappers on the server.
creative_wrappers = creative_wrapper_service.updateCreativeWrappers(
updated_creative_wrappers)
# Display results.
for creative_wrapper in creative_wrappers:
print (('Creative wrapper with ID "%s" and wrapping order "%s" '
'was updated.') % (creative_wrapper['id'],
creative_wrapper['ordering']))
else:
print 'No creative wrappers found to update.'
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, CREATIVE_WRAPPER_ID)
| apache-2.0 | 8,009,690,733,829,738,000 | 36.630137 | 78 | 0.705497 | false |
icomms/wqmanager | apps/domain/models.py | 1 | 6972 | from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.db import models
from domain import Permissions
from wqm.models import WqmAuthority
from locations.models import LocationType
##############################################################################################################
#
# Originally had my own hacky global storage of content type, but it turns out that contenttype.models
# wisely caches content types! No hit to the db beyond the first call - no need for us to do our own
# custom caching.
#
# See ContentType.get_for_model() code for details.
class Domain(models.Model):
'''Domain is the highest level collection of people/stuff
in the system. Pretty much everything happens at the
domain-level, including user membership, permission to
see data, reports, charts, etc.'''
name = models.CharField(max_length=64, unique=True)
full_name = models.CharField(max_length = 100, null=True)
is_active = models.BooleanField(default=False)
#description = models.CharField(max_length=255, null=True, blank=True)
#timezone = models.CharField(max_length=64,null=True)
# Utility function - gets active domains in which user has an active membership
# Note that User.is_active is not checked here - we're only concerned about usable
# domains in which the user can theoretically participate, not whether the user
# is cleared to login.
@staticmethod
def active_for_user(user):
return Domain.objects.filter( membership__member_type = ContentType.objects.get_for_model(User),
membership__member_id = user.id,
membership__is_active=True, # Looks in membership table
is_active=True) # Looks in domain table
def save(self, *args, **kwargs):
edit = False
if self.pk is not None:
edit = True
super(Domain, self).save(*args, **kwargs)
if edit:
wqmauthority = WqmAuthority.objects.get(domain=self)
wqmauthority.code = self.name
wqmauthority.name = self.full_name
wqmauthority.save()
else:
type = LocationType.objects.get(name="authority")
wqmauthority = WqmAuthority(name=self.full_name, domain=self, type=type, code=self.name)
wqmauthority.save()
def __unicode__(self):
return self.name
##############################################################################################################
#
# Use cases:
#
# Get all members in a domain:
# Member.objects.filter(member_type = 3, domain = 1) then iterate - slow, because of one query (for User) per row
# User.objects.filter(membership__domain = 2) - fast, but requires the addition of a GenericRelation to User.
# See UserInDomain, below.
#
# Get all domains to which a member belongs:
# User.objects.get(id = 1).membership.all() and then iterate to pick out domains - slow, because of one query
# (for Domain) per row. Requires GenericRelation on User.
# Member.objects.filter(member_type = 3, member_id = 1).query.as_sql() Generate same SQL, and require same
# slow iteration
# Domain.objects.filter(membership__member_type = 3, membership__member_id = 1) - fast, and requires no new fields
# (as Domain is a FK of Member)
#
member_limits = {'model__in':('user', 'formdatagroup')}
class Membership(models.Model):
domain = models.ForeignKey(Domain)
member_type = models.ForeignKey(ContentType, limit_choices_to=member_limits)
member_id = models.PositiveIntegerField()
member_object = generic.GenericForeignKey('member_type', 'member_id')
is_active = models.BooleanField(default=False)
def __unicode__(self):
return str(self.member_type) + str(self.member_id) + str(self.member_object)
##############################################################################################################
class RegistrationRequest(models.Model):
tos_confirmed = models.BooleanField(default=False)
# No verbose name on times and IPs - filled in on server
request_time = models.DateTimeField()
request_ip = models.IPAddressField()
activation_guid = models.CharField(max_length=32, unique=True)
# confirm info is blank until a confirming click is received
confirm_time = models.DateTimeField(null=True, blank=True)
confirm_ip = models.IPAddressField(null=True, blank=True)
domain = models.OneToOneField(Domain)
new_user = models.ForeignKey(User, related_name='new_user') # Not clear if we'll always create a new user - might be many reqs to one user, thus FK
# requesting_user is only filled in if a logged-in user requests a domain.
requesting_user = models.ForeignKey(User, related_name='requesting_user', null=True, blank=True) # blank and null -> FK is optional.
class Meta:
db_table = 'domain_registration_request'
# To be added:
# language
# number pref
# currency pref
# date pref
# time pref
##############################################################################################################
class Settings(models.Model):
domain = models.OneToOneField(Domain)
max_users = models.PositiveIntegerField()
# To be added - all of the date, time, etc. fields that will go into RegistrationRequest
##############################################################################################################
#
# http://bolhoed.net/blog/how-to-dynamically-add-fields-to-a-django-model shows:
#
# User.add_to_class('membership', generic.GenericRelation(Membership, content_type_field='member_type', object_id_field='member_id'))
#
# Rather than that hackery, I tried to implemenet a trivial proxy model for User, containing just the
# GenericRelation field. Doesn't work, though! Django complains about a field being defined on a proxy model.
#
# Looks like we have to enable the above hackery if we want an easy means of filtering users in a domain. Makes
# life easier, too, in that views will have access to this information.
#
User.add_to_class('domain_membership',
generic.GenericRelation( Membership, content_type_field='member_type', object_id_field='member_id' ) )
##############################################################################################################
# Monkeypatch a function onto User to tell if user is administrator of selected domain
def _admin_p (self):
dom = getattr(self, 'selected_domain', None)
if dom is not None:
return self.has_row_perm(dom, Permissions.ADMINISTRATOR)
else:
return False
User.is_selected_dom_admin = _admin_p | bsd-3-clause | 1,128,780,121,399,136,400 | 44.575163 | 152 | 0.613884 | false |
yarikoptic/seaborn | seaborn/tests/test_algorithms.py | 1 | 6588 | import numpy as np
from scipy import stats
from six.moves import range
import numpy.testing as npt
from numpy.testing import assert_array_equal
import nose.tools
from nose.tools import assert_equal, raises
from .. import algorithms as algo
rs = np.random.RandomState(sum(map(ord, "test_algorithms")))
a_norm = rs.randn(100)
def test_bootstrap():
"""Test that bootstrapping gives the right answer in dumb cases."""
a_ones = np.ones(10)
n_boot = 5
out1 = algo.bootstrap(a_ones, n_boot=n_boot)
assert_array_equal(out1, np.ones(n_boot))
out2 = algo.bootstrap(a_ones, n_boot=n_boot, func=np.median)
assert_array_equal(out2, np.ones(n_boot))
def test_bootstrap_length():
"""Test that we get a bootstrap array of the right shape."""
out = algo.bootstrap(a_norm)
assert_equal(len(out), 10000)
n_boot = 100
out = algo.bootstrap(a_norm, n_boot=n_boot)
assert_equal(len(out), n_boot)
def test_bootstrap_range():
"""Test that boostrapping a random array stays within the right range."""
min, max = a_norm.min(), a_norm.max()
out = algo.bootstrap(a_norm)
nose.tools.assert_less(min, out.min())
nose.tools.assert_greater_equal(max, out.max())
def test_bootstrap_multiarg():
"""Test that bootstrap works with multiple input arrays."""
x = np.vstack([[1, 10] for i in range(10)])
y = np.vstack([[5, 5] for i in range(10)])
test_func = lambda x, y: np.vstack((x, y)).max(axis=0)
out_actual = algo.bootstrap(x, y, n_boot=2, func=test_func)
out_wanted = np.array([[5, 10], [5, 10]])
assert_array_equal(out_actual, out_wanted)
def test_bootstrap_axis():
"""Test axis kwarg to bootstrap function."""
x = rs.randn(10, 20)
n_boot = 100
out_default = algo.bootstrap(x, n_boot=n_boot)
assert_equal(out_default.shape, (n_boot,))
out_axis = algo.bootstrap(x, n_boot=n_boot, axis=0)
assert_equal(out_axis.shape, (n_boot, 20))
def test_bootstrap_random_seed():
"""Test that we can get reproducible resamples by seeding the RNG."""
data = rs.randn(50)
seed = 42
boots1 = algo.bootstrap(data, random_seed=seed)
boots2 = algo.bootstrap(data, random_seed=seed)
assert_array_equal(boots1, boots2)
def test_smooth_bootstrap():
"""Test smooth bootstrap."""
x = rs.randn(15)
n_boot = 100
out_smooth = algo.bootstrap(x, n_boot=n_boot,
smooth=True, func=np.median)
assert(not np.median(out_smooth) in x)
def test_bootstrap_ols():
"""Test bootstrap of OLS model fit."""
ols_fit = lambda X, y: np.dot(np.dot(np.linalg.inv(
np.dot(X.T, X)), X.T), y)
X = np.column_stack((rs.randn(50, 4), np.ones(50)))
w = [2, 4, 0, 3, 5]
y_noisy = np.dot(X, w) + rs.randn(50) * 20
y_lownoise = np.dot(X, w) + rs.randn(50)
n_boot = 500
w_boot_noisy = algo.bootstrap(X, y_noisy,
n_boot=n_boot,
func=ols_fit)
w_boot_lownoise = algo.bootstrap(X, y_lownoise,
n_boot=n_boot,
func=ols_fit)
assert_equal(w_boot_noisy.shape, (n_boot, 5))
assert_equal(w_boot_lownoise.shape, (n_boot, 5))
nose.tools.assert_greater(w_boot_noisy.std(),
w_boot_lownoise.std())
def test_bootstrap_units():
"""Test that results make sense when passing unit IDs to bootstrap."""
data = rs.randn(50)
ids = np.repeat(range(10), 5)
bwerr = rs.normal(0, 2, 10)
bwerr = bwerr[ids]
data_rm = data + bwerr
seed = 77
boots_orig = algo.bootstrap(data_rm, random_seed=seed)
boots_rm = algo.bootstrap(data_rm, units=ids, random_seed=seed)
nose.tools.assert_greater(boots_rm.std(), boots_orig.std())
@raises(ValueError)
def test_bootstrap_arglength():
"""Test that different length args raise ValueError."""
algo.bootstrap(range(5), range(10))
@raises(TypeError)
def test_bootstrap_noncallable():
"""Test that we get a TypeError with noncallable algo.unc."""
non_func = "mean"
algo.bootstrap(a_norm, 100, non_func)
def test_randomize_corrmat():
"""Test the correctness of the correlation matrix p values."""
a = rs.randn(30)
b = a + rs.rand(30) * 3
c = rs.randn(30)
d = [a, b, c]
p_mat, dist = algo.randomize_corrmat(d, tail="upper", corrected=False,
return_dist=True)
nose.tools.assert_greater(p_mat[2, 0], p_mat[1, 0])
corrmat = np.corrcoef(d)
pctile = 100 - stats.percentileofscore(dist[2, 1], corrmat[2, 1])
nose.tools.assert_almost_equal(p_mat[2, 1] * 100, pctile)
d[1] = -a + rs.rand(30)
p_mat = algo.randomize_corrmat(d)
nose.tools.assert_greater(0.05, p_mat[1, 0])
def test_randomize_corrmat_dist():
"""Test that the distribution looks right."""
a = rs.randn(3, 20)
for n_i in [5, 10]:
p_mat, dist = algo.randomize_corrmat(a, n_iter=n_i, return_dist=True)
assert_equal(n_i, dist.shape[-1])
p_mat, dist = algo.randomize_corrmat(a, n_iter=10000, return_dist=True)
diag_mean = dist[0, 0].mean()
assert_equal(diag_mean, 1)
off_diag_mean = dist[0, 1].mean()
nose.tools.assert_greater(0.05, off_diag_mean)
def test_randomize_corrmat_correction():
"""Test that FWE correction works."""
a = rs.randn(3, 20)
p_mat = algo.randomize_corrmat(a, "upper", False)
p_mat_corr = algo.randomize_corrmat(a, "upper", True)
triu = np.triu_indices(3, 1)
npt.assert_array_less(p_mat[triu], p_mat_corr[triu])
def test_randimoize_corrmat_tails():
"""Test that the tail argument works."""
a = rs.randn(30)
b = a + rs.rand(30) * 8
c = rs.randn(30)
d = [a, b, c]
p_mat_b = algo.randomize_corrmat(d, "both", False, random_seed=0)
p_mat_u = algo.randomize_corrmat(d, "upper", False, random_seed=0)
p_mat_l = algo.randomize_corrmat(d, "lower", False, random_seed=0)
assert_equal(p_mat_b[0, 1], p_mat_u[0, 1] * 2)
assert_equal(p_mat_l[0, 1], 1 - p_mat_u[0, 1])
def test_randomise_corrmat_seed():
"""Test that we can seed the corrmat randomization."""
a = rs.randn(3, 20)
_, dist1 = algo.randomize_corrmat(a, random_seed=0, return_dist=True)
_, dist2 = algo.randomize_corrmat(a, random_seed=0, return_dist=True)
assert_array_equal(dist1, dist2)
@raises(ValueError)
def test_randomize_corrmat_tail_error():
"""Test that we are strict about tail paramete."""
a = rs.randn(3, 30)
algo.randomize_corrmat(a, "hello")
| bsd-3-clause | -8,255,307,290,111,198,000 | 31.294118 | 77 | 0.615209 | false |
ArcherSys/ArcherSys | Lib/opcode.py | 1 | 16466 | <<<<<<< HEAD
<<<<<<< HEAD
"""
opcode module - potentially shared between dis and other modules which
operate on bytecodes (e.g. peephole optimizers).
"""
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
"haslocal", "hascompare", "hasfree", "opname", "opmap",
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
# It's a chicken-and-egg I'm afraid:
# We're imported before _opcode's made.
# With exception unheeded
# (stack_effect is not needed)
# Both our chickens and eggs are allayed.
# --Larry Hastings, 2013/11/23
try:
from _opcode import stack_effect
__all__.append('stack_effect')
except ImportError:
pass
cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
'is not', 'exception match', 'BAD')
hasconst = []
hasname = []
hasjrel = []
hasjabs = []
haslocal = []
hascompare = []
hasfree = []
hasnargs = []
opmap = {}
opname = [''] * 256
for op in range(256): opname[op] = '<%r>' % (op,)
del op
def def_op(name, op):
opname[op] = name
opmap[name] = op
def name_op(name, op):
def_op(name, op)
hasname.append(op)
def jrel_op(name, op):
def_op(name, op)
hasjrel.append(op)
def jabs_op(name, op):
def_op(name, op)
hasjabs.append(op)
# Instruction opcodes for compiled code
# Blank lines correspond to available opcodes
def_op('POP_TOP', 1)
def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
def_op('DUP_TOP', 4)
def_op('DUP_TOP_TWO', 5)
def_op('NOP', 9)
def_op('UNARY_POSITIVE', 10)
def_op('UNARY_NEGATIVE', 11)
def_op('UNARY_NOT', 12)
def_op('UNARY_INVERT', 15)
def_op('BINARY_POWER', 19)
def_op('BINARY_MULTIPLY', 20)
def_op('BINARY_MODULO', 22)
def_op('BINARY_ADD', 23)
def_op('BINARY_SUBTRACT', 24)
def_op('BINARY_SUBSCR', 25)
def_op('BINARY_FLOOR_DIVIDE', 26)
def_op('BINARY_TRUE_DIVIDE', 27)
def_op('INPLACE_FLOOR_DIVIDE', 28)
def_op('INPLACE_TRUE_DIVIDE', 29)
def_op('STORE_MAP', 54)
def_op('INPLACE_ADD', 55)
def_op('INPLACE_SUBTRACT', 56)
def_op('INPLACE_MULTIPLY', 57)
def_op('INPLACE_MODULO', 59)
def_op('STORE_SUBSCR', 60)
def_op('DELETE_SUBSCR', 61)
def_op('BINARY_LSHIFT', 62)
def_op('BINARY_RSHIFT', 63)
def_op('BINARY_AND', 64)
def_op('BINARY_XOR', 65)
def_op('BINARY_OR', 66)
def_op('INPLACE_POWER', 67)
def_op('GET_ITER', 68)
def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
def_op('YIELD_FROM', 72)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
def_op('INPLACE_AND', 77)
def_op('INPLACE_XOR', 78)
def_op('INPLACE_OR', 79)
def_op('BREAK_LOOP', 80)
def_op('WITH_CLEANUP', 81)
def_op('RETURN_VALUE', 83)
def_op('IMPORT_STAR', 84)
def_op('YIELD_VALUE', 86)
def_op('POP_BLOCK', 87)
def_op('END_FINALLY', 88)
def_op('POP_EXCEPT', 89)
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
name_op('STORE_NAME', 90) # Index in name list
name_op('DELETE_NAME', 91) # ""
def_op('UNPACK_SEQUENCE', 92) # Number of tuple items
jrel_op('FOR_ITER', 93)
def_op('UNPACK_EX', 94)
name_op('STORE_ATTR', 95) # Index in name list
name_op('DELETE_ATTR', 96) # ""
name_op('STORE_GLOBAL', 97) # ""
name_op('DELETE_GLOBAL', 98) # ""
def_op('LOAD_CONST', 100) # Index in const list
hasconst.append(100)
name_op('LOAD_NAME', 101) # Index in name list
def_op('BUILD_TUPLE', 102) # Number of tuple items
def_op('BUILD_LIST', 103) # Number of list items
def_op('BUILD_SET', 104) # Number of set items
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
name_op('LOAD_ATTR', 106) # Index in name list
def_op('COMPARE_OP', 107) # Comparison operator
hascompare.append(107)
name_op('IMPORT_NAME', 108) # Index in name list
name_op('IMPORT_FROM', 109) # Index in name list
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
jabs_op('JUMP_ABSOLUTE', 113) # ""
jabs_op('POP_JUMP_IF_FALSE', 114) # ""
jabs_op('POP_JUMP_IF_TRUE', 115) # ""
name_op('LOAD_GLOBAL', 116) # Index in name list
jabs_op('CONTINUE_LOOP', 119) # Target address
jrel_op('SETUP_LOOP', 120) # Distance to target address
jrel_op('SETUP_EXCEPT', 121) # ""
jrel_op('SETUP_FINALLY', 122) # ""
def_op('LOAD_FAST', 124) # Local variable number
haslocal.append(124)
def_op('STORE_FAST', 125) # Local variable number
haslocal.append(125)
def_op('DELETE_FAST', 126) # Local variable number
haslocal.append(126)
def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3)
def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8)
hasnargs.append(131)
def_op('MAKE_FUNCTION', 132) # Number of args with default values
def_op('BUILD_SLICE', 133) # Number of items
def_op('MAKE_CLOSURE', 134)
def_op('LOAD_CLOSURE', 135)
hasfree.append(135)
def_op('LOAD_DEREF', 136)
hasfree.append(136)
def_op('STORE_DEREF', 137)
hasfree.append(137)
def_op('DELETE_DEREF', 138)
hasfree.append(138)
def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8)
hasnargs.append(140)
def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8)
hasnargs.append(141)
def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8)
hasnargs.append(142)
jrel_op('SETUP_WITH', 143)
def_op('LIST_APPEND', 145)
def_op('SET_ADD', 146)
def_op('MAP_ADD', 147)
def_op('LOAD_CLASSDEREF', 148)
hasfree.append(148)
def_op('EXTENDED_ARG', 144)
EXTENDED_ARG = 144
del def_op, name_op, jrel_op, jabs_op
=======
"""
opcode module - potentially shared between dis and other modules which
operate on bytecodes (e.g. peephole optimizers).
"""
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
"haslocal", "hascompare", "hasfree", "opname", "opmap",
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
# It's a chicken-and-egg I'm afraid:
# We're imported before _opcode's made.
# With exception unheeded
# (stack_effect is not needed)
# Both our chickens and eggs are allayed.
# --Larry Hastings, 2013/11/23
try:
from _opcode import stack_effect
__all__.append('stack_effect')
except ImportError:
pass
cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
'is not', 'exception match', 'BAD')
hasconst = []
hasname = []
hasjrel = []
hasjabs = []
haslocal = []
hascompare = []
hasfree = []
hasnargs = []
opmap = {}
opname = [''] * 256
for op in range(256): opname[op] = '<%r>' % (op,)
del op
def def_op(name, op):
opname[op] = name
opmap[name] = op
def name_op(name, op):
def_op(name, op)
hasname.append(op)
def jrel_op(name, op):
def_op(name, op)
hasjrel.append(op)
def jabs_op(name, op):
def_op(name, op)
hasjabs.append(op)
# Instruction opcodes for compiled code
# Blank lines correspond to available opcodes
def_op('POP_TOP', 1)
def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
def_op('DUP_TOP', 4)
def_op('DUP_TOP_TWO', 5)
def_op('NOP', 9)
def_op('UNARY_POSITIVE', 10)
def_op('UNARY_NEGATIVE', 11)
def_op('UNARY_NOT', 12)
def_op('UNARY_INVERT', 15)
def_op('BINARY_POWER', 19)
def_op('BINARY_MULTIPLY', 20)
def_op('BINARY_MODULO', 22)
def_op('BINARY_ADD', 23)
def_op('BINARY_SUBTRACT', 24)
def_op('BINARY_SUBSCR', 25)
def_op('BINARY_FLOOR_DIVIDE', 26)
def_op('BINARY_TRUE_DIVIDE', 27)
def_op('INPLACE_FLOOR_DIVIDE', 28)
def_op('INPLACE_TRUE_DIVIDE', 29)
def_op('STORE_MAP', 54)
def_op('INPLACE_ADD', 55)
def_op('INPLACE_SUBTRACT', 56)
def_op('INPLACE_MULTIPLY', 57)
def_op('INPLACE_MODULO', 59)
def_op('STORE_SUBSCR', 60)
def_op('DELETE_SUBSCR', 61)
def_op('BINARY_LSHIFT', 62)
def_op('BINARY_RSHIFT', 63)
def_op('BINARY_AND', 64)
def_op('BINARY_XOR', 65)
def_op('BINARY_OR', 66)
def_op('INPLACE_POWER', 67)
def_op('GET_ITER', 68)
def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
def_op('YIELD_FROM', 72)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
def_op('INPLACE_AND', 77)
def_op('INPLACE_XOR', 78)
def_op('INPLACE_OR', 79)
def_op('BREAK_LOOP', 80)
def_op('WITH_CLEANUP', 81)
def_op('RETURN_VALUE', 83)
def_op('IMPORT_STAR', 84)
def_op('YIELD_VALUE', 86)
def_op('POP_BLOCK', 87)
def_op('END_FINALLY', 88)
def_op('POP_EXCEPT', 89)
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
name_op('STORE_NAME', 90) # Index in name list
name_op('DELETE_NAME', 91) # ""
def_op('UNPACK_SEQUENCE', 92) # Number of tuple items
jrel_op('FOR_ITER', 93)
def_op('UNPACK_EX', 94)
name_op('STORE_ATTR', 95) # Index in name list
name_op('DELETE_ATTR', 96) # ""
name_op('STORE_GLOBAL', 97) # ""
name_op('DELETE_GLOBAL', 98) # ""
def_op('LOAD_CONST', 100) # Index in const list
hasconst.append(100)
name_op('LOAD_NAME', 101) # Index in name list
def_op('BUILD_TUPLE', 102) # Number of tuple items
def_op('BUILD_LIST', 103) # Number of list items
def_op('BUILD_SET', 104) # Number of set items
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
name_op('LOAD_ATTR', 106) # Index in name list
def_op('COMPARE_OP', 107) # Comparison operator
hascompare.append(107)
name_op('IMPORT_NAME', 108) # Index in name list
name_op('IMPORT_FROM', 109) # Index in name list
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
jabs_op('JUMP_ABSOLUTE', 113) # ""
jabs_op('POP_JUMP_IF_FALSE', 114) # ""
jabs_op('POP_JUMP_IF_TRUE', 115) # ""
name_op('LOAD_GLOBAL', 116) # Index in name list
jabs_op('CONTINUE_LOOP', 119) # Target address
jrel_op('SETUP_LOOP', 120) # Distance to target address
jrel_op('SETUP_EXCEPT', 121) # ""
jrel_op('SETUP_FINALLY', 122) # ""
def_op('LOAD_FAST', 124) # Local variable number
haslocal.append(124)
def_op('STORE_FAST', 125) # Local variable number
haslocal.append(125)
def_op('DELETE_FAST', 126) # Local variable number
haslocal.append(126)
def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3)
def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8)
hasnargs.append(131)
def_op('MAKE_FUNCTION', 132) # Number of args with default values
def_op('BUILD_SLICE', 133) # Number of items
def_op('MAKE_CLOSURE', 134)
def_op('LOAD_CLOSURE', 135)
hasfree.append(135)
def_op('LOAD_DEREF', 136)
hasfree.append(136)
def_op('STORE_DEREF', 137)
hasfree.append(137)
def_op('DELETE_DEREF', 138)
hasfree.append(138)
def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8)
hasnargs.append(140)
def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8)
hasnargs.append(141)
def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8)
hasnargs.append(142)
jrel_op('SETUP_WITH', 143)
def_op('LIST_APPEND', 145)
def_op('SET_ADD', 146)
def_op('MAP_ADD', 147)
def_op('LOAD_CLASSDEREF', 148)
hasfree.append(148)
def_op('EXTENDED_ARG', 144)
EXTENDED_ARG = 144
del def_op, name_op, jrel_op, jabs_op
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""
opcode module - potentially shared between dis and other modules which
operate on bytecodes (e.g. peephole optimizers).
"""
__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
"haslocal", "hascompare", "hasfree", "opname", "opmap",
"HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"]
# It's a chicken-and-egg I'm afraid:
# We're imported before _opcode's made.
# With exception unheeded
# (stack_effect is not needed)
# Both our chickens and eggs are allayed.
# --Larry Hastings, 2013/11/23
try:
from _opcode import stack_effect
__all__.append('stack_effect')
except ImportError:
pass
cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
'is not', 'exception match', 'BAD')
hasconst = []
hasname = []
hasjrel = []
hasjabs = []
haslocal = []
hascompare = []
hasfree = []
hasnargs = []
opmap = {}
opname = [''] * 256
for op in range(256): opname[op] = '<%r>' % (op,)
del op
def def_op(name, op):
opname[op] = name
opmap[name] = op
def name_op(name, op):
def_op(name, op)
hasname.append(op)
def jrel_op(name, op):
def_op(name, op)
hasjrel.append(op)
def jabs_op(name, op):
def_op(name, op)
hasjabs.append(op)
# Instruction opcodes for compiled code
# Blank lines correspond to available opcodes
def_op('POP_TOP', 1)
def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
def_op('DUP_TOP', 4)
def_op('DUP_TOP_TWO', 5)
def_op('NOP', 9)
def_op('UNARY_POSITIVE', 10)
def_op('UNARY_NEGATIVE', 11)
def_op('UNARY_NOT', 12)
def_op('UNARY_INVERT', 15)
def_op('BINARY_POWER', 19)
def_op('BINARY_MULTIPLY', 20)
def_op('BINARY_MODULO', 22)
def_op('BINARY_ADD', 23)
def_op('BINARY_SUBTRACT', 24)
def_op('BINARY_SUBSCR', 25)
def_op('BINARY_FLOOR_DIVIDE', 26)
def_op('BINARY_TRUE_DIVIDE', 27)
def_op('INPLACE_FLOOR_DIVIDE', 28)
def_op('INPLACE_TRUE_DIVIDE', 29)
def_op('STORE_MAP', 54)
def_op('INPLACE_ADD', 55)
def_op('INPLACE_SUBTRACT', 56)
def_op('INPLACE_MULTIPLY', 57)
def_op('INPLACE_MODULO', 59)
def_op('STORE_SUBSCR', 60)
def_op('DELETE_SUBSCR', 61)
def_op('BINARY_LSHIFT', 62)
def_op('BINARY_RSHIFT', 63)
def_op('BINARY_AND', 64)
def_op('BINARY_XOR', 65)
def_op('BINARY_OR', 66)
def_op('INPLACE_POWER', 67)
def_op('GET_ITER', 68)
def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
def_op('YIELD_FROM', 72)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
def_op('INPLACE_AND', 77)
def_op('INPLACE_XOR', 78)
def_op('INPLACE_OR', 79)
def_op('BREAK_LOOP', 80)
def_op('WITH_CLEANUP', 81)
def_op('RETURN_VALUE', 83)
def_op('IMPORT_STAR', 84)
def_op('YIELD_VALUE', 86)
def_op('POP_BLOCK', 87)
def_op('END_FINALLY', 88)
def_op('POP_EXCEPT', 89)
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
name_op('STORE_NAME', 90) # Index in name list
name_op('DELETE_NAME', 91) # ""
def_op('UNPACK_SEQUENCE', 92) # Number of tuple items
jrel_op('FOR_ITER', 93)
def_op('UNPACK_EX', 94)
name_op('STORE_ATTR', 95) # Index in name list
name_op('DELETE_ATTR', 96) # ""
name_op('STORE_GLOBAL', 97) # ""
name_op('DELETE_GLOBAL', 98) # ""
def_op('LOAD_CONST', 100) # Index in const list
hasconst.append(100)
name_op('LOAD_NAME', 101) # Index in name list
def_op('BUILD_TUPLE', 102) # Number of tuple items
def_op('BUILD_LIST', 103) # Number of list items
def_op('BUILD_SET', 104) # Number of set items
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
name_op('LOAD_ATTR', 106) # Index in name list
def_op('COMPARE_OP', 107) # Comparison operator
hascompare.append(107)
name_op('IMPORT_NAME', 108) # Index in name list
name_op('IMPORT_FROM', 109) # Index in name list
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
jabs_op('JUMP_ABSOLUTE', 113) # ""
jabs_op('POP_JUMP_IF_FALSE', 114) # ""
jabs_op('POP_JUMP_IF_TRUE', 115) # ""
name_op('LOAD_GLOBAL', 116) # Index in name list
jabs_op('CONTINUE_LOOP', 119) # Target address
jrel_op('SETUP_LOOP', 120) # Distance to target address
jrel_op('SETUP_EXCEPT', 121) # ""
jrel_op('SETUP_FINALLY', 122) # ""
def_op('LOAD_FAST', 124) # Local variable number
haslocal.append(124)
def_op('STORE_FAST', 125) # Local variable number
haslocal.append(125)
def_op('DELETE_FAST', 126) # Local variable number
haslocal.append(126)
def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3)
def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8)
hasnargs.append(131)
def_op('MAKE_FUNCTION', 132) # Number of args with default values
def_op('BUILD_SLICE', 133) # Number of items
def_op('MAKE_CLOSURE', 134)
def_op('LOAD_CLOSURE', 135)
hasfree.append(135)
def_op('LOAD_DEREF', 136)
hasfree.append(136)
def_op('STORE_DEREF', 137)
hasfree.append(137)
def_op('DELETE_DEREF', 138)
hasfree.append(138)
def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8)
hasnargs.append(140)
def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8)
hasnargs.append(141)
def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8)
hasnargs.append(142)
jrel_op('SETUP_WITH', 143)
def_op('LIST_APPEND', 145)
def_op('SET_ADD', 146)
def_op('MAP_ADD', 147)
def_op('LOAD_CLASSDEREF', 148)
hasfree.append(148)
def_op('EXTENDED_ARG', 144)
EXTENDED_ARG = 144
del def_op, name_op, jrel_op, jabs_op
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | -3,256,446,462,933,646,000 | 26.171617 | 80 | 0.635005 | false |
benagricola/exabgp | lib/exabgp/bgp/message/update/nlri/evpn/multicast.py | 1 | 2426 | """
multicast.py
Created by Thomas Morin on 2014-06-23.
Copyright (c) 2014-2015 Orange. All rights reserved.
"""
from exabgp.protocol.ip import IP
from exabgp.bgp.message.update.nlri.qualifier import RouteDistinguisher
from exabgp.bgp.message.update.nlri.qualifier import EthernetTag
from exabgp.bgp.message.update.nlri.evpn.nlri import EVPN
# +---------------------------------------+
# | RD (8 octets) |
# +---------------------------------------+
# | Ethernet Tag ID (4 octets) |
# +---------------------------------------+
# | IP Address Length (1 octet) |
# +---------------------------------------+
# | Originating Router's IP Addr |
# | (4 or 16 octets) |
# +---------------------------------------+
# ===================================================================== EVPNNLRI
@EVPN.register
class Multicast (EVPN):
CODE = 3
NAME = "Inclusive Multicast Ethernet Tag"
SHORT_NAME = "Multicast"
def __init__ (self, rd, etag, ip, packed=None,nexthop=None,action=None,addpath=None):
EVPN.__init__(self,action,addpath)
self.nexthop = nexthop
self.rd = rd
self.etag = etag
self.ip = ip
self._pack(packed)
def __ne__ (self, other):
return not self.__eq__(other)
def __str__ (self):
return "%s:%s:%s:%s" % (
self._prefix(),
self.rd._str(),
self.etag,
self.ip,
)
def __hash__ (self):
return hash((self.afi,self.safi,self.CODE,self.rd,self.etag,self.ip))
def _pack (self, packed=None):
if self._packed:
return self._packed
if packed:
self._packed = packed
return packed
self._packed = '%s%s%s%s' % (
self.rd.pack(),
self.etag.pack(),
chr(len(self.ip)*8),
self.ip.pack()
)
return self._packed
@classmethod
def unpack (cls, data):
rd = RouteDistinguisher.unpack(data[:8])
etag = EthernetTag.unpack(data[8:12])
iplen = ord(data[12])
if iplen not in (4*8,16*8):
raise Exception("IP len is %d, but EVPN route currently support only IPv4" % iplen)
ip = IP.unpack(data[13:13+iplen/8])
return cls(rd,etag,ip,data)
def json (self, compact=None):
content = ' "code": %d, ' % self.CODE
content += '"parsed": true, '
content += '"raw": "%s", ' % self._raw()
content += '"name": "%s", ' % self.NAME
content += '%s, ' % self.rd.json()
content += self.etag.json()
if self.ip:
content += ', "ip": "%s"' % str(self.ip)
return '{%s }' % content
| bsd-3-clause | 8,568,785,827,975,385,000 | 25.659341 | 86 | 0.54122 | false |
emoronayuso/beeton | asterisk-bee/asteriskbee/api_status/scripts_graficas/recoge_marcas_graficas.py | 1 | 2307 | #!/usr/bin/python
import matplotlib.pyplot as plt
import numpy as np
#import calendar
from datetime import datetime
from django.conf import settings
settings.configure()
import os
#para conexion con la bases de datos de beeton (asteriskbee)
import sqlite3 as dbapi
##Directorio de la aplicaion
### STATIC_ROOT = '/var/www/asterisk-bee/asteriskbee/'
#directorio = settings.STATIC_ROOT+"api_status/"
directorio = "/var/www/asterisk-bee/asteriskbee/api_status/"
##Numero de tuplas maximas por grafica
num_cpu_dia = 20
def recoge_marcas():
#Conexion con la base de datos de estadisticas
bbdd = dbapi.connect(directorio+"bbdd/estadisticas.db")
cursor = bbdd.cursor()
os.system("ps -e -o pcpu,cpu,nice,state,cputime,args --sort pcpu | sed '/^ 0.0 /d' > "+directorio+"scripts_graficas/temp/temp_cpu_dia; cat "+directorio+"scripts_graficas/temp/temp_cpu_dia | sed 's/^[ \t]*//;s/[ \t]*$//' | grep -v 'recoge_marcas_graficas.py' | cut -d ' ' -f 1 > "+directorio+"scripts_graficas/temp/temp_cpu_dia2")
total = 0.0
f = open(directorio+'scripts_graficas/temp/temp_cpu_dia2','r')
##Leemos la primera linea para quitar el encabezado
linea = f.readline()
while True:
linea = f.readline()
if not linea:
break
#Quitamos el uso de la cpu del script que recoge las marcas
else:
total = total + float(linea)
f.close()
res = total
# print str(res)
#Creamos la consulta ordenada por fecha
con_ordenada = """select * from api_status_marcas_graficas where tipo='cpu_dia' order by fecha_hora;"""
cursor.execute(con_ordenada)
p = cursor.fetchall()
if len(p) < num_cpu_dia:
#insetar en al base de datos
insert = "insert into api_status_marcas_graficas (tipo,valor) values ('cpu_dia',?);"
cursor.execute(insert ,(res,))
bbdd.commit()
else:
#Ordenar por fecha, eliminar el ultimo e introducir nuevo
# strftime('%d-%m-%Y %H:%M',calldate)
hora_actual = datetime.now()
con_update = " update api_status_marcas_graficas set fecha_hora=datetime(?),valor=? where id=?; "
# print "Antes del update, hora_actual->"+str(hora_actual)+"valor->"+str(res)+ " id->"+str(p[0][0])
cursor.execute(con_update ,(hora_actual,res,p[0][0]))
bbdd.commit()
##Cerramos la conexion con la BBDD
cursor.close()
bbdd.close()
if __name__ == "__main__":
recoge_marcas()
| gpl-3.0 | 2,008,526,283,982,816,800 | 24.633333 | 330 | 0.688773 | false |
suutari-ai/shoop | shuup_tests/core/test_rounding.py | 3 | 5376 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from decimal import Decimal
from shuup.core.models import OrderLine
from shuup.core.models import OrderLineType
from shuup.core.models import Shop
from shuup.core.models import ShopStatus
from shuup.testing.factories import (
add_product_to_order, create_empty_order, create_product,
get_default_shop, get_default_supplier
)
from shuup.utils.numbers import bankers_round
from shuup_tests.utils.basketish_order_source import BasketishOrderSource
PRICE_SPEC = [
([1,2,3,4]),
([1,2,3,6]),
([1,2,3,8]),
([1.23223, 12.24442, 42.26233]),
([1223.46636, 13.24655, 411.234554]),
([101.74363, 12.99346, 4222.57422]),
([112.93549, 199.2446, 422.29234]),
([1994.49654, 940.23452, 425.24566]),
([1994.496541234566, 940.2345298765, 425.2456612334]), # Those prices that will be cut when put in DB
]
@pytest.mark.parametrize("prices", PRICE_SPEC)
@pytest.mark.django_db
def test_rounding(prices):
expected = 0
for p in prices:
expected += bankers_round(p, 2)
order = create_empty_order(prices_include_tax=False)
order.save()
for x, price in enumerate(prices):
ol = OrderLine(
order=order,
type=OrderLineType.OTHER,
quantity=1,
text="Thing",
ordering=x,
base_unit_price=order.shop.create_price(price)
)
ol.save()
order.cache_prices()
for x, order_line in enumerate(order.lines.all().order_by("ordering")):
price = Decimal(prices[x]).quantize(Decimal(".1") ** 9)
# make sure prices are in database with original precision
assert order_line.base_unit_price == order.shop.create_price(price)
# make sure the line taxless price is rounded
assert order_line.taxless_price == order.shop.create_price(bankers_round(price, 2))
# Check that total prices calculated from priceful parts still matches
assert _get_taxless_price(order_line) == order_line.taxless_price
assert _get_taxful_price(order_line) == order_line.taxful_price
# make sure the line price is rounded
assert order_line.price == order.shop.create_price(price)
# make sure order total is rounded
assert order.taxless_total_price == order.shop.create_price(bankers_round(expected, 2))
@pytest.mark.parametrize("prices", PRICE_SPEC)
@pytest.mark.django_db
def test_order_source_rounding(prices):
shop = Shop.objects.create(
name="test",
identifier="test",
status=ShopStatus.ENABLED,
public_name="test",
prices_include_tax=False
)
expected = 0
for p in prices:
expected += bankers_round(p, 2)
source = BasketishOrderSource(shop)
for x, price in enumerate(prices):
source.add_line(
type=OrderLineType.OTHER,
quantity=1,
text=x,
base_unit_price=source.create_price(price),
ordering=x,
)
for x, order_source in enumerate(source.get_lines()):
price = Decimal(prices[x]).quantize(Decimal(".1") ** 9)
# make sure prices are in database with original precision
assert order_source.base_unit_price == source.shop.create_price(price)
# make sure the line taxless price is rounded
assert order_source.taxless_price == source.shop.create_price(bankers_round(price, 2))
# Check that total prices calculated from priceful parts still matches
assert _get_taxless_price(order_source) == order_source.taxless_price
assert _get_taxful_price(order_source) == order_source.taxful_price
# make sure the line price is rounded
assert order_source.price == source.shop.create_price(price)
# make sure order total is rounded
assert source.taxless_total_price == source.shop.create_price(bankers_round(expected, 2))
@pytest.mark.parametrize("prices", PRICE_SPEC)
@pytest.mark.django_db
def test_rounding_with_taxes(prices):
shop = get_default_shop()
supplier = get_default_supplier()
order = create_empty_order(shop=shop)
order.save()
product = create_product("test_sku", shop=shop, supplier=supplier)
tax_rate = Decimal("0.22222")
for x, price in enumerate(prices):
add_product_to_order(
order, supplier, product, quantity=Decimal("2.22"),
taxless_base_unit_price=Decimal(price), tax_rate=tax_rate)
order.cache_prices()
for x, order_line in enumerate(order.lines.all().order_by("ordering")):
# Check that total prices calculated from priceful parts still matches
assert _get_taxless_price(order_line) == order_line.taxless_price
assert _get_taxful_price(order_line) == order_line.taxful_price
assert order_line.price == (order_line.base_unit_price * order_line.quantity - order_line.discount_amount)
def _get_taxless_price(line):
return bankers_round(line.taxless_base_unit_price*line.quantity - line.taxless_discount_amount, 2)
def _get_taxful_price(line):
return bankers_round(line.taxful_base_unit_price*line.quantity - line.taxful_discount_amount, 2)
| agpl-3.0 | 2,804,682,339,294,635,500 | 35.571429 | 114 | 0.670387 | false |
diedthreetimes/VCrash | pybindgen-0.15.0.795/pybindgen/typehandlers/inttype.py | 1 | 29684 | # docstrings not needed here (the type handler interfaces are fully
# documented in base.py)
# pylint: disable-msg=C0111
import struct
assert struct.calcsize('i') == 4 # assumption is made that sizeof(int) == 4 for all platforms pybindgen runs on
from base import ReturnValue, Parameter, PointerParameter, PointerReturnValue, \
ReverseWrapperBase, ForwardWrapperBase, TypeConfigurationError, NotSupportedError
class IntParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['int', 'int32_t']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable(self.ctype_no_const, self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class UnsignedIntParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['unsigned int', 'uint32_t']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('N', ["PyLong_FromUnsignedLong(%s)" % self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable('unsigned int', self.name, self.default_value)
wrapper.parse_params.add_parameter('I', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class UnsignedIntPtrParam(PointerParameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT, Parameter.DIRECTION_INOUT]
CTYPES = ['unsigned int*', 'uint32_t*']
def __init__(self, ctype, name, direction=Parameter.DIRECTION_IN, is_const=False,
default_value=None, transfer_ownership=False, array_length=None):
super(UnsignedIntPtrParam, self).__init__(ctype, name, direction, is_const, default_value, transfer_ownership)
self.array_length = array_length
if transfer_ownership:
raise NotSupportedError("%s: transfer_ownership=True not yet implemented." % ctype)
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('I', ['*'+self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter('I', [self.value], self.name)
def convert_python_to_c(self, wrapper):
#assert self.ctype == 'unsigned int*'
if self.array_length is None:
name = wrapper.declarations.declare_variable(str(self.type_traits.target), self.name)
wrapper.call_params.append('&'+name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('I', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter('I', [name])
else: # complicated code path to deal with arrays...
name = wrapper.declarations.declare_variable(str(self.type_traits.target), self.name, array="[%i]" % self.array_length)
py_list = wrapper.declarations.declare_variable("PyObject*", "py_list")
idx = wrapper.declarations.declare_variable("int", "idx")
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
elem = wrapper.declarations.declare_variable("PyObject*", "element")
wrapper.parse_params.add_parameter('O!', ['&PyList_Type', '&'+py_list], self.name)
wrapper.before_call.write_error_check(
'PyList_Size(%s) != %i' % (py_list, self.array_length),
'PyErr_SetString(PyExc_TypeError, "Parameter `%s\' must be a list of %i ints/longs");'
% (self.name, self.array_length))
wrapper.before_call.write_code(
"for (%s = 0; %s < %i; %s++) {" % (idx, idx, self.array_length, idx))
wrapper.before_call.indent()
wrapper.before_call.write_code("%(elem)s = PyList_GET_ITEM(%(py_list)s, %(idx)s);" % vars())
wrapper.before_call.write_error_check(
'!(PyInt_Check(%(elem)s) || PyLong_Check(%(elem)s))',
'PyErr_SetString(PyExc_TypeError, "Parameter `%s\' must be a list of %i ints / longs");'
% (self.name, self.array_length))
wrapper.before_call.write_code("%(name)s[%(idx)s] = PyLong_AsUnsignedInt(%(elem)s);" % vars())
wrapper.before_call.unindent()
wrapper.before_call.write_code('}')
if self.direction & self.DIRECTION_OUT:
wrapper.after_call.write_code("%s = PyList_New(%i);" % (py_list, self.array_length))
wrapper.after_call.write_code(
"for (%s = 0; %s < %i; %s++) {" % (idx, idx, self.array_length, idx))
wrapper.after_call.indent()
wrapper.after_call.write_code("PyList_SET_ITEM(%(py_list)s, %(idx)s, PyLong_FromUnsignedLong(%(name)s[%(idx)s]));"
% vars())
wrapper.after_call.unindent()
wrapper.after_call.write_code('}')
wrapper.build_params.add_parameter("N", [py_list])
class IntReturn(ReturnValue):
CTYPES = ['int', 'int32_t']
def get_c_error_return(self):
return "return INT_MIN;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("i", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("i", [self.value], prepend=True)
class UnsignedIntReturn(ReturnValue):
CTYPES = ['unsigned int', 'uint32_t']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("I", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter('N', ["PyLong_FromUnsignedLong(%s)" % self.value], prepend=True)
class IntPtrParam(PointerParameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT,
Parameter.DIRECTION_IN|Parameter.DIRECTION_OUT]
CTYPES = ['int*']
def __init__(self, ctype, name, direction=None, is_const=None, transfer_ownership=None):
if direction is None:
if is_const:
direction = Parameter.DIRECTION_IN
else:
raise TypeConfigurationError("direction not given")
super(IntPtrParam, self).__init__(ctype, name, direction, is_const, transfer_ownership)
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('i', ['*'+self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("i", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append('&'+name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('i', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("i", [name])
class IntRefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT,
Parameter.DIRECTION_IN|Parameter.DIRECTION_OUT]
CTYPES = ['int&']
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('i', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("i", [self.value], self.name)
def convert_python_to_c(self, wrapper):
#assert self.ctype == 'int&'
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('i', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("i", [name])
class UnsignedIntRefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT,
Parameter.DIRECTION_IN|Parameter.DIRECTION_OUT]
CTYPES = ['unsigned int&', 'unsigned &']
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('I', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("I", [self.value], self.name)
def convert_python_to_c(self, wrapper):
#assert self.ctype == 'int&'
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('I', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("I", [name])
class UInt16Return(ReturnValue):
CTYPES = ['uint16_t', 'unsigned short', 'unsigned short int', 'short unsigned int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
tmp_var = wrapper.declarations.declare_variable("int", "tmp")
wrapper.parse_params.add_parameter("i", ["&"+tmp_var], prepend=True)
wrapper.after_call.write_error_check('%s > 0xffff' % tmp_var,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.after_call.write_code(
"%s = %s;" % (self.value, tmp_var))
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("i", [self.value], prepend=True)
class Int16Return(ReturnValue):
CTYPES = ['int16_t', 'short', 'short int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
tmp_var = wrapper.declarations.declare_variable("int", "tmp")
wrapper.parse_params.add_parameter("i", ["&"+tmp_var], prepend=True)
wrapper.after_call.write_error_check('%s > 32767 || %s < -32768' % (tmp_var, tmp_var),
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.after_call.write_code(
"%s = %s;" % (self.value, tmp_var))
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("i", [self.value], prepend=True)
class UInt16Param(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['uint16_t', 'unsigned short', 'unsigned short int']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', ["(int) "+self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("int", self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.before_call.write_error_check('%s > 0xffff' % name,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.call_params.append(name)
class UInt16RefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_INOUT, Parameter.DIRECTION_OUT]
CTYPES = ['uint16_t&', 'unsigned short&', 'unsigned short int&', 'short unsigned&', 'short unsigned int&']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('H', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("H", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('H', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("H", [name])
class Int16Param(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['int16_t', 'short', 'short int']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', ["(int) "+self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("int", self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.before_call.write_error_check('%s > 0x7fff' % name,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.call_params.append(name)
class Int16RefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_INOUT, Parameter.DIRECTION_OUT]
CTYPES = ['int16_t&', 'short&', 'short int&']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('h', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("h", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('h', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("h", [name])
class UInt8Param(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['uint8_t', 'unsigned char', 'char unsigned']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', ["(int) "+self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("int", self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.before_call.write_error_check('%s > 0xff' % name,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.call_params.append(name)
class UInt8RefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_INOUT, Parameter.DIRECTION_OUT]
CTYPES = ['uint8_t&', 'unsigned char&', 'char unsigned&']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('B', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("B", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('B', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("B", [name])
class UInt8Return(ReturnValue):
CTYPES = ['uint8_t', 'unsigned char', 'char unsigned']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
tmp_var = wrapper.declarations.declare_variable("int", "tmp")
wrapper.parse_params.add_parameter("i", ["&"+tmp_var], prepend=True)
wrapper.after_call.write_error_check('%s > 0xff' % tmp_var,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.after_call.write_code(
"%s = %s;" % (self.value, tmp_var))
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("i", ['(int)' + self.value], prepend=True)
class Int8Param(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['int8_t', 'signed char', 'char signed']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('i', ["(int) "+self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("int", self.name, self.default_value)
wrapper.parse_params.add_parameter('i', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.before_call.write_error_check('%s > 0x7f' % name,
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.call_params.append(name)
class Int8RefParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_INOUT, Parameter.DIRECTION_OUT]
CTYPES = ['int8_t&', 'signed char &', 'char signed&']
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('b', [self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("b", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable(self.ctype_no_const[:-1], self.name)
wrapper.call_params.append(name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('b', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("b", [name])
class Int8Return(ReturnValue):
CTYPES = ['int8_t', 'signed char']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
tmp_var = wrapper.declarations.declare_variable("int", "tmp")
wrapper.parse_params.add_parameter("i", ["&"+tmp_var], prepend=True)
wrapper.after_call.write_error_check('%s > 128 || %s < -127' % (tmp_var, tmp_var),
'PyErr_SetString(PyExc_ValueError, "Out of range");')
wrapper.after_call.write_code(
"%s = %s;" % (self.value, tmp_var))
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("i", [self.value], prepend=True)
class UnsignedLongLongParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['unsigned long long', 'uint64_t', 'unsigned long long int', 'long long unsigned int', 'long long unsigned']
def get_ctype_without_ref(self):
return str(self.type_traits.ctype_no_const)
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('K', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable(self.get_ctype_without_ref(), self.name, self.default_value)
wrapper.parse_params.add_parameter('K', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class UnsignedLongLongRefParam(UnsignedLongLongParam):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['unsigned long long&', 'uint64_t&', 'long long unsigned int&']
def get_ctype_without_ref(self):
assert self.type_traits.target is not None
return str(self.type_traits.target)
class UnsignedLongLongReturn(ReturnValue):
CTYPES = ['unsigned long long', 'uint64_t', 'long long unsigned int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("K", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("K", [self.value], prepend=True)
class UnsignedLongParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['unsigned long', 'unsigned long int', 'long unsigned', 'long unsigned int']
def get_ctype_without_ref(self):
return str(self.type_traits.ctype_no_const)
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('k', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable(self.get_ctype_without_ref(), self.name, self.default_value)
wrapper.parse_params.add_parameter('k', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class UnsignedLongRefParam(UnsignedLongParam):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['unsigned long&', 'long unsigned&', 'long unsigned int&', 'unsigned long int&']
def get_ctype_without_ref(self):
assert self.type_traits.target is not None
return str(self.type_traits.target)
class UnsignedLongReturn(ReturnValue):
CTYPES = ['unsigned long', 'long unsigned int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("k", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("k", [self.value], prepend=True)
class LongParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['signed long', 'signed long int', 'long', 'long int', 'long signed', 'long signed int']
def get_ctype_without_ref(self):
return str(self.type_traits.ctype_no_const)
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('l', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable(self.get_ctype_without_ref(), self.name, self.default_value)
wrapper.parse_params.add_parameter('l', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class LongRefParam(LongParam):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['signed long&', 'long signed&', 'long&', 'long int&', 'long signed int&', 'signed long int&']
def get_ctype_without_ref(self):
assert self.type_traits.target is not None
return str(self.type_traits.target)
class LongReturn(ReturnValue):
CTYPES = ['signed long', 'long signed int', 'long', 'long int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("l", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("l", [self.value], prepend=True)
class SizeTReturn(ReturnValue):
CTYPES = ['size_t',]
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
# using the intermediate variable is not always necessary but
# it's safer this way in case of weird platforms where
# sizeof(size_t) != sizeof(unsigned PY_LONG_LONG).
name = wrapper.declarations.declare_variable("unsigned PY_LONG_LONG", "retval_tmp", self.value)
wrapper.parse_params.add_parameter("K", ["&"+name], prepend=True)
wrapper.after_call.write_code("retval = %s;" % (name))
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("K", ["((unsigned PY_LONG_LONG) %s)" % self.value], prepend=True)
class SizeTParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['size_t']
def get_ctype_without_ref(self):
return str(self.type_traits.ctype_no_const)
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('K', ["((unsigned PY_LONG_LONG) %s)" % self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable("unsigned PY_LONG_LONG", self.name, self.default_value)
wrapper.parse_params.add_parameter('K', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class LongLongParam(Parameter):
DIRECTIONS = [Parameter.DIRECTION_IN]
CTYPES = ['long long', 'int64_t', 'long long int']
def get_ctype_without_ref(self):
return str(self.type_traits.ctype_no_const)
def convert_c_to_python(self, wrapper):
assert isinstance(wrapper, ReverseWrapperBase)
wrapper.build_params.add_parameter('L', [self.value])
def convert_python_to_c(self, wrapper):
assert isinstance(wrapper, ForwardWrapperBase)
name = wrapper.declarations.declare_variable(self.get_ctype_without_ref(), self.name, self.default_value)
wrapper.parse_params.add_parameter('L', ['&'+name], self.name, optional=bool(self.default_value))
wrapper.call_params.append(name)
class LongLongRefParam(LongLongParam):
DIRECTIONS = [Parameter.DIRECTION_IN] # other directions not yet implemented
CTYPES = ['long long&', 'int64_t&', 'long long int&']
def get_ctype_without_ref(self):
assert self.type_traits.target is not None
return str(self.type_traits.target)
class LongLongReturn(ReturnValue):
CTYPES = ['long long', 'int64_t', 'long long int']
def get_c_error_return(self):
return "return 0;"
def convert_python_to_c(self, wrapper):
wrapper.parse_params.add_parameter("L", ["&"+self.value], prepend=True)
def convert_c_to_python(self, wrapper):
wrapper.build_params.add_parameter("L", [self.value], prepend=True)
class Int8PtrParam(PointerParameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT,
Parameter.DIRECTION_IN|Parameter.DIRECTION_OUT]
CTYPES = ['int8_t*']
def __init__(self, ctype, name, direction=None, is_const=None, default_value=None, transfer_ownership=None):
if direction is None:
if is_const:
direction = Parameter.DIRECTION_IN
else:
raise TypeConfigurationError("direction not given")
super(Int8PtrParam, self).__init__(ctype, name, direction, is_const, default_value, transfer_ownership)
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('b', ['*'+self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("b", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable('int8_t', self.name)
wrapper.call_params.append('&'+name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('b', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("b", [name])
class UInt8PtrParam(PointerParameter):
DIRECTIONS = [Parameter.DIRECTION_IN, Parameter.DIRECTION_OUT,
Parameter.DIRECTION_IN|Parameter.DIRECTION_OUT]
CTYPES = ['uint8_t*']
def __init__(self, ctype, name, direction=None, is_const=None, default_value=None, transfer_ownership=None):
if direction is None:
if is_const:
direction = Parameter.DIRECTION_IN
else:
raise TypeConfigurationError("direction not given")
super(UInt8PtrParam, self).__init__(ctype, name, direction, is_const, default_value, transfer_ownership)
def convert_c_to_python(self, wrapper):
if self.direction & self.DIRECTION_IN:
wrapper.build_params.add_parameter('B', ['*'+self.value])
if self.direction & self.DIRECTION_OUT:
wrapper.parse_params.add_parameter("B", [self.value], self.name)
def convert_python_to_c(self, wrapper):
name = wrapper.declarations.declare_variable('uint8_t', self.name)
wrapper.call_params.append('&'+name)
if self.direction & self.DIRECTION_IN:
wrapper.parse_params.add_parameter('B', ['&'+name], self.name)
if self.direction & self.DIRECTION_OUT:
wrapper.build_params.add_parameter("B", [name])
| gpl-2.0 | 8,058,617,483,119,672,000 | 41.164773 | 131 | 0.638997 | false |
SSSD/sssd | src/tests/intg/test_files_provider.py | 1 | 37497 | #
# SSSD files domain tests
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import stat
import time
import config
import signal
import subprocess
import pwd
import grp
import pytest
import tempfile
import ent
import sssd_id
from sssd_nss import NssReturnCode
from sssd_passwd import (call_sssd_getpwnam,
call_sssd_getpwuid)
from sssd_group import call_sssd_getgrnam, call_sssd_getgrgid
from files_ops import passwd_ops_setup, group_ops_setup, PasswdOps, GroupOps
from util import unindent
# Sync this with files_ops.c
FILES_REALLOC_CHUNK = 64
CANARY = dict(name='canary', passwd='x', uid=100001, gid=200001,
gecos='Used to check if passwd is resolvable',
dir='/home/canary',
shell='/bin/bash')
USER1 = dict(name='user1', passwd='x', uid=10001, gid=20001,
gecos='User for tests',
dir='/home/user1',
shell='/bin/bash')
USER2 = dict(name='user2', passwd='x', uid=10002, gid=20001,
gecos='User2 for tests',
dir='/home/user2',
shell='/bin/bash')
OV_USER1 = dict(name='ov_user1', passwd='x', uid=10010, gid=20010,
gecos='Overriden User 1',
dir='/home/ov/user1',
shell='/bin/ov_user1_shell')
ALT_USER1 = dict(name='alt_user1', passwd='x', uid=60001, gid=70001,
gecos='User for tests from alt files',
dir='/home/altuser1',
shell='/bin/bash')
ALL_USERS = [CANARY, USER1, USER2, OV_USER1, ALT_USER1]
CANARY_GR = dict(name='canary',
gid=300001,
mem=[])
GROUP1 = dict(name='group1',
gid=30001,
mem=['user1'])
OV_GROUP1 = dict(name='ov_group1',
gid=30002,
mem=['user1'])
GROUP12 = dict(name='group12',
gid=30012,
mem=['user1', 'user2'])
GROUP_NOMEM = dict(name='group_nomem',
gid=40000,
mem=[])
ALT_GROUP1 = dict(name='alt_group1',
gid=80001,
mem=['alt_user1'])
def start_sssd():
"""Start sssd and add teardown for stopping it and removing state"""
os.environ["SSS_FILES_PASSWD"] = os.environ["NSS_WRAPPER_PASSWD"]
os.environ["SSS_FILES_GROUP"] = os.environ["NSS_WRAPPER_GROUP"]
if subprocess.call(["sssd", "-D", "--logger=files"]) != 0:
raise Exception("sssd start failed")
def stop_sssd():
pid_file = open(config.PIDFILE_PATH, "r")
pid = int(pid_file.read())
os.kill(pid, signal.SIGTERM)
while True:
try:
os.kill(pid, signal.SIGCONT)
except:
break
time.sleep(1)
def restart_sssd():
stop_sssd()
start_sssd()
def create_conf_fixture(request, contents):
"""Generate sssd.conf and add teardown for removing it"""
conf = open(config.CONF_PATH, "w")
conf.write(contents)
conf.close()
os.chmod(config.CONF_PATH, stat.S_IRUSR | stat.S_IWUSR)
request.addfinalizer(lambda: os.unlink(config.CONF_PATH))
def create_sssd_fixture(request):
start_sssd()
def teardown():
try:
stop_sssd()
except:
pass
for path in os.listdir(config.DB_PATH):
os.unlink(config.DB_PATH + "/" + path)
for path in os.listdir(config.MCACHE_PATH):
os.unlink(config.MCACHE_PATH + "/" + path)
request.addfinalizer(teardown)
# Fixtures
@pytest.fixture
def files_domain_only(request):
conf = unindent("""\
[sssd]
domains = files
services = nss
[domain/files]
id_provider = files
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def files_multiple_sources(request):
_, alt_passwd_path = tempfile.mkstemp(prefix='altpasswd')
request.addfinalizer(lambda: os.unlink(alt_passwd_path))
alt_pwops = PasswdOps(alt_passwd_path)
_, alt_group_path = tempfile.mkstemp(prefix='altgroup')
request.addfinalizer(lambda: os.unlink(alt_group_path))
alt_grops = GroupOps(alt_group_path)
passwd_list = ",".join([os.environ["NSS_WRAPPER_PASSWD"], alt_passwd_path])
group_list = ",".join([os.environ["NSS_WRAPPER_GROUP"], alt_group_path])
conf = unindent("""\
[sssd]
domains = files
services = nss
[nss]
debug_level = 10
[domain/files]
id_provider = files
passwd_files = {passwd_list}
group_files = {group_list}
debug_level = 10
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return alt_pwops, alt_grops
@pytest.fixture
def files_multiple_sources_nocreate(request):
"""
Sets up SSSD with multiple sources, but does not actually create
the files.
"""
alt_passwd_path = tempfile.mktemp(prefix='altpasswd')
request.addfinalizer(lambda: os.unlink(alt_passwd_path))
alt_group_path = tempfile.mktemp(prefix='altgroup')
request.addfinalizer(lambda: os.unlink(alt_group_path))
passwd_list = ",".join([os.environ["NSS_WRAPPER_PASSWD"], alt_passwd_path])
group_list = ",".join([os.environ["NSS_WRAPPER_GROUP"], alt_group_path])
conf = unindent("""\
[sssd]
domains = files
services = nss
[nss]
debug_level = 10
[domain/files]
id_provider = files
passwd_files = {passwd_list}
group_files = {group_list}
debug_level = 10
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return alt_passwd_path, alt_group_path
@pytest.fixture
def proxy_to_files_domain_only(request):
conf = unindent("""\
[sssd]
domains = proxy, local
services = nss
[domain/local]
id_provider = local
[domain/proxy]
id_provider = proxy
proxy_lib_name = files
auth_provider = none
resolver_provider = none
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def no_sssd_domain(request):
conf = unindent("""\
[sssd]
services = nss
enable_files_domain = true
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def no_files_domain(request):
conf = unindent("""\
[sssd]
domains = local
services = nss
enable_files_domain = true
[domain/local]
id_provider = local
[domain/disabled.files]
id_provider = files
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def disabled_files_domain(request):
conf = unindent("""\
[sssd]
domains = local
services = nss
enable_files_domain = false
[domain/local]
id_provider = local
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def no_sssd_conf(request):
create_sssd_fixture(request)
return None
@pytest.fixture
def domain_resolution_order(request):
conf = unindent("""\
[sssd]
domains = files
services = nss
domain_resolution_order = foo
[domain/files]
id_provider = files
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def default_domain_suffix(request):
conf = unindent("""\
[sssd]
domains = files
services = nss
default_domain_suffix = foo
[domain/files]
id_provider = files
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def override_homedir_and_shell(request):
conf = unindent("""\
[sssd]
domains = files
services = nss
[domain/files]
id_provider = files
override_homedir = /test/bar
override_shell = /bin/bar
[nss]
override_homedir = /test/foo
override_shell = /bin/foo
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
def setup_pw_with_list(pwd_ops, user_list):
for user in user_list:
pwd_ops.useradd(**user)
ent.assert_passwd_by_name(CANARY['name'], CANARY)
return pwd_ops
@pytest.fixture
def add_user_with_canary(passwd_ops_setup):
return setup_pw_with_list(passwd_ops_setup, [CANARY, USER1])
@pytest.fixture
def setup_pw_with_canary(passwd_ops_setup):
return setup_pw_with_list(passwd_ops_setup, [CANARY])
def add_group_members(pwd_ops, group):
members = {x['name']: x for x in ALL_USERS}
for member in group['mem']:
if pwd_ops.userexist(member):
continue
pwd_ops.useradd(**members[member])
def setup_gr_with_list(pwd_ops, grp_ops, group_list):
for group in group_list:
add_group_members(pwd_ops, group)
grp_ops.groupadd(**group)
ent.assert_group_by_name(CANARY_GR['name'], CANARY_GR)
return grp_ops
@pytest.fixture
def add_group_with_canary(passwd_ops_setup, group_ops_setup):
return setup_gr_with_list(
passwd_ops_setup, group_ops_setup, [GROUP1, CANARY_GR]
)
@pytest.fixture
def setup_gr_with_canary(passwd_ops_setup, group_ops_setup):
return setup_gr_with_list(passwd_ops_setup, group_ops_setup, [CANARY_GR])
def poll_canary(fn, name, threshold=20):
"""
If we query SSSD while it's updating its cache, it would return NOTFOUND
rather than a result from potentially outdated or incomplete cache. In
reality this doesn't hurt because the order of the modules is normally
"sss files" so the user lookup would fall back to files. But in tests
we use this loop to wait until the canary user who is always there is
resolved.
"""
for _ in range(0, threshold):
res, _ = fn(name)
if res == NssReturnCode.SUCCESS:
return True
elif res == NssReturnCode.NOTFOUND:
time.sleep(0.1)
continue
else:
return False
return False
def sssd_getpwnam_sync(name):
ret = poll_canary(call_sssd_getpwnam, CANARY["name"])
if ret is False:
return NssReturnCode.NOTFOUND, None
return call_sssd_getpwnam(name)
def sssd_getpwuid_sync(uid):
ret = poll_canary(call_sssd_getpwnam, CANARY["name"])
if ret is False:
return NssReturnCode.NOTFOUND, None
return call_sssd_getpwuid(uid)
def sssd_getgrnam_sync(name):
ret = poll_canary(call_sssd_getgrnam, CANARY_GR["name"])
if ret is False:
return NssReturnCode.NOTFOUND, None
return call_sssd_getgrnam(name)
def sssd_getgrgid_sync(name):
ret = poll_canary(call_sssd_getgrnam, CANARY_GR["name"])
if ret is False:
return NssReturnCode.NOTFOUND, None
return call_sssd_getgrgid(name)
def sssd_id_sync(name):
sssd_getpwnam_sync(CANARY["name"])
res, _, groups = sssd_id.get_user_groups(name)
return res, groups
# Helper functions
def user_generator(seqnum):
return dict(name='user%d' % seqnum,
passwd='x',
uid=10000 + seqnum,
gid=20000 + seqnum,
gecos='User for tests',
dir='/home/user%d' % seqnum,
shell='/bin/bash')
def check_user(exp_user, delay=1.0):
if delay > 0:
time.sleep(delay)
res, found_user = sssd_getpwnam_sync(exp_user["name"])
assert res == NssReturnCode.SUCCESS
assert found_user == exp_user
def group_generator(seqnum):
return dict(name='group%d' % seqnum,
gid=30000 + seqnum,
mem=[])
def check_group(exp_group, delay=1.0):
if delay > 0:
time.sleep(delay)
res, found_group = sssd_getgrnam_sync(exp_group["name"])
assert res == NssReturnCode.SUCCESS
assert found_group == exp_group
def check_group_by_gid(exp_group, delay=1.0):
if delay > 0:
time.sleep(delay)
res, found_group = sssd_getgrgid_sync(exp_group["gid"])
assert res == NssReturnCode.SUCCESS
assert found_group == exp_group
def check_group_list(exp_groups_list):
for exp_group in exp_groups_list:
check_group(exp_group)
def assert_user_overriden():
# There is an issue in nss_wrapper [0] and nss_wrapper always looks into
# the files first before using the NSS module. This lets this check fail
# because the user is found in the file and hence will be returned
# without overridden values.
# In order to work this around while there's no fix for nss_wrapper, let's
# use the fully-qualified name when looking up the USER1
#
# https://bugzilla.samba.org/show_bug.cgi?id=12883)
ent.assert_passwd_by_name(USER1["name"]+"@files", OV_USER1)
ent.assert_passwd_by_name(OV_USER1["name"], OV_USER1)
def assert_group_overriden():
# There is an issue in nss_wrapper [0] and nss_wrapper always looks into
# the files first before using the NSS module. This lets this check fail
# because the user is found in the file and hence will be returned
# without overridden values.
# In order to work this around while there's no fix for nss_wrapper, let's
# use the fully-qualified name when looking up the GROUP1
#
# https://bugzilla.samba.org/show_bug.cgi?id=12883)
ent.assert_group_by_name(GROUP1["name"]+"@files", OV_GROUP1)
ent.assert_group_by_name(OV_GROUP1["name"], OV_GROUP1)
# User tests
def test_getpwnam_after_start(add_user_with_canary, files_domain_only):
"""
Test that after startup without any additional operations, a user
can be resolved through sssd
"""
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
def test_getpwuid_after_start(add_user_with_canary, files_domain_only):
"""
Test that after startup without any additional operations, a user
can be resolved through sssd
"""
res, user = sssd_getpwuid_sync(USER1["uid"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
def test_user_overriden(add_user_with_canary, files_domain_only):
"""
Test that user override works with files domain only
"""
# Override
subprocess.check_call(["sss_override", "user-add", USER1["name"],
"-u", str(OV_USER1["uid"]),
"-g", str(OV_USER1["gid"]),
"-n", OV_USER1["name"],
"-c", OV_USER1["gecos"],
"-h", OV_USER1["dir"],
"-s", OV_USER1["shell"]])
restart_sssd()
assert_user_overriden()
def test_group_overriden(add_group_with_canary, files_domain_only):
"""
Test that user override works with files domain only
"""
# Override
subprocess.check_call(["sss_override", "group-add", GROUP1["name"],
"-n", OV_GROUP1["name"],
"-g", str(OV_GROUP1["gid"])])
restart_sssd()
assert_group_overriden()
def test_getpwnam_neg(files_domain_only):
"""
Test that a nonexistent user cannot be resolved by name
"""
res, _ = call_sssd_getpwnam("nosuchuser")
assert res == NssReturnCode.NOTFOUND
def test_getpwuid_neg(files_domain_only):
"""
Test that a nonexistent user cannot be resolved by UID
"""
res, _ = call_sssd_getpwuid(12345)
assert res == NssReturnCode.NOTFOUND
def test_root_does_not_resolve(files_domain_only):
"""
SSSD currently does not resolve the root user even though it can
be resolved through the NSS interface
"""
nss_root = pwd.getpwnam("root")
assert nss_root is not None
res, _ = call_sssd_getpwnam("root")
assert res == NssReturnCode.NOTFOUND
def test_uid_zero_does_not_resolve(files_domain_only):
"""
SSSD currently does not resolve the UID 0 even though it can
be resolved through the NSS interface
"""
nss_root = pwd.getpwuid(0)
assert nss_root is not None
res, _ = call_sssd_getpwuid(0)
assert res == NssReturnCode.NOTFOUND
def test_add_remove_add_file_user(setup_pw_with_canary, files_domain_only):
"""
Test that removing a user is detected and the user
is removed from the sssd database. Similarly, an add
should be detected. Do this several times to test retaining
the inotify watch for moved and unlinked files.
"""
res, _ = call_sssd_getpwnam(USER1["name"])
assert res == NssReturnCode.NOTFOUND
setup_pw_with_canary.useradd(**USER1)
check_user(USER1)
setup_pw_with_canary.userdel(USER1["name"])
time.sleep(1.0)
res, _ = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.NOTFOUND
setup_pw_with_canary.useradd(**USER1)
check_user(USER1)
def test_mod_user_shell(add_user_with_canary, files_domain_only):
"""
Test that modifying a user shell is detected and the user
is modified in the sssd database
"""
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
moduser = dict(USER1)
moduser['shell'] = '/bin/zsh'
add_user_with_canary.usermod(**moduser)
check_user(moduser)
def incomplete_user_setup(pwd_ops, del_field, exp_field):
adduser = dict(USER1)
del adduser[del_field]
exp_user = dict(USER1)
exp_user[del_field] = exp_field
pwd_ops.useradd(**adduser)
return exp_user
def test_user_no_shell(setup_pw_with_canary, files_domain_only):
"""
Test that resolving a user without a shell defined works and returns
a fallback value
"""
check_user(incomplete_user_setup(setup_pw_with_canary, 'shell', ''))
def test_user_no_dir(setup_pw_with_canary, files_domain_only):
"""
Test that resolving a user without a homedir defined works and returns
a fallback value
"""
check_user(incomplete_user_setup(setup_pw_with_canary, 'dir', ''))
def test_user_no_gecos(setup_pw_with_canary, files_domain_only):
"""
Test that resolving a user without a gecos defined works and returns
a fallback value
"""
check_user(incomplete_user_setup(setup_pw_with_canary, 'gecos', ''))
def test_user_no_passwd(setup_pw_with_canary, files_domain_only):
"""
Test that resolving a user without a password defined works and returns
a fallback value
"""
check_user(incomplete_user_setup(setup_pw_with_canary, 'passwd', 'x'))
def bad_incomplete_user_setup(pwd_ops, del_field):
adduser = dict(USER1)
adduser[del_field] = ''
pwd_ops.useradd(**adduser)
def test_incomplete_user_fail(setup_pw_with_canary, files_domain_only):
"""
Test resolving an incomplete user where the missing field is required
to be present in the user record and thus the user shouldn't resolve.
We cannot test UID and GID missing because nss_wrapper doesn't even
load the malformed passwd file, then.
"""
bad_incomplete_user_setup(setup_pw_with_canary, 'name')
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.NOTFOUND
def test_getgrnam_after_start(add_group_with_canary, files_domain_only):
"""
Test that after startup without any additional operations, a group
can be resolved through sssd by name
"""
check_group(GROUP1)
def test_getgrgid_after_start(add_group_with_canary, files_domain_only):
"""
Test that after startup without any additional operations, a group
can be resolved through sssd by GID
"""
check_group_by_gid(GROUP1)
def test_getgrnam_neg(files_domain_only):
"""
Test that a nonexistent group cannot be resolved
"""
res, user = sssd_getgrnam_sync("nosuchgroup")
assert res == NssReturnCode.NOTFOUND
def test_getgrgid_neg(files_domain_only):
"""
Test that a nonexistent group cannot be resolved
"""
res, user = sssd_getgrgid_sync(123456)
assert res == NssReturnCode.NOTFOUND
def test_root_group_does_not_resolve(files_domain_only):
"""
SSSD currently does not resolve the root group even though it can
be resolved through the NSS interface
"""
nss_root = grp.getgrnam("root")
assert nss_root is not None
res, user = call_sssd_getgrnam("root")
assert res == NssReturnCode.NOTFOUND
def test_gid_zero_does_not_resolve(files_domain_only):
"""
SSSD currently does not resolve the group with GID 0 even though it
can be resolved through the NSS interface
"""
nss_root = grp.getgrgid(0)
assert nss_root is not None
res, user = call_sssd_getgrgid(0)
assert res == NssReturnCode.NOTFOUND
def test_add_remove_add_file_group(
setup_pw_with_canary, setup_gr_with_canary, files_domain_only
):
"""
Test that removing a group is detected and the group
is removed from the sssd database. Similarly, an add
should be detected. Do this several times to test retaining
the inotify watch for moved and unlinked files.
"""
res, group = call_sssd_getgrnam(GROUP1["name"])
assert res == NssReturnCode.NOTFOUND
add_group_members(setup_pw_with_canary, GROUP1)
setup_gr_with_canary.groupadd(**GROUP1)
check_group(GROUP1)
setup_gr_with_canary.groupdel(GROUP1["name"])
time.sleep(1)
res, group = call_sssd_getgrnam(GROUP1["name"])
assert res == NssReturnCode.NOTFOUND
setup_gr_with_canary.groupadd(**GROUP1)
check_group(GROUP1)
def test_mod_group_name(add_group_with_canary, files_domain_only):
"""
Test that modifying a group name is detected and the group
is modified in the sssd database
"""
check_group(GROUP1)
modgroup = dict(GROUP1)
modgroup['name'] = 'group1_mod'
add_group_with_canary.groupmod(old_name=GROUP1["name"], **modgroup)
check_group(modgroup)
def test_mod_group_gid(add_group_with_canary, files_domain_only):
"""
Test that modifying a group name is detected and the group
is modified in the sssd database
"""
check_group(GROUP1)
modgroup = dict(GROUP1)
modgroup['gid'] = 30002
add_group_with_canary.groupmod(old_name=GROUP1["name"], **modgroup)
check_group(modgroup)
@pytest.fixture
def add_group_nomem_with_canary(passwd_ops_setup, group_ops_setup):
return setup_gr_with_list(
passwd_ops_setup, group_ops_setup, [GROUP_NOMEM, CANARY_GR]
)
def test_getgrnam_no_members(add_group_nomem_with_canary, files_domain_only):
"""
Test that after startup without any additional operations, a group
can be resolved through sssd
"""
check_group(GROUP_NOMEM)
def groupadd_list(grp_ops, groups):
for grp in groups:
grp_ops.groupadd(**grp)
def useradd_list(pwd_ops, users):
for usr in users:
pwd_ops.useradd(**usr)
def user_and_group_setup(pwd_ops, grp_ops, users, groups, reverse):
"""
The reverse is added so that we test cases where a group is added first,
then a user for this group is created -- in that case, we need to properly
link the group after the user is added.
"""
if reverse is False:
useradd_list(pwd_ops, users)
groupadd_list(grp_ops, groups)
else:
groupadd_list(grp_ops, groups)
useradd_list(pwd_ops, users)
def members_check(added_groups):
# Test that users are members as per getgrnam
check_group_list(added_groups)
# Test that users are members as per initgroups
for group in added_groups:
for member in group['mem']:
res, groups = sssd_id_sync(member)
assert res == sssd_id.NssReturnCode.SUCCESS
assert group['name'] in groups
def test_getgrnam_members_users_first(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
A user is linked with a group
"""
user_and_group_setup(setup_pw_with_canary,
setup_gr_with_canary,
[USER1],
[GROUP1],
False)
members_check([GROUP1])
def test_getgrnam_members_users_multiple(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
Multiple users are linked with a group
"""
user_and_group_setup(setup_pw_with_canary,
setup_gr_with_canary,
[USER1, USER2],
[GROUP12],
False)
members_check([GROUP12])
def test_getgrnam_members_groups_first(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
A group is linked with a user
"""
user_and_group_setup(setup_pw_with_canary,
setup_gr_with_canary,
[USER1],
[GROUP1],
True)
members_check([GROUP1])
def test_getgrnam_ghost(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
Test that group if not found (and will be handled by nss_files) if there
are any ghost members.
"""
user_and_group_setup(setup_pw_with_canary,
setup_gr_with_canary,
[],
[GROUP12],
False)
time.sleep(1)
res, group = call_sssd_getgrnam(GROUP12["name"])
assert res == NssReturnCode.NOTFOUND
for member in GROUP12['mem']:
res, _ = call_sssd_getpwnam(member)
assert res == NssReturnCode.NOTFOUND
def ghost_and_member_test(pw_ops, grp_ops, reverse):
user_and_group_setup(pw_ops,
grp_ops,
[USER1],
[GROUP12],
reverse)
time.sleep(1)
res, group = call_sssd_getgrnam(GROUP12["name"])
assert res == NssReturnCode.NOTFOUND
# We checked that the group added has the same members as group12,
# so both user1 and user2. Now check that user1 is a member of
# group12 and its own primary GID but user2 doesn't exist, it's
# just a ghost entry
res, groups = sssd_id_sync('user1')
assert res == sssd_id.NssReturnCode.SUCCESS
assert len(groups) == 2
assert 'group12' in groups
res, _ = call_sssd_getpwnam('user2')
assert res == NssReturnCode.NOTFOUND
def test_getgrnam_user_ghost_and_member(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
Test that a group with one member and one ghost.
"""
ghost_and_member_test(setup_pw_with_canary,
setup_gr_with_canary,
False)
def test_getgrnam_user_member_and_ghost(setup_pw_with_canary,
setup_gr_with_canary,
files_domain_only):
"""
Test that a group with one member and one ghost, adding the group
first and then linking the member
"""
ghost_and_member_test(setup_pw_with_canary,
setup_gr_with_canary,
True)
def test_getgrnam_add_remove_members(setup_pw_with_canary,
add_group_nomem_with_canary,
files_domain_only):
"""
Test that a user is linked with a group
"""
pwd_ops = setup_pw_with_canary
check_group(GROUP_NOMEM)
for usr in [USER1, USER2]:
pwd_ops.useradd(**usr)
modgroup = dict(GROUP_NOMEM)
modgroup['mem'] = ['user1', 'user2']
add_group_nomem_with_canary.groupmod(old_name=modgroup['name'], **modgroup)
check_group(modgroup)
res, groups = sssd_id_sync('user1')
assert res == sssd_id.NssReturnCode.SUCCESS
assert len(groups) == 2
assert 'group_nomem' in groups
res, groups = sssd_id_sync('user2')
assert res == sssd_id.NssReturnCode.SUCCESS
assert 'group_nomem' in groups
modgroup['mem'] = ['user2']
add_group_nomem_with_canary.groupmod(old_name=modgroup['name'], **modgroup)
check_group(modgroup)
# User1 exists, but is not a member of any supplementary group anymore
res, _ = call_sssd_getpwnam('user1')
assert res == sssd_id.NssReturnCode.SUCCESS
res, groups = sssd_id_sync('user1')
assert res == sssd_id.NssReturnCode.NOTFOUND
# user2 still is
res, groups = sssd_id_sync('user2')
assert res == sssd_id.NssReturnCode.SUCCESS
assert len(groups) == 2
assert 'group_nomem' in groups
def test_getgrnam_add_remove_ghosts(setup_pw_with_canary,
add_group_nomem_with_canary,
files_domain_only):
"""
Test that a user is linked with a group
"""
pwd_ops = setup_pw_with_canary
check_group(GROUP_NOMEM)
modgroup = dict(GROUP_NOMEM)
modgroup['mem'] = ['user1', 'user2']
add_group_nomem_with_canary.groupmod(old_name=modgroup['name'], **modgroup)
time.sleep(1)
res, group = call_sssd_getgrnam(modgroup['name'])
assert res == sssd_id.NssReturnCode.NOTFOUND
modgroup['mem'] = ['user2']
add_group_nomem_with_canary.groupmod(old_name=modgroup['name'], **modgroup)
time.sleep(1)
res, group = call_sssd_getgrnam(modgroup['name'])
assert res == sssd_id.NssReturnCode.NOTFOUND
res, _ = call_sssd_getpwnam('user1')
assert res == NssReturnCode.NOTFOUND
res, _ = call_sssd_getpwnam('user2')
assert res == NssReturnCode.NOTFOUND
def realloc_users(pwd_ops, num):
# Intentionally not including the last one because
# canary is added first
for i in range(1, num):
user = user_generator(i)
pwd_ops.useradd(**user)
user = user_generator(num-1)
check_user(user)
def test_realloc_users_exact(setup_pw_with_canary, files_domain_only):
"""
Test that returning exactly FILES_REALLOC_CHUNK users (see files_ops.c)
works fine to test reallocation logic. Test exact number of users to
check for off-by-one errors.
"""
realloc_users(setup_pw_with_canary, FILES_REALLOC_CHUNK)
def test_realloc_users(setup_pw_with_canary, files_domain_only):
"""
Test that returning exactly FILES_REALLOC_CHUNK users (see files_ops.c)
works fine to test reallocation logic.
"""
realloc_users(setup_pw_with_canary, FILES_REALLOC_CHUNK*3)
def realloc_groups(grp_ops, num):
for i in range(1, num):
group = group_generator(i)
grp_ops.groupadd(**group)
group = group_generator(num-1)
check_group(group)
def test_realloc_groups_exact(setup_gr_with_canary, files_domain_only):
"""
Test that returning exactly FILES_REALLOC_CHUNK groups (see files_ops.c)
works fine to test reallocation logic. Test exact number of groups to
check for off-by-one errors.
"""
realloc_groups(setup_gr_with_canary, FILES_REALLOC_CHUNK*3)
def test_realloc_groups(setup_gr_with_canary, files_domain_only):
"""
Test that returning exactly FILES_REALLOC_CHUNK groups (see files_ops.c)
works fine to test reallocation logic. Test exact number of groups to
check for off-by-one errors.
"""
realloc_groups(setup_gr_with_canary, FILES_REALLOC_CHUNK*3)
# Files domain autoconfiguration tests
def test_no_sssd_domain(add_user_with_canary, no_sssd_domain):
"""
Test that if no sssd domain is configured, sssd will add the implicit one
"""
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
def test_proxy_to_files_domain_only(add_user_with_canary,
proxy_to_files_domain_only):
"""
Test that implicit_files domain is not started together with proxy to files
"""
local_user1 = dict(name='user1', passwd='*', uid=10009, gid=10009,
gecos='user1', dir='/home/user1', shell='/bin/bash')
# Add a user with a different UID than the one in files
subprocess.check_call(
["sss_useradd", "-u", "10009", "-M", USER1["name"]])
res, user = call_sssd_getpwnam(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == local_user1
res, _ = call_sssd_getpwnam("{0}@implicit_files".format(USER1["name"]))
assert res == NssReturnCode.NOTFOUND
def test_no_files_domain(add_user_with_canary, no_files_domain):
"""
Test that if no files domain is configured, sssd will add the implicit one
before any explicitly configured domains
"""
# Add a user with a different UID than the one in files
subprocess.check_call(
["sss_useradd", "-u", "10009", "-M", USER1["name"]])
# Even though the local domain is the only one configured,
# files will be resolved first
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
def test_disable_files_domain(add_user_with_canary, disabled_files_domain):
"""
Test that if no files domain is configured, sssd will add the implicit one
before any explicitly configured domains
"""
# The local user will not be resolvable through nss_sss now
res, user = sssd_getpwnam_sync(USER1["name"])
assert res != NssReturnCode.SUCCESS
def test_no_sssd_conf(add_user_with_canary, no_sssd_conf):
"""
Test that running without sssd.conf implicitly configures one with
id_provider=files
"""
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user == USER1
def test_multiple_passwd_group_files(add_user_with_canary,
add_group_with_canary,
files_multiple_sources):
"""
Test that users and groups can be mirrored from multiple files
"""
alt_pwops, alt_grops = files_multiple_sources
alt_pwops.useradd(**ALT_USER1)
alt_grops.groupadd(**ALT_GROUP1)
check_user(USER1)
check_user(ALT_USER1)
check_group(GROUP1)
check_group(ALT_GROUP1)
def test_multiple_files_created_after_startup(add_user_with_canary,
add_group_with_canary,
files_multiple_sources_nocreate):
"""
Test that users and groups can be mirrored from multiple files,
but those files are not created when SSSD starts, only afterwards.
"""
alt_passwd_path, alt_group_path = files_multiple_sources_nocreate
check_user(USER1)
check_group(GROUP1)
# touch the files
for fpath in (alt_passwd_path, alt_group_path):
with open(fpath, "w") as f:
pass
alt_pwops = PasswdOps(alt_passwd_path)
alt_grops = GroupOps(alt_group_path)
alt_pwops.useradd(**ALT_USER1)
alt_grops.groupadd(**ALT_GROUP1)
check_user(ALT_USER1)
check_group(ALT_GROUP1)
def test_files_with_domain_resolution_order(add_user_with_canary,
domain_resolution_order):
"""
Test that when using domain_resolution_order the user won't be using
its fully-qualified name.
"""
check_user(USER1)
def test_files_with_default_domain_suffix(add_user_with_canary,
default_domain_suffix):
"""
Test that when using domain_resolution_order the user won't be using
its fully-qualified name.
"""
ret = poll_canary(call_sssd_getpwuid, CANARY["uid"])
if ret is False:
return NssReturnCode.NOTFOUND, None
res, found_user = call_sssd_getpwuid(USER1["uid"])
assert res == NssReturnCode.SUCCESS
assert found_user == USER1
def test_files_with_override_homedir(add_user_with_canary,
override_homedir_and_shell):
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user["dir"] == USER1["dir"]
def test_files_with_override_shell(add_user_with_canary,
override_homedir_and_shell):
res, user = sssd_getpwnam_sync(USER1["name"])
assert res == NssReturnCode.SUCCESS
assert user["shell"] == USER1["shell"]
| gpl-3.0 | -4,390,004,282,251,433,000 | 28.501967 | 79 | 0.61973 | false |
persandstrom/home-assistant | homeassistant/components/sensor/netatmo_public.py | 1 | 4390 | """
Support for Sensors using public Netatmo data.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.netatmo_public/.
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (CONF_NAME, CONF_TYPE)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['netatmo']
CONF_AREAS = 'areas'
CONF_LAT_NE = 'lat_ne'
CONF_LON_NE = 'lon_ne'
CONF_LAT_SW = 'lat_sw'
CONF_LON_SW = 'lon_sw'
DEFAULT_NAME = 'Netatmo Public Data'
DEFAULT_TYPE = 'max'
SENSOR_TYPES = {'max', 'avg'}
# NetAtmo Data is uploaded to server every 10 minutes
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_AREAS): vol.All(cv.ensure_list, [
{
vol.Required(CONF_LAT_NE): cv.latitude,
vol.Required(CONF_LAT_SW): cv.latitude,
vol.Required(CONF_LON_NE): cv.longitude,
vol.Required(CONF_LON_SW): cv.longitude,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE):
vol.In(SENSOR_TYPES)
}
]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the access to Netatmo binary sensor."""
netatmo = hass.components.netatmo
sensors = []
areas = config.get(CONF_AREAS)
for area_conf in areas:
data = NetatmoPublicData(netatmo.NETATMO_AUTH,
lat_ne=area_conf.get(CONF_LAT_NE),
lon_ne=area_conf.get(CONF_LON_NE),
lat_sw=area_conf.get(CONF_LAT_SW),
lon_sw=area_conf.get(CONF_LON_SW),
calculation=area_conf.get(CONF_TYPE))
sensors.append(NetatmoPublicSensor(area_conf.get(CONF_NAME), data))
add_entities(sensors)
class NetatmoPublicSensor(Entity):
"""Represent a single sensor in a Netatmo."""
def __init__(self, name, data):
"""Initialize the sensor."""
self.netatmo_data = data
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend."""
return 'mdi:weather-rainy'
@property
def device_class(self):
"""Return the device class of the sensor."""
return None
@property
def state(self):
"""Return true if binary sensor is on."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return 'mm'
def update(self):
"""Get the latest data from NetAtmo API and updates the states."""
self.netatmo_data.update()
self._state = self.netatmo_data.data
class NetatmoPublicData:
"""Get the latest data from NetAtmo."""
def __init__(self, auth, lat_ne, lon_ne, lat_sw, lon_sw, calculation):
"""Initialize the data object."""
self.auth = auth
self.data = None
self.lat_ne = lat_ne
self.lon_ne = lon_ne
self.lat_sw = lat_sw
self.lon_sw = lon_sw
self.calculation = calculation
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Request an update from the Netatmo API."""
import pyatmo
raindata = pyatmo.PublicData(self.auth,
LAT_NE=self.lat_ne,
LON_NE=self.lon_ne,
LAT_SW=self.lat_sw,
LON_SW=self.lon_sw,
required_data_type="rain")
if raindata.CountStationInArea() == 0:
_LOGGER.warning('No Rain Station available in this area.')
return
raindata_live = raindata.getLive()
if self.calculation == 'avg':
self.data = sum(raindata_live.values()) / len(raindata_live)
else:
self.data = max(raindata_live.values())
| apache-2.0 | -397,387,515,900,327,800 | 30.134752 | 75 | 0.591116 | false |
ryfeus/lambda-packs | pytorch/source/torch/nn/parallel/deprecated/distributed_cpu.py | 1 | 4290 | import torch
from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
import torch.distributed.deprecated as dist
from torch.nn.modules import Module
from collections import defaultdict
from torch.autograd import Variable
import torch.utils.hooks
class DistributedDataParallelCPU(Module):
r"""Implements distributed data parallelism for CPU at the module level.
This module support the ``mpi``, ``gloo``, ``tcp`` backends.
This container parallelizes the application of the given module by
splitting the input across the specified devices by chunking in the batch
dimension. The module is replicated on each machine, and each such replica
handles a portion of the input. During the backwards pass, gradients from
each node are averaged.
This module could be used in conjunction with the DistributedSampler,
(see :class `torch.utils.data.distributed.DistributedSampler`)
which will load a subset of the original datset for each node with the same
batch size. So strong scaling should be configured like this:
n = 1, batch size = 128
n = 2, batch size = 64
n = 4, batch size = 32
n = 8, batch size = 16
Creation of this class requires the distributed package to be already
initialized in the process group mode
(see :func:`torch.distributed.deprecated.init_process_group`).
.. warning::
Constructor, forward method, and differentiation of the output (or a
function of the output of this module) is a distributed synchronization
point. Take that into account in case different node might be
executing different code.
.. warning::
This module assumes all parameters are registered in the model by the
time it is created. No parameters should be added nor removed later.
.. warning::
This module assumes all gradients are dense.
.. warning::
This module doesn't work with :func:`torch.autograd.grad` (i.e. it will
only work if gradients are to be accumulated in ``.grad`` attributes of
parameters).
.. note::
Parameters are broadcast between nodes in the __init__() function. The
module performs an all-reduce step on gradients and assumes that they
will be modified by the optimizer in all nodes in the same way.
.. warning::
Forward and backward hooks defined on :attr:`module` and its submodules
won't be invoked anymore, unless the hooks are initialized in the
:meth:`forward` method.
Args:
module: module to be parallelized
Example::
>>> torch.distributed.deprecated.init_process_group(world_size=4, init_method='...')
>>> net = torch.nn.DistributedDataParallelCPU(model)
"""
def __init__(self, module):
super(DistributedDataParallelCPU, self).__init__()
self.module = module
self.sync_parameters()
def allreduce_params():
if self.needs_reduction:
self.needs_reduction = False
buckets = defaultdict(list)
for param in self.module.parameters():
if param.requires_grad and param.grad is not None:
tp = type(param.data)
buckets[tp].append(param)
for bucket in buckets.values():
grads = [param.grad.data for param in bucket]
coalesced = _flatten_dense_tensors(grads)
dist.all_reduce(coalesced)
coalesced /= dist.get_world_size()
for buf, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)):
buf.copy_(synced)
for param in list(self.module.parameters()):
@torch.utils.hooks.unserializable_hook
def allreduce_hook(*unused):
Variable._execution_engine.queue_callback(allreduce_params)
if param.requires_grad:
param.register_hook(allreduce_hook)
def sync_parameters(self):
for param in self.module.parameters():
dist.broadcast(param.data, 0)
def forward(self, *inputs, **kwargs):
self.needs_reduction = True
return self.module(*inputs, **kwargs)
| mit | -3,092,668,904,383,977,500 | 39.093458 | 94 | 0.648485 | false |
geky/pyOCD | pyOCD/target/target_lpc4330.py | 1 | 2872 | """
mbed CMSIS-DAP debugger
Copyright (c) 2006-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cortex_m import CortexM
class LPC4330(CortexM):
memoryMapXML = """<?xml version="1.0"?>
<!DOCTYPE memory-map PUBLIC "+//IDN gnu.org//DTD GDB Memory Map V1.0//EN" "http://sourceware.org/gdb/gdb-memory-map.dtd">
<memory-map>
<memory type="flash" start="0x14000000" length="0x4000000"> <property name="blocksize">0x400</property></memory>
<memory type="ram" start="0x10000000" length="0x20000"> </memory>
<memory type="ram" start="0x10080000" length="0x12000"> </memory>
<memory type="ram" start="0x20000000" length="0x8000"> </memory>
<memory type="ram" start="0x20008000" length="0x8000"> </memory>
</memory-map>
"""
def __init__(self, transport):
super(LPC4330, self).__init__(transport)
self.ignoreReset = False
def setFlash(self, flash):
self.flash = flash
def reset(self, software_reset = False):
# Always use software reset for LPC4330 since the hardware version
# will reset the DAP.
CortexM.reset(self, True)
def resetStopOnReset(self, software_reset = False):
if self.ignoreReset:
return
# Set core up to run some code in RAM that is guaranteed to be valid
# since FLASH could be corrupted and that is what user is trying to fix.
self.writeMemory(0x10000000, 0x10087ff0) # Initial SP
self.writeMemory(0x10000004, 0x1000000d) # Reset Handler
self.writeMemory(0x10000008, 0x1000000d) # Hard Fault Handler
self.writeMemory(0x1000000c, 0xe7fee7fe) # Infinite loop
self.writeMemory(0x40043100, 0x10000000) # Shadow 0x0 to RAM
# Always use software reset for LPC4330 since the hardware version
# will reset the DAP.
CortexM.resetStopOnReset(self, True)
# Map shadow memory to SPIFI FLASH
self.writeMemory(0x40043100, 0x80000000)
# The LPC4330 flash init routine can be used to remount FLASH.
self.ignoreReset = True
self.flash.init()
self.ignoreReset = False
# Set SP and PC based on interrupt vector in SPIFI_FLASH
sp = self.readMemory(0x14000000)
pc = self.readMemory(0x14000004)
self.writeCoreRegisterRaw('sp', sp)
self.writeCoreRegisterRaw('pc', pc)
| apache-2.0 | -5,108,212,287,236,866,000 | 38.342466 | 121 | 0.68071 | false |
opennode/nodeconductor-assembly-waldur | src/waldur_slurm/apps.py | 1 | 2931 | from django.apps import AppConfig
from django.db.models import signals
class SlurmConfig(AppConfig):
name = 'waldur_slurm'
verbose_name = 'SLURM'
service_name = 'SLURM'
def ready(self):
from waldur_core.quotas.fields import QuotaField, CounterQuotaField
from waldur_core.structure import SupportedServices
from waldur_core.structure import models as structure_models
from waldur_core.structure import signals as structure_signals
from waldur_freeipa import models as freeipa_models
from .backend import SlurmBackend
from . import handlers, models, utils
SupportedServices.register_backend(SlurmBackend)
signals.post_save.connect(
handlers.process_user_creation,
sender=freeipa_models.Profile,
dispatch_uid='waldur_slurm.handlers.process_user_creation',
)
signals.pre_delete.connect(
handlers.process_user_deletion,
sender=freeipa_models.Profile,
dispatch_uid='waldur_slurm.handlers.process_user_deletion',
)
structure_models_with_roles = (
structure_models.Customer,
structure_models.Project,
)
for model in structure_models_with_roles:
structure_signals.structure_role_granted.connect(
handlers.process_role_granted,
sender=model,
dispatch_uid='waldur_slurm.handlers.process_role_granted.%s'
% model.__class__,
)
structure_signals.structure_role_revoked.connect(
handlers.process_role_revoked,
sender=model,
dispatch_uid='waldur_slurm.handlers.process_role_revoked.%s'
% model.__class__,
)
for quota in utils.QUOTA_NAMES:
structure_models.Customer.add_quota_field(
name=quota, quota_field=QuotaField(is_backend=True)
)
structure_models.Project.add_quota_field(
name=quota, quota_field=QuotaField(is_backend=True)
)
structure_models.Project.add_quota_field(
name='nc_allocation_count',
quota_field=CounterQuotaField(
target_models=lambda: [models.Allocation],
path_to_scope='service_project_link.project',
),
)
structure_models.Customer.add_quota_field(
name='nc_allocation_count',
quota_field=CounterQuotaField(
target_models=lambda: [models.Allocation],
path_to_scope='service_project_link.project.customer',
),
)
signals.post_save.connect(
handlers.update_quotas_on_allocation_usage_update,
sender=models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_quotas_on_allocation_usage_update',
)
| mit | -963,469,003,066,524,500 | 34.743902 | 90 | 0.606619 | false |
addisonElliott/SmartShopTouchScreen | Windows/ExpirationBox_ui.py | 1 | 8236 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ExpirationBox.ui'
#
# Created by: PyQt5 UI code generator 5.7.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ExpirationBox(object):
def setupUi(self, ExpirationBox):
ExpirationBox.setObjectName("ExpirationBox")
ExpirationBox.resize(506, 364)
font = QtGui.QFont()
font.setPointSize(19)
ExpirationBox.setFont(font)
ExpirationBox.setStyleSheet("QDialog\n"
"{\n"
" border: 1px solid #76797C;\n"
"}")
self.gridLayout = QtWidgets.QGridLayout(ExpirationBox)
self.gridLayout.setContentsMargins(5, 5, 5, 5)
self.gridLayout.setObjectName("gridLayout")
self.day_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.day_label.setFont(font)
self.day_label.setAlignment(QtCore.Qt.AlignCenter)
self.day_label.setObjectName("day_label")
self.gridLayout.addWidget(self.day_label, 3, 2, 1, 1)
self.day_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setWeight(50)
self.day_combo.setFont(font)
self.day_combo.setObjectName("day_combo")
self.day_combo.addItem("")
self.day_combo.setItemText(0, "")
self.gridLayout.addWidget(self.day_combo, 4, 2, 1, 1)
self.month_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.month_label.setFont(font)
self.month_label.setAlignment(QtCore.Qt.AlignCenter)
self.month_label.setObjectName("month_label")
self.gridLayout.addWidget(self.month_label, 3, 1, 1, 1)
self.month_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.month_combo.setFont(font)
self.month_combo.setStyleSheet("QDialog\n"
"{\n"
" border: 1px solid #76797C;\n"
"}")
self.month_combo.setObjectName("month_combo")
self.month_combo.addItem("")
self.month_combo.setItemText(0, "")
self.gridLayout.addWidget(self.month_combo, 4, 1, 1, 1)
self.year_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.year_combo.setFont(font)
self.year_combo.setObjectName("year_combo")
self.year_combo.addItem("")
self.year_combo.setItemText(0, "")
self.gridLayout.addWidget(self.year_combo, 4, 3, 1, 1)
spacerItem = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.gridLayout.addItem(spacerItem, 13, 1, 1, 1)
self.year_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.year_label.setFont(font)
self.year_label.setAlignment(QtCore.Qt.AlignCenter)
self.year_label.setObjectName("year_label")
self.gridLayout.addWidget(self.year_label, 3, 3, 1, 1)
self.qty_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.qty_label.setFont(font)
self.qty_label.setObjectName("qty_label")
self.gridLayout.addWidget(self.qty_label, 6, 1, 1, 2)
self.horizontalLayout_1 = QtWidgets.QHBoxLayout()
self.horizontalLayout_1.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_1.setSpacing(15)
self.horizontalLayout_1.setObjectName("horizontalLayout_1")
self.cancel_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.cancel_label.setFont(font)
self.cancel_label.setObjectName("cancel_label")
self.horizontalLayout_1.addWidget(self.cancel_label)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_1.addItem(spacerItem1)
self.accept_button = TouchButton(ExpirationBox)
self.accept_button.setMinimumSize(QtCore.QSize(48, 48))
self.accept_button.setMaximumSize(QtCore.QSize(48, 48))
self.accept_button.setStyleSheet("background-color: transparent;\n"
"border: 0;")
self.accept_button.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/Icons/Icons/GreenCheckIcon_Finished.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.accept_button.setIcon(icon)
self.accept_button.setIconSize(QtCore.QSize(48, 48))
self.accept_button.setObjectName("accept_button")
self.horizontalLayout_1.addWidget(self.accept_button)
self.cancel_button = TouchButton(ExpirationBox)
self.cancel_button.setMinimumSize(QtCore.QSize(48, 48))
self.cancel_button.setMaximumSize(QtCore.QSize(48, 48))
self.cancel_button.setStyleSheet("background-color: transparent;\n"
"border: 0;")
self.cancel_button.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/Icons/Icons/RedCancelIcon_Finished.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.cancel_button.setIcon(icon1)
self.cancel_button.setIconSize(QtCore.QSize(48, 48))
self.cancel_button.setObjectName("cancel_button")
self.horizontalLayout_1.addWidget(self.cancel_button)
self.gridLayout.addLayout(self.horizontalLayout_1, 14, 1, 1, 3)
self.qty_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.qty_combo.setFont(font)
self.qty_combo.setObjectName("qty_combo")
self.gridLayout.addWidget(self.qty_combo, 7, 1, 1, 3)
self.label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 2, 1, 1, 1)
self.itemNameLabel = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.itemNameLabel.setFont(font)
self.itemNameLabel.setObjectName("itemNameLabel")
self.gridLayout.addWidget(self.itemNameLabel, 2, 2, 1, 2)
self.exp_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Cronus Round")
font.setPointSize(21)
self.exp_label.setFont(font)
self.exp_label.setObjectName("exp_label")
self.gridLayout.addWidget(self.exp_label, 1, 1, 1, 3, QtCore.Qt.AlignHCenter)
self.retranslateUi(ExpirationBox)
QtCore.QMetaObject.connectSlotsByName(ExpirationBox)
def retranslateUi(self, ExpirationBox):
_translate = QtCore.QCoreApplication.translate
ExpirationBox.setWindowTitle(_translate("ExpirationBox", "Dialog"))
self.day_label.setText(_translate("ExpirationBox", "Day"))
self.month_label.setText(_translate("ExpirationBox", "Month"))
self.year_label.setText(_translate("ExpirationBox", "Year"))
self.qty_label.setText(_translate("ExpirationBox", "Quantity"))
self.cancel_label.setText(_translate("ExpirationBox", "Scan to continue"))
self.label.setText(_translate("ExpirationBox", "Item Name:"))
self.itemNameLabel.setText(_translate("ExpirationBox", "Label"))
self.exp_label.setText(_translate("ExpirationBox", "Expiration Date"))
from Widgets.touchButton import TouchButton
import Resource_BY_rc
import style_rc
| agpl-3.0 | -5,871,618,067,304,195,000 | 43.76087 | 119 | 0.662943 | false |
goddardl/gaffer | apps/gui/gui-1.py | 1 | 5810 | ##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import gc
import IECore
import Gaffer
import GafferUI
class gui( Gaffer.Application ) :
def __init__( self ) :
Gaffer.Application.__init__(
self,
"This application provides a graphical user interface for editing node graphs."
)
self.parameters().addParameters(
[
IECore.StringVectorParameter(
name = "scripts",
description = "A list of scripts to edit.",
defaultValue = IECore.StringVectorData(),
),
IECore.BoolParameter(
name = "fullScreen",
description = "Opens the UI in full screen mode.",
defaultValue = False,
),
]
)
self.parameters().userData()["parser"] = IECore.CompoundObject(
{
"flagless" : IECore.StringVectorData( [ "scripts" ] )
}
)
self.__setupClipboardSync()
def _run( self, args ) :
GafferUI.ScriptWindow.connect( self.root() )
if len( args["scripts"] ) :
for fileName in args["scripts"] :
scriptNode = Gaffer.ScriptNode()
scriptNode["fileName"].setValue( os.path.abspath( fileName ) )
# \todo: Display load errors in a dialog, like in python/GafferUI/FileMenu.py
scriptNode.load( continueOnError = True )
self.root()["scripts"].addChild( scriptNode )
GafferUI.FileMenu.addRecentFile( self, fileName )
del scriptNode
else :
self.root()["scripts"].addChild( Gaffer.ScriptNode() )
if args["fullScreen"].value :
primaryScript = self.root()["scripts"][-1]
primaryWindow = GafferUI.ScriptWindow.acquire( primaryScript )
primaryWindow.setFullScreen( True )
GafferUI.EventLoop.mainEventLoop().start()
return 0
def __setupClipboardSync( self ) :
## This function sets up two way syncing between the clipboard held in the Gaffer::ApplicationRoot
# and the global QtGui.QClipboard which is shared with external applications, and used by the cut and paste
# operations in GafferUI's underlying QWidgets. This is very useful, as it allows nodes to be copied from
# the graph and pasted into emails/chats etc, and then copied out of emails/chats and pasted into the node graph.
#
## \todo I don't think this is the ideal place for this functionality. Firstly, we need it in all apps
# rather than just the gui app. Secondly, we want a way of using the global clipboard using GafferUI
# public functions without needing an ApplicationRoot. Thirdly, it's questionable that ApplicationRoot should
# have a clipboard anyway - it seems like a violation of separation between the gui and non-gui libraries.
# Perhaps we should abolish the ApplicationRoot clipboard and the ScriptNode cut/copy/paste routines, relegating
# them all to GafferUI functionality?
QtGui = GafferUI._qtImport( "QtGui" )
self.__clipboardContentsChangedConnection = self.root().clipboardContentsChangedSignal().connect( Gaffer.WeakMethod( self.__clipboardContentsChanged ) )
QtGui.QApplication.clipboard().dataChanged.connect( Gaffer.WeakMethod( self.__qtClipboardContentsChanged ) )
self.__ignoreQtClipboardContentsChanged = False
def __clipboardContentsChanged( self, applicationRoot ) :
assert( applicationRoot.isSame( self.root() ) )
data = applicationRoot.getClipboardContents()
QtGui = GafferUI._qtImport( "QtGui" )
clipboard = QtGui.QApplication.clipboard()
try :
self.__ignoreQtClipboardContentsChanged = True # avoid triggering an unecessary copy back in __qtClipboardContentsChanged
clipboard.setText( str( data ) )
finally :
self.__ignoreQtClipboardContentsChanged = False
def __qtClipboardContentsChanged( self ) :
if self.__ignoreQtClipboardContentsChanged :
return
QtGui = GafferUI._qtImport( "QtGui" )
text = str( QtGui.QApplication.clipboard().text() )
if text :
with Gaffer.BlockedConnection( self.__clipboardContentsChangedConnection ) :
self.root().setClipboardContents( IECore.StringData( text ) )
IECore.registerRunTimeTyped( gui )
| bsd-3-clause | -2,702,933,978,917,300,000 | 36.973856 | 154 | 0.704819 | false |
eonpatapon/contrail-controller | src/container/kube-manager/kube_manager/common/args.py | 1 | 7928 | #
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import sys
import argparse
from vnc_api.vnc_api import *
from pysandesh.sandesh_base import Sandesh, SandeshSystem, SandeshConfig
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
from sandesh_common.vns.constants import (HttpPortKubeManager,ApiServerPort,\
DiscoveryServerPort)
from enum import Enum
class MandatoryArgs(Enum):
"""
Enum of mandatory arguments to kube-manager.
Kube-manager arguments will be validated against these arguments to
enforce the presence of these mandatory arguments and optionally to
enforce the correctness/validity of the supplied value for an argument.
Each mandatory argument is represented by an enum member and the following
info is captured for each argument, as a dictionary:
a. arg_str - String which identifies the argument in config file.
b. validatefn (optional) - Pointer to function that validates configured
value for an argument.
A validate function (if specified) can be any custom function that returns
a value that evaluates to bool True when validation is successful.
It should return bool False if its validation fails.
Example:
An argumennt "foo" is configured in the config file as follows:
foo = foo_value
It can be enforced as mandatory argument by added the following member to
this enum.
FOO = {"arg_str": "foo", "validatefn": foo_fn()}
If a validation function is not required then:
FOO = {"arg_str": "foo"}
"""
POD_SUBNET = {
"arg_str": "pod_subnets",
"validatefn": lambda x: x
}
SERVICE_SUBNET = {
"arg_str": "service_subnets",
"validatefn": lambda x: x
}
IP_FABRIC_SUBNET = {
"arg_str": "ip_fabric_subnets",
"validatefn": lambda x: x
}
def parse_args(args_str=None):
if not args_str:
args_str = sys.argv[1:]
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--config-file", action='append',
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str)
defaults = {
'http_server_port': HttpPortKubeManager,
'worker_id': '0',
'collectors': '',
'logger_class': None,
'logging_conf': '',
'log_local': False,
'log_category': '',
'use_syslog': False,
'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY,
'kube_object_cache': 'True',
'disc_server_ip': 'localhost',
'disc_server_port': DiscoveryServerPort,
'log_level': SandeshLevel.SYS_DEBUG,
'log_file': '/var/log/contrail/contrail-kube-manager.log',
'api_service_link_local' : 'True',
'orchestrator' : 'kubernetes',
'token' : '',
'nested_mode': '0',
'global_tags': '1',
'aps_name': '',
'kube_timer_interval': '60',
'secure_project': 'True'
}
defaults.update(SandeshConfig.get_default_options(['DEFAULTS']))
vnc_opts = {
'rabbit_server': 'localhost',
'rabbit_port': '5672',
'rabbit_user': 'guest',
'rabbit_password': 'guest',
'rabbit_vhost': None,
'rabbit_ha_mode': False,
'rabbit_use_ssl': False,
'kombu_ssl_version': '',
'kombu_ssl_keyfile': '',
'kombu_ssl_certfile': '',
'kombu_ssl_ca_certs': '',
'cassandra_user': None,
'cassandra_password': None,
'cassandra_server_list': '',
'cluster_id': '',
'vnc_endpoint_ip': '[127.0.0.1]',
'vnc_endpoint_port': ApiServerPort,
'admin_user' : '',
'admin_password' : '',
'admin_tenant' : '',
'public_fip_pool': '{}',
'zk_server_ip': '127.0.0.1:2181',
}
k8s_opts = {
'kubernetes_api_server': 'localhost',
'kubernetes_api_port': '8080',
'kubernetes_api_secure_port': 8443,
'kubernetes_service_name': 'kubernetes',
MandatoryArgs.SERVICE_SUBNET.value['arg_str']: None,
MandatoryArgs.POD_SUBNET.value['arg_str']: None,
MandatoryArgs.IP_FABRIC_SUBNET.value['arg_str']: None,
'kubernetes_cluster_owner': 'k8s',
'kubernetes_cluster_domain' : 'default-domain',
'cluster_name': None,
'cluster_project' : "{}",
'cluster_network' : "{}",
'cluster_pod_network' : None,
'cluster_service_network' : None,
'ip_fabric_forwarding': False,
'ip_fabric_snat': False,
}
sandesh_opts = SandeshConfig.get_default_options()
auth_opts = {
'auth_token_url': None,
'auth_user': 'admin',
'auth_password': 'admin',
'auth_tenant': 'admin',
}
config = ConfigParser.SafeConfigParser()
if args.config_file:
config.read(args.config_file)
if 'VNC' in config.sections():
vnc_opts.update(dict(config.items("VNC")))
if 'KUBERNETES' in config.sections():
k8s_opts.update(dict(config.items("KUBERNETES")))
SandeshConfig.update_options(sandesh_opts, config)
if 'AUTH' in config.sections():
auth_opts.update(dict(config.items("AUTH")))
if 'DEFAULTS' in config.sections():
defaults.update(dict(config.items("DEFAULTS")))
parser = argparse.ArgumentParser(
parents=[conf_parser],
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
defaults.update(vnc_opts)
defaults.update(k8s_opts)
defaults.update(sandesh_opts)
defaults.update(auth_opts)
parser.set_defaults(**defaults)
args = parser.parse_args(args_str)
if type(args.cassandra_server_list) is str:
args.cassandra_server_list = args.cassandra_server_list.split()
if type(args.collectors) is str:
args.collectors = args.collectors.split()
if type(args.pod_subnets) is str:
args.pod_subnets = args.pod_subnets.split()
if type(args.service_subnets) is str:
args.service_subnets = args.service_subnets.split()
if type(args.ip_fabric_subnets) is str:
args.ip_fabric_subnets = args.ip_fabric_subnets.split()
if type(args.ip_fabric_forwarding) is str:
if args.ip_fabric_forwarding.lower() == 'true':
args.ip_fabric_forwarding = True
else:
args.ip_fabric_forwarding = False
if type(args.ip_fabric_snat) is str:
if args.ip_fabric_snat.lower() == 'true':
args.ip_fabric_snat = True
else:
args.ip_fabric_snat = False
args.sandesh_config = SandeshConfig.from_parser_arguments(args)
# Validate input argumnents.
validate_mandatory_args(args)
return args
def rabbitmq_args(args):
return {
'servers': args.rabbit_server, 'port': args.rabbit_port,
'user': args.rabbit_user, 'password': args.rabbit_password,
'vhost': args.rabbit_vhost, 'ha_mode': args.rabbit_ha_mode,
'use_ssl': args.rabbit_use_ssl,
'ssl_version': args.kombu_ssl_version,
'ssl_keyfile': args.kombu_ssl_keyfile,
'ssl_certfile': args.kombu_ssl_certfile,
'ssl_ca_certs': args.kombu_ssl_ca_certs
}
def validate_mandatory_args(args):
for mandatory_arg in MandatoryArgs:
arg_name = mandatory_arg.value['arg_str']
if not hasattr(args, arg_name):
print("Mandatory Argument %s not found in config"
% arg_name)
sys.exit("Mandatory argument [%s] not found in config" % arg_name)
validatefn = mandatory_arg.value.get('validatefn', None)
arg_value = getattr(args, arg_name)
if validatefn and not validatefn(arg_value):
sys.exit("Validation of mandatory argument [%s] configured with"\
" value [%s] failed." % (arg_name, arg_value))
| apache-2.0 | 1,123,368,809,067,095,000 | 33.92511 | 78 | 0.611882 | false |
yousseb/django_pytds | tests/runtests.py | 1 | 14761 | #!/usr/bin/env python
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import warnings
import django
from django import contrib
from django.utils._os import upath
from django.utils import six
CONTRIB_MODULE_PATH = 'django.contrib'
TEST_TEMPLATE_DIR = 'templates'
DJANGO_RUNTESTS_DIR = os.path.abspath(os.path.join(os.path.dirname(upath(django.__file__)), '..', 'tests'))
RUNTESTS_DIR = os.path.abspath(os.path.dirname(upath(__file__)))
CONTRIB_DIR = os.path.dirname(upath(contrib.__file__))
TEMP_DIR = tempfile.mkdtemp(prefix='django_mssql_')
os.environ['DJANGO_TEST_TEMP_DIR'] = TEMP_DIR
MSSQL_DIR = os.path.abspath(os.path.join(RUNTESTS_DIR, '..'))
if MSSQL_DIR not in sys.path:
sys.path.append(MSSQL_DIR)
if DJANGO_RUNTESTS_DIR not in sys.path:
sys.path.append(DJANGO_RUNTESTS_DIR)
SUBDIRS_TO_SKIP = [
TEST_TEMPLATE_DIR,
CONTRIB_DIR,
'test_main',
]
DJANGO_TESTS_TO_INCLUDE = [
'aggregation',
'aggregation_regress',
'backends',
'basic',
'bulk_create',
'cache',
'commands_sql',
'custom_columns',
'custom_columns_regress',
'custom_managers',
'custom_managers_regress',
'custom_methods',
'custom_pk',
'datatypes',
'dates',
'datetimes',
'db_typecasts',
'defer',
'defer_regress',
'delete',
'delete_regress',
'expressions',
'expressions_regress',
'generic_relations',
'generic_relations_regress',
'get_object_or_404',
'get_or_create',
'get_or_create_regress',
'initial_sql_regress',
'inspectdb',
'introspection',
'known_related_objects',
'lookup',
'max_lengths',
'model_inheritance',
'model_inheritance_regress',
'model_inheritance_same_model_name',
'model_inheritance_select_related',
'model_regress',
'multiple_databases',
'mutually_referential',
'nested_foreign_keys',
'null_fk',
'null_fk_ordering',
'null_queries',
'ordering',
'pagination',
'prefetch_related',
'queries',
'raw_query',
'reserved_names',
'reverse_lookup',
'reverse_single_related',
'schema',
'select_for_update',
'select_related',
'select_related_onetoone',
'select_related_regress',
'string_lookup',
'tablespaces',
'timezones',
'transactions',
'transactions_regress',
'update_only_fields',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'sqlserver_ado',
'sqlserver_ado.sql_app',
]
def get_test_modules():
test_dirs = [
(None, RUNTESTS_DIR),
(None, DJANGO_RUNTESTS_DIR),
]
modules = []
for modpath, dirpath in test_dirs:
for f in os.listdir(dirpath):
if ('.' in f or
# Python 3 byte code dirs (PEP 3147)
f == '__pycache__' or
f.startswith('sql') or
os.path.basename(f) in SUBDIRS_TO_SKIP or
os.path.isfile(f)):
continue
if dirpath.startswith(DJANGO_RUNTESTS_DIR) and os.path.basename(f) not in DJANGO_TESTS_TO_INCLUDE:
continue
modules.append((modpath, f))
return modules
def get_installed():
from django.db.models.loading import get_apps
return [app.__name__.rsplit('.', 1)[0] for app in get_apps() if not app.__name__.startswith('django.contrib')]
def setup(verbosity, test_labels):
from django.conf import settings
from django.db.models.loading import get_apps, load_app
from django.test.testcases import TransactionTestCase, TestCase
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
'TEMPLATE_DIRS': settings.TEMPLATE_DIRS,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TEMP_DIR, 'static')
settings.TEMPLATE_DIRS = (os.path.join(RUNTESTS_DIR, TEST_TEMPLATE_DIR),)
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
if verbosity > 0:
# Ensure any warnings captured to logging are piped through a verbose
# logging handler. If any -W options were passed explicitly on command
# line, warnings are not captured, and this has no effect.
logger = logging.getLogger('py.warnings')
handler = logging.StreamHandler()
logger.addHandler(handler)
# Load all the ALWAYS_INSTALLED_APPS.
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'django.contrib.comments is deprecated and will be removed before Django 1.8.', PendingDeprecationWarning)
get_apps()
# Load all the test model apps.
test_modules = get_test_modules()
# Reduce given test labels to just the app module path
test_labels_set = set()
for label in test_labels:
bits = label.split('.')
if bits[:2] == ['django', 'contrib']:
bits = bits[:3]
else:
bits = bits[:1]
test_labels_set.add('.'.join(bits))
# If GeoDjango, then we'll want to add in the test applications
# that are a part of its test suite.
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
if HAS_SPATIAL_DB:
from django.contrib.gis.tests import geo_apps
test_modules.extend(geo_apps())
settings.INSTALLED_APPS.extend(['django.contrib.gis', 'django.contrib.sitemaps'])
for modpath, module_name in test_modules:
if modpath:
module_label = '.'.join([modpath, module_name])
else:
module_label = module_name
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
if not test_labels:
module_found_in_labels = True
else:
match = lambda label: (
module_label == label or # exact match
module_label.startswith(label + '.') # ancestor match
)
module_found_in_labels = any(match(l) for l in test_labels_set)
if module_found_in_labels:
if verbosity >= 2:
print("Importing application %s" % module_name)
mod = load_app(module_label)
if mod:
if module_label not in settings.INSTALLED_APPS:
settings.INSTALLED_APPS.append(module_label)
return state
def teardown(state):
from django.conf import settings
try:
# Removing the temporary TEMP_DIR. Ensure we pass in unicode
# so that it will successfully remove temp trees containing
# non-ASCII filenames on Windows. (We're assuming the temp dir
# name itself does not contain non-ASCII characters.)
shutil.rmtree(six.text_type(TEMP_DIR))
except OSError:
print('Failed to remove temp directory: %s' % TEMP_DIR)
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
def django_tests(verbosity, interactive, failfast, test_labels):
from django.conf import settings
state = setup(verbosity, test_labels)
extra_tests = []
# Run the test suite, including the extra validation tests.
from django.test.utils import get_runner
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
)
failures = test_runner.run_tests(
test_labels or get_installed(), extra_tests=extra_tests)
teardown(state)
return failures
def bisect_tests(bisection_label, options, test_labels):
state = setup(int(options.verbosity), test_labels)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels)/2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.call(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.call(subprocess_args + test_labels_b)
if failures_a and not failures_b:
print("***** Problem found in first half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_a[:-1]
elif failures_b and not failures_a:
print("***** Problem found in second half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_b[:-1]
elif failures_a and failures_b:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels):
state = setup(int(options.verbosity), test_labels)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
from optparse import OptionParser
usage = "%prog [options] [module module module ...]"
parser = OptionParser(usage=usage)
parser.add_option(
'-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all '
'output')
parser.add_option(
'--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_option(
'--failfast', action='store_true', dest='failfast', default=False,
help='Tells Django to stop running the test suite after first failed '
'test.')
parser.add_option(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, the DJANGO_SETTINGS_MODULE environment '
'variable will be used.')
parser.add_option(
'--bisect', action='store', dest='bisect', default=None,
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.')
parser.add_option(
'--pair', action='store', dest='pair', default=None,
help='Run the test suite in pairs with the named test to find problem '
'pairs.')
parser.add_option(
'--liveserver', action='store', dest='liveserver', default=None,
help='Overrides the default address where the live server (used with '
'LiveServerTestCase) is expected to run from. The default value '
'is localhost:8081.')
parser.add_option(
'--selenium', action='store_true', dest='selenium',
default=False,
help='Run the Selenium tests as well (if Selenium is installed)')
options, args = parser.parse_args()
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
elif "DJANGO_SETTINGS_MODULE" not in os.environ:
parser.error("DJANGO_SETTINGS_MODULE is not set in the environment. "
"Set it or use --settings.")
else:
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.liveserver is not None:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options.liveserver
if options.selenium:
os.environ['DJANGO_SELENIUM_TESTS'] = '1'
if options.bisect:
bisect_tests(options.bisect, options, args)
elif options.pair:
paired_tests(options.pair, options, args)
else:
failures = django_tests(int(options.verbosity), options.interactive,
options.failfast, args)
if failures:
sys.exit(bool(failures))
| mit | -2,331,804,514,326,747,000 | 33.488318 | 148 | 0.622316 | false |
MaStanford/AnglishWordbook | Anglish/SyncWikia.py | 1 | 4522 | __author__ = 'm.stanford'
import string
from socket import error as SocketError
import json, httplib
STARTING_PAGE = 72;
ENDING_PAGE = 98;
invalidWords = ["un-English", "Anglish/English", "attested", "unattested", "Class"]
delimiter = "\'\'\'"
wierdfunkInSomeWords = ["\'\' \'\'\'", "\'\'\',", '\'\'\'\'\'', '\"\'\'']
def getWordPage(page):
connection = httplib.HTTPConnection('anglish.wikia.com', 80)
connection.connect()
connection.request('GET', '/api.php?action=query&prop=revisions&rvprop=content&format=json&pageids=' + str(page))
result = json.loads(connection.getresponse().read())
print result
return result
def processRawPage(page, number):
words = page['query']
words = words['pages']
words = words[str(number)]
words = words['revisions']
words = words[0]
listOfWords = []
for key, value in words.iteritems():
listOfLines = value
for strings in wierdfunkInSomeWords:
listOfLines = listOfLines.replace(strings, '')
listOfLines = value.split(delimiter)
print 'Raw Line: ' + str(listOfLines)
length = len(listOfLines)
i = 10;
while not isValidWord(listOfLines[i]):
i += 1
even = i % 2
while i < length:
#Check if we have an invalid word in a place where it should be valid. We then will append that line to the previous line in the list of words.
if not isValidWord(listOfLines[i]) and i % 2 == even:
out = listOfWords[len(listOfWords)-1] + listOfLines[i]
out = out.replace("\'\'", '').replace('|', '\n')
listOfWords.remove(listOfWords[len(listOfWords)-1])
listOfWords.append(out)
print 'Found odd line: ' + out.replace('\n', ' ')
i += 1
even = i % 2
else:
print 'Valid Line: ' + listOfLines[i].replace("\'\'", '').replace('|', '').replace('\n', ' ')
listOfWords.append(listOfLines[i].replace("\'\'", '').replace('|', '\n'))
i += 1
return listOfWords
def buildWordDef(processedHead, processedDef):
word = {}
word['word'] = processedHead.lower()
listOfDefs = [x for x in processedDef.split('\n') if x]
# print 'Def: ' + processedHead + ' : ' + str(listOfDefs)
if len(listOfDefs) > 3:
word['attested_definitions'] = listOfDefs[1].replace('-\n', '').replace('\n', '').replace(' ', '').split(',')
word['unattested_definitions'] = listOfDefs[2].replace('-\n', '').replace('\n', '').replace(' ', '').split(',')
word['type'] = listOfDefs[0].replace("\'", "")
else:
word['attested_definitions'] = []
word['unattested_definitions'] = []
word['type'] = ''
print "buildWordDef" + str(word)
return word
def addWord(wordDef):
word = wordDef['word']
attested = wordDef['attested_definitions']
unattested = wordDef['unattested_definitions']
wordType = wordDef['type']
try:
connection = httplib.HTTPSConnection('https://anglishwordbook.herokuapp.com/', 443)
connection.connect()
connection.request('POST', '/1/classes/Word', json.dumps({
"Word": word,
"Attested": attested,
"Unattested": unattested,
"Type": wordType
}), {
"X-Parse-Application-Id": "ApuxkukQC9mFuLIdIjG3qC27ms5kZ4XZbopxUohp",
"X-Parse-Master-Key ": "ME6doa9GdB2PTGesScr8DwNQVzlzMwmoEurf3kIX",
"Content-Type": "application/json"
})
result = json.loads(connection.getresponse().read())
if 'objectId' in result:
print result
return True
else:
return False
except SocketError as e:
return addWord(wordDef)
def isValidWord(line):
if len(line.split(' ')) > 2:
return False
if line in invalidWords:
return False
if all(c in string.punctuation for c in line.replace(' ', '').replace('\n','')):
return False
return True
for j in range(STARTING_PAGE, ENDING_PAGE):
rawPage = getWordPage(j)
processedPage = processRawPage(rawPage, j)
index = len(processedPage)
k = 0
while k < index - 1:
# print 'Obj 1 ' + processedPage[i]
# print 'Obj 2 ' + processedPage[i+1]
wordDef = buildWordDef(processedPage[k], processedPage[k+1])
if addWord(wordDef):
k += 2
else:
k = k
| apache-2.0 | -7,765,113,058,141,094,000 | 28.363636 | 156 | 0.570102 | false |
jsirois/pants | src/python/pants/backend/python/goals/setup_py.py | 1 | 37779 | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import enum
import io
import itertools
import logging
import os
import pickle
from abc import ABC, abstractmethod
from collections import abc, defaultdict
from dataclasses import dataclass
from typing import Any, Dict, List, Mapping, Set, Tuple, cast
from pants.backend.python.macros.python_artifact import PythonArtifact
from pants.backend.python.subsystems.setuptools import Setuptools
from pants.backend.python.target_types import (
PexEntryPointField,
PythonProvidesField,
PythonRequirementsField,
PythonSources,
ResolvedPexEntryPoint,
ResolvePexEntryPointRequest,
SetupPyCommandsField,
)
from pants.backend.python.util_rules.pex import (
PexInterpreterConstraints,
PexRequest,
PexRequirements,
VenvPex,
VenvPexProcess,
)
from pants.backend.python.util_rules.python_sources import (
PythonSourceFilesRequest,
StrippedPythonSourceFiles,
)
from pants.backend.python.util_rules.python_sources import rules as python_sources_rules
from pants.base.specs import AddressSpecs, AscendantAddresses
from pants.core.goals.package import BuiltPackage, BuiltPackageArtifact, PackageFieldSet
from pants.core.target_types import FilesSources, ResourcesSources
from pants.engine.addresses import Address, UnparsedAddressInputs
from pants.engine.collection import Collection, DeduplicatedCollection
from pants.engine.fs import (
AddPrefix,
CreateDigest,
Digest,
DigestContents,
DigestSubset,
FileContent,
MergeDigests,
PathGlobs,
RemovePrefix,
Snapshot,
)
from pants.engine.process import ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import (
Dependencies,
DependenciesRequest,
Sources,
Target,
Targets,
TransitiveTargets,
TransitiveTargetsRequest,
)
from pants.engine.unions import UnionMembership, UnionRule, union
from pants.option.subsystem import Subsystem
from pants.python.python_setup import PythonSetup
from pants.util.docutil import docs_url
from pants.util.logging import LogLevel
from pants.util.memo import memoized_property
from pants.util.meta import frozen_after_init
from pants.util.strutil import ensure_text
logger = logging.getLogger(__name__)
class InvalidSetupPyArgs(Exception):
"""Indicates invalid arguments to setup.py."""
class TargetNotExported(Exception):
"""Indicates a target that was expected to be exported is not."""
class InvalidEntryPoint(Exception):
"""Indicates that a specified binary entry point was invalid."""
class OwnershipError(Exception):
"""An error related to target ownership calculation."""
def __init__(self, msg: str):
super().__init__(
f"{msg} See {docs_url('python-distributions')} for "
f"how python_library targets are mapped to distributions."
)
class NoOwnerError(OwnershipError):
"""Indicates an exportable target has no owning exported target."""
class AmbiguousOwnerError(OwnershipError):
"""Indicates an exportable target has more than one owning exported target."""
@dataclass(frozen=True)
class ExportedTarget:
"""A target that explicitly exports a setup.py artifact, using a `provides=` stanza.
The code provided by this artifact can be from this target or from any targets it owns.
"""
target: Target # In practice, a PythonDistribution.
@property
def provides(self) -> PythonArtifact:
return self.target[PythonProvidesField].value
@dataclass(frozen=True)
class DependencyOwner:
"""An ExportedTarget in its role as an owner of other targets.
We need this type to prevent rule ambiguities when computing the list of targets owned by an
ExportedTarget (which involves going from ExportedTarget -> dep -> owner (which is itself an
ExportedTarget) and checking if owner is the original ExportedTarget.
"""
exported_target: ExportedTarget
@dataclass(frozen=True)
class OwnedDependency:
"""A target that is owned by some ExportedTarget.
Code in this target is published in the owner's distribution.
The owner of a target T is T's closest filesystem ancestor among the python_distribution
targets that directly or indirectly depend on it (including T itself).
"""
target: Target
class OwnedDependencies(Collection[OwnedDependency]):
pass
class ExportedTargetRequirements(DeduplicatedCollection[str]):
"""The requirements of an ExportedTarget.
Includes:
- The "normal" 3rdparty requirements of the ExportedTarget and all targets it owns.
- The published versions of any other ExportedTargets it depends on.
"""
sort_input = True
@dataclass(frozen=True)
class PythonDistributionFieldSet(PackageFieldSet):
required_fields = (PythonProvidesField,)
provides: PythonProvidesField
@dataclass(frozen=True)
class SetupPySourcesRequest:
targets: Targets
py2: bool # Whether to use py2 or py3 package semantics.
@dataclass(frozen=True)
class SetupPySources:
"""The sources required by a setup.py command.
Includes some information derived from analyzing the source, namely the packages, namespace
packages and resource files in the source.
"""
digest: Digest
packages: Tuple[str, ...]
namespace_packages: Tuple[str, ...]
package_data: Tuple["PackageDatum", ...]
@dataclass(frozen=True)
class SetupPyChrootRequest:
"""A request to create a chroot containing a setup.py and the sources it operates on."""
exported_target: ExportedTarget
py2: bool # Whether to use py2 or py3 package semantics.
@frozen_after_init
@dataclass(unsafe_hash=True)
class SetupKwargs:
"""The keyword arguments to the `setup()` function in the generated `setup.py`."""
_pickled_bytes: bytes
def __init__(
self, kwargs: Mapping[str, Any], *, address: Address, _allow_banned_keys: bool = False
) -> None:
super().__init__()
if "version" not in kwargs:
raise ValueError(f"Missing a `version` kwarg in the `provides` field for {address}.")
if not _allow_banned_keys:
for arg in {
"data_files",
"namespace_packages",
"package_dir",
"package_data",
"packages",
"install_requires",
}:
if arg in kwargs:
raise ValueError(
f"{arg} cannot be set in the `provides` field for {address}, but it was "
f"set to {kwargs[arg]}. Pants will dynamically set the value for you."
)
# We serialize with `pickle` so that is hashable. We don't use `FrozenDict` because it
# would require that all values are immutable, and we may have lists and dictionaries as
# values. It's too difficult/clunky to convert those all, then to convert them back out of
# `FrozenDict`. We don't use JSON because it does not preserve data types like `tuple`.
self._pickled_bytes = pickle.dumps({k: v for k, v in sorted(kwargs.items())}, protocol=4)
@memoized_property
def kwargs(self) -> Dict[str, Any]:
return cast(Dict[str, Any], pickle.loads(self._pickled_bytes))
@property
def name(self) -> str:
return cast(str, self.kwargs["name"])
@property
def version(self) -> str:
return cast(str, self.kwargs["version"])
# Note: This only exists as a hook for additional logic for the `setup()` kwargs, e.g. for plugin
# authors. To resolve `SetupKwargs`, call `await Get(SetupKwargs, ExportedTarget)`, which handles
# running any custom implementations vs. using the default implementation.
@union
@dataclass(frozen=True) # type: ignore[misc]
class SetupKwargsRequest(ABC):
"""A request to allow setting the kwargs passed to the `setup()` function.
By default, Pants will pass the kwargs provided in the BUILD file unchanged. To customize this
behavior, subclass `SetupKwargsRequest`, register the rule `UnionRule(SetupKwargsRequest,
MyCustomSetupKwargsRequest)`, and add a rule that takes your subclass as a parameter and returns
`SetupKwargs`.
"""
target: Target
@classmethod
@abstractmethod
def is_applicable(cls, target: Target) -> bool:
"""Whether the kwargs implementation should be used for this target or not."""
@property
def explicit_kwargs(self) -> Dict[str, Any]:
return self.target[PythonProvidesField].value.kwargs
class FinalizedSetupKwargs(SetupKwargs):
"""The final kwargs used for the `setup()` function, after Pants added requirements and sources
information."""
def __init__(self, kwargs: Mapping[str, Any], *, address: Address) -> None:
super().__init__(kwargs, address=address, _allow_banned_keys=True)
@dataclass(frozen=True)
class SetupPyChroot:
"""A chroot containing a generated setup.py and the sources it operates on."""
digest: Digest
setup_kwargs: FinalizedSetupKwargs
@dataclass(frozen=True)
class RunSetupPyRequest:
"""A request to run a setup.py command."""
exported_target: ExportedTarget
interpreter_constraints: PexInterpreterConstraints
chroot: SetupPyChroot
args: Tuple[str, ...]
@dataclass(frozen=True)
class RunSetupPyResult:
"""The result of running a setup.py command."""
output: Digest # The state of the chroot after running setup.py.
@enum.unique
class FirstPartyDependencyVersionScheme(enum.Enum):
EXACT = "exact" # i.e., ==
COMPATIBLE = "compatible" # i.e., ~=
ANY = "any" # i.e., no specifier
class SetupPyGeneration(Subsystem):
options_scope = "setup-py-generation"
help = "Options to control how setup.py is generated from a `python_distribution` target."
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--first-party-dependency-version-scheme",
type=FirstPartyDependencyVersionScheme,
default=FirstPartyDependencyVersionScheme.EXACT,
help=(
"What version to set in `install_requires` when a `python_distribution` depends on "
"other `python_distribution`s. If `exact`, will use `==`. If `compatible`, will "
"use `~=`. If `any`, will leave off the version. See "
"https://www.python.org/dev/peps/pep-0440/#version-specifiers."
),
)
def first_party_dependency_version(self, version: str) -> str:
"""Return the version string (e.g. '~=4.0') for a first-party dependency.
If the user specified to use "any" version, then this will return an empty string.
"""
scheme = self.options.first_party_dependency_version_scheme
if scheme == FirstPartyDependencyVersionScheme.ANY:
return ""
specifier = "==" if scheme == FirstPartyDependencyVersionScheme.EXACT else "~="
return f"{specifier}{version}"
def validate_commands(commands: Tuple[str, ...]):
# We rely on the dist dir being the default, so we know where to find the created dists.
if "--dist-dir" in commands or "-d" in commands:
raise InvalidSetupPyArgs(
"Cannot set --dist-dir/-d in setup.py args. To change where dists "
"are written, use the global --pants-distdir option."
)
# We don't allow publishing via setup.py, as we don't want the setup.py running rule,
# which is not a @goal_rule, to side-effect (plus, we'd need to ensure that publishing
# happens in dependency order). Note that `upload` and `register` were removed in
# setuptools 42.0.0, in favor of Twine, but we still check for them in case the user modified
# the default version used by our Setuptools subsystem.
# TODO: A `publish` rule, that can invoke Twine to do the actual uploading.
# See https://github.com/pantsbuild/pants/issues/8935.
if "upload" in commands or "register" in commands:
raise InvalidSetupPyArgs("Cannot use the `upload` or `register` setup.py commands")
@rule
async def package_python_dist(
field_set: PythonDistributionFieldSet,
python_setup: PythonSetup,
) -> BuiltPackage:
transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
exported_target = ExportedTarget(transitive_targets.roots[0])
interpreter_constraints = PexInterpreterConstraints.create_from_targets(
transitive_targets.closure, python_setup
)
chroot = await Get(
SetupPyChroot,
SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()),
)
# If commands were provided, run setup.py with them; Otherwise just dump chroots.
commands = exported_target.target.get(SetupPyCommandsField).value or ()
if commands:
validate_commands(commands)
setup_py_result = await Get(
RunSetupPyResult,
RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands),
)
dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
return BuiltPackage(
setup_py_result.output,
tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
)
else:
dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}"
rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname))
return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname),))
# We write .py sources into the chroot under this dir.
CHROOT_SOURCE_ROOT = "src"
SETUP_BOILERPLATE = """
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
# Target: {target_address_spec}
from setuptools import setup
setup(**{setup_kwargs_str})
"""
@rule
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools) -> RunSetupPyResult:
"""Run a setup.py command on a single exported target."""
# Note that this pex has no entrypoint. We use it to run our generated setup.py, which
# in turn imports from and invokes setuptools.
setuptools_pex = await Get(
VenvPex,
PexRequest(
output_filename="setuptools.pex",
internal_only=True,
requirements=PexRequirements(setuptools.all_requirements),
interpreter_constraints=(
req.interpreter_constraints
if setuptools.options.is_default("interpreter_constraints")
else PexInterpreterConstraints(setuptools.interpreter_constraints)
),
),
)
# The setuptools dist dir, created by it under the chroot (not to be confused with
# pants's own dist dir, at the buildroot).
dist_dir = "dist/"
result = await Get(
ProcessResult,
VenvPexProcess(
setuptools_pex,
argv=("setup.py", *req.args),
input_digest=req.chroot.digest,
# setuptools commands that create dists write them to the distdir.
# TODO: Could there be other useful files to capture?
output_directories=(dist_dir,),
description=f"Run setuptools for {req.exported_target.target.address}",
level=LogLevel.DEBUG,
),
)
output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir))
return RunSetupPyResult(output_digest)
@rule
async def determine_setup_kwargs(
exported_target: ExportedTarget, union_membership: UnionMembership
) -> SetupKwargs:
target = exported_target.target
setup_kwargs_requests = union_membership.get(SetupKwargsRequest) # type: ignore[misc]
applicable_setup_kwargs_requests = tuple(
request for request in setup_kwargs_requests if request.is_applicable(target)
)
# If no provided implementations, fall back to our default implementation that simply returns
# what the user explicitly specified in the BUILD file.
if not applicable_setup_kwargs_requests:
return SetupKwargs(exported_target.provides.kwargs, address=target.address)
if len(applicable_setup_kwargs_requests) > 1:
possible_requests = sorted(plugin.__name__ for plugin in applicable_setup_kwargs_requests)
raise ValueError(
f"Multiple of the registered `SetupKwargsRequest`s can work on the target "
f"{target.address}, and it's ambiguous which to use: {possible_requests}\n\nPlease "
"activate fewer implementations, or make the classmethod `is_applicable()` more "
"precise so that only one implementation is applicable for this target."
)
setup_kwargs_request = tuple(applicable_setup_kwargs_requests)[0]
return await Get(SetupKwargs, SetupKwargsRequest, setup_kwargs_request(target))
@rule
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
exported_target = request.exported_target
exported_addr = exported_target.target.address
owned_deps, transitive_targets = await MultiGet(
Get(OwnedDependencies, DependencyOwner(exported_target)),
Get(TransitiveTargets, TransitiveTargetsRequest([exported_target.target.address])),
)
# files() targets aren't owned by a single exported target - they aren't code, so
# we allow them to be in multiple dists. This is helpful for, e.g., embedding
# a standard license file in a dist.
files_targets = (tgt for tgt in transitive_targets.closure if tgt.has_field(FilesSources))
targets = Targets(itertools.chain((od.target for od in owned_deps), files_targets))
sources, requirements = await MultiGet(
Get(SetupPySources, SetupPySourcesRequest(targets, py2=request.py2)),
Get(ExportedTargetRequirements, DependencyOwner(exported_target)),
)
# Generate the kwargs for the setup() call. In addition to using the kwargs that are either
# explicitly provided or generated via a user's plugin, we add additional kwargs based on the
# resolved requirements and sources.
target = exported_target.target
resolved_setup_kwargs = await Get(SetupKwargs, ExportedTarget, exported_target)
setup_kwargs = resolved_setup_kwargs.kwargs.copy()
# NB: We are careful to not overwrite these values, but we also don't expect them to have been
# set. The user must have have gone out of their way to use a `SetupKwargs` plugin, and to have
# specified `SetupKwargs(_allow_banned_keys=True)`.
setup_kwargs.update(
{
"package_dir": {"": CHROOT_SOURCE_ROOT, **setup_kwargs.get("package_dir", {})},
"packages": (*sources.packages, *(setup_kwargs.get("packages", []))),
"namespace_packages": (
*sources.namespace_packages,
*setup_kwargs.get("namespace_packages", []),
),
"package_data": {**dict(sources.package_data), **setup_kwargs.get("package_data", {})},
"install_requires": (*requirements, *setup_kwargs.get("install_requires", [])),
}
)
# Add any `pex_binary` targets from `setup_py().with_binaries()` to the dist's entry points.
key_to_binary_spec = exported_target.provides.binaries
binaries = await Get(
Targets, UnparsedAddressInputs(key_to_binary_spec.values(), owning_address=target.address)
)
entry_point_requests = []
for binary in binaries:
if not binary.has_field(PexEntryPointField):
raise InvalidEntryPoint(
"Expected addresses to `pex_binary` targets in `.with_binaries()` for the "
f"`provides` field for {exported_addr}, but found {binary.address} with target "
f"type {binary.alias}."
)
entry_point = binary[PexEntryPointField].value
url = "https://python-packaging.readthedocs.io/en/latest/command-line-scripts.html#the-console-scripts-entry-point"
if not entry_point:
raise InvalidEntryPoint(
"Every `pex_binary` used in `.with_binaries()` for the `provides` field for "
f"{exported_addr} must explicitly set the `entry_point` field, but "
f"{binary.address} left the field off. Set `entry_point` to either "
f"`app.py:func` or the longhand `path.to.app:func`. See {url}."
)
if not entry_point.function:
raise InvalidEntryPoint(
"Every `pex_binary` used in `with_binaries()` for the `provides()` field for "
f"{exported_addr} must end in the format `:my_func` for the `entry_point` field, "
f"but {binary.address} set it to {entry_point.spec!r}. For example, set "
f"`entry_point='{entry_point.module}:main'. See {url}."
)
entry_point_requests.append(ResolvePexEntryPointRequest(binary[PexEntryPointField]))
binary_entry_points = await MultiGet(
Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest, request)
for request in entry_point_requests
)
for key, binary_entry_point in zip(key_to_binary_spec.keys(), binary_entry_points):
entry_points = setup_kwargs.setdefault("entry_points", {})
console_scripts = entry_points.setdefault("console_scripts", [])
if binary_entry_point.val is not None:
console_scripts.append(f"{key}={binary_entry_point.val.spec}")
# Generate the setup script.
setup_py_content = SETUP_BOILERPLATE.format(
target_address_spec=target.address.spec,
setup_kwargs_str=distutils_repr(setup_kwargs),
).encode()
files_to_create = [
FileContent("setup.py", setup_py_content),
FileContent("MANIFEST.in", "include *.py".encode()),
]
extra_files_digest, src_digest = await MultiGet(
Get(Digest, CreateDigest(files_to_create)),
# Nest the sources under the src/ prefix.
Get(Digest, AddPrefix(sources.digest, CHROOT_SOURCE_ROOT)),
)
chroot_digest = await Get(Digest, MergeDigests((src_digest, extra_files_digest)))
return SetupPyChroot(chroot_digest, FinalizedSetupKwargs(setup_kwargs, address=target.address))
@rule
async def get_sources(request: SetupPySourcesRequest) -> SetupPySources:
python_sources_request = PythonSourceFilesRequest(
targets=request.targets, include_resources=False, include_files=False
)
all_sources_request = PythonSourceFilesRequest(
targets=request.targets, include_resources=True, include_files=True
)
python_sources, all_sources = await MultiGet(
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, python_sources_request),
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, all_sources_request),
)
python_files = set(python_sources.stripped_source_files.snapshot.files)
all_files = set(all_sources.stripped_source_files.snapshot.files)
resource_files = all_files - python_files
init_py_digest_contents = await Get(
DigestContents,
DigestSubset(
python_sources.stripped_source_files.snapshot.digest, PathGlobs(["**/__init__.py"])
),
)
packages, namespace_packages, package_data = find_packages(
python_files=python_files,
resource_files=resource_files,
init_py_digest_contents=init_py_digest_contents,
py2=request.py2,
)
return SetupPySources(
digest=all_sources.stripped_source_files.snapshot.digest,
packages=packages,
namespace_packages=namespace_packages,
package_data=package_data,
)
@rule(desc="Compute distribution's 3rd party requirements")
async def get_requirements(
dep_owner: DependencyOwner,
union_membership: UnionMembership,
setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([dep_owner.exported_target.target.address])
)
ownable_tgts = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
owned_by_us: Set[Target] = set()
owned_by_others: Set[Target] = set()
for tgt, owner in zip(ownable_tgts, owners):
(owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)
# Get all 3rdparty deps of our owned deps.
#
# Note that we need only consider requirements that are direct dependencies of our owned deps:
# If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
# if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
# then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
# will require ET.
direct_deps_tgts = await MultiGet(
Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
)
reqs = PexRequirements.create_from_requirement_fields(
tgt[PythonRequirementsField]
for tgt in itertools.chain.from_iterable(direct_deps_tgts)
if tgt.has_field(PythonRequirementsField)
)
req_strs = list(reqs)
# Add the requirements on any exported targets on which we depend.
kwargs_for_exported_targets_we_depend_on = await MultiGet(
Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
)
req_strs.extend(
f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
for kwargs in set(kwargs_for_exported_targets_we_depend_on)
)
return ExportedTargetRequirements(req_strs)
@rule(desc="Find all code to be published in the distribution", level=LogLevel.DEBUG)
async def get_owned_dependencies(
dependency_owner: DependencyOwner, union_membership: UnionMembership
) -> OwnedDependencies:
"""Find the dependencies of dependency_owner that are owned by it.
Includes dependency_owner itself.
"""
transitive_targets = await Get(
TransitiveTargets,
TransitiveTargetsRequest([dependency_owner.exported_target.target.address]),
)
ownable_targets = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_targets)
owned_dependencies = [
tgt
for owner, tgt in zip(owners, ownable_targets)
if owner == dependency_owner.exported_target
]
return OwnedDependencies(OwnedDependency(t) for t in owned_dependencies)
@rule(desc="Get exporting owner for target")
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
"""Find the exported target that owns the given target (and therefore exports it).
The owner of T (i.e., the exported target in whose artifact T's code is published) is:
1. An exported target that depends on T (or is T itself).
2. Is T's closest filesystem ancestor among those satisfying 1.
If there are multiple such exported targets at the same degree of ancestry, the ownership
is ambiguous and an error is raised. If there is no exported target that depends on T
and is its ancestor, then there is no owner and an error is raised.
"""
target = owned_dependency.target
ancestor_addrs = AscendantAddresses(target.address.spec_path)
ancestor_tgts = await Get(Targets, AddressSpecs([ancestor_addrs]))
# Note that addresses sort by (spec_path, target_name), and all these targets are
# ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
# address will effectively sort by closeness of ancestry to the given target.
exported_ancestor_tgts = sorted(
[t for t in ancestor_tgts if t.has_field(PythonProvidesField)],
key=lambda t: t.address,
reverse=True,
)
exported_ancestor_iter = iter(exported_ancestor_tgts)
for exported_ancestor in exported_ancestor_iter:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([exported_ancestor.address])
)
if target in transitive_targets.closure:
owner = exported_ancestor
# Find any exported siblings of owner that also depend on target. They have the
# same spec_path as it, so they must immediately follow it in ancestor_iter.
sibling_owners = []
sibling = next(exported_ancestor_iter, None)
while sibling and sibling.address.spec_path == owner.address.spec_path:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([sibling.address])
)
if target in transitive_targets.closure:
sibling_owners.append(sibling)
sibling = next(exported_ancestor_iter, None)
if sibling_owners:
all_owners = [exported_ancestor] + sibling_owners
raise AmbiguousOwnerError(
f"Found multiple sibling python_distribution targets that are the closest "
f"ancestor dependees of {target.address} and are therefore candidates to "
f"own it: {', '.join(o.address.spec for o in all_owners)}. Only a "
f"single such owner is allowed, to avoid ambiguity."
)
return ExportedTarget(owner)
raise NoOwnerError(
f"No python_distribution target found to own {target.address}. Note that "
f"the owner must be in or above the owned target's directory, and must "
f"depend on it (directly or indirectly)."
)
def is_ownable_target(tgt: Target, union_membership: UnionMembership) -> bool:
return (
# Note that we check for a PythonProvides field so that a python_distribution
# target can be owned (by itself). This is so that if there are any 3rdparty
# requirements directly on the python_distribution target, we apply them to the dist.
# This isn't particularly useful (3rdparty requirements should be on the python_library
# that consumes them)... but users may expect it to work anyway.
tgt.has_field(PythonProvidesField)
or tgt.has_field(PythonSources)
or tgt.has_field(ResourcesSources)
or tgt.get(Sources).can_generate(PythonSources, union_membership)
)
# Convenient type alias for the pair (package name, data files in the package).
PackageDatum = Tuple[str, Tuple[str, ...]]
# Distutils does not support unicode strings in setup.py, so we must explicitly convert to binary
# strings as pants uses unicode_literals. A natural and prior technique was to use `pprint.pformat`,
# but that embeds u's in the string itself during conversion. For that reason we roll out own
# literal pretty-printer here.
#
# Note that we must still keep this code, even though Pants only runs with Python 3, because
# the created product may still be run by Python 2.
#
# For more information, see http://bugs.python.org/issue13943.
def distutils_repr(obj):
"""Compute a string repr suitable for use in generated setup.py files."""
output = io.StringIO()
linesep = os.linesep
def _write(data):
output.write(ensure_text(data))
def _write_repr(o, indent=False, level=0):
pad = " " * 4 * level
if indent:
_write(pad)
level += 1
if isinstance(o, (bytes, str)):
# The py2 repr of str (unicode) is `u'...'` and we don't want the `u` prefix; likewise,
# the py3 repr of bytes is `b'...'` and we don't want the `b` prefix so we hand-roll a
# repr here.
o_txt = ensure_text(o)
if linesep in o_txt:
_write('"""{}"""'.format(o_txt.replace('"""', r"\"\"\"")))
else:
_write("'{}'".format(o_txt.replace("'", r"\'")))
elif isinstance(o, abc.Mapping):
_write("{" + linesep)
for k, v in o.items():
_write_repr(k, indent=True, level=level)
_write(": ")
_write_repr(v, indent=False, level=level)
_write("," + linesep)
_write(pad + "}")
elif isinstance(o, abc.Iterable):
if isinstance(o, abc.MutableSequence):
open_collection, close_collection = "[]"
elif isinstance(o, abc.Set):
open_collection, close_collection = "{}"
else:
open_collection, close_collection = "()"
_write(open_collection + linesep)
for i in o:
_write_repr(i, indent=True, level=level)
_write("," + linesep)
_write(pad + close_collection)
else:
_write(repr(o)) # Numbers and bools.
_write_repr(obj)
return output.getvalue()
def find_packages(
*,
python_files: Set[str],
resource_files: Set[str],
init_py_digest_contents: DigestContents,
py2: bool,
) -> Tuple[Tuple[str, ...], Tuple[str, ...], Tuple[PackageDatum, ...]]:
"""Analyze the package structure for the given sources.
Returns a tuple (packages, namespace_packages, package_data), suitable for use as setup()
kwargs.
"""
# Find all packages implied by the sources.
packages: Set[str] = set()
package_data: Dict[str, List[str]] = defaultdict(list)
for python_file in python_files:
# Python 2: An __init__.py file denotes a package.
# Python 3: Any directory containing python source files is a package.
if not py2 or os.path.basename(python_file) == "__init__.py":
packages.add(os.path.dirname(python_file).replace(os.path.sep, "."))
# Now find all package_data.
for resource_file in resource_files:
# Find the closest enclosing package, if any. Resources will be loaded relative to that.
maybe_package: str = os.path.dirname(resource_file).replace(os.path.sep, ".")
while maybe_package and maybe_package not in packages:
maybe_package = maybe_package.rpartition(".")[0]
# If resource is not in a package, ignore it. There's no principled way to load it anyway.
if maybe_package:
package_data[maybe_package].append(
os.path.relpath(resource_file, maybe_package.replace(".", os.path.sep))
)
# See which packages are pkg_resources-style namespace packages.
# Note that implicit PEP 420 namespace packages and pkgutil-style namespace packages
# should *not* be listed in the setup namespace_packages kwarg. That's for pkg_resources-style
# namespace packages only. See https://github.com/pypa/sample-namespace-packages/.
namespace_packages: Set[str] = set()
init_py_by_path: Dict[str, bytes] = {ipc.path: ipc.content for ipc in init_py_digest_contents}
for pkg in packages:
path = os.path.join(pkg.replace(".", os.path.sep), "__init__.py")
if path in init_py_by_path and declares_pkg_resources_namespace_package(
init_py_by_path[path].decode()
):
namespace_packages.add(pkg)
return (
tuple(sorted(packages)),
tuple(sorted(namespace_packages)),
tuple((pkg, tuple(sorted(files))) for pkg, files in package_data.items()),
)
def declares_pkg_resources_namespace_package(python_src: str) -> bool:
"""Given .py file contents, determine if it declares a pkg_resources-style namespace package.
Detects pkg_resources-style namespaces. See here for details:
https://packaging.python.org/guides/packaging-namespace-packages/.
Note: Accepted namespace package decls are valid Python syntax in all Python versions,
so this code can, e.g., detect namespace packages in Python 2 code while running on Python 3.
"""
import ast
def is_name(node: ast.AST, name: str) -> bool:
return isinstance(node, ast.Name) and node.id == name
def is_call_to(node: ast.AST, func_name: str) -> bool:
if not isinstance(node, ast.Call):
return False
func = node.func
return (isinstance(func, ast.Attribute) and func.attr == func_name) or is_name(
func, func_name
)
def has_args(call_node: ast.Call, required_arg_ids: Tuple[str, ...]) -> bool:
args = call_node.args
if len(args) != len(required_arg_ids):
return False
actual_arg_ids = tuple(arg.id for arg in args if isinstance(arg, ast.Name))
return actual_arg_ids == required_arg_ids
try:
python_src_ast = ast.parse(python_src)
except SyntaxError:
# The namespace package incantations we check for are valid code in all Python versions.
# So if the code isn't parseable we know it isn't a valid namespace package.
return False
# Note that these checks are slightly heuristic. It is possible to construct adversarial code
# that would defeat them. But the only consequence would be an incorrect namespace_packages list
# in setup.py, and we're assuming our users aren't trying to shoot themselves in the foot.
for ast_node in ast.walk(python_src_ast):
# pkg_resources-style namespace, e.g.,
# __import__('pkg_resources').declare_namespace(__name__).
if is_call_to(ast_node, "declare_namespace") and has_args(
cast(ast.Call, ast_node), ("__name__",)
):
return True
return False
def rules():
return [
*python_sources_rules(),
*collect_rules(),
UnionRule(PackageFieldSet, PythonDistributionFieldSet),
]
| apache-2.0 | -2,219,731,043,789,042,400 | 39.710129 | 123 | 0.672675 | false |
azumimuo/family-xbmc-addon | plugin.video.bubbles/resources/lib/externals/hachoir/hachoir_parser/image/bmp.py | 1 | 6874 | """
Microsoft Bitmap picture parser.
- file extension: ".bmp"
Author: Victor Stinner
Creation: 16 december 2005
"""
from resources.lib.externals.hachoir.hachoir_parser import Parser
from resources.lib.externals.hachoir.hachoir_core.field import (FieldSet,
UInt8, UInt16, UInt32, Bits,
String, RawBytes, Enum,
PaddingBytes, NullBytes, createPaddingField)
from resources.lib.externals.hachoir.hachoir_core.endian import LITTLE_ENDIAN
from resources.lib.externals.hachoir.hachoir_core.text_handler import textHandler, hexadecimal
from resources.lib.externals.hachoir.hachoir_parser.image.common import RGB, PaletteRGBA
from resources.lib.externals.hachoir.hachoir_core.tools import alignValue
class Pixel4bit(Bits):
static_size = 4
def __init__(self, parent, name):
Bits.__init__(self, parent, name, 4)
class ImageLine(FieldSet):
def __init__(self, parent, name, width, pixel_class):
FieldSet.__init__(self, parent, name)
self._pixel = pixel_class
self._width = width
self._size = alignValue(self._width * self._pixel.static_size, 32)
def createFields(self):
for x in xrange(self._width):
yield self._pixel(self, "pixel[]")
size = self.size - self.current_size
if size:
yield createPaddingField(self, size)
class ImagePixels(FieldSet):
def __init__(self, parent, name, width, height, pixel_class, size=None):
FieldSet.__init__(self, parent, name, size=size)
self._width = width
self._height = height
self._pixel = pixel_class
def createFields(self):
for y in xrange(self._height-1, -1, -1):
yield ImageLine(self, "line[%u]" % y, self._width, self._pixel)
size = (self.size - self.current_size) // 8
if size:
yield NullBytes(self, "padding", size)
class CIEXYZ(FieldSet):
def createFields(self):
yield UInt32(self, "x")
yield UInt32(self, "y")
yield UInt32(self, "z")
class BmpHeader(FieldSet):
color_space_name = {
1: "Business (Saturation)",
2: "Graphics (Relative)",
4: "Images (Perceptual)",
8: "Absolute colormetric (Absolute)",
}
def getFormatVersion(self):
if "gamma_blue" in self:
return 4
if "important_color" in self:
return 3
return 2
def createFields(self):
# Version 2 (12 bytes)
yield UInt32(self, "header_size", "Header size")
yield UInt32(self, "width", "Width (pixels)")
yield UInt32(self, "height", "Height (pixels)")
yield UInt16(self, "nb_plan", "Number of plan (=1)")
yield UInt16(self, "bpp", "Bits per pixel") # may be zero for PNG/JPEG picture
# Version 3 (40 bytes)
if self["header_size"].value < 40:
return
yield Enum(UInt32(self, "compression", "Compression method"), BmpFile.COMPRESSION_NAME)
yield UInt32(self, "image_size", "Image size (bytes)")
yield UInt32(self, "horizontal_dpi", "Horizontal DPI")
yield UInt32(self, "vertical_dpi", "Vertical DPI")
yield UInt32(self, "used_colors", "Number of color used")
yield UInt32(self, "important_color", "Number of import colors")
# Version 4 (108 bytes)
if self["header_size"].value < 108:
return
yield textHandler(UInt32(self, "red_mask"), hexadecimal)
yield textHandler(UInt32(self, "green_mask"), hexadecimal)
yield textHandler(UInt32(self, "blue_mask"), hexadecimal)
yield textHandler(UInt32(self, "alpha_mask"), hexadecimal)
yield Enum(UInt32(self, "color_space"), self.color_space_name)
yield CIEXYZ(self, "red_primary")
yield CIEXYZ(self, "green_primary")
yield CIEXYZ(self, "blue_primary")
yield UInt32(self, "gamma_red")
yield UInt32(self, "gamma_green")
yield UInt32(self, "gamma_blue")
def parseImageData(parent, name, size, header):
if ("compression" not in header) or (header["compression"].value in (0, 3)):
width = header["width"].value
height = header["height"].value
bpp = header["bpp"].value
if bpp == 32:
cls = UInt32
elif bpp == 24:
cls = RGB
elif bpp == 8:
cls = UInt8
elif bpp == 4:
cls = Pixel4bit
else:
cls = None
if cls:
return ImagePixels(parent, name, width, height, cls, size=size*8)
return RawBytes(parent, name, size)
class BmpFile(Parser):
PARSER_TAGS = {
"id": "bmp",
"category": "image",
"file_ext": ("bmp",),
"mime": (u"image/x-ms-bmp", u"image/x-bmp"),
"min_size": 30*8,
# "magic": (("BM", 0),),
"magic_regex": ((
# "BM", <filesize>, <reserved>, header_size=(12|40|108)
"BM.{4}.{8}[\x0C\x28\x6C]\0{3}",
0),),
"description": "Microsoft bitmap (BMP) picture"
}
endian = LITTLE_ENDIAN
COMPRESSION_NAME = {
0: u"Uncompressed",
1: u"RLE 8-bit",
2: u"RLE 4-bit",
3: u"Bitfields",
4: u"JPEG",
5: u"PNG",
}
def validate(self):
if self.stream.readBytes(0, 2) != 'BM':
return "Wrong file signature"
if self["header/header_size"].value not in (12, 40, 108):
return "Unknown header size (%s)" % self["header_size"].value
if self["header/nb_plan"].value != 1:
return "Invalid number of planes"
return True
def createFields(self):
yield String(self, "signature", 2, "Header (\"BM\")", charset="ASCII")
yield UInt32(self, "file_size", "File size (bytes)")
yield PaddingBytes(self, "reserved", 4, "Reserved")
yield UInt32(self, "data_start", "Data start position")
yield BmpHeader(self, "header")
# Compute number of color
header = self["header"]
bpp = header["bpp"].value
if 0 < bpp <= 8:
if "used_colors" in header and header["used_colors"].value:
nb_color = header["used_colors"].value
else:
nb_color = (1 << bpp)
else:
nb_color = 0
# Color palette (if any)
if nb_color:
yield PaletteRGBA(self, "palette", nb_color)
# Seek to data start
field = self.seekByte(self["data_start"].value)
if field:
yield field
# Image pixels
size = min(self["file_size"].value-self["data_start"].value, (self.size - self.current_size)//8)
yield parseImageData(self, "pixels", size, header)
def createDescription(self):
return u"Microsoft Bitmap version %s" % self["header"].getFormatVersion()
def createContentSize(self):
return self["file_size"].value * 8
| gpl-2.0 | 6,517,626,528,385,999,000 | 34.251282 | 104 | 0.586267 | false |
urbn/kombu | t/unit/utils/test_functional.py | 1 | 9030 | from __future__ import absolute_import, unicode_literals
import pickle
import pytest
from itertools import count
from case import Mock, mock, skip
from kombu.five import (
items, PY3,
)
from kombu.utils import functional as utils
from kombu.utils.functional import (
ChannelPromise, LRUCache, fxrange, fxrangemax, memoize, lazy,
maybe_evaluate, maybe_list, reprcall, reprkwargs, retry_over_time,
accepts_argument,
)
class test_ChannelPromise:
def test_repr(self):
obj = Mock(name='cb')
assert 'promise' in repr(ChannelPromise(obj))
obj.assert_not_called()
class test_shufflecycle:
def test_shuffles(self):
prev_repeat, utils.repeat = utils.repeat, Mock()
try:
utils.repeat.return_value = list(range(10))
values = {'A', 'B', 'C'}
cycle = utils.shufflecycle(values)
seen = set()
for i in range(10):
next(cycle)
utils.repeat.assert_called_with(None)
assert seen.issubset(values)
with pytest.raises(StopIteration):
next(cycle)
next(cycle)
finally:
utils.repeat = prev_repeat
def double(x):
return x * 2
class test_LRUCache:
def test_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x[i] = i
assert list(x.keys()) == list(slots[limit:])
assert x.items()
assert x.values()
def test_is_pickleable(self):
x = LRUCache(limit=10)
x.update(luke=1, leia=2)
y = pickle.loads(pickle.dumps(x))
assert y.limit == y.limit
assert y == x
def test_update_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x.update({i: i})
assert list(x.keys()) == list(slots[limit:])
def test_least_recently_used(self):
x = LRUCache(3)
x[1], x[2], x[3] = 1, 2, 3
assert list(x.keys()), [1, 2 == 3]
x[4], x[5] = 4, 5
assert list(x.keys()), [3, 4 == 5]
# access 3, which makes it the last used key.
x[3]
x[6] = 6
assert list(x.keys()), [5, 3 == 6]
x[7] = 7
assert list(x.keys()), [3, 6 == 7]
def test_update_larger_than_cache_size(self):
x = LRUCache(2)
x.update({x: x for x in range(100)})
assert list(x.keys()), [98 == 99]
def test_items(self):
c = LRUCache()
c.update(a=1, b=2, c=3)
assert list(items(c))
def test_incr(self):
c = LRUCache()
c.update(a='1')
c.incr('a')
assert c['a'] == '2'
def test_memoize():
counter = count(1)
@memoize(maxsize=2)
def x(i):
return next(counter)
assert x(1) == 1
assert x(1) == 1
assert x(2) == 2
assert x(3) == 3
assert x(1) == 4
x.clear()
assert x(3) == 5
class test_lazy:
def test__str__(self):
assert (str(lazy(lambda: 'the quick brown fox')) ==
'the quick brown fox')
def test__repr__(self):
assert repr(lazy(lambda: 'fi fa fo')).strip('u') == "'fi fa fo'"
@skip.if_python3()
def test__cmp__(self):
assert lazy(lambda: 10).__cmp__(lazy(lambda: 20)) == -1
assert lazy(lambda: 10).__cmp__(5) == 1
def test_evaluate(self):
assert lazy(lambda: 2 + 2)() == 4
assert lazy(lambda x: x * 4, 2) == 8
assert lazy(lambda x: x * 8, 2)() == 16
def test_cmp(self):
assert lazy(lambda: 10) == lazy(lambda: 10)
assert lazy(lambda: 10) != lazy(lambda: 20)
def test__reduce__(self):
x = lazy(double, 4)
y = pickle.loads(pickle.dumps(x))
assert x() == y()
def test__deepcopy__(self):
from copy import deepcopy
x = lazy(double, 4)
y = deepcopy(x)
assert x._fun == y._fun
assert x._args == y._args
assert x() == y()
@pytest.mark.parametrize('obj,expected', [
(lazy(lambda: 10), 10),
(20, 20),
])
def test_maybe_evaluate(obj, expected):
assert maybe_evaluate(obj) == expected
class test_retry_over_time:
class Predicate(Exception):
pass
def setup(self):
self.index = 0
def myfun(self):
if self.index < 9:
raise self.Predicate()
return 42
def errback(self, exc, intervals, retries):
interval = next(intervals)
sleepvals = (None, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 16.0)
self.index += 1
assert interval == sleepvals[self.index]
return interval
@mock.sleepdeprived(module=utils)
def test_simple(self):
prev_count, utils.count = utils.count, Mock()
try:
utils.count.return_value = list(range(1))
x = retry_over_time(self.myfun, self.Predicate,
errback=None, interval_max=14)
assert x is None
utils.count.return_value = list(range(10))
cb = Mock()
x = retry_over_time(self.myfun, self.Predicate,
errback=self.errback, callback=cb,
interval_max=14)
assert x == 42
assert self.index == 9
cb.assert_called_with()
finally:
utils.count = prev_count
def test_retry_timeout(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=self.errback, interval_max=14, timeout=1
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=None, timeout=1,
)
@mock.sleepdeprived(module=utils)
def test_retry_zero(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=self.errback, interval_max=14,
)
assert self.index == 0
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_once(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=self.errback, interval_max=14,
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_always(self):
Predicate = self.Predicate
class Fun(object):
def __init__(self):
self.calls = 0
def __call__(self, *args, **kwargs):
try:
if self.calls >= 10:
return 42
raise Predicate()
finally:
self.calls += 1
fun = Fun()
assert retry_over_time(
fun, self.Predicate,
max_retries=None, errback=None, interval_max=14) == 42
assert fun.calls == 11
@pytest.mark.parametrize('obj,expected', [
(None, None),
(1, [1]),
([1, 2, 3], [1, 2, 3]),
])
def test_maybe_list(obj, expected):
assert maybe_list(obj) == expected
def test_fxrange__no_repeatlast():
assert list(fxrange(1.0, 3.0, 1.0)) == [1.0, 2.0, 3.0]
@pytest.mark.parametrize('args,expected', [
((1.0, 3.0, 1.0, 30.0),
[1.0, 2.0, 3.0, 3.0, 3.0, 3.0,
3.0, 3.0, 3.0, 3.0, 3.0]),
((1.0, None, 1.0, 30.0),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]),
])
def test_fxrangemax(args, expected):
assert list(fxrangemax(*args)) == expected
def test_reprkwargs():
assert reprkwargs({'foo': 'bar', 1: 2, 'k': 'v'})
def test_reprcall():
assert reprcall('add', (2, 2), {'copy': True})
class test_accepts_arg:
def function(self, foo, bar, baz="baz"):
pass
def test_valid_argument(self):
assert accepts_argument(self.function, 'self')
assert accepts_argument(self.function, 'foo')
assert accepts_argument(self.function, 'baz')
def test_invalid_argument(self):
assert not accepts_argument(self.function, 'random_argument')
if PY3:
assert not accepts_argument(test_accepts_arg, 'foo')
def test_raise_exception(self):
with pytest.raises(Exception):
accepts_argument(None, 'foo')
if not PY3:
with pytest.raises(Exception):
accepts_argument(test_accepts_arg, 'foo')
| bsd-3-clause | 7,319,310,932,030,114,000 | 25.715976 | 76 | 0.531561 | false |
msullivan/advent-of-code | 2020/17a.py | 1 | 1655 | #!/usr/bin/env python3
import copy
from collections import defaultdict
import sys
import re
def extract(s):
return [int(x) for x in re.findall(r'-?\d+', s)]
def first(grid, x, y, dx, dy):
while True:
x += dx
y += dy
if x < 0 or x >= len(grid[0]) or y < 0 or y >= len(grid):
return ''
if grid[y][x] in ('L', '#'):
return grid[y][x]
nbrs = [(x, y, z) for x in range(-1, 2) for y in range(-1, 2) for z in range(-1, 2) if not x == y == z == 0]
def add(v1, v2):
return tuple(x + y for x, y in zip(v1, v2))
def step(grid):
ngrid = copy.deepcopy(grid)
# ngrid = [x[:] for x in grid]
change = False
for pos in list(grid):
for dx in nbrs + [(0, 0, 0)]:
npos = add(dx, pos)
cnt = 0
for d in nbrs:
if grid[add(npos, d)] == "#":
cnt += 1
print(cnt)
if grid[npos] == '#' and not (cnt == 2 or cnt == 3):
ngrid[npos] = '.'
change = True
elif grid[npos] == '.' and cnt == 3:
ngrid[npos] = '#'
change = True
return ngrid, change
def main(args):
# data = [x.split('\n') for x in sys.stdin.read().split('\n\n')]
data = [list(s.strip()) for s in sys.stdin]
grid = defaultdict(lambda: ".")
for y in range(len(data)):
for x in range(len(data[0])):
grid[x,y,0] = data[y][x]
for i in range(6):
print(i, grid)
grid, _ = step(grid)
print(len([x for x in grid.values() if x == '#']))
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit | 4,169,982,579,958,940,000 | 24.859375 | 108 | 0.467674 | false |
asamerh4/mesos | support/push-commits.py | 1 | 4982 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is typically used by Mesos committers to push a locally applied
review chain to ASF git repo and mark the reviews as submitted on ASF
ReviewBoard.
Example Usage:
> git checkout master
> git pull origin
> ./support/apply-reviews.py -c -r 1234
> ./support/push-commits.py
"""
# TODO(vinod): Also post the commit message to the corresponding ASF JIRA
# tickets and resolve them if necessary.
import argparse
import os
import re
import sys
from subprocess import check_output
REVIEWBOARD_URL = 'https://reviews.apache.org'
def get_reviews(revision_range):
"""Return the list of reviews found in the commits in the revision range."""
reviews = [] # List of (review id, commit log) tuples
rev_list = check_output(['git',
'rev-list',
'--reverse',
revision_range]).strip().split('\n')
for rev in rev_list:
commit_log = check_output(['git',
'--no-pager',
'show',
'--no-color',
'--no-patch',
rev]).strip()
pos = commit_log.find('Review: ')
if pos != -1:
pattern = re.compile('Review: ({url})$'.format(
url=os.path.join(REVIEWBOARD_URL, 'r', '[0-9]+')))
match = pattern.search(commit_log.strip().strip('/'))
if match is None:
print "\nInvalid ReviewBoard URL: '{}'".format(commit_log[pos:])
sys.exit(1)
url = match.group(1)
reviews.append((os.path.basename(url), commit_log))
return reviews
def close_reviews(reviews, options):
"""Mark the given reviews as submitted on ReviewBoard."""
for review_id, commit_log in reviews:
print 'Closing review', review_id
if not options['dry_run']:
check_output(['rbt',
'close',
'--description',
commit_log,
review_id])
def parse_options():
"""Return a dictionary of options parsed from command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('-n',
'--dry-run',
action='store_true',
help='Perform a dry run.')
args = parser.parse_args()
options = {}
options['dry_run'] = args.dry_run
return options
def main():
"""Main function to push the commits in this branch as review requests."""
options = parse_options()
current_branch_ref = check_output(['git', 'symbolic-ref', 'HEAD']).strip()
current_branch = current_branch_ref.replace('refs/heads/', '', 1)
if current_branch != 'master':
print 'Please run this script from master branch'
sys.exit(1)
remote_tracking_branch = check_output(['git',
'rev-parse',
'--abbrev-ref',
'master@{upstream}']).strip()
merge_base = check_output([
'git',
'merge-base',
remote_tracking_branch,
'master']).strip()
if merge_base == current_branch_ref:
print 'No new commits found to push'
sys.exit(1)
reviews = get_reviews(merge_base + ".." + current_branch_ref)
# Push the current branch to remote master.
remote = check_output(['git',
'config',
'--get',
'branch.master.remote']).strip()
print 'Pushing commits to', remote
if options['dry_run']:
check_output(['git',
'push',
'--dry-run',
remote,
'master:master'])
else:
check_output(['git',
'push',
remote,
'master:master'])
# Now mark the reviews as submitted.
close_reviews(reviews, options)
if __name__ == '__main__':
main()
| apache-2.0 | -8,114,206,134,426,273,000 | 30.732484 | 80 | 0.545163 | false |
ATSTI/administra | open_myplace/boleto/boleto.py | 1 | 4123 | # -*- coding: utf-8 -*-
#################################################################################
# #
# Copyright (C) 2011 Vinicius Dittgen - PROGE, Leonardo Santagada - PROGE #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU General Public License for more details. #
# #
#You should have received a copy of the GNU General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
#################################################################################
from openerp.osv import fields, osv
class boleto_partner_config(osv.osv):
"""Boleto Partner Configuration"""
_name = 'boleto.partner_config'
_columns = {
'name': fields.char('Name', size=20, required=True),
'carteira': fields.integer('Carteira', size=20, required=True),
# 'juros': fields.float('Juros', digits=(1, 6)),
# 'multa': fields.float('Multa', digits=(12, 6)),
'instrucoes': fields.text(u'Instruções'),
}
boleto_partner_config()
class boleto_company_config(osv.osv):
"""Boleto Company Configuration"""
_name = 'boleto.company_config'
_columns = {
'name': fields.char('Name', size=20, required=True),
'banco': fields.selection([('bb', 'Banco do Brasil'), ('real', 'Banco Real'), ('bradesco', 'Banco Bradesco'), ('caixa', 'Banco Caixa Federal'),('sicredi', 'Sicredi'),('itau', 'Banco Itau')], 'Banco', required=True),
'agencia_cedente': fields.integer('Agencia', size=6, required=True),
'conta_cedente': fields.integer('Conta', size=8, required=True),
'convenio': fields.integer(u'Convenio', size=8, required=True),
'nosso_numero': fields.integer(u'Nosso Número'),
}
boleto_company_config()
class boleto_boleto(osv.osv):
"""Boleto"""
_name = 'boleto.boleto'
_columns = {
'name': fields.char('Name', size=20, required=True),
# do cliente
'carteira': fields.char('Carteira', size=10),
# 'juros': fields.float('Juros', digits=(12, 6)),
# 'multa': fields.float('Multa', digits=(12, 6)),
'instrucoes': fields.text(u'Instruções'),
'sacado': fields.many2one('res.partner', 'Sacado'),
# da empresa
'banco': fields.selection([('bb', 'Banco do Brasil'), ('real', 'Banco Real'), ('bradesco', 'Banco Bradesco'), ('caixa', 'Banco Caixa Federal'),('sicredi', 'Sicredi'),('itau','Banco Itau')], 'Banco'),
'agencia_cedente': fields.char('Agencia', size=6),
'conta_cedente': fields.char('Conta', size=8),
'convenio': fields.char('Convenio', size=8),
'nosso_numero': fields.integer(u'Nosso Número'),
'cedente': fields.many2one('res.company', 'Empresa'),
# da fatura
'move_line_id': fields.many2one('account.move.line', 'Move Line'),
'data_vencimento': fields.date('Data do Vencimento'),
'data_documento': fields.date('Data do Documento'),
'data_processamento': fields.date('Data do Processamento'),
'valor': fields.float('Valor', digits=(12, 6)),
'numero_documento': fields.char(u'Número do Documento', size=20),
'endereco': fields.char(u'Endereço', size=20),
}
boleto_boleto()
| gpl-2.0 | -8,144,782,495,104,841,000 | 50.4375 | 223 | 0.531227 | false |
quixey/scrapy-cluster | crawler/tests/tests_online.py | 1 | 3938 | '''
Online link spider test
'''
import unittest
from unittest import TestCase
import time
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import scrapy
import redis
from redis.exceptions import ConnectionError
import json
import threading, time
from crawling.spiders.link_spider import LinkSpider
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from kafka import KafkaClient, SimpleConsumer
class CustomSpider(LinkSpider):
'''
Overridden link spider for testing
'''
name = "test-spider"
class TestLinkSpider(TestCase):
example_feed = "\x80\x02}q\x00(X\x0f\x00\x00\x00allowed_domainsq\x01NX"\
"\x0b\x00\x00\x00allow_regexq\x02NX\a\x00\x00\x00crawlidq\x03X\x19"\
"\x00\x00\x0001234567890abcdefghijklmnq\x04X\x03\x00\x00\x00urlq\x05X"\
"\x13\x00\x00\x00www.istresearch.comq\x06X\a\x00\x00\x00expiresq\aK"\
"\x00X\b\x00\x00\x00priorityq\bK\x01X\n\x00\x00\x00deny_regexq\tNX\b"\
"\x00\x00\x00spideridq\nX\x0b\x00\x00\x00test-spiderq\x0bX\x05\x00"\
"\x00\x00attrsq\x0cNX\x05\x00\x00\x00appidq\rX\a\x00\x00\x00testappq"\
"\x0eX\x06\x00\x00\x00cookieq\x0fNX\t\x00\x00\x00useragentq\x10NX\x0f"\
"\x00\x00\x00deny_extensionsq\x11NX\b\x00\x00\x00maxdepthq\x12K\x00u."
def setUp(self):
self.settings = get_project_settings()
self.settings.set('KAFKA_TOPIC_PREFIX', "demo_test")
# set up redis
self.redis_conn = redis.Redis(host=self.settings['REDIS_HOST'],
port=self.settings['REDIS_PORT'])
try:
self.redis_conn.info()
except ConnectionError:
print "Could not connect to Redis"
# plugin is essential to functionality
sys.exit(1)
# clear out older test keys if any
keys = self.redis_conn.keys("test-spider:*")
for key in keys:
self.redis_conn.delete(key)
# set up kafka to consumer potential result
self.kafka_conn = KafkaClient(self.settings['KAFKA_HOSTS'])
self.kafka_conn.ensure_topic_exists("demo_test.crawled_firehose")
self.consumer = SimpleConsumer(
self.kafka_conn,
"demo-id",
"demo_test.crawled_firehose",
buffer_size=1024*100,
fetch_size_bytes=1024*100,
max_buffer_size=None
)
# move cursor to end of kafka topic
self.consumer.seek(0, 2)
def test_crawler_process(self):
runner = CrawlerRunner(self.settings)
d = runner.crawl(CustomSpider)
d.addBoth(lambda _: reactor.stop())
# add crawl to redis
key = "test-spider:istresearch.com:queue"
self.redis_conn.zadd(key, self.example_feed, -99)
# run the spider, give 20 seconds to see the url, crawl it,
# and send to kafka. Then we kill the reactor
def thread_func():
time.sleep(20)
reactor.stop()
thread = threading.Thread(target=thread_func)
thread.start()
reactor.run()
# ensure it was sent out to kafka
message_count = 0
for message in self.consumer.get_messages():
if message is None:
break
else:
the_dict = json.loads(message.message.value)
if the_dict is not None and the_dict['appid'] == 'testapp' \
and the_dict['crawlid'] == '01234567890abcdefghijklmn':
message_count += 1
self.assertEquals(message_count, 1)
def tearDown(self):
keys = self.redis_conn.keys('stats:crawler:*:test-spider:*')
keys = keys + self.redis_conn.keys('test-spider:*')
for key in keys:
self.redis_conn.delete(key)
if __name__ == '__main__':
unittest.main()
| mit | 2,532,433,231,757,263,000 | 33.243478 | 79 | 0.623667 | false |
yfauser/maxwindownotify | setup.py | 1 | 1252 | from setuptools import setup
import io
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
long_description = read('README.rst')
setup(
name='maxwindownotify',
version='1.1.1',
packages=['maxwindownotify'],
package_data={'maxwindownotify':['*'], 'maxwindownotify':['notifier_modules/*']},
url='http://github.com/yfauser/maxwindownotify',
license='MIT',
author='yfauser',
author_email='[email protected]',
description='This little script (daemon) will poll for the status of all window sensors known to a MAX Cube system and check for open windows',
long_description=long_description,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'],
install_requires=['requests>=2.7.0', 'netaddr>=0.7.18'],
entry_points={
'console_scripts': ['maxwindownotify = maxwindownotify.maxwindownotify:main']
}
)
| mit | 2,280,797,815,360,075,000 | 33.777778 | 147 | 0.654153 | false |
hazybluedot/indie_helper | util.py | 1 | 2278 | import requests
import bleach
import sys
if sys.version < '3':
from urlparse import urlparse
text_type = unicode
text_types = [ str, unicode ]
binary_type = str
else:
from urllib.parse import urlparse
text_type = str
text_types = [ str ]
binary_type = bytes
def is_url(url):
try:
parts = urlparse(url)
except TypeError:
return False
return parts.scheme in [ 'http', 'https' ]
def flatten(item):
if type(item) in [ list, tuple ] and len(item) == 1:
return item[0]
else:
return item
#bleach.ALLOWED_TAGS + ['p']
ALLOWED_TAGS=bleach.ALLOWED_TAGS + ['p', 'span']
def clean(text):
return bleach.clean(text, tags=ALLOWED_TAGS)
def clean_url(url):
if url.startswith('javascript:'):
return '';
return url
def bleachify(entry, key=None):
## todo for each property
if key == 'url':
bleached = bleachify(entry)
return [ clean_url(u) for u in bleached ]
if hasattr(entry, 'items'):
return dict([ (prop, bleachify(value, prop)) for prop, value in entry.items() ])
elif type(entry) is list:
## to flatten the list-of-one values that mf2py generates
## I have revisited this and decided to keep single element lists as this seems to be part of the mf2 defined format
#if len(entry) == 1:
# return bleachify(entry[0])
#else:
return map(bleachify, entry)
elif type(entry) in text_types:
return clean(entry)
else:
print('unhandled type of entry: {0}'.format(type(entry)))
return None
def follow_redirects(url, max_depth):
"""perform http GET url, following any redirects up to max_depth.
return resolved url.
Raises TooManyRedirects exception if max_depth is exceeded"""
def _wrapped(url, depth, acc):
if depth > max_depth:
raise TooManyRedirects('following redirects on {0} exceeded maximum depth of {1}'.format(url, max_depth))
r = requests.head(url)
acc.append( { 'url': url, 'status_code': r.status_code} )
if r.status_code in [ 301, 302 ]:
return _wrapped(r.headers['Location'], depth+1, acc)
else:
return acc
return _wrapped(url, 0, [])
| gpl-3.0 | 7,210,100,983,337,184,000 | 28.205128 | 124 | 0.611501 | false |
jenshenrik/destiny-trader | destiny.py | 1 | 3112 | import sys
import re
TYPE_BATTLEFIELD = "Battlefield"
def print_usage():
print("""
Star Wars: Destiny trade list builder
Usage:
$>python destiny.py <target-file>
where <target-file> is the text-file to process.
This file should be generated by logging into swdestiny.com, going to 'My collection',
selecting all (Ctrl/Cmd + A), pasting into an empty file, and saving.
""")
# Opens file, and returns it as a list of lines
def open_file(path):
f = open(path, 'r+')
lines = f.readlines()
f.close()
return lines
def write_file(path, haves, wants):
output = open(path, 'w')
output.write("HAVES")
for card in haves:
qty = 0
if card.type == TYPE_BATTLEFIELD:
qty = card.qty - 1
else:
qty = card.qty - 2
output.write("\n%dx %s\t\t(%s)" % (qty, card.name, card.set_string))
output.write("\n\nWANTS")
for card in wants:
qty = 0
if card.type == TYPE_BATTLEFIELD:
qty = 1 #you always only want 1 battlefield
else:
qty = 2 - card.qty
output.write("\n%dx %s\t\t(%s)" % (qty, card.name, card.set_string))
output.close()
def strip_header(lines):
return lines[19:]
def strip_footer(lines):
return lines[:-11]
class Card:
def __init__(self, line):
split = line.split("\t")
self.name = split[0].lstrip().rstrip()
self.qty = self.parse_qty(split[1])
self.type = split[6]
self.rarity = split[7]
self.set = self.parse_set(split[-1].lstrip().rstrip())
self.number = self.parse_number(split[-1])
self.set_string = split[-1].lstrip().rstrip()
# Pulls number from quantity string
def parse_qty(self, qty_string):
found = re.findall(r'\d+', qty_string)
return int(found[0])
# Parse the card's set name.
# Assumes the last word is set number
def parse_set(self, set_string):
return set_string.rsplit(" ", 1)[0]
# Parse the card's number in the set.
# Assumes the last word is set number
def parse_number(self, number_string):
return int(number_string.rsplit(" ", 1)[1])
def check_usage():
num_args = len(sys.argv)
if num_args < 2:
print_usage()
sys.exit()
def extract_filename_and_extension(filename):
split_name = filename.rsplit(".", 1)
return (split_name[0], split_name[1])
# run script
check_usage()
input_file = sys.argv[1]
file_lines = open_file(input_file)
file_lines = strip_header(file_lines)
file_lines = strip_footer(file_lines)
cards = []
for line in file_lines:
cards.append(Card(line))
haves = []
wants = []
for card in cards:
if card.type == TYPE_BATTLEFIELD:
if card.qty < 1:
wants.append(card)
elif card.qty > 1:
haves.append(card)
else:
if card.qty < 2:
wants.append(card)
elif card.qty > 2:
haves.append(card)
(filename, extension) = extract_filename_and_extension(input_file)
write_file(filename+"_trades."+extension, haves, wants)
| gpl-3.0 | 8,064,256,526,302,666,000 | 24.719008 | 90 | 0.593509 | false |
bfarr/kombine | examples/kepler/correlated_likelihood.py | 1 | 2577 | import numpy as np
import numpy.linalg as nl
import numpy.random as nr
import rv_model as rv
import scipy.linalg as sl
import scipy.stats as ss
def generate_covariance(ts, sigma, tau):
r"""Generates a covariance matrix according to an
squared-exponential autocovariance
.. math::
\left\langle x_i x_j \right\rangle = \sigma_0^2 \delta_{ij} + \sigma^2 \exp\left[ - \frac{\left| t_i - t_j\right|^2}{2 \tau^2} \right]
"""
ndim = ts.shape[0]
tis = ts[:, np.newaxis]
tjs = ts[np.newaxis, :]
return sigma*sigma*np.exp(-np.square(tis-tjs)/(2.0*tau*tau))
params_dtype = np.dtype([('mu', np.float),
('K', np.float),
('e', np.float),
('omega', np.float),
('chi', np.float),
('P', np.float),
('nu', np.float),
('sigma', np.float),
('tau', np.float)])
class Log1PPosterior(object):
"""Log of the posterior for a single planet system observed with a
single telescope. """
def __init__(self, ts, vs, dvs):
self.ts = np.sort(ts)
self.vs = vs
self.dvs = dvs
self.T = self.ts[-1] - self.ts[0]
self.dt_min = np.min(np.diff(self.ts))
def to_params(self, p):
p = np.atleast_1d(p)
return p.view(params_dtype)
def log_prior(self, p):
p = self.to_params(p)
# Bounds
if p['K'] < 0.0 or p['e'] < 0.0 or p['e'] > 1.0 or p['omega'] < 0.0 or p['omega'] > 2.0*np.pi or p['P'] < 0.0 or p['nu'] < 0.1 or p['nu'] > 10.0 or p['sigma'] < 0.0 or p['tau'] < 0.0 or p['tau'] > self.T:
return np.NINF
# Otherwise, flat prior on everything.
return 0.0
def log_likelihood(self, p):
p = self.to_params(p)
v = self.rvs(p)
res = self.vs - v - p['mu']
cov = p['nu']*p['nu']*np.diag(self.dvs*self.dvs)
cov += generate_covariance(self.ts, p['sigma'], p['tau'])
cfactor = sl.cho_factor(cov)
cc, lower = cfactor
n = self.ts.shape[0]
return -0.5*n*np.log(2.0*np.pi) - np.sum(np.log(np.diag(cc))) - 0.5*np.dot(res, sl.cho_solve(cfactor, res))
def __call__(self, p):
lp = self.log_prior(p)
if lp == np.NINF:
return np.NINF
else:
return lp + self.log_likelihood(p)
def rvs(self, p):
p = self.to_params(p)
return rv.rv_model(self.ts, p['K'], p['e'], p['omega'], p['chi'], p['P'])
| mit | -2,341,818,866,456,086,000 | 27.955056 | 212 | 0.498642 | false |
FrancescoRizzi/AWSomesauce | articles/BAS4-pws/custauth/custauth.py | 1 | 18186 | #!/usr/bin/env python
import os
import json
import StringIO
from contextlib import closing
import re
import time
import pprint
import boto3
from boto3.session import Session
import botocore
import jwt
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.backends import default_backend
# Simplest form of logging using the standard logging module:
# ============================================================
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# Top-Level Handler:
# ============================================================
def lambda_handler(event, context):
logger.info("CustAuth Triggered.")
authToken = event.get('authorizationToken', '')
methodArn = event.get('methodArn', '')
authHeader = event.get('Authorization', '')
logger.info("Authorization Token : '{0!s}'.".format(authToken))
logger.info("Method ARN : '{0!s}'.".format(methodArn))
logger.info("Authorization Header: '{0!s}'.".format(authHeader))
# Check Configuration before wasting time
# ========================================================
# AUTH_APP_ID: required
auth_app_id = os.environ.get('AUTH_APP_ID', None)
if not auth_app_id:
logger.error("Missing Required 'AUTH_APP_ID' Environmental Variable.")
raise ValueError("Missing/blank 'AUTH_APP_ID'")
logger.info("Auth App ID : '{0!s}'.".format(auth_app_id))
# AUTH_TENANT_ID: required
auth_tenant_id = os.environ.get('AUTH_TENANT_ID', None)
if not auth_tenant_id:
logger.error("Missing Required 'AUTH_TENANT_ID' Environmental Variable.")
raise ValueError("Missing/blank 'AUTH_TENANT_ID'")
logger.info("Auth Tenant ID : '{0!s}'.".format(auth_tenant_id))
# CERTS_BUCKET: required
certs_bucket = os.environ.get('CERTS_BUCKET', None)
if not certs_bucket:
logger.error("Missing Required 'CERTS_BUCKET' Environmental Variable.")
raise ValueError("Missing/blank 'CERTS_BUCKET'")
logger.info("Certificates Bucket : '{0!s}'.".format(certs_bucket))
# ========================================================
# Client credentials expected in the authorizationToken, in the form:
# 'Bearer <id_token>'
# Missing authorizationToken:
# response 401 - Unauthorized (although we don't send back a 'WWW-Authenticate' header as we should)
if not authToken:
logger.warn("Missing Authorization Token: will trigger 401-Unauthorized response.")
raise Exception('Unauthorized')
validator = TokenValidator()
validToken = validator.ValidateToken(authToken, auth_app_id, auth_tenant_id, certs_bucket)
logger.info("Is the Authorization Token valid? {0!s}".format(validToken))
# authorizationToken invalid (format or contents):
# respond with Policy DENYING access, which will trigger API Gateway to respond with
# response 403 - Forbidden
# authorizationToken valid (format and contents):
# respond with Policy ALLOWING access, which will trigger API Gateway to
# proceed with the backend integration configured on the method.
principalId = auth_app_id
arnParts = event['methodArn'].split(':')
apiGatewayArnTmp = arnParts[5].split('/')
awsAccountId = arnParts[4]
policy = AuthPolicy(principalId, awsAccountId)
policy.restApiId = apiGatewayArnTmp[0]
policy.region = arnParts[3]
policy.stage = apiGatewayArnTmp[1]
policyDesc = ''
if validToken:
policy.allowAllMethods()
policyDesc = 'ALLOW'
else:
policy.denyAllMethods()
policyDesc = 'DENY'
authResponse = policy.build()
# Optional: context
# The response can also include a 'context' key-value pairs mapping,
# which will be rendered available to the configured backend
# (if the policy is such that the request handling continues)
# as $context.authorizer.<key>
# This mapping is part of the cached response.
#
# context = {
# 'key': 'value', # $context.authorizer.key -> value
# 'number' : 1,
# 'bool' : True
# }
# authResponse['context'] = context
#
# INVALID formats:
# context['arr'] = ['foo']
# context['obj'] = {'foo':'bar'}
logger.info("CustAuth completed: returning policy to {0!s} access.".format(policyDesc))
return authResponse
# TokenValidator
# ============================================================
class TokenValidator(object):
PEMSTART = "-----BEGIN CERTIFICATE-----\n"
PEMEND = "\n-----END CERTIFICATE-----\n"
def __init__(self):
self._session = None
self._client = None
def ValidateToken(self, auth_header, auth_app_id, auth_tenant_id, certs_bucket):
# auth_header expected to be in the form:
# 'Bearer <id_token>'
(pre, encoded_token) = auth_header.split(' ', 2)
if (not pre) or (pre.upper() != "BEARER"):
logger.warn("Authorization Token did not match expected 'Bearer <id_token>' format.")
return False
expected_issuer = 'https://sts.windows.net/{0!s}/'.format(auth_tenant_id)
unverified_headers = jwt.get_unverified_header(encoded_token)
#unverified_token = jwt.decode(encoded_token, algorithms=['RS256'], audience=auth_app_id, issuer=expected_issuer, options={'verify_signature': False})
#x5t = unverified_token.get('x5t', None)
#kid = unverified_token.get('kid', None)
kid = unverified_headers.get('kid', None)
logger.info("Token 'kid': '{0!s}'.".format(kid))
if not kid:
logger.warn("Could not extract 'kid' property from token.")
return False
cert_pem = self.GetSigningCertificate(certs_bucket, kid)
if cert_pem:
logger.info("Retrieved Signing Certificate.")
#if isinstance(cert_pem, unicode):
# logger.info("Signing Certificate is unicode. Will attempt STRICT conversion.")
# cert_pem = cert_pem.encode('ascii', 'strict')
# logger.info("Signing Certificate unicode encoded to ASCII.")
cert = load_pem_x509_certificate(cert_pem, default_backend())
logger.info("Loaded Signing Certificate.")
public_key = cert.public_key()
logger.info("Extracted Public Key from Signing Certificate.")
decoded = jwt.decode(encoded_token, public_key, algorithms=['RS256'], audience=auth_app_id, issuer=expected_issuer)
# NOTE: the JWT decode method verifies
# - general format of the encoded token
# - signature, using the given public key
# - aud claim (Audience) vs audience value
# - exp claim (Expiration) vs current datetime (UTC)
# - nbf claim (Not Before) vs current datetime (UTC)
# - iss claim (Issuer) vs issuer value
if decoded:
logger.info("Token Decoded and Validated Successfully.")
return True
else:
logger.warn("Failed to Decode Token when verifying signature.")
return False
else:
logger.warn("Could not retrieve signing certificate matching token's 'kid' property ('{0!s}').".format(kid))
return False
def GetSigningCertificate(self, certs_bucket, kid):
self.EnsureClient()
discovery_record_str = None
with closing(StringIO.StringIO()) as dest:
self._client.download_fileobj(
Bucket=certs_bucket,
Key=kid,
Fileobj=dest)
discovery_record_str = dest.getvalue()
if not discovery_record_str:
logger.warn("Could not retrieve Discovery Record from Bucket.")
return None
logger.info("Retrieved Discovery Record Payload from Bucket.")
# discovery_record_str is the payload extracted from
# the bucket, presumed to be the JSON-formatted string
# of the signing certificate discovery record. eg:
# {
# "x5t": "...",
# "use": "...",
# "e": "...",
# "kty": "...",
# "n": "...",
# "x5c": [
# "..."
# ],
# "issuer": "...",
# "kid": "..."
# }
# What we need to extract as 'certificate' is
# the first value in the "x5c" property list
discovery_record = json.loads(discovery_record_str)
logger.info("Parsed Discovery Record JSON.")
x5c = discovery_record.get('x5c', None)
if not x5c:
logger.warn("Could not find 'x5c' property from Discovery Record.")
return None
logger.info("Discovery Record x5c found.")
raw_cert = ""
if isinstance(x5c, list):
raw_cert = x5c[0]
elif isinstance(x5c, basestring):
raw_cert = x5c
else:
logger.warn("Unexpected data type for x5c value from Discovery Record (expected string or list).")
return None
logger.info("Raw Cert:|{0!s}|".format(raw_cert))
if isinstance(raw_cert, unicode):
logger.info("Raw Certificate is unicode. Attempting STRICT conversion to ASCII.")
raw_cert = raw_cert.encode('ascii', 'strict')
logger.info("Raw Certificate encoded to ASCII.")
logger.info("Formatting Raw Certificate according to PEM 64-characters lines.")
raw_cert = self.InsertNewLines(raw_cert)
logger.info("Raw Certificate lines length normalized to PEM.")
pem_cert = self.PEMSTART + raw_cert + self.PEMEND
logger.info("After wrapping Raw certificate in PEM Markers:")
logger.info(pem_cert)
#tmp = "is NOT"
#if isinstance(raw_cert, unicode):
# tmp = "is"
#logger.info("Before Wrapping in PEM delimiters, the raw_cert data type {0!s} unicode.".format(tmp))
#
#pem_cert = self.PEMSTART + raw_cert + self.PEMEND
#logger.info("PEM Cert:|{0!s}|".format(pem_cert))
#
#tmp = "is NOT"
#if isinstance(pem_cert, unicode):
# tmp = "is"
#logger.info("After Wrapping in PEM delimiters, the pem_cert data type {0!s} unicode.".format(tmp))
#
#if isinstance(pem_cert, unicode):
# logger.info("Signing Certificate is unicode. Will attempt STRICT conversion.")
# pem_cert = pem_cert.encode('ascii', 'strict')
# logger.info("Signing Certificate unicode encoded to ASCII.")
#
#logger.info("Splitting according to PEM format (64 characters per line).")
#pem_cert = self.InsertNewLines(pem_cert)
#logger.info("After splitting in 64-character long lines:")
#logger.info(pem_cert)
return pem_cert
def InsertNewLines(self, s, every=64):
lines = []
for i in xrange(0, len(s), every):
lines.append(s[i:i+every])
return '\n'.join(lines)
def EnsureClient(self):
self.EnsureSession()
if not self._client:
self._client = self._session.client('s3')
def EnsureSession(self):
if not self._session:
self._session = boto3.Session()
# HttpVerbs
# ============================================================
class HttpVerb:
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
HEAD = "HEAD"
DELETE = "DELETE"
OPTIONS = "OPTIONS"
ALL = "*"
# AuthPolicy
# ============================================================
class AuthPolicy(object):
awsAccountId = ""
"""The AWS account id the policy will be generated for. This is used to create the method ARNs."""
principalId = ""
"""The principal used for the policy, this should be a unique identifier for the end user."""
version = "2012-10-17"
"""The policy version used for the evaluation. This should always be '2012-10-17'"""
pathRegex = "^[/.a-zA-Z0-9-\*]+$"
"""The regular expression used to validate resource paths for the policy"""
"""these are the internal lists of allowed and denied methods. These are lists
of objects and each object has 2 properties: A resource ARN and a nullable
conditions statement.
the build method processes these lists and generates the approriate
statements for the final policy"""
allowMethods = []
denyMethods = []
restApiId = "*"
"""The API Gateway API id. By default this is set to '*'"""
region = "*"
"""The region where the API is deployed. By default this is set to '*'"""
stage = "*"
"""The name of the stage used in the policy. By default this is set to '*'"""
def __init__(self, principal, awsAccountId):
self.awsAccountId = awsAccountId
self.principalId = principal
self.allowMethods = []
self.denyMethods = []
def _addMethod(self, effect, verb, resource, conditions):
"""Adds a method to the internal lists of allowed or denied methods. Each object in
the internal list contains a resource ARN and a condition statement. The condition
statement can be null."""
if verb != "*" and not hasattr(HttpVerb, verb):
raise NameError("Invalid HTTP verb " + verb + ". Allowed verbs in HttpVerb class")
resourcePattern = re.compile(self.pathRegex)
if not resourcePattern.match(resource):
raise NameError("Invalid resource path: " + resource + ". Path should match " + self.pathRegex)
if resource[:1] == "/":
resource = resource[1:]
resourceArn = ("arn:aws:execute-api:" +
self.region + ":" +
self.awsAccountId + ":" +
self.restApiId + "/" +
self.stage + "/" +
verb + "/" +
resource)
if effect.lower() == "allow":
self.allowMethods.append({
'resourceArn' : resourceArn,
'conditions' : conditions
})
elif effect.lower() == "deny":
self.denyMethods.append({
'resourceArn' : resourceArn,
'conditions' : conditions
})
def _getEmptyStatement(self, effect):
"""Returns an empty statement object prepopulated with the correct action and the
desired effect."""
statement = {
'Action': 'execute-api:Invoke',
'Effect': effect[:1].upper() + effect[1:].lower(),
'Resource': []
}
return statement
def _getStatementForEffect(self, effect, methods):
"""This function loops over an array of objects containing a resourceArn and
conditions statement and generates the array of statements for the policy."""
statements = []
if len(methods) > 0:
statement = self._getEmptyStatement(effect)
for curMethod in methods:
if curMethod['conditions'] is None or len(curMethod['conditions']) == 0:
statement['Resource'].append(curMethod['resourceArn'])
else:
conditionalStatement = self._getEmptyStatement(effect)
conditionalStatement['Resource'].append(curMethod['resourceArn'])
conditionalStatement['Condition'] = curMethod['conditions']
statements.append(conditionalStatement)
statements.append(statement)
return statements
def allowAllMethods(self):
"""Adds a '*' allow to the policy to authorize access to all methods of an API"""
self._addMethod("Allow", HttpVerb.ALL, "*", [])
def denyAllMethods(self):
"""Adds a '*' allow to the policy to deny access to all methods of an API"""
self._addMethod("Deny", HttpVerb.ALL, "*", [])
def allowMethod(self, verb, resource):
"""Adds an API Gateway method (Http verb + Resource path) to the list of allowed
methods for the policy"""
self._addMethod("Allow", verb, resource, [])
def denyMethod(self, verb, resource):
"""Adds an API Gateway method (Http verb + Resource path) to the list of denied
methods for the policy"""
self._addMethod("Deny", verb, resource, [])
def allowMethodWithConditions(self, verb, resource, conditions):
"""Adds an API Gateway method (Http verb + Resource path) to the list of allowed
methods and includes a condition for the policy statement. More on AWS policy
conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition"""
self._addMethod("Allow", verb, resource, conditions)
def denyMethodWithConditions(self, verb, resource, conditions):
"""Adds an API Gateway method (Http verb + Resource path) to the list of denied
methods and includes a condition for the policy statement. More on AWS policy
conditions here: http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition"""
self._addMethod("Deny", verb, resource, conditions)
def build(self):
"""Generates the policy document based on the internal lists of allowed and denied
conditions. This will generate a policy with two main statements for the effect:
one statement for Allow and one statement for Deny.
Methods that includes conditions will have their own statement in the policy."""
if ((self.allowMethods is None or len(self.allowMethods) == 0) and
(self.denyMethods is None or len(self.denyMethods) == 0)):
raise NameError("No statements defined for the policy")
policy = {
'principalId' : self.principalId,
'policyDocument' : {
'Version' : self.version,
'Statement' : []
}
}
policy['policyDocument']['Statement'].extend(self._getStatementForEffect("Allow", self.allowMethods))
policy['policyDocument']['Statement'].extend(self._getStatementForEffect("Deny", self.denyMethods))
return policy | mit | 3,410,329,240,392,538,600 | 38.027897 | 158 | 0.600352 | false |
CTSRD-SOAAP/chromium-42.0.2311.135 | native_client/buildbot/buildbot_selector.py | 1 | 18629 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import subprocess
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import pynacl.platform
python = sys.executable
bash = '/bin/bash'
echo = 'echo'
BOT_ASSIGNMENT = {
######################################################################
# Buildbots.
######################################################################
'xp-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'xp-bare-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-bare-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'precise-64-validator-opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
# Clang.
'precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'mac10.7-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# ASan.
'precise_64-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 64 newlib --asan',
'mac10.7-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 32 newlib --asan',
# PNaCl.
'oneiric_32-newlib-arm_hw-pnacl-panda-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-dbg',
'oneiric_32-newlib-arm_hw-pnacl-panda-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-opt',
'precise_64-newlib-arm_qemu-pnacl-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-dbg',
'precise_64-newlib-arm_qemu-pnacl-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-opt',
'precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'mac10.8-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'win7-64-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
# PNaCl Spec
'precise_64-newlib-arm_qemu-pnacl-buildonly-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-buildonly',
'oneiric_32-newlib-arm_hw-pnacl-panda-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-hw',
'lucid_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8632',
'lucid_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8664',
# NaCl Spec
'lucid_64-newlib-x86_32-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8632',
'lucid_64-newlib-x86_64-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8664',
# Android bots.
'precise64-newlib-dbg-android':
python + ' buildbot/buildbot_standard.py dbg arm newlib --android',
'precise64-newlib-opt-android':
python + ' buildbot/buildbot_standard.py opt arm newlib --android',
# Valgrind bots.
'precise-64-newlib-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
'precise-64-glibc-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
# Coverage.
'mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'xp-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
######################################################################
# Trybots.
######################################################################
'nacl-precise64_validator_opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
'nacl-precise64_newlib_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh newlib',
'nacl-precise64_glibc_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh glibc',
# Android trybots.
'nacl-precise64-newlib-dbg-android':
python + ' buildbot/buildbot_standard.py dbg arm newlib --android',
'nacl-precise64-newlib-opt-android':
python + ' buildbot/buildbot_standard.py opt arm newlib --android',
# Coverage trybots.
'nacl-mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'nacl-precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-win32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
# Clang trybots.
'nacl-precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'nacl-mac10.6-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# ASan.
'nacl-precise_64-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 64 newlib --asan',
'nacl-mac10.7-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 32 newlib --asan',
# Pnacl main trybots
'nacl-precise_64-newlib-arm_qemu-pnacl':
bash + ' buildbot/buildbot_pnacl.sh mode-trybot-qemu arm',
'nacl-precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'nacl-precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'nacl-precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
'nacl-arm_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-try',
'nacl-arm_hw_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-try',
'nacl-mac10.8_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'nacl-win7_64_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
# Pnacl spec2k trybots
'nacl-precise_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8632',
'nacl-precise_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8664',
'nacl-arm_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-buildonly',
'nacl-arm_hw_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-hw',
# Toolchain glibc.
'precise64-glibc': bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'mac-glibc': bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'win7-glibc': 'buildbot\\buildbot_windows-glibc-makefile.bat',
# Toolchain newlib x86.
'win7-toolchain_x86': 'buildbot\\buildbot_toolchain_win.bat',
'mac-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh mac',
'precise64-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh linux',
# Toolchain newlib arm.
'win7-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --buildbot'
' toolchain_build',
'mac-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --buildbot'
' toolchain_build',
'precise64-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --buildbot'
' --test_toolchain nacl_arm_newlib'
' toolchain_build',
# BIONIC toolchain builders.
'precise64-toolchain_bionic':
python +
' buildbot/buildbot_toolchain_build.py'
' --buildbot'
' toolchain_build_bionic',
# Pnacl toolchain builders.
'linux-pnacl-x86_32':
python +
' buildbot/buildbot_pnacl_toolchain.py --buildbot --tests-arch x86-32',
'linux-pnacl-x86_64':
python +
' buildbot/buildbot_pnacl_toolchain.py --buildbot --tests-arch x86-64',
'mac-pnacl-x86_32':
python +
' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'win-pnacl-x86_32':
python +
' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# Pnacl toolchain testers
'linux-pnacl-x86_64-tests-x86_64':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-64',
'linux-pnacl-x86_64-tests-x86_32':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-32',
'linux-pnacl-x86_64-tests-arm':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot arm',
# MIPS toolchain buildbot.
'linux-pnacl-x86_32-tests-mips':
bash + ' buildbot/buildbot_pnacl.sh mode-trybot-qemu mips32',
# Toolchain trybots.
'nacl-toolchain-precise64-newlib':
bash + ' buildbot/buildbot_toolchain.sh linux',
'nacl-toolchain-mac-newlib': bash + ' buildbot/buildbot_toolchain.sh mac',
'nacl-toolchain-win7-newlib': 'buildbot\\buildbot_toolchain_win.bat',
'nacl-toolchain-precise64-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --trybot'
' --test_toolchain nacl_arm_newlib'
' toolchain_build',
'nacl-toolchain-mac-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --trybot'
' toolchain_build',
'nacl-toolchain-win7-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' --trybot'
' toolchain_build',
'nacl-toolchain-precise64-glibc':
bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'nacl-toolchain-mac-glibc':
bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'nacl-toolchain-win7-glibc':
'buildbot\\buildbot_windows-glibc-makefile.bat',
# Pnacl toolchain trybots.
'nacl-toolchain-linux-pnacl-x86_32':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-32',
'nacl-toolchain-linux-pnacl-x86_64':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-64',
'nacl-toolchain-linux-pnacl-mips': echo + ' "TODO(mseaborn)"',
'nacl-toolchain-mac-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-win7-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
# Sanitizer Pnacl toolchain trybots.
'nacl-toolchain-asan':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-64 '
' --sanitize address --skip-tests',
# TODO(kschimpf): Bot is currently broken: --sanitize memory not understood.
'nacl-toolchain-msan':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-64 '
' --sanitize memory --skip-tests',
# TODO(kschimpf): Bot is currently broken: --sanitize thread not understood.
'nacl-toolchain-tsan':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-64 '
' --sanitize thread --skip-tests',
# TODO(kschimpf): Bot is currently broken: --sanitize undefined not understood.
'nacl-toolchain-ubsan':
python +
' buildbot/buildbot_pnacl_toolchain.py --trybot --tests-arch x86-64 '
' --sanitize undefined --skip-tests',
}
special_for_arm = [
'win7_64',
'win7-64',
'lucid-64',
'lucid64',
'precise-64',
'precise64'
]
for platform in [
'vista', 'win7', 'win8', 'win',
'mac10.6', 'mac10.7', 'mac10.8',
'lucid', 'precise'] + special_for_arm:
if platform in special_for_arm:
arch_variants = ['arm']
else:
arch_variants = ['', '32', '64', 'arm']
for arch in arch_variants:
arch_flags = ''
real_arch = arch
arch_part = '-' + arch
# Disable GYP build for win32 bots and arm cross-builders. In this case
# "win" means Windows XP, not Vista, Windows 7, etc.
#
# Building via GYP always builds all toolchains by default, but the win32
# XP pnacl builds are pathologically slow (e.g. ~38 seconds per compile on
# the nacl-win32_glibc_opt trybot). There are other builders that test
# Windows builds via gyp, so the reduced test coverage should be slight.
if arch == 'arm' or (platform == 'win' and arch == '32'):
arch_flags += ' --no-gyp'
if platform == 'win7' and arch == '32':
arch_flags += ' --no-goma'
if arch == '':
arch_part = ''
real_arch = '32'
# Test with Breakpad tools only on basic Linux builds.
if sys.platform.startswith('linux'):
arch_flags += ' --use-breakpad-tools'
for mode in ['dbg', 'opt']:
for libc in ['newlib', 'glibc']:
# Buildbots.
for bare in ['', '-bare']:
name = platform + arch_part + bare + '-' + libc + '-' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
# Trybots
for arch_sep in ['', '-', '_']:
name = 'nacl-' + platform + arch_sep + arch + '_' + libc + '_' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
def EscapeJson(data):
return '"' + json.dumps(data).replace('"', r'\"') + '"'
def HasNoPerfResults(builder):
if 'pnacl-buildonly-spec' in builder:
return True
return builder in [
'mac-toolchain_arm',
'win-pnacl-x86_32',
'linux-pnacl-x86_32-tests-mips',
'precise64-toolchain_bionic',
]
def Main():
builder = os.environ.get('BUILDBOT_BUILDERNAME')
build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
build_revision = os.environ.get('BUILDBOT_GOT_REVISION',
os.environ.get('BUILDBOT_REVISION'))
slave_type = os.environ.get('BUILDBOT_SLAVE_TYPE')
cmd = BOT_ASSIGNMENT.get(builder)
if not cmd:
sys.stderr.write('ERROR - unset/invalid builder name\n')
sys.exit(1)
env = os.environ.copy()
# Don't write out .pyc files because in cases in which files move around or
# the PYTHONPATH / sys.path change, old .pyc files can be mistakenly used.
# This avoids the need for admin changes on the bots in this case.
env['PYTHONDONTWRITEBYTECODE'] = '1'
# Use .boto file from home-dir instead of buildbot supplied one.
if 'AWS_CREDENTIAL_FILE' in env:
del env['AWS_CREDENTIAL_FILE']
alt_boto = os.path.expanduser('~/.boto')
if os.path.exists(alt_boto):
env['BOTO_CONFIG'] = alt_boto
cwd_drive = os.path.splitdrive(os.getcwd())[0]
env['GSUTIL'] = cwd_drive + '/b/build/third_party/gsutil/gsutil'
# When running from cygwin, we sometimes want to use a native python.
# The native python will use the depot_tools version by invoking python.bat.
if pynacl.platform.IsWindows():
env['NATIVE_PYTHON'] = 'python.bat'
else:
env['NATIVE_PYTHON'] = 'python'
if sys.platform == 'win32':
# If the temp directory is not on the same drive as the working directory,
# there can be random failures when cleaning up temp directories, so use
# a directory on the current drive. Use __file__ here instead of os.getcwd()
# because toolchain_main picks its working directories relative to __file__
filedrive, _ = os.path.splitdrive(__file__)
tempdrive, _ = os.path.splitdrive(env['TEMP'])
if tempdrive != filedrive:
env['TEMP'] = filedrive + '\\temp'
env['TMP'] = env['TEMP']
if not os.path.exists(env['TEMP']):
os.mkdir(env['TEMP'])
# Run through runtest.py to get upload of perf data.
build_properties = {
'buildername': builder,
'mastername': 'client.nacl',
'buildnumber': str(build_number),
}
factory_properties = {
'perf_id': builder,
'show_perf_results': True,
'step_name': 'naclperf', # Seems unused, but is required.
'test_name': 'naclperf', # Really "Test Suite"
}
# Locate the buildbot build directory by relative path, as it's absolute
# location varies by platform and configuration.
buildbot_build_dir = os.path.join(* [os.pardir] * 4)
runtest = os.path.join(buildbot_build_dir, 'scripts', 'slave', 'runtest.py')
# For builds with an actual build number, require that the script is present
# (i.e. that we're run from an actual buildbot).
if build_number is not None and not os.path.exists(runtest):
raise Exception('runtest.py script not found at: %s\n' % runtest)
cmd_exe = cmd.split(' ')[0]
cmd_exe_ext = os.path.splitext(cmd_exe)[1]
# Do not wrap these types of builds with runtest.py:
# - tryjobs
# - commands beginning with 'echo '
# - batch files
# - debug builders
# - builds with no perf tests
if not (slave_type == 'Trybot' or
cmd_exe == echo or
cmd_exe_ext == '.bat' or
'-dbg' in builder or
HasNoPerfResults(builder)):
# Perf dashboards are now generated by output scraping that occurs in the
# script runtest.py, which lives in the buildbot repository.
# Non-trybot builds should be run through runtest, allowing it to upload
# perf data if relevant.
cmd = ' '.join([
python, runtest,
'--revision=' + build_revision,
'--build-dir=src/out',
'--results-url=https://chromeperf.appspot.com',
'--annotate=graphing',
'--no-xvfb', # We provide our own xvfb invocation.
'--factory-properties', EscapeJson(factory_properties),
'--build-properties', EscapeJson(build_properties),
cmd,
])
print "%s runs: %s\n" % (builder, cmd)
retcode = subprocess.call(cmd, env=env, shell=True)
sys.exit(retcode)
if __name__ == '__main__':
Main()
| bsd-3-clause | 1,836,903,225,074,248,400 | 38.890792 | 83 | 0.617854 | false |
omemo/python-omemo | src/omemo/liteprekeystore.py | 1 | 2577 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Tarek Galal <[email protected]>
#
# This file is part of Gajim-OMEMO plugin.
#
# The Gajim-OMEMO plugin is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# Gajim-OMEMO is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# the Gajim-OMEMO plugin. If not, see <http://www.gnu.org/licenses/>.
#
from axolotl.state.prekeyrecord import PreKeyRecord
from axolotl.state.prekeystore import PreKeyStore
class LitePreKeyStore(PreKeyStore):
def __init__(self, dbConn):
"""
:type dbConn: Connection
"""
self.dbConn = dbConn
dbConn.execute("CREATE TABLE IF NOT EXISTS prekeys(" +
"_id INTEGER PRIMARY KEY AUTOINCREMENT," +
"prekey_id INTEGER UNIQUE, sent_to_server BOOLEAN, " +
" record BLOB);")
def loadPreKey(self, preKeyId):
q = "SELECT record FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
result = cursor.fetchone()
if not result:
raise Exception("No such prekeyRecord!")
return PreKeyRecord(serialized=result[0])
def loadPendingPreKeys(self):
q = "SELECT record FROM prekeys"
cursor = self.dbConn.cursor()
cursor.execute(q)
result = cursor.fetchall()
return [PreKeyRecord(serialized=r[0]) for r in result]
def storePreKey(self, preKeyId, preKeyRecord):
# self.removePreKey(preKeyId)
q = "INSERT INTO prekeys (prekey_id, record) VALUES(?,?)"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, preKeyRecord.serialize()))
self.dbConn.commit()
def containsPreKey(self, preKeyId):
q = "SELECT record FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
return cursor.fetchone() is not None
def removePreKey(self, preKeyId):
q = "DELETE FROM prekeys WHERE prekey_id = ?"
cursor = self.dbConn.cursor()
cursor.execute(q, (preKeyId, ))
self.dbConn.commit()
| gpl-3.0 | 4,332,987,200,107,825,700 | 34.791667 | 80 | 0.648428 | false |
nitmir/django-cas-server | cas_server/admin.py | 1 | 6813 | # This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License version 3 for
# more details.
#
# You should have received a copy of the GNU General Public License version 3
# along with this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# (c) 2015-2016 Valentin Samir
"""module for the admin interface of the app"""
from .default_settings import settings
from django.contrib import admin
from .models import ServiceTicket, ProxyTicket, ProxyGrantingTicket, User, ServicePattern
from .models import Username, ReplaceAttributName, ReplaceAttributValue, FilterAttributValue
from .models import FederatedIendityProvider, FederatedUser, UserAttributes
from .forms import TicketForm
class BaseInlines(admin.TabularInline):
"""
Bases: :class:`django.contrib.admin.TabularInline`
Base class for inlines in the admin interface.
"""
#: This controls the number of extra forms the formset will display in addition to
#: the initial forms.
extra = 0
class UserAdminInlines(BaseInlines):
"""
Bases: :class:`BaseInlines`
Base class for inlines in :class:`UserAdmin` interface
"""
#: The form :class:`TicketForm<cas_server.forms.TicketForm>` used to display tickets.
form = TicketForm
#: Fields to display on a object that are read only (not editable).
readonly_fields = (
'validate', 'service', 'service_pattern',
'creation', 'renew', 'single_log_out', 'value'
)
#: Fields to display on a object.
fields = (
'validate', 'service', 'service_pattern',
'creation', 'renew', 'single_log_out'
)
class ServiceTicketInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ServiceTicket<cas_server.models.ServiceTicket>` in admin interface
"""
#: The model which the inline is using.
model = ServiceTicket
class ProxyTicketInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ProxyTicket<cas_server.models.ProxyTicket>` in admin interface
"""
#: The model which the inline is using.
model = ProxyTicket
class ProxyGrantingInline(UserAdminInlines):
"""
Bases: :class:`UserAdminInlines`
:class:`ProxyGrantingTicket<cas_server.models.ProxyGrantingTicket>` in admin interface
"""
#: The model which the inline is using.
model = ProxyGrantingTicket
class UserAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`User<cas_server.models.User>` in admin interface
"""
#: See :class:`ServiceTicketInline`, :class:`ProxyTicketInline`, :class:`ProxyGrantingInline`
#: objects below the :class:`UserAdmin` fields.
inlines = (ServiceTicketInline, ProxyTicketInline, ProxyGrantingInline)
#: Fields to display on a object that are read only (not editable).
readonly_fields = ('username', 'date', "session_key")
#: Fields to display on a object.
fields = ('username', 'date', "session_key")
#: Fields to display on the list of class:`UserAdmin` objects.
list_display = ('username', 'date', "session_key")
class UsernamesInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`Username<cas_server.models.Username>` in admin interface
"""
#: The model which the inline is using.
model = Username
class ReplaceAttributNameInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`ReplaceAttributName<cas_server.models.ReplaceAttributName>` in admin interface
"""
#: The model which the inline is using.
model = ReplaceAttributName
class ReplaceAttributValueInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`ReplaceAttributValue<cas_server.models.ReplaceAttributValue>` in admin interface
"""
#: The model which the inline is using.
model = ReplaceAttributValue
class FilterAttributValueInline(BaseInlines):
"""
Bases: :class:`BaseInlines`
:class:`FilterAttributValue<cas_server.models.FilterAttributValue>` in admin interface
"""
#: The model which the inline is using.
model = FilterAttributValue
class ServicePatternAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`ServicePattern<cas_server.models.ServicePattern>` in admin interface
"""
#: See :class:`UsernamesInline`, :class:`ReplaceAttributNameInline`,
#: :class:`ReplaceAttributValueInline`, :class:`FilterAttributValueInline` objects below
#: the :class:`ServicePatternAdmin` fields.
inlines = (
UsernamesInline,
ReplaceAttributNameInline,
ReplaceAttributValueInline,
FilterAttributValueInline
)
#: Fields to display on the list of class:`ServicePatternAdmin` objects.
list_display = ('pos', 'name', 'pattern', 'proxy',
'single_log_out', 'proxy_callback', 'restrict_users')
class FederatedIendityProviderAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`FederatedIendityProvider<cas_server.models.FederatedIendityProvider>` in admin
interface
"""
#: Fields to display on a object.
fields = ('pos', 'suffix', 'server_url', 'cas_protocol_version', 'verbose_name', 'display')
#: Fields to display on the list of class:`FederatedIendityProviderAdmin` objects.
list_display = ('verbose_name', 'suffix', 'display')
class FederatedUserAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`FederatedUser<cas_server.models.FederatedUser>` in admin
interface
"""
#: Fields to display on a object.
fields = ('username', 'provider', 'last_update')
#: Fields to display on the list of class:`FederatedUserAdmin` objects.
list_display = ('username', 'provider', 'last_update')
class UserAttributesAdmin(admin.ModelAdmin):
"""
Bases: :class:`django.contrib.admin.ModelAdmin`
:class:`UserAttributes<cas_server.models.UserAttributes>` in admin
interface
"""
#: Fields to display on a object.
fields = ('username', '_attributs')
admin.site.register(ServicePattern, ServicePatternAdmin)
admin.site.register(FederatedIendityProvider, FederatedIendityProviderAdmin)
if settings.DEBUG: # pragma: no branch (we always test with DEBUG True)
admin.site.register(User, UserAdmin)
admin.site.register(FederatedUser, FederatedUserAdmin)
admin.site.register(UserAttributes, UserAttributesAdmin)
| gpl-3.0 | 5,034,193,972,385,483,000 | 32.895522 | 97 | 0.693821 | false |
classner/fertilized-devtools | binding_generator/ordered_set.py | 1 | 1936 | # See http://code.activestate.com/recipes/576694/.
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def update(self, setvalues):
for key in setvalues:
self.add(key)
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
| bsd-2-clause | -3,656,923,377,244,034,600 | 25.520548 | 78 | 0.497934 | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/services/types/feed_item_target_service.py | 1 | 5926 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import response_content_type as gage_response_content_type
from google.ads.googleads.v7.resources.types import feed_item_target as gagr_feed_item_target
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.services',
marshal='google.ads.googleads.v7',
manifest={
'GetFeedItemTargetRequest',
'MutateFeedItemTargetsRequest',
'FeedItemTargetOperation',
'MutateFeedItemTargetsResponse',
'MutateFeedItemTargetResult',
},
)
class GetFeedItemTargetRequest(proto.Message):
r"""Request message for
[FeedItemTargetService.GetFeedItemTarget][google.ads.googleads.v7.services.FeedItemTargetService.GetFeedItemTarget].
Attributes:
resource_name (str):
Required. The resource name of the feed item
targets to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateFeedItemTargetsRequest(proto.Message):
r"""Request message for
[FeedItemTargetService.MutateFeedItemTargets][google.ads.googleads.v7.services.FeedItemTargetService.MutateFeedItemTargets].
Attributes:
customer_id (str):
Required. The ID of the customer whose feed
item targets are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.FeedItemTargetOperation]):
Required. The list of operations to perform
on individual feed item targets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='FeedItemTargetOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=4,
)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
validate_only = proto.Field(
proto.BOOL,
number=3,
)
class FeedItemTargetOperation(proto.Message):
r"""A single operation (create, remove) on an feed item target.
Attributes:
create (google.ads.googleads.v7.resources.types.FeedItemTarget):
Create operation: No resource name is
expected for the new feed item target.
remove (str):
Remove operation: A resource name for the removed feed item
target is expected, in this format:
``customers/{customer_id}/feedItemTargets/{feed_id}~{feed_item_id}~{feed_item_target_type}~{feed_item_target_id}``
"""
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=gagr_feed_item_target.FeedItemTarget,
)
remove = proto.Field(
proto.STRING,
number=2,
oneof='operation',
)
class MutateFeedItemTargetsResponse(proto.Message):
r"""Response message for an feed item target mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateFeedItemTargetResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateFeedItemTargetResult',
)
class MutateFeedItemTargetResult(proto.Message):
r"""The result for the feed item target mutate.
Attributes:
resource_name (str):
Returned for successful operations.
feed_item_target (google.ads.googleads.v7.resources.types.FeedItemTarget):
The mutated feed item target with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
feed_item_target = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_feed_item_target.FeedItemTarget,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 2,457,467,938,042,463,700 | 32.862857 | 128 | 0.662842 | false |
ifaoe/daisi-tk | daisi_images.py | 1 | 5255 | #!/usr/bin/python3
import logging
import psycopg2
from argparse import ArgumentParser
from gdal_tif2geo import process
import multiprocessing
import subprocess
from joblib import Parallel, delayed
from math import ceil
import tempfile
import os
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# function which is called in parallel
def parallel_process(row, linco_path, linco_args, threads, overwrite, temppath, compress, opencl):
# split row from database query into single variables
[epsg, iiq_file, geo_file, ne_x, ne_y, nw_x, nw_y, sw_x, sw_y, se_x, se_y] = row
if not overwrite:
if os.path.isfile(geo_file) and os.path.exists(geo_file):
print('{file} already exists.'.format(file=geo_file))
return
print("Processing {0} -> {1}".format(iiq_file, geo_file))
# convert iiq -> tiff
# create temporary file
temp_file = tempfile.NamedTemporaryFile()
# run linco
linco_command = ('nice', '-n 19', linco_path, iiq_file, temp_file.name, '-cputhreads={threads}'.format(threads=threads), linco_args)
logger.debug(' '.join(linco_command))
linco_log = subprocess.run(linco_command, shell=True, check=True, stdout=subprocess.PIPE).stdout.decode('utf8')
logger.debug(linco_log)
# create geotiff
process(temp_file.name, geo_file, [ne_x, ne_y], [nw_x, nw_y], [se_x, se_y], [sw_x, sw_y], threads,
0.02, compress, 95, 'lanczos', epsg, [256, 256], args.verbose, opencl, overwrite, temppath)
if __name__ == '__main__':
parser = ArgumentParser(description='Georeference DAISI images from tif.')
parser.add_argument('-v', '--verbose', action='store_true', help='Verbosity.')
parser.add_argument('-s', '--session', type=str, default='.*', help='Session pattern (default: .*).')
parser.add_argument('-t', '--transect', type=str, default='.*', help='Transect pattern (default: .*).')
parser.add_argument('-c', '--camera', type=str, default='.*', help='Camera pattern (default: .*).')
parser.add_argument('-i', '--image', type=str, default='.*', help='Image pattern (default: .*).')
parser.add_argument('-H', '--host', type=str, default='127.0.0.1', help='Database host (default: 127.0.0.1).')
parser.add_argument('-d', '--database', type=str, default='daisi', help='Database name (default: DAISI).')
parser.add_argument('-u', '--user', type=str, default='daisi', help='Database user (default: DAISI).')
parser.add_argument('-P', '--password', type=str, default='18ifaoe184', help='Database password.')
parser.add_argument('-p', '--port', type=str, default='5432', help='Database port (default: 5432).')
parser.add_argument('-l', '--location', type=str, default='rostock', help='Image data location (default: rostock)')
parser.add_argument('-o', '--overwrite', action='store_true', help='Overwrite image if it already exists.')
parser.add_argument('--linco-path', type=str, default='/usr/local/bin/linco', help='Location of linco executable.')
parser.add_argument('--linco-args', type=str, default='-bits=16 -shadowRecovery=75 -highlightRecovery=75',
help='Set linco arguments (default: -bits=16 -shadowRecovery=75 -highlightRecovery=75).')
parser.add_argument('--linco-help', action='store_true', help='Get linco help (overwrites all other arguments).')
parser.add_argument('--temp-path', type=str, help='Path for temporary files')
parser.add_argument('--compress', action='store_true', help='Enable JPEG compression (default: off).')
parser.add_argument('--opencl', action='store_true', help='Enable OpenCL (default: off, requires working OpenCL setup.).')
args = parser.parse_args()
if args.linco_help:
subprocess.run([args.linco_path, '--help'])
exit(1)
if args.verbose:
logger.setLevel(logging.DEBUG)
# connecting to database
connection = psycopg2.connect(database=args.database, host=args.host, port=args.port, user=args.user, password=args.password)
cursor = connection.cursor()
cursor.execute("SELECT epsg, iiq_path, geo_path, ne_x, ne_y, nw_x, nw_y, sw_x, sw_y, se_x, se_y FROM daisi_dev.gdal_images "
"WHERE location=%s AND session~%s AND transect~%s AND camera~%s AND image~%s",
(args.location, args.session, args.transect, args.camera, args.image))
rows = cursor.fetchall()
row_count = len(rows)
if row_count == 0:
logger.critical('No images match the query {0}'.format(cursor.query))
exit(1)
logger.debug('{0} images match the query {1}'.format(row_count, cursor.query))
connection.commit()
cpu_count = multiprocessing.cpu_count()
thread_count = min(cpu_count, ceil(cpu_count/row_count))
process_count = min(cpu_count, ceil(cpu_count/thread_count))
logger.debug('Found {0} CPUs. Using {1} processes with {2} thread(s) each.'.format(cpu_count, process_count, thread_count))
Parallel(n_jobs=process_count)(delayed(parallel_process)
(
row, args.linco_path, args.linco_args, thread_count, args.overwrite, args.temp_path, args.compress, args.opencl
) for row in rows)
| gpl-2.0 | 3,208,551,945,965,820,400 | 52.622449 | 150 | 0.654234 | false |
fnoorian/Free-buck-boost | drivers/json_server.py | 1 | 1329 | from werkzeug.wrappers import Request, Response
from werkzeug.serving import run_simple
# this use package json-rpc (not jsonrpc!)
from jsonrpc import JSONRPCResponseManager, dispatcher
from drivers.boost_driver import FCCBoostDriver
from drivers.buck_driver import FCCBuckDriver, FCCMPPTDriver
from drivers.mighty_driver import MightyWattDriver
@dispatcher.add_method
def get_version():
version = ["fcc_json_server", 1]
return version
@Request.application
def application(request):
dispatcher["mightywatt_readstatus"] = mightywatt.read_status
dispatcher["mightywatt_setpower"] = mightywatt.set_power
dispatcher["charger_readstatus"] = charger.read_status
dispatcher["discharger_readstatus"] = discharger.read_status
dispatcher["mppt_readstatus"] = mppt.read_status
response = JSONRPCResponseManager.handle(
request.data, dispatcher)
return Response(response.json, mimetype='application/json')
if __name__ == '__main__':
mightywatt = MightyWattDriver(u'8533434373835120D1C2')
charger = FCCBoostDriver(u'75439333635351719221')
discharger = FCCBuckDriver(u'75439333635351712071')
mppt = FCCMPPTDriver(u'75439333635351918140')
#run_simple('localhost', 4000, application)
run_simple('0.0.0.0', 4002, application)
| bsd-2-clause | -2,386,581,520,251,007,000 | 32.973684 | 64 | 0.734387 | false |
googleinterns/cabby | cabby/model/datasets.py | 1 | 4391 | # coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl import logging
import os
import pandas as pd
from sklearn.utils import shuffle
from cabby.geo import regions
from cabby.geo import util as gutil
class RUNDataset:
def __init__(self, data_dir: str, s2level: int, lines: bool = False):
train_ds, valid_ds, test_ds, ds = self.load_data(data_dir, lines=lines)
# Get labels.
map_1 = regions.get_region("RUN-map1")
map_2 = regions.get_region("RUN-map2")
map_3 = regions.get_region("RUN-map3")
logging.info(map_1.polygon.wkt)
logging.info(map_2.polygon.wkt)
logging.info(map_3.polygon.wkt)
unique_cellid_map_1 = gutil.cellids_from_polygon(map_1.polygon, s2level)
unique_cellid_map_2 = gutil.cellids_from_polygon(map_2.polygon, s2level)
unique_cellid_map_3 = gutil.cellids_from_polygon(map_3.polygon, s2level)
unique_cellid = (
unique_cellid_map_1 + unique_cellid_map_2 + unique_cellid_map_3)
label_to_cellid = {idx: cellid for idx, cellid in enumerate(unique_cellid)}
cellid_to_label = {cellid: idx for idx, cellid in enumerate(unique_cellid)}
self.train = train_ds
self.valid = valid_ds
self.test = test_ds
self.ds = ds
self.unique_cellid = unique_cellid
self.label_to_cellid = label_to_cellid
self.cellid_to_label = cellid_to_label
def load_data(self, data_dir: str, lines: bool):
ds = pd.read_json(os.path.join(data_dir, 'dataset.json'), lines=lines)
ds['instructions'] = ds.groupby(
['id'])['instruction'].transform(lambda x: ' '.join(x))
ds = ds.drop_duplicates(subset='id', keep="last")
columns_keep = ds.columns.difference(
['map', 'id', 'instructions', 'end_point', 'start_point'])
ds.drop(columns_keep, 1, inplace=True)
ds = shuffle(ds)
ds.reset_index(inplace=True, drop=True)
dataset_size = ds.shape[0]
logging.info(f"Size of dataset: {ds.shape[0]}")
train_size = round(dataset_size * 80 / 100)
valid_size = round(dataset_size * 10 / 100)
train_ds = ds.iloc[:train_size]
valid_ds = ds.iloc[train_size:train_size + valid_size]
test_ds = ds.iloc[train_size + valid_size:]
return train_ds, valid_ds, test_ds, ds
class RVSDataset:
def __init__(self, data_dir: str, s2level: int, region: str, lines: bool = True):
ds = pd.read_json(os.path.join(data_dir, 'dataset.json'), lines=lines)
logging.info(f"Size of dataset before removal of duplication: {ds.shape[0]}")
ds = pd.concat([ds.drop(['geo_landmarks'], axis=1), ds['geo_landmarks'].apply(pd.Series)], axis=1)
lengths = ds.end_point.apply(lambda x: x if len(x) == 3 else "").tolist()
ds['end_osmid'] = ds.end_point.apply(lambda x: x[1])
ds['start_osmid'] = ds.start_point.apply(lambda x: x[1])
ds['end_pivot'] = ds.end_point
ds['end_point'] = ds.end_point.apply(lambda x: x[3])
ds['start_point'] = ds.start_point.apply(lambda x: x[3])
ds = ds.drop_duplicates(subset=['end_osmid', 'start_osmid'], keep='last')
logging.info(f"Size of dataset after removal of duplication: {ds.shape[0]}")
dataset_size = ds.shape[0]
train_size = round(dataset_size * 80 / 100)
valid_size = round(dataset_size * 10 / 100)
train_ds = ds.iloc[:train_size]
valid_ds = ds.iloc[train_size:train_size + valid_size]
test_ds = ds.iloc[train_size + valid_size:]
# Get labels.
active_region = regions.get_region(region)
unique_cellid = gutil.cellids_from_polygon(active_region.polygon, s2level)
label_to_cellid = {idx: cellid for idx, cellid in enumerate(unique_cellid)}
cellid_to_label = {cellid: idx for idx, cellid in enumerate(unique_cellid)}
self.train = train_ds
self.valid = valid_ds
self.test = test_ds
self.unique_cellid = unique_cellid
self.label_to_cellid = label_to_cellid
self.cellid_to_label = cellid_to_label
| apache-2.0 | 5,425,208,701,046,947,000 | 37.858407 | 102 | 0.675473 | false |
0sw4l/villas-de-san-pablo | apps/utils/views.py | 1 | 1899 | from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import CreateView
from django.views.generic import ListView
from django.views.generic import TemplateView
from django.views.generic import UpdateView, DetailView
from apps.utils.shortcuts import get_object_or_none
class BaseListView(LoginRequiredMixin, ListView):
pass
class BaseCreateView(LoginRequiredMixin, CreateView):
template_name = 'apps/base/base_form.html'
def get_context_data(self, **kwargs):
context = super(BaseCreateView, self).get_context_data(**kwargs)
context['action'] = 'Crear'
return context
class BaseListViewDinamicHeader(LoginRequiredMixin, ListView):
context_object_name = "list"
query_fields = ()
HEADER = None
def __init__(self):
super(BaseListViewDinamicHeader, self).__init__()
self.HEADER += ('Acciones',)
def get_queryset(self):
return self.model.objects.all()
def get_context_data(self, **kwargs):
context = super(BaseListViewDinamicHeader, self).get_context_data(**kwargs)
context['header_table'] = self.get_header_table()
return context
def get_header_table(self):
return self.HEADER
class DirectDeleteMixin(object):
def get(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
class BaseUpdateView(LoginRequiredMixin, UpdateView):
template_name = 'apps/base/base_form.html'
def get_context_data(self, **kwargs):
context = super(BaseUpdateView, self).get_context_data(**kwargs)
context['action'] = 'Modificar'
return context
def get_object(self, queryset=None):
obj = self.model.objects.get(id=self.kwargs['pk'])
return obj
class BaseTemplateView(LoginRequiredMixin, TemplateView):
pass
class BaseDetailView(LoginRequiredMixin, DetailView):
pass
| mit | -484,693,667,937,133,630 | 27.772727 | 83 | 0.699315 | false |
tekton/DocuCanvas | accounts/migrations/0007_auto__add_recordpermission__add_unique_recordpermission_contentType_us.py | 1 | 8125 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RecordPermission'
db.create_table(u'accounts_recordpermission', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('contentType', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('recordID', self.gf('django.db.models.fields.IntegerField')()),
('canView', self.gf('django.db.models.fields.BooleanField')(default=False)),
('canUpdate', self.gf('django.db.models.fields.BooleanField')(default=False)),
('canDelete', self.gf('django.db.models.fields.BooleanField')(default=False)),
('viewableFields', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True)),
('updatableFields', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True)),
))
db.send_create_signal(u'accounts', ['RecordPermission'])
# Adding unique constraint on 'RecordPermission', fields ['contentType', 'user', 'recordID']
db.create_unique(u'accounts_recordpermission', ['contentType_id', 'user_id', 'recordID'])
def backwards(self, orm):
# Removing unique constraint on 'RecordPermission', fields ['contentType', 'user', 'recordID']
db.delete_unique(u'accounts_recordpermission', ['contentType_id', 'user_id', 'recordID'])
# Deleting model 'RecordPermission'
db.delete_table(u'accounts_recordpermission')
models = {
u'accounts.account': {
'Meta': {'object_name': 'Account'},
'avatar': ('django.db.models.fields.CharField', [], {'default': "'/static/img/pony.png'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'git_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'github_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'google_plus': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.googleaccount': {
'Meta': {'object_name': 'GoogleAccount'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Account']", 'null': 'True'}),
'account_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'credentials': ('oauth2client.django_orm.CredentialsField', [], {'null': 'True'}),
'google_account_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'accounts.recordpermission': {
'Meta': {'unique_together': "(('contentType', 'user', 'recordID'),)", 'object_name': 'RecordPermission'},
'canDelete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canUpdate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'canView': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'contentType': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recordID': ('django.db.models.fields.IntegerField', [], {}),
'updatableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'viewableFields': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts'] | gpl-3.0 | -734,166,846,986,416,400 | 72.872727 | 187 | 0.571323 | false |
Jumpscale/jumpscale_portal8 | apps/portalbase/AYS81/.macros/wiki/aysservice/3_aysservice.py | 1 | 2580 | from collections import OrderedDict
def main(j, args, params, tags, tasklet):
try:
role = args.getTag('aysrole')
name = args.getTag('aysname')
ayspath = args.getTag('ayspath') or ''
repo = j.atyourservice.repoGet(ayspath)
service = repo.serviceGet(role, name, die=False)
if service:
prods = {}
for prod_role, producers in service.producers.items():
prods.setdefault(prod_role, [])
for producer in producers:
prods[prod_role].append('[{name}|/ays81/Service?aysrole={role}&aysname={name}&ayspath={path}]'.format(
role=prod_role, path=ayspath, name=producer.model.dbobj.name))
parent = {}
if service.parent is not None:
parent['role'] = service.parent.model.role
parent['link'] = '[{name}|/ays81/Service?aysrole={role}&aysname={name}&ayspath={path}]'.format(
role=service.parent.model.role, path=ayspath, name=service.parent.model.dbobj.name)
link_to_template = ('[%s|ays81/ActorTemplate?ayspath=%s&aysname=%s]' % (role,
ayspath, role))
# we prepend service path with '$codedir' to make it work in the explorer.
# because of this line :
# https://github.com/Jumpscale/jumpscale_portal8/blob/master/apps/portalbase/macros/page/explorer/1_main.py#L25
hidden = ['key.priv', 'password', 'passwd', 'pwd', 'oauth.jwt_key', 'keyPriv']
data = j.data.serializer.json.loads(service.model.dataJSON)
data_revised = dict()
for k, v in data.items():
if k.strip() in hidden:
continue
else:
data_revised[k] = v.replace('\\n', '') if isinstance(v, str) else v
args.doc.applyTemplate({
'service': service,
'type': link_to_template,
'data': data_revised,
'name': name,
'role': role,
'producers': OrderedDict(sorted(prods.items())),
'parent': parent,
'actions': service.model.actions,
'reponame': service.aysrepo.name,
})
else:
args.doc.applyTemplate({'error': 'service not found'})
except Exception as e:
args.doc.applyTemplate({'error': e.__str__()})
params.result = (args.doc, args.doc)
return params
| apache-2.0 | 1,651,762,895,446,285,600 | 41.295082 | 123 | 0.526357 | false |
DailyActie/Surrogate-Model | 01-codes/scikit-learn-master/sklearn/externals/funcsigs.py | 1 | 29802 | # Copyright 2001-2013 Python Software Foundation; All Rights Reserved
"""Function signature objects for callables
Back port of Python 3.3's function signature tools from the inspect module,
modified to be compatible with Python 2.6, 2.7 and 3.2+.
"""
from __future__ import absolute_import, division, print_function
import functools
import itertools
import re
import types
try:
from collections import OrderedDict
except ImportError:
from .odict import OrderedDict
__version__ = "0.4"
__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature']
_WrapperDescriptor = type(type.__call__)
_MethodWrapper = type(all.__call__)
_NonUserDefinedCallables = (_WrapperDescriptor,
_MethodWrapper,
types.BuiltinFunctionType)
def formatannotation(annotation, base_module=None):
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', '__builtin__', base_module):
return annotation.__name__
return annotation.__module__ + '.' + annotation.__name__
return repr(annotation)
def _get_user_defined_method(cls, method_name, *nested):
try:
if cls is type:
return
meth = getattr(cls, method_name)
for name in nested:
meth = getattr(meth, name, meth)
except AttributeError:
return
else:
if not isinstance(meth, _NonUserDefinedCallables):
# Once '__signature__' will be added to 'C'-level
# callables, this check won't be necessary
return meth
def signature(obj):
'''Get a signature object for the passed callable.'''
if not callable(obj):
raise TypeError('{0!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
sig = signature(obj.__func__)
if obj.__self__ is None:
# Unbound method: the first parameter becomes positional-only
if sig.parameters:
first = sig.parameters.values()[0].replace(
kind=_POSITIONAL_ONLY)
return sig.replace(
parameters=(first,) + tuple(sig.parameters.values())[1:])
else:
return sig
else:
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
try:
sig = obj.__signature__
except AttributeError:
pass
else:
if sig is not None:
return sig
try:
# Was this function wrapped by a decorator?
wrapped = obj.__wrapped__
except AttributeError:
pass
else:
return signature(wrapped)
if isinstance(obj, types.FunctionType):
return Signature.from_function(obj)
if isinstance(obj, functools.partial):
sig = signature(obj.func)
new_params = OrderedDict(sig.parameters.items())
partial_args = obj.args or ()
partial_keywords = obj.keywords or {}
try:
ba = sig.bind_partial(*partial_args, **partial_keywords)
except TypeError as ex:
msg = 'partial object {0!r} has incorrect arguments'.format(obj)
raise ValueError(msg)
for arg_name, arg_value in ba.arguments.items():
param = new_params[arg_name]
if arg_name in partial_keywords:
# We set a new default value, because the following code
# is correct:
#
# >>> def foo(a): print(a)
# >>> print(partial(partial(foo, a=10), a=20)())
# 20
# >>> print(partial(partial(foo, a=10), a=20)(a=30))
# 30
#
# So, with 'partial' objects, passing a keyword argument is
# like setting a new default value for the corresponding
# parameter
#
# We also mark this parameter with '_partial_kwarg'
# flag. Later, in '_bind', the 'default' value of this
# parameter will be added to 'kwargs', to simulate
# the 'functools.partial' real call.
new_params[arg_name] = param.replace(default=arg_value,
_partial_kwarg=True)
elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and
not param._partial_kwarg):
new_params.pop(arg_name)
return sig.replace(parameters=new_params.values())
sig = None
if isinstance(obj, type):
# obj is a class or a metaclass
# First, let's see if it has an overloaded __call__ defined
# in its metaclass
call = _get_user_defined_method(type(obj), '__call__')
if call is not None:
sig = signature(call)
else:
# Now we check if the 'obj' class has a '__new__' method
new = _get_user_defined_method(obj, '__new__')
if new is not None:
sig = signature(new)
else:
# Finally, we should have at least __init__ implemented
init = _get_user_defined_method(obj, '__init__')
if init is not None:
sig = signature(init)
elif not isinstance(obj, _NonUserDefinedCallables):
# An object with __call__
# We also check that the 'obj' is not an instance of
# _WrapperDescriptor or _MethodWrapper to avoid
# infinite recursion (and even potential segfault)
call = _get_user_defined_method(type(obj), '__call__', 'im_func')
if call is not None:
sig = signature(call)
if sig is not None:
# For classes and objects we skip the first parameter of their
# __call__, __new__, or __init__ methods
return sig.replace(parameters=tuple(sig.parameters.values())[1:])
if isinstance(obj, types.BuiltinFunctionType):
# Raise a nicer error message for builtins
msg = 'no signature found for builtin function {0!r}'.format(obj)
raise ValueError(msg)
raise ValueError('callable {0!r} is not supported by signature'.format(obj))
class _void(object):
'''A private marker - used in Parameter & Signature'''
class _empty(object):
pass
class _ParameterKind(int):
def __new__(self, *args, **kwargs):
obj = int.__new__(self, *args)
obj._name = kwargs['name']
return obj
def __str__(self):
return self._name
def __repr__(self):
return '<_ParameterKind: {0!r}>'.format(self._name)
_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
class Parameter(object):
'''Represents a parameter in a function signature.
Has the following public attributes:
* name : str
The name of the parameter as a string.
* default : object
The default value for the parameter if specified. If the
parameter has no default value, this attribute is not set.
* annotation
The annotation for the parameter if specified. If the
parameter has no annotation, this attribute is not set.
* kind : str
Describes how argument values are bound to the parameter.
Possible values: `Parameter.POSITIONAL_ONLY`,
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
'''
__slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg')
POSITIONAL_ONLY = _POSITIONAL_ONLY
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
VAR_POSITIONAL = _VAR_POSITIONAL
KEYWORD_ONLY = _KEYWORD_ONLY
VAR_KEYWORD = _VAR_KEYWORD
empty = _empty
def __init__(self, name, kind, default=_empty, annotation=_empty,
_partial_kwarg=False):
if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
_VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
raise ValueError("invalid value for 'Parameter.kind' attribute")
self._kind = kind
if default is not _empty:
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{0} parameters cannot have default values'.format(kind)
raise ValueError(msg)
self._default = default
self._annotation = annotation
if name is None:
if kind != _POSITIONAL_ONLY:
raise ValueError("None is not a valid name for a "
"non-positional-only parameter")
self._name = name
else:
name = str(name)
if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I):
msg = '{0!r} is not a valid parameter name'.format(name)
raise ValueError(msg)
self._name = name
self._partial_kwarg = _partial_kwarg
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def annotation(self):
return self._annotation
@property
def kind(self):
return self._kind
def replace(self, name=_void, kind=_void, annotation=_void,
default=_void, _partial_kwarg=_void):
'''Creates a customized copy of the Parameter.'''
if name is _void:
name = self._name
if kind is _void:
kind = self._kind
if annotation is _void:
annotation = self._annotation
if default is _void:
default = self._default
if _partial_kwarg is _void:
_partial_kwarg = self._partial_kwarg
return type(self)(name, kind, default=default, annotation=annotation,
_partial_kwarg=_partial_kwarg)
def __str__(self):
kind = self.kind
formatted = self._name
if kind == _POSITIONAL_ONLY:
if formatted is None:
formatted = ''
formatted = '<{0}>'.format(formatted)
# Add annotation and default value
if self._annotation is not _empty:
formatted = '{0}:{1}'.format(formatted,
formatannotation(self._annotation))
if self._default is not _empty:
formatted = '{0}={1}'.format(formatted, repr(self._default))
if kind == _VAR_POSITIONAL:
formatted = '*' + formatted
elif kind == _VAR_KEYWORD:
formatted = '**' + formatted
return formatted
def __repr__(self):
return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__,
id(self), self.name)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, Parameter) and
self._name == other._name and
self._kind == other._kind and
self._default == other._default and
self._annotation == other._annotation)
def __ne__(self, other):
return not self.__eq__(other)
class BoundArguments(object):
'''Result of `Signature.bind` call. Holds the mapping of arguments
to the function's parameters.
Has the following public attributes:
* arguments : OrderedDict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
The Signature object that created this instance.
* args : tuple
Tuple of positional arguments values.
* kwargs : dict
Dict of keyword arguments values.
'''
def __init__(self, signature, arguments):
self.arguments = arguments
self._signature = signature
@property
def signature(self):
return self._signature
@property
def args(self):
args = []
for param_name, param in self._signature.parameters.items():
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
# Keyword arguments mapped by 'functools.partial'
# (Parameter._partial_kwarg is True) are mapped
# in 'BoundArguments.kwargs', along with VAR_KEYWORD &
# KEYWORD_ONLY
break
try:
arg = self.arguments[param_name]
except KeyError:
# We're done here. Other arguments
# will be mapped in 'BoundArguments.kwargs'
break
else:
if param.kind == _VAR_POSITIONAL:
# *args
args.extend(arg)
else:
# plain argument
args.append(arg)
return tuple(args)
@property
def kwargs(self):
kwargs = {}
kwargs_started = False
for param_name, param in self._signature.parameters.items():
if not kwargs_started:
if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or
param._partial_kwarg):
kwargs_started = True
else:
if param_name not in self.arguments:
kwargs_started = True
continue
if not kwargs_started:
continue
try:
arg = self.arguments[param_name]
except KeyError:
pass
else:
if param.kind == _VAR_KEYWORD:
# **kwargs
kwargs.update(arg)
else:
# plain keyword argument
kwargs[param_name] = arg
return kwargs
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
return (issubclass(other.__class__, BoundArguments) and
self.signature == other.signature and
self.arguments == other.arguments)
def __ne__(self, other):
return not self.__eq__(other)
class Signature(object):
'''A Signature object represents the overall signature of a function.
It stores a Parameter object for each parameter accepted by the
function, as well as information specific to the function itself.
A Signature object has the following public attributes and methods:
* parameters : OrderedDict
An ordered mapping of parameters' names to the corresponding
Parameter objects (keyword-only arguments are in the same order
as listed in `code.co_varnames`).
* return_annotation : object
The annotation for the return type of the function if specified.
If the function has no annotation for its return type, this
attribute is not set.
* bind(*args, **kwargs) -> BoundArguments
Creates a mapping from positional and keyword arguments to
parameters.
* bind_partial(*args, **kwargs) -> BoundArguments
Creates a partial mapping from positional and keyword arguments
to parameters (simulating 'functools.partial' behavior.)
'''
__slots__ = ('_return_annotation', '_parameters')
_parameter_cls = Parameter
_bound_arguments_cls = BoundArguments
empty = _empty
def __init__(self, parameters=None, return_annotation=_empty,
__validate_parameters__=True):
'''Constructs Signature from the given list of Parameter
objects and 'return_annotation'. All arguments are optional.
'''
if parameters is None:
params = OrderedDict()
else:
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
for idx, param in enumerate(parameters):
kind = param.kind
if kind < top_kind:
msg = 'wrong parameter order: {0} before {1}'
msg = msg.format(top_kind, param.kind)
raise ValueError(msg)
else:
top_kind = kind
name = param.name
if name is None:
name = str(idx)
param = param.replace(name=name)
if name in params:
msg = 'duplicate parameter name: {0!r}'.format(name)
raise ValueError(msg)
params[name] = param
else:
params = OrderedDict(((param.name, param)
for param in parameters))
self._parameters = params
self._return_annotation = return_annotation
@classmethod
def from_function(cls, func):
'''Constructs Signature for the given python function'''
if not isinstance(func, types.FunctionType):
raise TypeError('{0!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0)
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
annotations = getattr(func, '__annotations__', {})
defaults = func.__defaults__
kwdefaults = getattr(func, '__kwdefaults__', None)
if defaults:
pos_default_count = len(defaults)
else:
pos_default_count = 0
parameters = []
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
default=defaults[offset]))
# *args
if func_code.co_flags & 0x04:
name = arg_names[pos_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
# Keyword-only parameters.
for name in keyword_only:
default = _empty
if kwdefaults is not None:
default = kwdefaults.get(name, _empty)
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_KEYWORD_ONLY,
default=default))
# **kwargs
if func_code.co_flags & 0x08:
index = pos_count + keyword_only_count
if func_code.co_flags & 0x04:
index += 1
name = arg_names[index]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_KEYWORD))
return cls(parameters,
return_annotation=annotations.get('return', _empty),
__validate_parameters__=False)
@property
def parameters(self):
try:
return types.MappingProxyType(self._parameters)
except AttributeError:
return OrderedDict(self._parameters.items())
@property
def return_annotation(self):
return self._return_annotation
def replace(self, parameters=_void, return_annotation=_void):
'''Creates a customized copy of the Signature.
Pass 'parameters' and/or 'return_annotation' arguments
to override them in the new copy.
'''
if parameters is _void:
parameters = self.parameters.values()
if return_annotation is _void:
return_annotation = self._return_annotation
return type(self)(parameters,
return_annotation=return_annotation)
def __hash__(self):
msg = "unhashable type: '{0}'".format(self.__class__.__name__)
raise TypeError(msg)
def __eq__(self, other):
if (not issubclass(type(other), Signature) or
self.return_annotation != other.return_annotation or
len(self.parameters) != len(other.parameters)):
return False
other_positions = dict((param, idx)
for idx, param in enumerate(other.parameters.keys()))
for idx, (param_name, param) in enumerate(self.parameters.items()):
if param.kind == _KEYWORD_ONLY:
try:
other_param = other.parameters[param_name]
except KeyError:
return False
else:
if param != other_param:
return False
else:
try:
other_idx = other_positions[param_name]
except KeyError:
return False
else:
if (idx != other_idx or
param != other.parameters[param_name]):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def _bind(self, args, kwargs, partial=False):
'''Private method. Don't use directly.'''
arguments = OrderedDict()
parameters = iter(self.parameters.values())
parameters_ex = ()
arg_vals = iter(args)
if partial:
# Support for binding arguments to 'functools.partial' objects.
# See 'functools.partial' case in 'signature()' implementation
# for details.
for param_name, param in self.parameters.items():
if (param._partial_kwarg and param_name not in kwargs):
# Simulating 'functools.partial' behavior
kwargs[param_name] = param.default
while True:
# Let's iterate through the positional arguments and corresponding
# parameters
try:
arg_val = next(arg_vals)
except StopIteration:
# No more positional arguments
try:
param = next(parameters)
except StopIteration:
# No more parameters. That's it. Just need to check that
# we have no `kwargs` after this while loop
break
else:
if param.kind == _VAR_POSITIONAL:
# That's OK, just empty *args. Let's start parsing
# kwargs
break
elif param.name in kwargs:
if param.kind == _POSITIONAL_ONLY:
msg = '{arg!r} parameter is positional only, ' \
'but was passed as a keyword'
msg = msg.format(arg=param.name)
raise TypeError(msg)
parameters_ex = (param,)
break
elif (param.kind == _VAR_KEYWORD or
param.default is not _empty):
# That's fine too - we have a default value for this
# parameter. So, lets start parsing `kwargs`, starting
# with the current parameter
parameters_ex = (param,)
break
else:
if partial:
parameters_ex = (param,)
break
else:
msg = '{arg!r} parameter lacking default value'
msg = msg.format(arg=param.name)
raise TypeError(msg)
else:
# We have a positional argument to process
try:
param = next(parameters)
except StopIteration:
raise TypeError('too many positional arguments')
else:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
# Looks like we have no parameter for this positional
# argument
raise TypeError('too many positional arguments')
if param.kind == _VAR_POSITIONAL:
# We have an '*args'-like argument, let's fill it with
# all positional arguments we have left and move on to
# the next phase
values = [arg_val]
values.extend(arg_vals)
arguments[param.name] = tuple(values)
break
if param.name in kwargs:
raise TypeError('multiple values for argument '
'{arg!r}'.format(arg=param.name))
arguments[param.name] = arg_val
# Now, we iterate through the remaining parameters to process
# keyword arguments
kwargs_param = None
for param in itertools.chain(parameters_ex, parameters):
if param.kind == _POSITIONAL_ONLY:
# This should never happen in case of a properly built
# Signature object (but let's have this check here
# to ensure correct behaviour just in case)
raise TypeError('{arg!r} parameter is positional only, '
'but was passed as a keyword'. \
format(arg=param.name))
if param.kind == _VAR_KEYWORD:
# Memorize that we have a '**kwargs'-like parameter
kwargs_param = param
continue
param_name = param.name
try:
arg_val = kwargs.pop(param_name)
except KeyError:
# We have no value for this parameter. It's fine though,
# if it has a default value, or it is an '*args'-like
# parameter, left alone by the processing of positional
# arguments.
if (not partial and param.kind != _VAR_POSITIONAL and
param.default is _empty):
raise TypeError('{arg!r} parameter lacking default value'. \
format(arg=param_name))
else:
arguments[param_name] = arg_val
if kwargs:
if kwargs_param is not None:
# Process our '**kwargs'-like parameter
arguments[kwargs_param.name] = kwargs
else:
raise TypeError('too many keyword arguments')
return self._bound_arguments_cls(self, arguments)
def bind(self, *args, **kwargs):
'''Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
'''
return self._bind(args, kwargs)
def bind_partial(self, *args, **kwargs):
'''Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
'''
return self._bind(args, kwargs, partial=True)
def __str__(self):
result = []
render_kw_only_separator = True
for idx, param in enumerate(self.parameters.values()):
formatted = str(param)
kind = param.kind
if kind == _VAR_POSITIONAL:
# OK, we have an '*args'-like parameter, so we won't need
# a '*' to separate keyword-only arguments
render_kw_only_separator = False
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
# We have a keyword-only parameter to render and we haven't
# rendered an '*args'-like parameter before, so add a '*'
# separator to the parameters list ("foo(arg1, *, arg2)" case)
result.append('*')
# This condition should be only triggered once, so
# reset the flag
render_kw_only_separator = False
result.append(formatted)
rendered = '({0})'.format(', '.join(result))
if self.return_annotation is not _empty:
anno = formatannotation(self.return_annotation)
rendered += ' -> {0}'.format(anno)
return rendered
| mit | -1,135,423,231,880,870,100 | 35.432763 | 84 | 0.537648 | false |
cockroachdb/examples-orms | python/django/cockroach_example/settings.py | 1 | 3362 | """
Django settings for cockroach_example project.
Generated by 'django-admin startproject' using Django 2.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
from urllib.parse import urlparse
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0pld^66i)iv4df8km5vc%1^sskuqjf16jk&z=c^rk--oh6i0i^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cockroach_example',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cockroach_example.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cockroach_example.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
port = 26257
addr = os.getenv('ADDR')
if addr is not None:
url = urlparse(addr)
port = url.port
DATABASES = {
'default': {
'ENGINE' : 'django_cockroachdb',
'NAME' : 'company_django',
'USER' : 'root',
'PASSWORD': '',
'HOST' : 'localhost',
'PORT' : port,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | -6,756,807,042,726,074,000 | 24.278195 | 91 | 0.678168 | false |
jbzdak/data-base-checker | bazydanych2/settingsdev.py | 1 | 1181 |
from bazydanych2.settingsshared import *
DEBUG=True
TEMPLATE_DEBUG=True
STATIC_ROOT = '/tmp/staticfiles'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
"level": 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler'
}
},
'root':{
'handlers' : ['console']
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = '/tmp/app-messages'
INSTALLED_APPS += ('celery_test_app', )
ALLOW_OFFILNE_GRADING = False
SCHEMA_CHECKER_HOST = '192.168.56.30' | gpl-3.0 | -979,873,808,556,966,700 | 21.730769 | 66 | 0.524979 | false |
ted-gould/nova | nova/tests/unit/compute/test_compute_mgr.py | 1 | 197801 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for ComputeManager()."""
import contextlib
import time
import uuid
from cinderclient import exceptions as cinder_exception
from eventlet import event as eventlet_event
import mock
from mox3 import mox
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
import nova
from nova.compute import build_results
from nova.compute import manager
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor import api as conductor_api
from nova import context
from nova import db
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.unit.compute import fake_resource_tracker
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network_cache_model
from nova.tests.unit import fake_server_actions
from nova.tests.unit.objects import test_instance_fault
from nova.tests.unit.objects import test_instance_info_cache
from nova import utils
from nova.virt import driver as virt_driver
from nova.virt import event as virtevent
from nova.virt import fake as fake_driver
from nova.virt import hardware
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
class ComputeManagerUnitTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerUnitTestCase, self).setUp()
self.flags(use_local=True, group='conductor')
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(manager.ComputeManager, '_get_power_state')
@mock.patch.object(manager.ComputeManager, '_sync_instance_power_state')
@mock.patch.object(objects.Instance, 'get_by_uuid')
def _test_handle_lifecycle_event(self, mock_get, mock_sync,
mock_get_power_state, transition,
event_pwr_state, current_pwr_state):
event = mock.Mock()
event.get_instance_uuid.return_value = mock.sentinel.uuid
event.get_transition.return_value = transition
mock_get_power_state.return_value = current_pwr_state
self.compute.handle_lifecycle_event(event)
mock_get.assert_called_with(mock.ANY, mock.sentinel.uuid,
expected_attrs=[])
if event_pwr_state == current_pwr_state:
mock_sync.assert_called_with(mock.ANY, mock_get.return_value,
event_pwr_state)
else:
self.assertFalse(mock_sync.called)
def test_handle_lifecycle_event(self):
event_map = {virtevent.EVENT_LIFECYCLE_STOPPED: power_state.SHUTDOWN,
virtevent.EVENT_LIFECYCLE_STARTED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_PAUSED: power_state.PAUSED,
virtevent.EVENT_LIFECYCLE_RESUMED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_SUSPENDED:
power_state.SUSPENDED,
}
for transition, pwr_state in six.iteritems(event_map):
self._test_handle_lifecycle_event(transition=transition,
event_pwr_state=pwr_state,
current_pwr_state=pwr_state)
def test_handle_lifecycle_event_state_mismatch(self):
self._test_handle_lifecycle_event(
transition=virtevent.EVENT_LIFECYCLE_STOPPED,
event_pwr_state=power_state.SHUTDOWN,
current_pwr_state=power_state.RUNNING)
def test_delete_instance_info_cache_delete_ordering(self):
call_tracker = mock.Mock()
call_tracker.clear_events_for_instance.return_value = None
mgr_class = self.compute.__class__
orig_delete = mgr_class._delete_instance
specd_compute = mock.create_autospec(mgr_class)
# spec out everything except for the method we really want
# to test, then use call_tracker to verify call sequence
specd_compute._delete_instance = orig_delete
mock_inst = mock.Mock()
mock_inst.uuid = 'inst-1'
mock_inst.save = mock.Mock()
mock_inst.destroy = mock.Mock()
mock_inst.system_metadata = mock.Mock()
def _mark_notify(*args, **kwargs):
call_tracker._notify_about_instance_usage(*args, **kwargs)
def _mark_shutdown(*args, **kwargs):
call_tracker._shutdown_instance(*args, **kwargs)
specd_compute.instance_events = call_tracker
specd_compute._notify_about_instance_usage = _mark_notify
specd_compute._shutdown_instance = _mark_shutdown
mock_inst.info_cache = call_tracker
specd_compute._delete_instance(specd_compute,
self.context,
mock_inst,
mock.Mock(),
mock.Mock())
methods_called = [n for n, a, k in call_tracker.mock_calls]
self.assertEqual(['clear_events_for_instance',
'_notify_about_instance_usage',
'_shutdown_instance', 'delete'],
methods_called)
@mock.patch.object(manager.ComputeManager, '_get_resource_tracker')
@mock.patch.object(fake_driver.FakeDriver, 'get_available_nodes')
@mock.patch.object(manager.ComputeManager, '_get_compute_nodes_in_db')
def test_update_available_resource(self, get_db_nodes, get_avail_nodes,
get_rt):
info = {'cn_id': 1}
def _make_compute_node(hyp_hostname):
cn = mock.Mock(spec_set=['hypervisor_hostname', 'id',
'destroy'])
cn.id = info['cn_id']
info['cn_id'] += 1
cn.hypervisor_hostname = hyp_hostname
return cn
def _make_rt(node):
n = mock.Mock(spec_set=['update_available_resource',
'nodename'])
n.nodename = node
return n
ctxt = mock.Mock()
db_nodes = [_make_compute_node('node1'),
_make_compute_node('node2'),
_make_compute_node('node3'),
_make_compute_node('node4')]
avail_nodes = set(['node2', 'node3', 'node4', 'node5'])
avail_nodes_l = list(avail_nodes)
rts = [_make_rt(node) for node in avail_nodes_l]
# Make the 2nd and 3rd ones raise
exc = exception.ComputeHostNotFound(host='fake')
rts[1].update_available_resource.side_effect = exc
exc = test.TestingException()
rts[2].update_available_resource.side_effect = exc
rts_iter = iter(rts)
def _get_rt_side_effect(*args, **kwargs):
return next(rts_iter)
expected_rt_dict = {avail_nodes_l[0]: rts[0],
avail_nodes_l[2]: rts[2],
avail_nodes_l[3]: rts[3]}
get_db_nodes.return_value = db_nodes
get_avail_nodes.return_value = avail_nodes
get_rt.side_effect = _get_rt_side_effect
self.compute.update_available_resource(ctxt)
get_db_nodes.assert_called_once_with(ctxt, use_slave=True)
self.assertEqual([mock.call(node) for node in avail_nodes],
get_rt.call_args_list)
for rt in rts:
rt.update_available_resource.assert_called_once_with(ctxt)
self.assertEqual(expected_rt_dict,
self.compute._resource_tracker_dict)
# First node in set should have been removed from DB
for db_node in db_nodes:
if db_node.hypervisor_hostname == 'node1':
db_node.destroy.assert_called_once_with()
else:
self.assertFalse(db_node.destroy.called)
def test_delete_instance_without_info_cache(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
host=self.compute.host,
expected_attrs=['system_metadata'])
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
with contextlib.nested(
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_shutdown_instance'),
mock.patch.object(instance, 'obj_load_attr'),
mock.patch.object(instance, 'save'),
mock.patch.object(instance, 'destroy')
) as (
compute_notify_about_instance_usage, comupte_shutdown_instance,
instance_obj_load_attr, instance_save, instance_destroy
):
instance.info_cache = None
self.compute._delete_instance(self.context, instance, [], quotas)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(time, 'sleep')
def test_allocate_network_succeeds_after_retries(
self, mock_sleep, mock_save, mock_allocate_for_instance):
self.flags(network_allocate_retries=8)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
final_result = 'meow'
dhcp_options = None
mock_allocate_for_instance.side_effect = [
test.TestingException()] * 7 + [final_result]
expected_sleep_times = [1, 2, 4, 8, 16, 30, 30, 30]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
mock_sleep.has_calls(expected_sleep_times)
self.assertEqual(final_result, res)
# Ensure save is not called in while allocating networks, the instance
# is saved after the allocation.
self.assertFalse(mock_save.called)
self.assertEqual('True', instance.system_metadata['network_allocated'])
def test_allocate_network_fails(self):
self.flags(network_allocate_retries=0)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
def test_allocate_network_neg_conf_value_treated_as_zero(self):
self.flags(network_allocate_retries=-1)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
# Only attempted once.
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(manager.ComputeManager, '_instance_update')
@mock.patch.object(time, 'sleep')
def test_allocate_network_with_conf_value_is_one(
self, sleep, _instance_update, allocate_for_instance):
self.flags(network_allocate_retries=1)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
final_result = 'zhangtralon'
allocate_for_instance.side_effect = [test.TestingException(),
final_result]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
self.assertEqual(final_result, res)
self.assertEqual(1, sleep.call_count)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_build_and_run_instance')
def _test_max_concurrent_builds(self, mock_dbari, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
with mock.patch.object(self.compute,
'_build_semaphore') as mock_sem:
instance = objects.Instance(uuid=str(uuid.uuid4()))
for i in (1, 2, 3):
self.compute.build_and_run_instance(self.context, instance,
mock.sentinel.image,
mock.sentinel.request_spec,
{})
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_builds_limited(self):
self.flags(max_concurrent_builds=2)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_unlimited(self):
self.flags(max_concurrent_builds=0)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_semaphore_limited(self):
self.flags(max_concurrent_builds=123)
self.assertEqual(123,
manager.ComputeManager()._build_semaphore.balance)
def test_max_concurrent_builds_semaphore_unlimited(self):
self.flags(max_concurrent_builds=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._build_semaphore.balance)
self.assertIsInstance(compute._build_semaphore,
compute_utils.UnlimitedSemaphore)
def test_nil_out_inst_obj_host_and_node_sets_nil(self):
instance = fake_instance.fake_instance_obj(self.context,
uuid='foo-uuid',
host='foo-host',
node='foo-node')
self.assertIsNotNone(instance.host)
self.assertIsNotNone(instance.node)
self.compute._nil_out_instance_obj_host_and_node(instance)
self.assertIsNone(instance.host)
self.assertIsNone(instance.node)
def test_init_host(self):
our_host = self.compute.host
inst = fake_instance.fake_db_instance(
vm_state=vm_states.ACTIVE,
info_cache=dict(test_instance_info_cache.fake_info_cache,
network_info=None),
security_groups=None)
startup_instances = [inst, inst, inst]
def _do_mock_calls(defer_iptables_apply):
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(
self.context, our_host, columns_to_join=['info_cache'],
use_slave=False
).AndReturn(startup_instances)
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_on()
self.compute._destroy_evacuated_instances(self.context)
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_off()
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_on')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_off')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute,
'_destroy_evacuated_instances')
self.mox.StubOutWithMock(self.compute,
'_init_instance')
# Test with defer_iptables_apply
self.flags(defer_iptables_apply=True)
_do_mock_calls(True)
self.mox.ReplayAll()
self.compute.init_host()
self.mox.VerifyAll()
# Test without defer_iptables_apply
self.mox.ResetAll()
self.flags(defer_iptables_apply=False)
_do_mock_calls(False)
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
@mock.patch('nova.objects.InstanceList')
@mock.patch('nova.objects.MigrationList.get_by_filters')
def test_cleanup_host(self, mock_miglist_get, mock_instance_list):
# just testing whether the cleanup_host method
# when fired will invoke the underlying driver's
# equivalent method.
mock_miglist_get.return_value = []
mock_instance_list.get_by_host.return_value = []
with mock.patch.object(self.compute, 'driver') as mock_driver:
self.compute.init_host()
mock_driver.init_host.assert_called_once_with(host='fake-mini')
self.compute.cleanup_host()
# register_event_listener is called on startup (init_host) and
# in cleanup_host
mock_driver.register_event_listener.assert_has_calls([
mock.call(self.compute.handle_events), mock.call(None)])
mock_driver.cleanup_host.assert_called_once_with(host='fake-mini')
def test_init_virt_events_disabled(self):
self.flags(handle_virt_lifecycle_events=False, group='workarounds')
with mock.patch.object(self.compute.driver,
'register_event_listener') as mock_register:
self.compute.init_virt_events()
self.assertFalse(mock_register.called)
@mock.patch('nova.objects.MigrationList.get_by_filters')
@mock.patch('nova.objects.Migration.save')
def test_init_host_with_evacuated_instance(self, mock_save, mock_mig_get):
our_host = self.compute.host
not_our_host = 'not-' + our_host
deleted_instance = fake_instance.fake_instance_obj(
self.context, host=not_our_host, uuid='fake-uuid')
migration = objects.Migration(instance_uuid=deleted_instance.uuid)
mock_mig_get.return_value = [migration]
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver, 'destroy')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute, 'init_virt_events')
self.mox.StubOutWithMock(self.compute, '_get_instances_on_driver')
self.mox.StubOutWithMock(self.compute, '_init_instance')
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(self.context, our_host,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn([])
self.compute.init_virt_events()
# simulate failed instance
self.compute._get_instances_on_driver(
self.context, {'deleted': False}).AndReturn([deleted_instance])
self.compute.network_api.get_instance_nw_info(
self.context, deleted_instance).AndRaise(
exception.InstanceNotFound(instance_id=deleted_instance['uuid']))
# ensure driver.destroy is called so that driver may
# clean up any dangling files
self.compute.driver.destroy(self.context, deleted_instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_init_instance_with_binding_failed_vif_type(self):
# this instance will plug a 'binding_failed' vif
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
with contextlib.nested(
mock.patch.object(context, 'get_admin_context',
return_value=self.context),
mock.patch.object(compute_utils, 'get_nw_info_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute.driver, 'plug_vifs',
side_effect=exception.VirtualInterfacePlugException(
"Unexpected vif_type=binding_failed")),
mock.patch.object(self.compute, '_set_instance_obj_error_state')
) as (get_admin_context, get_nw_info, plug_vifs, set_error_state):
self.compute._init_instance(self.context, instance)
set_error_state.assert_called_once_with(self.context, instance)
def test__get_power_state_InstanceNotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.InstanceNotFound(instance_id=1)):
self.assertEqual(self.compute._get_power_state(self.context,
instance),
power_state.NOSTATE)
def test__get_power_state_NotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.NotFound()):
self.assertRaises(exception.NotFound,
self.compute._get_power_state,
self.context, instance)
def test_init_instance_failed_resume_sets_error(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
self.flags(resume_guests_state_on_host_boot=True)
self.mox.StubOutWithMock(self.compute, '_get_power_state')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'resume_state_on_host_boot')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute.driver.plug_vifs(instance, mox.IgnoreArg())
self.compute._get_instance_block_device_info(mox.IgnoreArg(),
instance).AndReturn('fake-bdm')
self.compute.driver.resume_state_on_host_boot(mox.IgnoreArg(),
instance, mox.IgnoreArg(),
'fake-bdm').AndRaise(test.TestingException)
self.compute._set_instance_obj_error_state(mox.IgnoreArg(), instance)
self.mox.ReplayAll()
self.compute._init_instance('fake-context', instance)
def test_init_instance_stuck_in_deleting(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
@mock.patch.object(objects.Instance, 'get_by_uuid')
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def test_init_instance_stuck_in_deleting_raises_exception(
self, mock_get_by_instance_uuid, mock_get_by_uuid):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
metadata={},
system_metadata={},
host=self.compute.host,
vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING,
expected_attrs=['metadata', 'system_metadata'])
bdms = []
reservations = ['fake-resv']
def _create_patch(name, attr):
patcher = mock.patch.object(name, attr)
mocked_obj = patcher.start()
self.addCleanup(patcher.stop)
return mocked_obj
mock_delete_instance = _create_patch(self.compute, '_delete_instance')
mock_set_instance_error_state = _create_patch(
self.compute, '_set_instance_obj_error_state')
mock_create_reservations = _create_patch(self.compute,
'_create_reservations')
mock_create_reservations.return_value = reservations
mock_get_by_instance_uuid.return_value = bdms
mock_get_by_uuid.return_value = instance
mock_delete_instance.side_effect = test.TestingException('test')
self.compute._init_instance(self.context, instance)
mock_set_instance_error_state.assert_called_once_with(
self.context, instance)
def _test_init_instance_reverts_crashed_migrations(self,
old_vm_state=None):
power_on = True if (not old_vm_state or
old_vm_state == vm_states.ACTIVE) else False
sys_meta = {
'old_vm_state': old_vm_state
}
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ERROR,
task_state=task_states.RESIZE_MIGRATING,
power_state=power_state.SHUTDOWN,
system_metadata=sys_meta,
host=self.compute.host,
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'finish_revert_migration')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver, 'get_info')
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute, '_retry_reboot')
self.compute._retry_reboot(self.context, instance).AndReturn(
(False, None))
compute_utils.get_nw_info_for_instance(instance).AndReturn(
network_model.NetworkInfo())
self.compute.driver.plug_vifs(instance, [])
self.compute._get_instance_block_device_info(
self.context, instance).AndReturn([])
self.compute.driver.finish_revert_migration(self.context, instance,
[], [], power_on)
instance.save()
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.assertIsNone(instance.task_state)
def test_init_instance_reverts_crashed_migration_from_active(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.ACTIVE)
def test_init_instance_reverts_crashed_migration_from_stopped(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.STOPPED)
def test_init_instance_reverts_crashed_migration_no_old_state(self):
self._test_init_instance_reverts_crashed_migrations(old_vm_state=None)
def test_init_instance_resets_crashed_live_migration(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.MIGRATING)
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch('nova.compute.utils.get_nw_info_for_instance',
return_value=network_model.NetworkInfo())
) as (save, get_nw_info):
self.compute._init_instance(self.context, instance)
save.assert_called_once_with(expected_task_state=['migrating'])
get_nw_info.assert_called_once_with(instance)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def _test_init_instance_sets_building_error(self, vm_state,
task_state=None):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_state,
host=self.compute.host,
task_state=task_state)
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_error(self):
self._test_init_instance_sets_building_error(vm_states.BUILDING)
def test_init_instance_sets_rebuilding_errors(self):
tasks = [task_states.REBUILDING,
task_states.REBUILD_BLOCK_DEVICE_MAPPING,
task_states.REBUILD_SPAWNING]
vms = [vm_states.ACTIVE, vm_states.STOPPED]
for vm_state in vms:
for task_state in tasks:
self._test_init_instance_sets_building_error(
vm_state, task_state)
def _test_init_instance_sets_building_tasks_error(self, instance):
instance.host = self.compute.host
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_tasks_error_scheduling(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=None,
task_state=task_states.SCHEDULING)
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_block_device(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_networking(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.NETWORKING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_spawning(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.SPAWNING
self._test_init_instance_sets_building_tasks_error(instance)
def _test_init_instance_cleans_image_states(self, instance):
with mock.patch.object(instance, 'save') as save:
self.compute._get_power_state = mock.Mock()
self.compute.driver.post_interrupted_snapshot_cleanup = mock.Mock()
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.host = self.compute.host
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.compute.driver.post_interrupted_snapshot_cleanup.\
assert_called_once_with(self.context, instance)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def _test_init_instance_cleans_task_states(self, powerstate, state,
mock_get_uuid, mock_get_power_state):
instance = objects.Instance(self.context)
instance.uuid = 'fake-uuid'
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.vm_state = vm_states.ACTIVE
instance.task_state = state
instance.host = self.compute.host
mock_get_power_state.return_value = powerstate
self.compute._init_instance(self.context, instance)
return instance
def test_init_instance_cleans_image_state_pending_upload(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_PENDING_UPLOAD
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_uploading(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_UPLOADING
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
self._test_init_instance_cleans_image_states(instance)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_pausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.PAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_unpausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.UNPAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager.unpause_instance')
def test_init_instance_cleans_paused_unpausing(self, mock_unpause):
def fake_unpause(context, instance):
instance.task_state = None
mock_unpause.side_effect = fake_unpause
instance = self._test_init_instance_cleans_task_states(
power_state.PAUSED, task_states.UNPAUSING)
mock_unpause.assert_called_once_with(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_init_instance_errors_when_not_migrating(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ERROR
instance.task_state = task_states.IMAGE_UPLOADING
instance.host = self.compute.host
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_deletes_error_deleting_instance(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
vm_state=vm_states.ERROR,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(objects.quotas, 'ids_from_instance')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
objects.quotas.ids_from_instance(self.context, instance).AndReturn(
(instance.project_id, instance.user_id))
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_resize_prep(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.RESIZE_PREP,
power_state=power_state.RUNNING)
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(compute_utils, 'get_nw_info_for_instance'),
mock.patch.object(instance, 'save', autospec=True)
) as (mock_get_power_state, mock_nw_info, mock_instance_save):
self.compute._init_instance(self.context, instance)
mock_instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
@mock.patch('nova.context.RequestContext.elevated')
@mock.patch('nova.compute.utils.get_nw_info_for_instance')
@mock.patch(
'nova.compute.manager.ComputeManager._get_instance_block_device_info')
@mock.patch('nova.virt.driver.ComputeDriver.destroy')
@mock.patch('nova.virt.driver.ComputeDriver.get_volume_connector')
def _test_shutdown_instance_exception(self, exc, mock_connector,
mock_destroy, mock_blk_device_info, mock_nw_info, mock_elevated):
mock_connector.side_effect = exc
mock_elevated.return_value = self.context
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
task_state=task_states.DELETING)
bdms = [mock.Mock(id=1, is_volume=True)]
self.compute._shutdown_instance(self.context, instance, bdms,
notify=False, try_deallocate_networks=False)
def test_shutdown_instance_endpoint_not_found(self):
exc = cinder_exception.EndpointNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_client_exception(self):
exc = cinder_exception.ClientException
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_volume_not_found(self):
exc = exception.VolumeNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_disk_not_found(self):
exc = exception.DiskNotFound
self._test_shutdown_instance_exception(exc)
def _test_init_instance_retries_reboot(self, instance, reboot_type,
return_power_state):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=return_power_state),
mock.patch.object(self.compute, 'reboot_instance'),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
reboot_instance,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, block_device_info=None,
reboot_type=reboot_type)
reboot_instance.assert_has_calls([call])
def test_init_instance_retries_reboot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_SOFT_REBOOT:
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'SOFT',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_soft_became_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.SHUTDOWN)
self.assertEqual(task_states.REBOOT_PENDING_HARD,
instance.task_state)
def test_init_instance_retries_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def test_init_instance_retries_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def _test_init_instance_cleans_reboot_state(self, instance):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save', autospec=True),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
instance_save,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def test_init_instance_cleans_image_state_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_cleans_image_state_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_retries_power_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
def test_init_instance_retries_power_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
def test_init_instance_retries_power_on_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_init_instance_retries_power_off_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_get_instances_on_driver(self):
driver_instances = []
for x in range(10):
driver_instances.append(fake_instance.fake_db_instance())
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndReturn(
[inst['uuid'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context,
{'uuid': [inst['uuid'] for
inst in driver_instances]},
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(
driver_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
@mock.patch('nova.virt.driver.ComputeDriver.list_instance_uuids')
@mock.patch('nova.db.api.instance_get_all_by_filters')
def test_get_instances_on_driver_empty(self, mock_list, mock_db):
mock_list.return_value = []
result = self.compute._get_instances_on_driver(self.context)
# instance_get_all_by_filters should not be called
self.assertEqual(0, mock_db.call_count)
self.assertEqual([],
[x['uuid'] for x in result])
def test_get_instances_on_driver_fallback(self):
# Test getting instances when driver doesn't support
# 'list_instance_uuids'
self.compute.host = 'host'
filters = {'host': self.compute.host}
self.flags(instance_name_template='inst-%i')
all_instances = []
driver_instances = []
for x in range(10):
instance = fake_instance.fake_db_instance(name='inst-%i' % x,
id=x)
if x % 2:
driver_instances.append(instance)
all_instances.append(instance)
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(self.compute.driver,
'list_instances')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndRaise(
NotImplementedError())
self.compute.driver.list_instances().AndReturn(
[inst['name'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context, filters,
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(all_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context, filters)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
def test_instance_usage_audit(self):
instances = [objects.Instance(uuid='foo')]
@classmethod
def fake_task_log(*a, **k):
pass
@classmethod
def fake_get(*a, **k):
return instances
self.flags(instance_usage_audit=True)
self.stubs.Set(objects.TaskLog, 'get', fake_task_log)
self.stubs.Set(objects.InstanceList,
'get_active_by_window_joined', fake_get)
self.stubs.Set(objects.TaskLog, 'begin_task', fake_task_log)
self.stubs.Set(objects.TaskLog, 'end_task', fake_task_log)
self.mox.StubOutWithMock(compute_utils, 'notify_usage_exists')
compute_utils.notify_usage_exists(self.compute.notifier,
self.context, instances[0], ignore_missing_network_data=False)
self.mox.ReplayAll()
self.compute._instance_usage_audit(self.context)
@mock.patch.object(objects.InstanceList, 'get_by_host')
def test_sync_power_states(self, mock_get):
instance = mock.Mock()
mock_get.return_value = [instance]
with mock.patch.object(self.compute._sync_power_pool,
'spawn_n') as mock_spawn:
self.compute._sync_power_states(mock.sentinel.context)
mock_get.assert_called_with(mock.sentinel.context,
self.compute.host, expected_attrs=[],
use_slave=True)
mock_spawn.assert_called_once_with(mock.ANY, instance)
def _get_sync_instance(self, power_state, vm_state, task_state=None,
shutdown_terminate=False):
instance = objects.Instance()
instance.uuid = 'fake-uuid'
instance.power_state = power_state
instance.vm_state = vm_state
instance.host = self.compute.host
instance.task_state = task_state
instance.shutdown_terminate = shutdown_terminate
self.mox.StubOutWithMock(instance, 'refresh')
self.mox.StubOutWithMock(instance, 'save')
return instance
def test_sync_instance_power_state_match(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.RUNNING)
def test_sync_instance_power_state_running_stopped(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
instance.save()
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.SHUTDOWN)
self.assertEqual(instance.power_state, power_state.SHUTDOWN)
def _test_sync_to_stop(self, power_state, vm_state, driver_power_state,
stop=True, force=False, shutdown_terminate=False):
instance = self._get_sync_instance(
power_state, vm_state, shutdown_terminate=shutdown_terminate)
instance.refresh(use_slave=False)
instance.save()
self.mox.StubOutWithMock(self.compute.compute_api, 'stop')
self.mox.StubOutWithMock(self.compute.compute_api, 'delete')
self.mox.StubOutWithMock(self.compute.compute_api, 'force_stop')
if shutdown_terminate:
self.compute.compute_api.delete(self.context, instance)
elif stop:
if force:
self.compute.compute_api.force_stop(self.context, instance)
else:
self.compute.compute_api.stop(self.context, instance)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
driver_power_state)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_sync_instance_power_state_to_stop(self):
for ps in (power_state.SHUTDOWN, power_state.CRASHED,
power_state.SUSPENDED):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps)
for ps in (power_state.SHUTDOWN, power_state.CRASHED):
self._test_sync_to_stop(power_state.PAUSED, vm_states.PAUSED, ps,
force=True)
self._test_sync_to_stop(power_state.SHUTDOWN, vm_states.STOPPED,
power_state.RUNNING, force=True)
def test_sync_instance_power_state_to_terminate(self):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE,
power_state.SHUTDOWN,
force=False, shutdown_terminate=True)
def test_sync_instance_power_state_to_no_stop(self):
for ps in (power_state.PAUSED, power_state.NOSTATE):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps,
stop=False)
for vs in (vm_states.SOFT_DELETED, vm_states.DELETED):
for ps in (power_state.NOSTATE, power_state.SHUTDOWN):
self._test_sync_to_stop(power_state.RUNNING, vs, ps,
stop=False)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_pending_task(
self, mock_sync_power_state):
with mock.patch.object(self.compute.driver,
'get_info') as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid',
task_state=task_states.POWERING_OFF)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
self.assertFalse(mock_get_info.called)
self.assertFalse(mock_sync_power_state.called)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_not_found_driver(
self, mock_sync_power_state):
error = exception.InstanceNotFound(instance_id=1)
with mock.patch.object(self.compute.driver,
'get_info', side_effect=error) as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid', task_state=None)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
mock_get_info.assert_called_once_with(db_instance)
mock_sync_power_state.assert_called_once_with(self.context,
db_instance,
power_state.NOSTATE,
use_slave=True)
def test_run_pending_deletes(self):
self.flags(instance_delete_interval=10)
class FakeInstance(object):
def __init__(self, uuid, name, smd):
self.uuid = uuid
self.name = name
self.system_metadata = smd
self.cleaned = False
def __getitem__(self, name):
return getattr(self, name)
def save(self):
pass
a = FakeInstance('123', 'apple', {'clean_attempts': '100'})
b = FakeInstance('456', 'orange', {'clean_attempts': '3'})
c = FakeInstance('789', 'banana', {})
self.mox.StubOutWithMock(objects.InstanceList,
'get_by_filters')
objects.InstanceList.get_by_filters(
{'read_deleted': 'yes'},
{'deleted': True, 'soft_deleted': False, 'host': 'fake-mini',
'cleaned': False},
expected_attrs=['info_cache', 'security_groups',
'system_metadata'],
use_slave=True).AndReturn([a, b, c])
self.mox.StubOutWithMock(self.compute.driver, 'delete_instance_files')
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(True)
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(False)
self.mox.ReplayAll()
self.compute._run_pending_deletes({})
self.assertFalse(a.cleaned)
self.assertEqual('100', a.system_metadata['clean_attempts'])
self.assertTrue(b.cleaned)
self.assertEqual('4', b.system_metadata['clean_attempts'])
self.assertFalse(c.cleaned)
self.assertEqual('1', c.system_metadata['clean_attempts'])
@mock.patch.object(objects.Migration, 'obj_as_admin')
@mock.patch.object(objects.Migration, 'save')
@mock.patch.object(objects.MigrationList, 'get_by_filters')
@mock.patch.object(objects.InstanceList, 'get_by_filters')
def _test_cleanup_incomplete_migrations(self, inst_host,
mock_inst_get_by_filters,
mock_migration_get_by_filters,
mock_save, mock_obj_as_admin):
def fake_inst(context, uuid, host):
inst = objects.Instance(context)
inst.uuid = uuid
inst.host = host
return inst
def fake_migration(uuid, status, inst_uuid, src_host, dest_host):
migration = objects.Migration()
migration.uuid = uuid
migration.status = status
migration.instance_uuid = inst_uuid
migration.source_compute = src_host
migration.dest_compute = dest_host
return migration
fake_instances = [fake_inst(self.context, '111', inst_host),
fake_inst(self.context, '222', inst_host)]
fake_migrations = [fake_migration('123', 'error', '111',
'fake-host', 'fake-mini'),
fake_migration('456', 'error', '222',
'fake-host', 'fake-mini')]
mock_migration_get_by_filters.return_value = fake_migrations
mock_inst_get_by_filters.return_value = fake_instances
with mock.patch.object(self.compute.driver, 'delete_instance_files'):
self.compute._cleanup_incomplete_migrations(self.context)
# Ensure that migration status is set to 'failed' after instance
# files deletion for those instances whose instance.host is not
# same as compute host where periodic task is running.
for inst in fake_instances:
if inst.host != CONF.host:
for mig in fake_migrations:
if inst.uuid == mig.instance_uuid:
self.assertEqual('failed', mig.status)
def test_cleanup_incomplete_migrations_dest_node(self):
"""Test to ensure instance files are deleted from destination node.
If instance gets deleted during resizing/revert-resizing operation,
in that case instance files gets deleted from instance.host (source
host here), but there is possibility that instance files could be
present on destination node.
This test ensures that `_cleanup_incomplete_migration` periodic
task deletes orphaned instance files from destination compute node.
"""
self.flags(host='fake-mini')
self._test_cleanup_incomplete_migrations('fake-host')
def test_cleanup_incomplete_migrations_source_node(self):
"""Test to ensure instance files are deleted from source node.
If instance gets deleted during resizing/revert-resizing operation,
in that case instance files gets deleted from instance.host (dest
host here), but there is possibility that instance files could be
present on source node.
This test ensures that `_cleanup_incomplete_migration` periodic
task deletes orphaned instance files from source compute node.
"""
self.flags(host='fake-host')
self._test_cleanup_incomplete_migrations('fake-mini')
def test_attach_interface_failure(self):
# Test that the fault methods are invoked when an attach fails
db_instance = fake_instance.fake_db_instance()
f_instance = objects.Instance._from_db_object(self.context,
objects.Instance(),
db_instance)
e = exception.InterfaceAttachFailed(instance_uuid=f_instance.uuid)
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute.network_api,
'allocate_port_for_instance',
side_effect=e)
@mock.patch.object(self.compute, '_instance_update',
side_effect=lambda *a, **k: {})
def do_test(update, meth, add_fault):
self.assertRaises(exception.InterfaceAttachFailed,
self.compute.attach_interface,
self.context, f_instance, 'net_id', 'port_id',
None)
add_fault.assert_has_calls([
mock.call(self.context, f_instance, e,
mock.ANY)])
do_test()
def test_detach_interface_failure(self):
# Test that the fault methods are invoked when a detach fails
# Build test data that will cause a PortNotFound exception
f_instance = mock.MagicMock()
f_instance.info_cache = mock.MagicMock()
f_instance.info_cache.network_info = []
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute, '_set_instance_obj_error_state')
def do_test(meth, add_fault):
self.assertRaises(exception.PortNotFound,
self.compute.detach_interface,
self.context, f_instance, 'port_id')
add_fault.assert_has_calls(
[mock.call(self.context, f_instance, mock.ANY, mock.ANY)])
do_test()
def test_swap_volume_volume_api_usage(self):
# This test ensures that volume_id arguments are passed to volume_api
# and that volume states are OK
volumes = {}
old_volume_id = uuidutils.generate_uuid()
volumes[old_volume_id] = {'id': old_volume_id,
'display_name': 'old_volume',
'status': 'detaching',
'size': 1}
new_volume_id = uuidutils.generate_uuid()
volumes[new_volume_id] = {'id': new_volume_id,
'display_name': 'new_volume',
'status': 'available',
'size': 2}
def fake_vol_api_roll_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'detaching':
volumes[volume_id]['status'] = 'in-use'
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(
{'device_name': '/dev/vdb', 'source_type': 'volume',
'destination_type': 'volume', 'instance_uuid': 'fake',
'connection_info': '{"foo": "bar"}'})
def fake_vol_api_func(context, volume, *args):
self.assertTrue(uuidutils.is_uuid_like(volume))
return {}
def fake_vol_get(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
return volumes[volume_id]
def fake_vol_unreserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'attaching':
volumes[volume_id]['status'] = 'available'
def fake_vol_migrate_volume_completion(context, old_volume_id,
new_volume_id, error=False):
self.assertTrue(uuidutils.is_uuid_like(old_volume_id))
self.assertTrue(uuidutils.is_uuid_like(new_volume_id))
volumes[old_volume_id]['status'] = 'in-use'
return {'save_volume_id': new_volume_id}
def fake_func_exc(*args, **kwargs):
raise AttributeError # Random exception
def fake_swap_volume(old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
self.assertEqual(resize_to, 2)
def fake_block_device_mapping_update(ctxt, id, updates, legacy):
self.assertEqual(2, updates['volume_size'])
return fake_bdm
self.stubs.Set(self.compute.volume_api, 'roll_detaching',
fake_vol_api_roll_detaching)
self.stubs.Set(self.compute.volume_api, 'get', fake_vol_get)
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_vol_api_func)
self.stubs.Set(self.compute.volume_api, 'unreserve_volume',
fake_vol_unreserve)
self.stubs.Set(self.compute.volume_api, 'terminate_connection',
fake_vol_api_func)
self.stubs.Set(db, 'block_device_mapping_get_by_volume_id',
lambda x, y, z: fake_bdm)
self.stubs.Set(self.compute.driver, 'get_volume_connector',
lambda x: {})
self.stubs.Set(self.compute.driver, 'swap_volume',
fake_swap_volume)
self.stubs.Set(self.compute.volume_api, 'migrate_volume_completion',
fake_vol_migrate_volume_completion)
self.stubs.Set(db, 'block_device_mapping_update',
fake_block_device_mapping_update)
self.stubs.Set(db,
'instance_fault_create',
lambda x, y:
test_instance_fault.fake_faults['fake-uuid'][0])
self.stubs.Set(self.compute, '_instance_update',
lambda c, u, **k: {})
# Good path
self.compute.swap_volume(self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
# Error paths
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.driver, 'swap_volume', fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
@mock.patch.object(compute_utils, 'EventReporter')
def test_check_can_live_migrate_source(self, event_mock):
is_volume_backed = 'volume_backed'
dest_check_data = dict(foo='bar')
db_instance = fake_instance.fake_db_instance()
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
expected_dest_check_data = dict(dest_check_data,
is_volume_backed=is_volume_backed)
self.mox.StubOutWithMock(self.compute.compute_api,
'is_volume_backed_instance')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_source')
self.compute.compute_api.is_volume_backed_instance(
self.context, instance).AndReturn(is_volume_backed)
self.compute._get_instance_block_device_info(
self.context, instance, refresh_conn_info=True
).AndReturn({'block_device_mapping': 'fake'})
self.compute.driver.check_can_live_migrate_source(
self.context, instance, expected_dest_check_data,
{'block_device_mapping': 'fake'})
self.mox.ReplayAll()
self.compute.check_can_live_migrate_source(
self.context, instance=instance,
dest_check_data=dest_check_data)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_source',
instance.uuid)
@mock.patch.object(compute_utils, 'EventReporter')
def _test_check_can_live_migrate_destination(self, event_mock,
do_raise=False,
has_mig_data=False):
db_instance = fake_instance.fake_db_instance(host='fake-host')
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
instance.host = 'fake-host'
block_migration = 'block_migration'
disk_over_commit = 'disk_over_commit'
src_info = 'src_info'
dest_info = 'dest_info'
dest_check_data = dict(foo='bar')
mig_data = dict(cow='moo')
expected_result = dict(mig_data)
if has_mig_data:
dest_check_data['migrate_data'] = dict(cat='meow')
expected_result.update(cat='meow')
self.mox.StubOutWithMock(self.compute, '_get_compute_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
self.compute._get_compute_info(self.context,
'fake-host').AndReturn(src_info)
self.compute._get_compute_info(self.context,
CONF.host).AndReturn(dest_info)
self.compute.driver.check_can_live_migrate_destination(
self.context, instance, src_info, dest_info,
block_migration, disk_over_commit).AndReturn(dest_check_data)
mock_meth = self.compute.compute_rpcapi.check_can_live_migrate_source(
self.context, instance, dest_check_data)
if do_raise:
mock_meth.AndRaise(test.TestingException())
self.mox.StubOutWithMock(db, 'instance_fault_create')
db.instance_fault_create(
self.context, mox.IgnoreArg()).AndReturn(
test_instance_fault.fake_faults['fake-uuid'][0])
else:
mock_meth.AndReturn(mig_data)
self.compute.driver.check_can_live_migrate_destination_cleanup(
self.context, dest_check_data)
self.mox.ReplayAll()
result = self.compute.check_can_live_migrate_destination(
self.context, instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
self.assertEqual(expected_result, result)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_destination',
instance.uuid)
def test_check_can_live_migrate_destination_success(self):
self._test_check_can_live_migrate_destination()
def test_check_can_live_migrate_destination_success_w_mig_data(self):
self._test_check_can_live_migrate_destination(has_mig_data=True)
def test_check_can_live_migrate_destination_fail(self):
self.assertRaises(
test.TestingException,
self._test_check_can_live_migrate_destination,
do_raise=True)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_prepare_for_instance_event(self, lock_name_mock):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_pop_instance_event(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
result = self.compute.instance_events.pop_instance_event(inst_obj,
event_obj)
self.assertEqual(result, event)
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_clear_events_for_instance(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.clear_events_for_instance(
inst_obj)
self.assertEqual(result, {'test-event': event})
lock_name_mock.assert_called_once_with(inst_obj)
def test_instance_events_lock_name(self):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events._lock_name(inst_obj)
self.assertEqual(result, 'foo-events')
def test_prepare_for_instance_event_again(self):
inst_obj = objects.Instance(uuid='foo')
self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
# A second attempt will avoid creating a new list; make sure we
# get the current list
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
def test_process_instance_event(self):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
self.compute._process_instance_event(inst_obj, event_obj)
self.assertTrue(event.ready())
self.assertEqual(event_obj, event.wait())
self.assertEqual({}, self.compute.instance_events._events)
def test_process_instance_vif_deleted_event(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid')
inst_obj = objects.Instance(id=3, uuid='uuid', info_cache=info_cache)
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface')
def do_test(detach_interface, update_instance_cache_with_nw_info):
self.compute._process_instance_vif_deleted_event(self.context,
inst_obj,
vif2['id'])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
inst_obj,
nw_info=[vif1])
detach_interface.assert_called_once_with(inst_obj, vif2)
do_test()
def test_external_instance_event(self):
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2'),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid2',
tag='tag2'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid3',
tag='tag3')]
@mock.patch.object(self.compute, '_process_instance_vif_deleted_event')
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
_process_instance_vif_deleted_event):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
_process_instance_event.assert_called_once_with(instances[1],
events[1])
_process_instance_vif_deleted_event.assert_called_once_with(
self.context, instances[2], events[2].tag)
do_test()
def test_external_instance_event_with_exception(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid2')
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2', info_cache=info_cache),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid2',
tag='2'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid3',
tag='tag3')]
# Make sure all the three events are handled despite the exceptions in
# processing events 1 and 2
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface',
side_effect=exception.NovaException)
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
side_effect=exception.InstanceInfoCacheNotFound(
instance_uuid='uuid1'))
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
detach_interface, update_instance_cache_with_nw_info):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
instances[1],
nw_info=[vif1])
detach_interface.assert_called_once_with(instances[1], vif2)
_process_instance_event.assert_called_once_with(instances[2],
events[2])
do_test()
def test_cancel_all_events(self):
inst = objects.Instance(uuid='uuid')
fake_eventlet_event = mock.MagicMock()
self.compute.instance_events._events = {
inst.uuid: {
'network-vif-plugged-bar': fake_eventlet_event,
}
}
self.compute.instance_events.cancel_all_events()
self.assertTrue(fake_eventlet_event.send.called)
event = fake_eventlet_event.send.call_args_list[0][0][0]
self.assertEqual('network-vif-plugged', event.name)
self.assertEqual('bar', event.tag)
self.assertEqual('failed', event.status)
def test_cleanup_cancels_all_events(self):
with mock.patch.object(self.compute, 'instance_events') as mock_ev:
self.compute.cleanup_host()
mock_ev.cancel_all_events.assert_called_once_with()
def test_cleanup_blocks_new_events(self):
instance = objects.Instance(uuid='uuid')
self.compute.instance_events.cancel_all_events()
callback = mock.MagicMock()
body = mock.MagicMock()
with self.compute.virtapi.wait_for_instance_event(
instance, ['network-vif-plugged-bar'],
error_callback=callback):
body()
self.assertTrue(body.called)
callback.assert_called_once_with('network-vif-plugged-bar', instance)
def test_pop_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
event = mock.MagicMock()
self.compute.instance_events._events = None
self.assertIsNone(
self.compute.instance_events.pop_instance_event(inst, event))
def test_clear_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
self.compute.instance_events._events = None
self.assertEqual(
self.compute.instance_events.clear_events_for_instance(inst), {})
def test_retry_reboot_pending_soft(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'SOFT')
def test_retry_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_soft_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_no_reboot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = 'bar'
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.objects.Instance._from_db_object')
def test_remove_volume_connection(self, inst_from_db, detach, bdm_get):
bdm = mock.sentinel.bdm
inst_obj = mock.sentinel.inst_obj
bdm_get.return_value = bdm
inst_from_db.return_value = inst_obj
with mock.patch.object(self.compute, 'volume_api'):
self.compute.remove_volume_connection(self.context, 'vol',
inst_obj)
detach.assert_called_once_with(self.context, inst_obj, bdm)
def test_detach_volume(self):
self._test_detach_volume()
def test_detach_volume_not_destroy_bdm(self):
self._test_detach_volume(destroy_bdm=False)
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.compute.manager.ComputeManager.'
'_notify_about_instance_usage')
def _test_detach_volume(self, notify_inst_usage, detach,
bdm_get, destroy_bdm=True):
volume_id = '123'
inst_obj = mock.sentinel.inst_obj
bdm = mock.MagicMock(spec=objects.BlockDeviceMapping)
bdm.device_name = 'vdb'
bdm_get.return_value = bdm
with mock.patch.object(self.compute, 'volume_api') as volume_api:
with mock.patch.object(self.compute, 'driver') as driver:
connector_sentinel = mock.sentinel.connector
driver.get_volume_connector.return_value = connector_sentinel
self.compute._detach_volume(self.context, volume_id,
inst_obj,
destroy_bdm=destroy_bdm)
detach.assert_called_once_with(self.context, inst_obj, bdm)
driver.get_volume_connector.assert_called_once_with(inst_obj)
volume_api.terminate_connection.assert_called_once_with(
self.context, volume_id, connector_sentinel)
volume_api.detach.assert_called_once_with(mock.ANY, volume_id)
notify_inst_usage.assert_called_once_with(
self.context, inst_obj, "volume.detach",
extra_usage_info={'volume_id': volume_id}
)
if destroy_bdm:
bdm.destroy.assert_called_once_with()
else:
self.assertFalse(bdm.destroy.called)
def _test_rescue(self, clean_shutdown=True):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE)
fake_nw_info = network_model.NetworkInfo()
rescue_image_meta = {'id': 'fake', 'name': 'fake'}
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_get_rescue_image',
return_value=rescue_image_meta),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_power_off_instance'),
mock.patch.object(self.compute.driver, 'rescue'),
mock.patch.object(compute_utils, 'notify_usage_exists'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
get_rescue_image, notify_instance_usage, power_off_instance,
driver_rescue, notify_usage_exists, get_power_state, instance_save
):
self.compute.rescue_instance(
self.context, instance, rescue_password='verybadpass',
rescue_image_ref=None, clean_shutdown=clean_shutdown)
# assert the field values on the instance object
self.assertEqual(vm_states.RESCUED, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
self.assertIsNotNone(instance.launched_at)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
get_rescue_image.assert_called_once_with(
self.context, instance, None)
extra_usage_info = {'rescue_image_name': 'fake'}
notify_calls = [
mock.call(self.context, instance, "rescue.start",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info),
mock.call(self.context, instance, "rescue.end",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
power_off_instance.assert_called_once_with(self.context, instance,
clean_shutdown)
driver_rescue.assert_called_once_with(
self.context, instance, fake_nw_info, rescue_image_meta,
'verybadpass')
notify_usage_exists.assert_called_once_with(self.compute.notifier,
self.context, instance, current_period=True)
instance_save.assert_called_once_with(
expected_task_state=task_states.RESCUING)
def test_rescue(self):
self._test_rescue()
def test_rescue_forced_shutdown(self):
self._test_rescue(clean_shutdown=False)
def test_unrescue(self):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.RESCUED)
fake_nw_info = network_model.NetworkInfo()
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute.driver, 'unrescue'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
notify_instance_usage, driver_unrescue, get_power_state,
instance_save
):
self.compute.unrescue_instance(self.context, instance)
# assert the field values on the instance object
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
notify_calls = [
mock.call(self.context, instance, "unrescue.start",
network_info=fake_nw_info),
mock.call(self.context, instance, "unrescue.end",
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
driver_unrescue.assert_called_once_with(instance, fake_nw_info)
instance_save.assert_called_once_with(
expected_task_state=task_states.UNRESCUING)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.Instance, 'save')
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
def test_set_admin_password(self, gen_password_mock,
instance_save_mock, power_state_mock):
# Ensure instance can have its admin password set.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password')
def do_test(driver_mock, elevated_mock):
# call the manager method
self.compute.set_admin_password(self.context, instance, None)
# make our assertions
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
power_state_mock.assert_called_once_with(self.context, instance)
driver_mock.assert_called_once_with(instance, 'fake-pass')
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
do_test()
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.NOSTATE)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def test_set_admin_password_bad_state(self, add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock):
# Test setting password while instance is rebuilding.
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self.context, 'elevated',
return_value=self.context):
# call the manager method
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context, instance, None)
# make our assertions
power_state_mock.assert_called_once_with(self.context, instance)
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def _do_test_set_admin_password_driver_error(self, exc,
expected_vm_state,
expected_task_state,
expected_exception,
add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock,
gen_password_mock):
# Ensure expected exception is raised if set_admin_password fails.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password',
side_effect=exc)
def do_test(driver_mock, elevated_mock):
# error raised from the driver should not reveal internal
# information so a new error is raised
self.assertRaises(expected_exception,
self.compute.set_admin_password,
self.context,
instance=instance,
new_pass=None)
if expected_exception == NotImplementedError:
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
else:
# setting the instance to error state
instance_save_mock.assert_called_once_with()
self.assertEqual(expected_vm_state, instance.vm_state)
# check revert_task_state decorator
update_mock.assert_called_once_with(
self.context, instance, task_state=expected_task_state)
# check wrap_instance_fault decorator
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
do_test()
def test_set_admin_password_driver_not_authorized(self):
# Ensure expected exception is raised if set_admin_password not
# authorized.
exc = exception.Forbidden('Internal error')
expected_exception = exception.InstancePasswordSetFailed
self._do_test_set_admin_password_driver_error(
exc, vm_states.ERROR, None, expected_exception)
def test_set_admin_password_driver_not_implemented(self):
# Ensure expected exception is raised if set_admin_password not
# implemented by driver.
exc = NotImplementedError()
expected_exception = NotImplementedError
self._do_test_set_admin_password_driver_error(
exc, vm_states.ACTIVE, None, expected_exception)
def test_destroy_evacuated_instances(self):
our_host = self.compute.host
instance_1 = objects.Instance(self.context)
instance_1.uuid = 'foo'
instance_1.task_state = None
instance_1.vm_state = vm_states.ACTIVE
instance_1.host = 'not-' + our_host
instance_2 = objects.Instance(self.context)
instance_2.uuid = 'bar'
instance_2.task_state = None
instance_2.vm_state = vm_states.ACTIVE
instance_2.host = 'not-' + our_host
# Only instance 2 has a migration record
migration = objects.Migration(instance_uuid=instance_2.uuid)
with contextlib.nested(
mock.patch.object(self.compute, '_get_instances_on_driver',
return_value=[instance_1,
instance_2]),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.compute, '_get_instance_block_device_info',
return_value={}),
mock.patch.object(self.compute, '_is_instance_storage_shared',
return_value=False),
mock.patch.object(self.compute.driver, 'destroy'),
mock.patch('nova.objects.MigrationList.get_by_filters'),
mock.patch('nova.objects.Migration.save')
) as (_get_instances_on_driver, get_instance_nw_info,
_get_instance_block_device_info, _is_instance_storage_shared,
destroy, migration_list, migration_save):
migration_list.return_value = [migration]
self.compute._destroy_evacuated_instances(self.context)
# Only instance 2 should be deleted. Instance 1 is still running
# here, but no migration from our host exists, so ignore it
destroy.assert_called_once_with(self.context, instance_2, None,
{}, True)
@mock.patch('nova.compute.manager.ComputeManager.'
'_destroy_evacuated_instances')
@mock.patch('nova.compute.manager.LOG')
def test_init_host_foreign_instance(self, mock_log, mock_destroy):
inst = mock.MagicMock()
inst.host = self.compute.host + '-alt'
self.compute._init_instance(mock.sentinel.context, inst)
self.assertFalse(inst.save.called)
self.assertTrue(mock_log.warning.called)
msg = mock_log.warning.call_args_list[0]
self.assertIn('appears to not be owned by this host', msg[0][0])
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_not_implemented_err(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, instance_state=vm_states.STOPPED):
raise NotImplementedError('test')
self.assertRaises(NotImplementedError, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance,
vm_state=vm_states.STOPPED, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_inst_fault_rollback(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(self.context,
instance):
raise exception.InstanceFaultRollback(
inner_exception=test.TestingException('test'))
self.assertRaises(test.TestingException, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance,
vm_state=vm_states.ACTIVE, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager.'
'_set_instance_obj_error_state')
def test_error_out_instance_on_exception_unknown_with_quotas(self,
set_error):
instance = fake_instance.fake_instance_obj(self.context)
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, quotas):
raise test.TestingException('test')
self.assertRaises(test.TestingException, do_test)
self.assertEqual(1, len(quotas.method_calls))
self.assertEqual(mock.call.rollback(), quotas.method_calls[0])
set_error.assert_called_once_with(self.context, instance)
def test_cleanup_volumes(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_do_not_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': False})
bdm_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_do_not_delete_dict, bdm_delete_dict])
with mock.patch.object(self.compute.volume_api,
'delete') as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms)
volume_delete.assert_called_once_with(self.context,
bdms[1].volume_id)
def test_cleanup_volumes_exception_do_not_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms,
raise_exc=False)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_cleanup_volumes_exception_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.assertRaises(test.TestingException,
self.compute._cleanup_volumes, self.context, instance.uuid,
bdms)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_stop_instance_task_state_none_power_state_shutdown(self):
# Tests that stop_instance doesn't puke when the instance power_state
# is shutdown and the task_state is None.
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE,
task_state=None, power_state=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_notify_about_instance_usage')
@mock.patch.object(self.compute, '_power_off_instance')
@mock.patch.object(instance, 'save')
def do_test(save_mock, power_off_mock, notify_mock, get_state_mock):
# run the code
self.compute.stop_instance(self.context, instance, True)
# assert the calls
self.assertEqual(2, get_state_mock.call_count)
notify_mock.assert_has_calls([
mock.call(self.context, instance, 'power_off.start'),
mock.call(self.context, instance, 'power_off.end')
])
power_off_mock.assert_called_once_with(
self.context, instance, True)
save_mock.assert_called_once_with(
expected_task_state=[task_states.POWERING_OFF, None])
self.assertEqual(power_state.SHUTDOWN, instance.power_state)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.STOPPED, instance.vm_state)
do_test()
def test_reset_network_driver_not_implemented(self):
instance = fake_instance.fake_instance_obj(self.context)
@mock.patch.object(self.compute.driver, 'reset_network',
side_effect=NotImplementedError())
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def do_test(mock_add_fault, mock_reset):
self.assertRaises(messaging.ExpectedException,
self.compute.reset_network,
self.context,
instance)
self.compute = utils.ExceptionHelper(self.compute)
self.assertRaises(NotImplementedError,
self.compute.reset_network,
self.context,
instance)
do_test()
def test_rebuild_default_impl(self):
def _detach(context, bdms):
# NOTE(rpodolyaka): check that instance has been powered off by
# the time we detach block devices, exact calls arguments will be
# checked below
self.assertTrue(mock_power_off.called)
self.assertFalse(mock_destroy.called)
def _attach(context, instance, bdms, do_check_attach=True):
return {'block_device_mapping': 'shared_block_storage'}
def _spawn(context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self.assertEqual(block_device_info['block_device_mapping'],
'shared_block_storage')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'destroy',
return_value=None),
mock.patch.object(self.compute.driver, 'spawn',
side_effect=_spawn),
mock.patch.object(objects.Instance, 'save',
return_value=None),
mock.patch.object(self.compute, '_power_off_instance',
return_value=None)
) as(
mock_destroy,
mock_spawn,
mock_save,
mock_power_off
):
instance = fake_instance.fake_instance_obj(self.context)
instance.task_state = task_states.REBUILDING
instance.save(expected_task_state=[task_states.REBUILDING])
self.compute._rebuild_default_impl(self.context,
instance,
None,
[],
admin_password='new_pass',
bdms=[],
detach_block_devices=_detach,
attach_block_devices=_attach,
network_info=None,
recreate=False,
block_device_info=None,
preserve_ephemeral=False)
self.assertTrue(mock_save.called)
self.assertTrue(mock_spawn.called)
mock_destroy.assert_called_once_with(
self.context, instance,
network_info=None, block_device_info=None)
mock_power_off.assert_called_once_with(
self.context, instance, clean_shutdown=True)
@mock.patch.object(utils, 'last_completed_audit_period',
return_value=(0, 0))
@mock.patch.object(time, 'time', side_effect=[10, 20, 21])
@mock.patch.object(objects.InstanceList, 'get_by_host', return_value=[])
@mock.patch.object(objects.BandwidthUsage, 'get_by_instance_uuid_and_mac')
@mock.patch.object(db, 'bw_usage_update')
def test_poll_bandwidth_usage(self, bw_usage_update, get_by_uuid_mac,
get_by_host, time, last_completed_audit):
bw_counters = [{'uuid': 'fake-uuid', 'mac_address': 'fake-mac',
'bw_in': 1, 'bw_out': 2}]
usage = objects.BandwidthUsage()
usage.bw_in = 3
usage.bw_out = 4
usage.last_ctr_in = 0
usage.last_ctr_out = 0
self.flags(bandwidth_poll_interval=1)
get_by_uuid_mac.return_value = usage
_time = timeutils.utcnow()
bw_usage_update.return_value = {'uuid': '', 'mac': '',
'start_period': _time, 'last_refreshed': _time, 'bw_in': 0,
'bw_out': 0, 'last_ctr_in': 0, 'last_ctr_out': 0, 'deleted': 0,
'created_at': _time, 'updated_at': _time, 'deleted_at': _time}
with mock.patch.object(self.compute.driver,
'get_all_bw_counters', return_value=bw_counters):
self.compute._poll_bandwidth_usage(self.context)
get_by_uuid_mac.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', start_period=0, use_slave=True)
# NOTE(sdague): bw_usage_update happens at some time in
# the future, so what last_refreshed is is irrelevant.
bw_usage_update.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', 0, 4, 6, 1, 2,
last_refreshed=mock.ANY,
update_cells=False)
def test_reverts_task_state_instance_not_found(self):
# Tests that the reverts_task_state decorator in the compute manager
# will not trace when an InstanceNotFound is raised.
instance = objects.Instance(uuid='fake')
instance_update_mock = mock.Mock(
side_effect=exception.InstanceNotFound(instance_id=instance.uuid))
self.compute._instance_update = instance_update_mock
log_mock = mock.Mock()
manager.LOG = log_mock
@manager.reverts_task_state
def fake_function(self, context, instance):
raise test.TestingException()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance)
self.assertFalse(log_mock.called)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_update_scheduler_instance_info(self, mock_update):
instance = objects.Instance(uuid='fake')
self.compute._update_scheduler_instance_info(self.context, instance)
self.assertEqual(mock_update.call_count, 1)
args = mock_update.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
# Send a single instance; check that the method converts to an
# InstanceList
self.assertIsInstance(args[2], objects.InstanceList)
self.assertEqual(args[2].objects[0], instance)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
def test_delete_scheduler_instance_info(self, mock_delete):
self.compute._delete_scheduler_instance_info(self.context,
mock.sentinel.inst_uuid)
self.assertEqual(mock_delete.call_count, 1)
args = mock_delete.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
self.assertEqual(args[2], mock.sentinel.inst_uuid)
@mock.patch.object(nova.context.RequestContext, 'elevated')
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
def test_sync_scheduler_instance_info(self, mock_sync, mock_get_by_host,
mock_elevated):
inst1 = objects.Instance(uuid='fake1')
inst2 = objects.Instance(uuid='fake2')
inst3 = objects.Instance(uuid='fake3')
exp_uuids = [inst.uuid for inst in [inst1, inst2, inst3]]
mock_get_by_host.return_value = objects.InstanceList(
objects=[inst1, inst2, inst3])
fake_elevated = context.get_admin_context()
mock_elevated.return_value = fake_elevated
self.compute._sync_scheduler_instance_info(self.context)
mock_get_by_host.assert_called_once_with(
fake_elevated, self.compute.host, expected_attrs=[],
use_slave=True)
mock_sync.assert_called_once_with(fake_elevated, self.compute.host,
exp_uuids)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_scheduler_info_updates_off(self, mock_update, mock_delete,
mock_sync):
mgr = self.compute
mgr.send_instance_updates = False
mgr._update_scheduler_instance_info(self.context,
mock.sentinel.instance)
mgr._delete_scheduler_instance_info(self.context,
mock.sentinel.instance_uuid)
mgr._sync_scheduler_instance_info(self.context)
# None of the calls should have been made
self.assertFalse(mock_update.called)
self.assertFalse(mock_delete.called)
self.assertFalse(mock_sync.called)
def test_refresh_instance_security_rules_takes_non_object(self):
inst = fake_instance.fake_db_instance()
with mock.patch.object(self.compute.driver,
'refresh_instance_security_rules') as mock_r:
self.compute.refresh_instance_security_rules(self.context, inst)
self.assertIsInstance(mock_r.call_args_list[0][0][0],
objects.Instance)
def test_set_instance_obj_error_state_with_clean_task_state(self):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.BUILDING, task_state=task_states.SPAWNING)
with mock.patch.object(instance, 'save'):
self.compute._set_instance_obj_error_state(self.context, instance,
clean_task_state=True)
self.assertEqual(vm_states.ERROR, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_set_instance_obj_error_state_by_default(self):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.BUILDING, task_state=task_states.SPAWNING)
with mock.patch.object(instance, 'save'):
self.compute._set_instance_obj_error_state(self.context, instance)
self.assertEqual(vm_states.ERROR, instance.vm_state)
self.assertEqual(task_states.SPAWNING, instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_instance_update(self, mock_save):
instance = objects.Instance(task_state=task_states.SCHEDULING,
vm_state=vm_states.BUILDING)
updates = {'task_state': None, 'vm_state': vm_states.ERROR}
with mock.patch.object(self.compute,
'_update_resource_tracker') as mock_rt:
self.compute._instance_update(self.context, instance, **updates)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
mock_save.assert_called_once_with()
mock_rt.assert_called_once_with(self.context, instance)
class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerBuildInstanceTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.admin_pass = 'pass'
self.injected_files = []
self.image = {}
self.node = 'fake-node'
self.limits = {}
self.requested_networks = []
self.security_groups = []
self.block_device_mapping = []
self.filter_properties = {'retry': {'num_attempts': 1,
'hosts': [[self.compute.host,
'fake-node']]}}
def fake_network_info():
return network_model.NetworkInfo([{'address': '1.2.3.4'}])
self.network_info = network_model.NetworkInfoAsyncWrapper(
fake_network_info)
self.block_device_info = self.compute._prep_block_device(context,
self.instance, self.block_device_mapping)
# override tracker with a version that doesn't need the database:
fake_rt = fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver, self.node)
self.compute._resource_tracker_dict[self.node] = fake_rt
def _do_build_instance_update(self, reschedule_update=False):
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save(
expected_task_state=(task_states.SCHEDULING, None)).AndReturn(
self.instance)
if reschedule_update:
self.instance.save().AndReturn(self.instance)
def _build_and_run_instance_update(self):
self.mox.StubOutWithMock(self.instance, 'save')
self._build_resources_instance_update(stub=False)
self.instance.save(expected_task_state=
task_states.BLOCK_DEVICE_MAPPING).AndReturn(self.instance)
def _build_resources_instance_update(self, stub=True):
if stub:
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save().AndReturn(self.instance)
def _notify_about_instance_usage(self, event, stub=True, **kwargs):
if stub:
self.mox.StubOutWithMock(self.compute,
'_notify_about_instance_usage')
self.compute._notify_about_instance_usage(self.context, self.instance,
event, **kwargs)
def _instance_action_events(self):
self.mox.StubOutWithMock(objects.InstanceActionEvent, 'event_start')
self.mox.StubOutWithMock(objects.InstanceActionEvent,
'event_finish_with_failure')
objects.InstanceActionEvent.event_start(
self.context, self.instance.uuid, mox.IgnoreArg(),
want_result=False)
objects.InstanceActionEvent.event_finish_with_failure(
self.context, self.instance.uuid, mox.IgnoreArg(),
exc_val=mox.IgnoreArg(), exc_tb=mox.IgnoreArg(),
want_result=False)
@staticmethod
def _assert_build_instance_hook_called(mock_hooks, result):
# NOTE(coreywright): we want to test the return value of
# _do_build_and_run_instance, but it doesn't bubble all the way up, so
# mock the hooking, which allows us to test that too, though a little
# too intimately
mock_hooks.setdefault().run_post.assert_called_once_with(
'build_instance', result, mock.ANY, mock.ANY, f=None)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_called_with_proper_args(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.ACTIVE)
# This test when sending an icehouse compatible rpc call to juno compute
# node, NetworkRequest object can load from three items tuple.
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.compute.manager.ComputeManager._build_and_run_instance')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_with_icehouse_requested_network(
self, mock_spawn, mock_build_and_run, mock_save):
fake_server_actions.stub_out_action_events(self.stubs)
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
mock_save.return_value = self.instance
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=[objects.NetworkRequest(
network_id='fake_network_id',
address='10.0.0.1',
port_id='fake_port_id')],
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
requested_network = mock_build_and_run.call_args[0][5][0]
self.assertEqual('fake_network_id', requested_network.network_id)
self.assertEqual('10.0.0.1', str(requested_network.address))
self.assertEqual('fake_port_id', requested_network.port_id)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_abort_exception(self, mock_spawn, mock_hooks):
def fake_spawn(f, *args, **kwargs):
# NOTE(danms): Simulate the detached nature of spawn so that
# we confirm that the inner task has the fault logic
try:
return f(*args, **kwargs)
except Exception:
pass
mock_spawn.side_effect = fake_spawn
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.BuildAbortException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute._set_instance_obj_error_state(self.context, self.instance,
clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.network_api.cleanup_instance_network_on_host(self.context,
self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def test_rescheduled_exception_with_non_ascii_exception(self):
exc = exception.NovaException(u's\xe9quence')
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
@mock.patch.object(manager.ComputeManager, '_build_and_run_instance')
@mock.patch.object(conductor_api.ComputeTaskAPI, 'build_instances')
@mock.patch.object(network_api.API, 'cleanup_instance_network_on_host')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceActionEvent, 'event_start')
@mock.patch.object(objects.InstanceActionEvent,
'event_finish_with_failure')
@mock.patch.object(virt_driver.ComputeDriver, 'macs_for_instance')
def test_rescheduled_exception_with_network_allocated(self,
mock_macs_for_instance, mock_event_finish,
mock_event_start, mock_ins_save, mock_cleanup_network,
mock_build_ins, mock_build_and_run):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
system_metadata={'network_allocated': 'True'},
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
mock_ins_save.return_value = instance
mock_macs_for_instance.return_value = []
mock_build_and_run.side_effect = exception.RescheduledException(
reason='', instance_uuid=self.instance.uuid)
self.compute._do_build_and_run_instance(self.context, instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
mock_build_and_run.assert_called_once_with(self.context,
instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
mock_cleanup_network.assert_called_once_with(
self.context, instance, self.compute.host)
mock_build_ins.assert_called_once_with(self.context,
[instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_without_retry(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
{}).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
compute_utils.add_instance_fault_from_exc(self.context, self.instance,
mox.IgnoreArg(), mox.IgnoreArg())
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute._set_instance_obj_error_state(self.context, self.instance,
clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties={},
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_do_not_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(True)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def _test_build_and_run_exceptions(self, exc, set_error=False,
cleanup_volumes=False, nil_out_host_and_node=False):
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(exc)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
if cleanup_volumes:
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
if nil_out_host_and_node:
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.compute._nil_out_instance_obj_host_and_node(self.instance)
if set_error:
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_obj_error_state(self.context,
self.instance, clean_task_state=True)
self._instance_action_events()
self.mox.ReplayAll()
with contextlib.nested(
mock.patch('nova.utils.spawn_n'),
mock.patch('nova.hooks._HOOKS')
) as (
mock_spawn,
mock_hooks
):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
def test_build_and_run_notfound_exception(self):
self._test_build_and_run_exceptions(exception.InstanceNotFound(
instance_id=''))
def test_build_and_run_unexpecteddeleting_exception(self):
self._test_build_and_run_exceptions(
exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={}))
def test_build_and_run_buildabort_exception(self):
self._test_build_and_run_exceptions(
exception.BuildAbortException(instance_uuid='', reason=''),
set_error=True, cleanup_volumes=True, nil_out_host_and_node=True)
def test_build_and_run_unhandled_exception(self):
self._test_build_and_run_exceptions(test.TestingException(),
set_error=True, cleanup_volumes=True,
nil_out_host_and_node=True)
def test_instance_not_found(self):
exc = exception.InstanceNotFound(instance_id=1)
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.end',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_reschedule_on_exception(self):
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
exc = test.TestingException()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_spawn_network_alloc_failure(self):
# Because network allocation is asynchronous, failures may not present
# themselves until the virt spawn method is called.
self._test_build_and_run_spawn_exceptions(exception.NoMoreNetworks())
def test_build_and_run_no_more_fixedips_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.NoMoreFixedIps("error messge"))
def test_build_and_run_flavor_disk_smaller_image_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskSmallerThanImage(
flavor_size=0, image_size=1))
def test_build_and_run_flavor_disk_smaller_min_disk(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskSmallerThanMinDisk(
flavor_size=0, image_min_disk=1))
def test_build_and_run_flavor_memory_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorMemoryTooSmall())
def test_build_and_run_image_not_active_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageNotActive(image_id=self.image.get('id')))
def test_build_and_run_image_unacceptable_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageUnacceptable(image_id=self.image.get('id'),
reason=""))
def _test_build_and_run_spawn_exceptions(self, exc):
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn',
side_effect=exc),
mock.patch.object(self.instance, 'save',
side_effect=[self.instance, self.instance, self.instance]),
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute,
'_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_shutdown_instance'),
mock.patch.object(self.compute,
'_validate_instance_group_policy')
) as (spawn, save,
_build_networks_for_instance, _notify_about_instance_usage,
_shutdown_instance, _validate_instance_group_policy):
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
_validate_instance_group_policy.assert_called_once_with(
self.context, self.instance, self.filter_properties)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
_notify_about_instance_usage.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
extra_usage_info={'image_name': self.image.get('name')}),
mock.call(self.context, self.instance, 'create.error',
fault=exc)])
save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(
expected_task_state=task_states.BLOCK_DEVICE_MAPPING)])
spawn.assert_has_calls([mock.call(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info)])
_shutdown_instance.assert_called_once_with(self.context,
self.instance, self.block_device_mapping,
self.requested_networks, try_deallocate_networks=True)
@mock.patch('nova.utils.spawn_n')
def test_reschedule_on_resources_unavailable(self, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
reason = 'resource unavailable'
exc = exception.ComputeResourcesUnavailable(reason=reason)
class FakeResourceTracker(object):
def instance_claim(self, context, instance, limits):
raise exc
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self.mox.StubOutWithMock(self.compute,
'_nil_out_instance_obj_host_and_node')
self.compute._get_resource_tracker(self.node).AndReturn(
FakeResourceTracker())
self._do_build_instance_update(reschedule_update=True)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute._nil_out_instance_obj_host_and_node(self.instance)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
def test_build_resources_buildabort_reraise(self):
exc = exception.BuildAbortException(
instance_uuid=self.instance.uuid, reason='')
self.mox.StubOutWithMock(self.compute, '_build_resources')
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance,
self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping,
self.node, self.limits, self.filter_properties)
mock_save.assert_called_once_with()
def test_build_resources_reraises_on_failed_bdm_prep(self):
self.mox.StubOutWithMock(self.compute, '_prep_block_device')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self._build_resources_instance_update()
self.compute._prep_block_device(self.context, self.instance,
self.block_device_mapping).AndRaise(test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_bdm_prep_from_delete_raises_unexpected(self):
with contextlib.nested(
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=self.network_info),
mock.patch.object(self.instance, 'save',
side_effect=exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})),
) as (_build_networks_for_instance, save):
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e,
exception.UnexpectedDeletingTaskStateError)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
save.assert_has_calls([mock.call()])
def test_build_resources_aborts_on_failed_network_alloc(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndRaise(
test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_network_alloc_from_delete_raises_unexpected(self):
with mock.patch.object(self.compute,
'_build_networks_for_instance') as _build_networks:
exc = exception.UnexpectedDeletingTaskStateError
_build_networks.side_effect = exc(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exc)
_build_networks.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
def test_build_resources_with_network_info_obj_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
network_model.NetworkInfo([{'address': '1.2.3.4'}]))
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
def test_build_resources_cleans_up_and_reraises_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_instance_not_found_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
expected_exc = exception.InstanceNotFound(
instance_id=self.instance.uuid)
mock_save.side_effect = expected_exc
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except Exception as e:
self.assertEqual(expected_exc, e)
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_unexpected_task_error_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = exception.UnexpectedTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_exception_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = Exception()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
def test_build_resources_aborts_on_cleanup_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False).AndRaise(
test.TestingException())
self._build_resources_instance_update()
self.mox.ReplayAll()
def fake_spawn():
raise test.TestingException()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_build_networks_if_not_allocated(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata={},
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_build_networks_if_allocated_false(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='False'),
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_return_networks_if_found(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='True'),
expected_attrs=['system_metadata'])
def fake_network_info():
return network_model.NetworkInfo([{'address': '123.123.123.123'}])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.mox.StubOutWithMock(self.compute.network_api,
'setup_instance_network_on_host')
self.compute.network_api.setup_instance_network_on_host(
self.context, instance, instance.host)
self.compute.network_api.get_instance_nw_info(
self.context, instance).AndReturn(
network_model.NetworkInfoAsyncWrapper(fake_network_info))
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_cleanup_allocated_networks_instance_not_found(self):
with contextlib.nested(
mock.patch.object(self.compute, '_deallocate_network'),
mock.patch.object(self.instance, 'save',
side_effect=exception.InstanceNotFound(instance_id=''))
) as (_deallocate_network, save):
# Testing that this doesn't raise an exeption
self.compute._cleanup_allocated_networks(self.context,
self.instance, self.requested_networks)
save.assert_called_once_with()
self.assertEqual('False',
self.instance.system_metadata['network_allocated'])
@mock.patch.object(manager.ComputeManager, '_instance_update')
def test_launched_at_in_create_end_notification(self,
mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
with contextlib.nested(
mock.patch.object(self.compute,
'_update_scheduler_instance_info'),
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_upd, mock_spawn, mock_networks, mock_save, mock_notify):
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', extra_usage_info={'message': u'Success'},
network_info=[])
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
@mock.patch.object(manager.ComputeManager, '_instance_update')
def test_create_end_on_instance_delete(self, mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
exc = exception.InstanceNotFound(instance_id='')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save',
side_effect=[None, None, None, exc]),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', fault=exc)
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
class ComputeManagerMigrationTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerMigrationTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.image = {}
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.migration = objects.Migration(context=self.context.elevated(),
new_instance_type_id=7)
self.migration.status = 'migrating'
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(objects.Migration, 'save')
@mock.patch.object(objects.Migration, 'obj_as_admin')
def test_errors_out_migration_decorator(self, mock_save,
mock_obj_as_admin):
# Tests that errors_out_migration decorator in compute manager
# sets migration status to 'error' when an exception is raised
# from decorated method
instance = fake_instance.fake_instance_obj(self.context)
migration = objects.Migration()
migration.instance_uuid = instance.uuid
migration.status = 'migrating'
migration.id = 0
@manager.errors_out_migration
def fake_function(self, context, instance, migration):
raise test.TestingException()
mock_obj_as_admin.return_value = mock.MagicMock()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance, migration)
self.assertEqual('error', migration.status)
mock_save.assert_called_once_with()
mock_obj_as_admin.assert_called_once_with()
def test_finish_resize_failure(self):
with contextlib.nested(
mock.patch.object(self.compute, '_finish_resize',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock())
) as (meth, fault_create, instance_update, instance_save,
migration_save, migration_obj_as_admin):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.finish_resize,
context=self.context, disk_info=[], image=self.image,
instance=self.instance, reservations=[],
migration=self.migration
)
self.assertEqual("error", self.migration.status)
migration_save.assert_called_once_with()
migration_obj_as_admin.assert_called_once_with()
def test_resize_instance_failure(self):
self.migration.dest_host = None
with contextlib.nested(
mock.patch.object(self.compute.driver,
'migrate_disk_and_power_off',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock()),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_get_instance_block_device_info',
return_value=None),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid',
return_value=None),
mock.patch.object(objects.Flavor,
'get_by_id',
return_value=None)
) as (meth, fault_create, instance_update,
migration_save, migration_obj_as_admin, nw_info, save_inst,
notify, vol_block_info, bdm, flavor):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.resize_instance,
context=self.context, instance=self.instance, image=self.image,
reservations=[], migration=self.migration,
instance_type='type', clean_shutdown=True)
self.assertEqual("error", self.migration.status)
self.assertEqual([mock.call(), mock.call()],
migration_save.mock_calls)
self.assertEqual([mock.call(), mock.call()],
migration_obj_as_admin.mock_calls)
def _test_revert_resize_instance_destroy_disks(self, is_shared=False):
# This test asserts that _is_instance_storage_shared() is called from
# revert_resize() and the return value is passed to driver.destroy().
# Otherwise we could regress this.
@mock.patch.object(self.instance, 'revert_migration_context')
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_is_instance_storage_shared')
@mock.patch.object(self.compute, 'finish_revert_resize')
@mock.patch.object(self.compute, '_instance_update')
@mock.patch.object(self.compute, '_get_resource_tracker')
@mock.patch.object(self.compute.driver, 'destroy')
@mock.patch.object(self.compute.network_api, 'setup_networks_on_host')
@mock.patch.object(self.compute.network_api, 'migrate_instance_start')
@mock.patch.object(compute_utils, 'notify_usage_exists')
@mock.patch.object(self.migration, 'save')
@mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
def do_test(get_by_instance_uuid,
migration_save,
notify_usage_exists,
migrate_instance_start,
setup_networks_on_host,
destroy,
_get_resource_tracker,
_instance_update,
finish_revert_resize,
_is_instance_storage_shared,
get_instance_nw_info,
revert_migration_context):
self.migration.source_compute = self.instance['host']
# Inform compute that instance uses non-shared or shared storage
_is_instance_storage_shared.return_value = is_shared
self.compute.revert_resize(context=self.context,
migration=self.migration,
instance=self.instance,
reservations=None)
_is_instance_storage_shared.assert_called_once_with(
self.context, self.instance,
host=self.migration.source_compute)
# If instance storage is shared, driver destroy method
# should not destroy disks otherwise it should destroy disks.
destroy.assert_called_once_with(self.context, self.instance,
mock.ANY, mock.ANY, not is_shared)
do_test()
def test_revert_resize_instance_destroy_disks_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=True)
def test_revert_resize_instance_destroy_disks_non_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=False)
def test_consoles_enabled(self):
self.flags(enabled=False, group='vnc')
self.flags(enabled=False, group='spice')
self.flags(enabled=False, group='rdp')
self.flags(enabled=False, group='serial_console')
self.assertFalse(self.compute._consoles_enabled())
self.flags(enabled=True, group='vnc')
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group='vnc')
for console in ['spice', 'rdp', 'serial_console']:
self.flags(enabled=True, group=console)
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group=console)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_live_migration')
def _test_max_concurrent_live(self, mock_lm, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
@mock.patch('nova.objects.Migration.save')
def _do_it(mock_mig_save):
instance = objects.Instance(uuid=str(uuid.uuid4()))
migration = objects.Migration()
self.compute.live_migration(self.context,
mock.sentinel.dest,
instance,
mock.sentinel.block_migration,
migration,
mock.sentinel.migrate_data)
self.assertEqual('queued', migration.status)
migration.save.assert_called_once_with()
with mock.patch.object(self.compute,
'_live_migration_semaphore') as mock_sem:
for i in (1, 2, 3):
_do_it()
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_live_limited(self):
self.flags(max_concurrent_live_migrations=2)
self._test_max_concurrent_live()
def test_max_concurrent_live_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
self._test_max_concurrent_live()
def test_max_concurrent_live_semaphore_limited(self):
self.flags(max_concurrent_live_migrations=123)
self.assertEqual(
123,
manager.ComputeManager()._live_migration_semaphore.balance)
def test_max_concurrent_live_semaphore_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
def test_max_concurrent_live_semaphore_negative(self):
self.flags(max_concurrent_live_migrations=-2)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
| apache-2.0 | 3,144,236,380,760,686,000 | 47.303053 | 79 | 0.589229 | false |
rcocetta/kano-profile | kano_profile/apps.py | 1 | 3777 | #!/usr/bin/env python
# apps.py
#
# Copyright (C) 2014, 2015 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU General Public License v2
#
import os
from kano.utils import read_json, write_json, get_date_now, ensure_dir, \
chown_path, run_print_output_error
from kano.logging import logger
from .paths import apps_dir, xp_file, kanoprofile_dir, app_profiles_file
def get_app_dir(app_name):
app_dir = os.path.join(apps_dir, app_name)
return app_dir
def get_app_data_dir(app_name):
data_str = 'data'
app_data_dir = os.path.join(get_app_dir(app_name), data_str)
return app_data_dir
def get_app_state_file(app_name):
app_state_str = 'state.json'
app_state_file = os.path.join(get_app_dir(app_name), app_state_str)
return app_state_file
def load_app_state(app_name):
app_state_file = get_app_state_file(app_name)
app_state = read_json(app_state_file)
if not app_state:
app_state = dict()
return app_state
def load_app_state_variable(app_name, variable):
data = load_app_state(app_name)
if variable in data:
return data[variable]
def save_app_state(app_name, data):
""" Save a state of an application to the user's Kano profile.
:param app_name: The application that this data are associated with.
:type app_name: str
:param data: The data to be stored.
:type data: dict
"""
logger.debug('save_app_state {}'.format(app_name))
app_state_file = get_app_state_file(app_name)
data['save_date'] = get_date_now()
ensure_dir(get_app_dir(app_name))
write_json(app_state_file, data)
if 'SUDO_USER' in os.environ:
chown_path(kanoprofile_dir)
chown_path(apps_dir)
chown_path(get_app_dir(app_name))
chown_path(app_state_file)
def save_app_state_variable(app_name, variable, value):
""" Save a state variable to the user's Kano profile.
:param app_name: The application that this variable is associated with.
:type app_name: str
:param variable: The name of the variable.
:type data: str
:param data: The variable data to be stored.
:type data: any
"""
msg = 'save_app_state_variable {} {} {}'.format(app_name, variable, value)
logger.debug(msg)
data = load_app_state(app_name)
data[variable] = value
save_app_state(app_name, data)
def increment_app_state_variable(app_name, variable, value):
logger.debug(
'increment_app_state_variable {} {} {}'.format(
app_name, variable, value))
data = load_app_state(app_name)
if variable not in data:
data[variable] = 0
data[variable] += value
save_app_state(app_name, data)
def get_app_list():
if not os.path.exists(apps_dir):
return []
else:
return [p for p in os.listdir(apps_dir)
if os.path.isdir(os.path.join(apps_dir, p))]
def get_gamestate_variables(app_name):
allrules = read_json(xp_file)
if not allrules:
return list()
groups = allrules[app_name]
for group, rules in groups.iteritems():
if group == 'multipliers':
return [str(key) for key in rules.keys()]
def launch_project(app, filename, data_dir):
logger.info('launch_project: {} {} {}'.format(app, filename, data_dir))
app_profiles = read_json(app_profiles_file)
fullpath = os.path.join(data_dir, filename)
cmd = app_profiles[app]['cmd'].format(fullpath=fullpath, filename=filename)
_, _, rc = run_print_output_error(cmd)
return rc
def get_app_xp_for_challenge(app, challenge_no):
xp_file_json = read_json(xp_file)
try:
return xp_file_json[app]['level'][challenge_no]
except KeyError:
return 0
| gpl-2.0 | -652,654,093,269,193,900 | 25.787234 | 80 | 0.639396 | false |
sbkro/alc | src/tests/unit/test_formatter.py | 1 | 2802 | # -*- coding:utf-8 -*-
import calendar
from mock import patch
from datetime import datetime
from alc.formatter import CalendarFormatter
class TestDatetime:
'''
Unit test for *CalendarFormatter.datetime()*.
'''
def test_default(self):
'''
:type: normal
:case: *datetime_format* is None.
:expect: convert datetime using default format (%Y/%m/%d).
'''
cf = CalendarFormatter(datetime(2014, 7, 24, 23, 18, 00))
expect = '2014/07/24'
actual = cf.datetime()
assert expect == actual
def test_datetime_format_is_specified(self):
'''
:type: normal
:case: *datetime_format* is specified.
:expect: convert datetime using specified format.
'''
cf = CalendarFormatter(datetime(2014, 7, 24, 23, 18, 00))
expect = '2014/07/24 23:18:00'
actual = cf.datetime('%Y/%m/%d %H:%M:%S')
assert expect == actual
class TestWeekheader:
'''
Unit test for *CalendarFormatter.weekheader()*.
'''
def test_default(self):
'''
:type: normal
:case: call this method.
:expect: get a week header.
'''
expect = 'Mo\tTu\tWe\tTh\tFr\tSa\tSu'
actual = CalendarFormatter.weekheader()
assert expect == actual
def test_first_weekday_is_specified(self):
'''
:type: normal
:case: set the first week day in advance. (Sunday)
:expect: get a week header of start Sunday.
'''
calendar.setfirstweekday(calendar.SUNDAY)
expect = 'Su\tMo\tTu\tWe\tTh\tFr\tSa'
actual = CalendarFormatter.weekheader()
assert expect == actual
class TestWeekdays:
'''
Unit test for *CalendarFormatter.weekdays()*.
'''
def test_default(self):
'''
:type: normal
:case: call this method. datetime is '2014/07'.
:expect: get a calendar for '2014/07'.
'''
cf = CalendarFormatter(datetime(2014, 7, 24, 23, 18, 00))
expect = [
'\t01\t02\t03\t04\t05\t06\t',
'07\t08\t09\t10\t11\t12\t13\t',
'14\t15\t16\t17\t18\t19\t20\t',
'21\t22\t23\t24\t25\t26\t27\t',
'28\t29\t30\t31\t\t\t\t'
]
for i, w in enumerate(cf.weekdays()):
assert expect[i] == w
class TestSetfirstweekday:
'''
Unit test for *CalendarFormatter.setfirstweekday()*.
'''
@patch('calendar.setfirstweekday')
def test_default(self, m_setfirstweekday):
'''
:type: normal
:case: call this method.
:expect: call *calendar.setfirstweekday*.
'''
CalendarFormatter.setfirstweekday(calendar.MONDAY)
m_setfirstweekday.assert_called_once_with(calendar.MONDAY)
| bsd-3-clause | 5,329,593,480,155,695,000 | 25.186916 | 66 | 0.570307 | false |
mF2C/COMPSs | tests/sources/python/1_decorator_mpi/src/modules/testMpiDecorator.py | 1 | 4569 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
PyCOMPSs Testbench Tasks
========================
"""
# Imports
import unittest
import os
from pycompss.api.task import task
from pycompss.api.parameter import *
from pycompss.api.api import compss_barrier, compss_open, compss_wait_on
from pycompss.api.mpi import mpi
from pycompss.api.constraint import constraint
@mpi(binary="date", working_dir="/tmp", runner="mpirun")
@task()
def myDate(dprefix, param):
pass
@constraint(computing_units="2")
@mpi(binary="date", working_dir="/tmp", runner="mpirun", computing_nodes=2)
@task()
def myDateConstrained(dprefix, param):
pass
@constraint(computing_units="$CUS")
@mpi(binary="date", working_dir="/tmp", runner="mpirun", computing_nodes="$CUS")
@task()
def myDateConstrainedWithEnvVar(dprefix, param):
pass
@mpi(binary="sed", working_dir=".", runner="mpirun", computing_nodes="4")
@task(file=FILE_IN)
def mySedIN(expression, file):
pass
@mpi(binary="date", working_dir=".", runner="mpirun", computing_nodes=1)
@task(returns=int)
def myReturn():
pass
@mpi(binary="./private.sh", working_dir=os.getcwd() + '/src/scripts/', runner="mpirun", computing_nodes=1)
@task(returns=int)
def failedBinary(code):
pass
@mpi(binary="sed", working_dir=".", runner="mpirun")
@task(file=FILE_INOUT)
def mySedINOUT(flag, expression, file):
pass
@mpi(binary="grep", working_dir=".", runner="mpirun")
# @task(infile=Parameter(TYPE.FILE, DIRECTION.IN, STREAM.STDIN), result=Parameter(TYPE.FILE, DIRECTION.OUT, STREAM.STDOUT))
# @task(infile={Type:FILE_IN, Stream:STDIN}, result={Type:FILE_OUT, Stream:STDOUT})
@task(infile={Type: FILE_IN_STDIN}, result={Type: FILE_OUT_STDOUT})
def myGrepper(keyword, infile, result):
pass
@mpi(binary="ls", runner="mpirun", computing_nodes=2)
@task(hide={Type: FILE_IN, Prefix: "--hide="}, sort={Type: IN, Prefix: "--sort="})
def myLs(flag, hide, sort):
pass
@mpi(binary="ls", runner="mpirun", computing_nodes=2)
@task(hide={Type: FILE_IN, Prefix: "--hide="}, sort={Prefix: "--sort="})
def myLsWithoutType(flag, hide, sort):
pass
@mpi(binary="./checkNames.sh", working_dir=os.getcwd() + '/src/scripts/', runner="mpirun", computing_nodes=1)
@task(f=FILE_IN, fp={Type: FILE_IN, Prefix: "--prefix="}, fout={Type: FILE_OUT}, returns=int)
def checkFileNames(f, fp, name, fout):
pass
class testMpiDecorator(unittest.TestCase):
def testFunctionalUsage(self):
myDate("-d", "next friday")
compss_barrier()
def testFunctionalUsageWithConstraint(self):
myDateConstrained("-d", "next monday")
compss_barrier()
def testFunctionalUsageWithEnvVarConstraint(self):
myDateConstrainedWithEnvVar("-d", "next tuesday")
compss_barrier()
def testFileManagementIN(self):
infile = "src/infile"
mySedIN('s/Hi/HELLO/g', infile)
compss_barrier()
def testReturn(self):
ev = myReturn()
ev = compss_wait_on(ev)
self.assertEqual(ev, 0)
def testFailedBinaryExitValue(self):
ev = failedBinary(123)
ev = compss_wait_on(ev)
self.assertEqual(ev, 123)
@unittest.skip("UNSUPPORTED WITH GAT")
def testFileManagementINOUT(self):
inoutfile = "src/inoutfile"
mySedINOUT('-i', 's/Hi/HELLO/g', inoutfile)
with compss_open(inoutfile, "r") as finout_r:
content_r = finout_r.read()
# Check if there are no Hi words, and instead there is HELLO
if 'Hi' in content_r:
self.fail("INOUT File failed.")
def testFileManagement(self):
infile = "src/infile"
outfile = "src/grepoutfile"
myGrepper("Hi", infile, outfile)
compss_barrier()
def testFilesAndPrefix(self):
flag = '-l'
infile = "src/infile"
sort = "size"
myLs(flag, infile, sort)
compss_barrier()
def testFilesAndPrefixWithoutType(self):
flag = '-l'
infile = "src/inoutfile"
sort = "time"
myLsWithoutType(flag, infile, sort)
compss_barrier()
def testCheckFileNames(self):
f = "src/infile"
fp = "src/infile"
name = "infile"
fout = "checkFileNamesResult.txt"
exit_value = checkFileNames(f, fp, name, fout)
exit_value = compss_wait_on(exit_value)
with compss_open(fout) as result:
data = result.read()
print("CheckFileNamesResult: " + str(data))
self.assertEqual(exit_value, 0, "At least one file name is NOT as expected: {}, {}, {}".format(f, fp, name))
| apache-2.0 | 5,971,662,384,461,707,000 | 27.735849 | 123 | 0.64084 | false |
s34rching/python_classes | tests/test_contact_data.py | 1 | 2053 | import re
import random
def test_contact_data_from_home_page(app):
r_index = random.randrange(len(app.contact.get_contact_list()))
data_from_home_page = app.contact.get_contact_list()[r_index]
data_from_edit_page = app.contact.get_contact_info_from_edit_page(r_index)
assert data_from_home_page.firstname == data_from_edit_page.firstname
assert data_from_home_page.lastname == data_from_edit_page.lastname
assert data_from_home_page.address == data_from_edit_page.address
assert data_from_home_page.all_phones_from_homepage == merge_phones_like_on_homepage(data_from_edit_page)
assert data_from_home_page.all_emails_from_homepage == merge_emails_like_on_homepage(data_from_edit_page)
assert data_from_home_page.id == data_from_edit_page.id
def test_phones_from_view_page(app):
r_index = random.randrange(len(app.contact.get_contact_list()))
data_from_view_page = app.contact.get_contact_info_from_view_page(r_index)
data_from_edit_page = app.contact.get_contact_info_from_edit_page(r_index)
assert data_from_view_page.home_number == data_from_edit_page.home_number
assert data_from_view_page.mobile_number == data_from_edit_page.mobile_number
assert data_from_view_page.work_number == data_from_edit_page.work_number
assert data_from_view_page.secondary_number == data_from_edit_page.secondary_number
def clear(s):
return re.sub('[() -]', '', s)
def merge_phones_like_on_homepage(contact):
return '\n'.join(filter(lambda x: x!= '',
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home_number, contact.work_number, contact.mobile_number, contact.secondary_number]))))
def merge_emails_like_on_homepage(contact):
return '\n'.join(filter(lambda x: x!= '',
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.email, contact.email2, contact.email3])))) | apache-2.0 | 5,947,474,044,754,886,000 | 50.35 | 131 | 0.656113 | false |
phbradley/tcr-dist | tcrdist/parse_cdr3.py | 1 | 4891 | import logging
logger = logging.getLogger('parse_cdr3.py')
from .all_genes import all_genes, gap_character
def get_cdr3_and_j_match_counts( organism, ab, qseq, j_gene, min_min_j_matchlen = 3,
extended_cdr3 = False ):
#fasta = all_fasta[organism]
jg = all_genes[organism][j_gene]
errors = []
## qseq starts at CA...
assert qseq[0] == 'C'
num_genome_j_positions_in_loop = len(jg.cdrs[0].replace(gap_character,''))-2
#num_genome_j_positions_in_loop = all_num_genome_j_positions_in_loop[organism][ab][j_gene]
if extended_cdr3: num_genome_j_positions_in_loop += 2 ## up to but not including GXG
## history: was only for alpha
aseq = qseq[:] ## starts at the C position
ja_gene = j_gene
#assert ja_gene in fasta
ja_seq = jg.protseq #fasta[ ja_gene ]
min_j_matchlen = min_min_j_matchlen+3
while min_j_matchlen >= min_min_j_matchlen:
ntrim =0
while ntrim+min_j_matchlen<len(ja_seq) and ja_seq[ntrim:ntrim+min_j_matchlen] not in aseq:
ntrim += 1
jatag = ja_seq[ntrim:ntrim+min_j_matchlen]
if jatag in aseq:
break
else:
min_j_matchlen -= 1
#print 'min_j_matchlen:',min_j_matchlen,'jatag:',jatag,'ntrim:',ntrim,'ja_seq:',ja_seq,'qseq',qseq
if jatag not in aseq:
logger.error('whoah %s %s %s',ab,aseq,ja_seq )
errors.append( 'j{}tag_not_in_aseq'.format(ab) )
return '-',[100,0],errors
elif ja_seq.count( jatag ) != 1:
logger.error( 'whoah2 %s %s %s',ab,aseq,ja_seq )
errors.append( 'multiple_j{}tag_in_jseq'.format(ab) )
return '-',[100,0],errors
else:
pos = aseq.find( jatag )
looplen = pos - ntrim + num_genome_j_positions_in_loop
if not extended_cdr3:
aseq = aseq[3:]
looplen -= 3 ## dont count CAX
if len(aseq)<looplen:
logger.error('short %s %s %s',ab,aseq,ja_seq )
errors.append( ab+'seq_too_short' )
return '-',[100,0],errors
cdrseq = aseq[:looplen ]
## now count mismatches in the J gene, beyond the cdrseq
j_seq = jg.protseq #fasta[ j_gene ] ## not sure why we do this again (old legacy code)
if qseq.count( cdrseq ) > 1:
logger.error('multiple cdrseq occurrences %s %s'%(qseq,cdrseq))
errors.append('multiple_cdrseq_occ')
return '-',[100,0],errors
assert qseq.count(cdrseq) == 1
start_counting_qseq = qseq.find(cdrseq)+len(cdrseq)
start_counting_jseq = num_genome_j_positions_in_loop
j_match_counts = [0,0]
#assert extended_cdr3 ## otherwise I think this count is not right?
#print 'here',start_counting_qseq,start_counting_jseq,len(qseq)
for qpos in range( start_counting_qseq, len(qseq)):
jpos = start_counting_jseq + (qpos-start_counting_qseq)
#print 'here',qpos,jpos
if jpos>= len(j_seq): break
if qseq[qpos] == j_seq[jpos]:
j_match_counts[1] += 1
else:
j_match_counts[0] += 1
return cdrseq, j_match_counts,errors
def parse_cdr3( organism, ab, qseq, v_gene, j_gene, q2v_align, extended_cdr3 = False ):
## v_align is a mapping from 0-indexed qseq positions to 0-indexed v_gene protseq positions
#fasta = all_fasta[ organism ]
#align_fasta = all_align_fasta[ organism ]
vg = all_genes[organism][v_gene]
errors = []
## what is the C position in this v gene?
v_seq = vg.protseq #fasta[ v_gene ]
v_alseq = vg.alseq #align_fasta[ v_gene ]
assert v_seq == v_alseq.replace(gap_character,'')
alseq_cpos = vg.cdr_columns[-1][0] - 1 ## now 0-indexed
#alseq_cpos = alseq_C_pos[organism][ab] - 1 ## now 0-indexed
numgaps = v_alseq[:alseq_cpos].count(gap_character)
cpos = alseq_cpos - numgaps ## 0-indexed
cpos_match = -1
v_match_counts = [0,0]
qseq_len = len(qseq)
for (qpos,vpos) in sorted( q2v_align.iteritems() ):
#print 'q2v-align:',qpos, vpos, cpos
if qpos == len(qseq):
continue ## from a partial codon at the end
if vpos == cpos:
cpos_match = qpos
elif vpos <= cpos:
## only count v mismatches here
if qseq[qpos] == v_seq[vpos]:
v_match_counts[1] += 1
else:
v_match_counts[0] += 1
if cpos_match<0 or qseq[ cpos_match ] != 'C':
## problemo
logger.error('failed to find blast match to C position')
errors.append('no_V{}_Cpos_blastmatch'.format(ab))
return '-',[100,0],[100,0],errors
cdrseq, j_match_counts, other_errors = get_cdr3_and_j_match_counts( organism, ab, qseq[ cpos_match: ], j_gene,
extended_cdr3 = extended_cdr3 )
return cdrseq, v_match_counts, j_match_counts, errors+other_errors
| mit | -8,223,407,647,342,972,000 | 34.963235 | 114 | 0.584339 | false |
QISKit/qiskit-sdk-py | test/python/test_qasm_parser.py | 1 | 2809 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test for the QASM parser"""
import unittest
import ply
from qiskit.qasm import Qasm, QasmError
from qiskit.qasm.node.node import Node
from qiskit.test import QiskitTestCase, Path
def parse(file_path, prec=15):
"""
Simple helper
- file_path: Path to the OpenQASM file
- prec: Precision for the returned string
"""
qasm = Qasm(file_path)
return qasm.parse().qasm(prec)
class TestParser(QiskitTestCase):
"""QasmParser"""
def setUp(self):
self.qasm_file_path = self._get_resource_path('example.qasm', Path.QASMS)
self.qasm_file_path_fail = self._get_resource_path(
'example_fail.qasm', Path.QASMS)
self.qasm_file_path_if = self._get_resource_path(
'example_if.qasm', Path.QASMS)
def test_parser(self):
"""should return a correct response for a valid circuit."""
res = parse(self.qasm_file_path)
self.log.info(res)
# TODO: For now only some basic checks.
self.assertEqual(len(res), 1563)
self.assertEqual(res[:12], "OPENQASM 2.0")
self.assertEqual(res[14:41], "gate u3(theta,phi,lambda) q")
self.assertEqual(res[1547:1562], "measure r -> d;")
def test_parser_fail(self):
"""should fail a for a not valid circuit."""
self.assertRaisesRegex(QasmError, "Perhaps there is a missing",
parse, file_path=self.qasm_file_path_fail)
def test_all_valid_nodes(self):
"""Test that the tree contains only Node subclasses."""
def inspect(node):
"""Inspect node children."""
for child in node.children:
self.assertTrue(isinstance(child, Node))
inspect(child)
# Test the canonical example file.
qasm = Qasm(self.qasm_file_path)
res = qasm.parse()
inspect(res)
# Test a file containing if instructions.
qasm_if = Qasm(self.qasm_file_path_if)
res_if = qasm_if.parse()
inspect(res_if)
def test_get_tokens(self):
"""Test whether we get only valid tokens."""
qasm = Qasm(self.qasm_file_path)
for token in qasm.get_tokens():
self.assertTrue(isinstance(token, ply.lex.LexToken))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -5,361,655,547,522,064,000 | 31.287356 | 81 | 0.629405 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_07_01/aio/operations/_public_ip_prefixes_operations.py | 1 | 27147 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PublicIPPrefixesOperations:
"""PublicIPPrefixesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
public_ip_prefix_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
public_ip_prefix_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified public IP prefix.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the PublicIpPrefix.
:type public_ip_prefix_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
public_ip_prefix_name=public_ip_prefix_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
async def get(
self,
resource_group_name: str,
public_ip_prefix_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.PublicIPPrefix":
"""Gets the specified public IP prefix in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPPrefix, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.PublicIPPrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
public_ip_prefix_name: str,
parameters: "_models.PublicIPPrefix",
**kwargs
) -> "_models.PublicIPPrefix":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PublicIPPrefix')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
public_ip_prefix_name: str,
parameters: "_models.PublicIPPrefix",
**kwargs
) -> AsyncLROPoller["_models.PublicIPPrefix"]:
"""Creates or updates a static or dynamic public IP prefix.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param parameters: Parameters supplied to the create or update public IP prefix operation.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.PublicIPPrefix
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PublicIPPrefix or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_07_01.models.PublicIPPrefix]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_prefix_name=public_ip_prefix_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
public_ip_prefix_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.PublicIPPrefix":
"""Updates public IP prefix tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param parameters: Parameters supplied to update public IP prefix tags.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPPrefix, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.PublicIPPrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.PublicIPPrefixListResult"]:
"""Gets all the public IP prefixes in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPPrefixListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.PublicIPPrefixListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefixListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPPrefixListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPPrefixes'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.PublicIPPrefixListResult"]:
"""Gets all public IP prefixes in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPPrefixListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.PublicIPPrefixListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefixListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPPrefixListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes'} # type: ignore
| mit | 5,796,781,428,933,566,000 | 48.811009 | 200 | 0.644565 | false |
DevangS/CoralNet | accounts/migrations/0004_create_user_called_alleviate.py | 1 | 5492 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.conf import settings
class Migration(DataMigration):
# Create a dummy user called "Alleviate".
# This is the user under which Alleviate-accepted annotations
# will be added.
def forwards(self, orm):
username = "Alleviate"
print "-----"
try:
orm['auth.User'].objects.get(username=username)
except orm['auth.User'].DoesNotExist:
alleviateUser = orm['auth.User'](id=settings.ALLEVIATE_USER_ID,
username=username,
first_name="",
last_name="",
email="",
password="",
)
alleviateUser.save()
print "Created user with username %s." % username
else:
print "User with username %s already exists; nothing needs to be done." % username
print "-----"
def backwards(self, orm):
username = "Alleviate"
print (
"-----\n"
"NOTE: This migration rollback does nothing. "
"Deleting the %s user would delete all Alleviate annotations, "
"which would be very bad to do accidentally."
"\n-----" % username
)
models = {
'accounts.profile': {
'Meta': {'object_name': 'Profile'},
'about_me': ('django.db.models.fields.CharField', [], {'max_length': '45', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '5'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '45', 'blank': 'True'}),
'mugshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'registered'", 'max_length': '15'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'my_profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts']
symmetrical = True
| bsd-2-clause | -8,000,994,628,860,161,000 | 53.376238 | 182 | 0.545157 | false |
tuanvu216/udacity-course | intro_to_machine_learning/lesson/lesson_14_evaluation_metrics/evaluate_poi_identifier.py | 1 | 2588 | #!/usr/bin/python
"""
starter code for the evaluation mini-project
start by copying your trained/tested POI identifier from
that you built in the validation mini-project
the second step toward building your POI identifier!
start by loading/formatting the data
"""
import pickle
import sys
sys.path.append("C:/Vindico/Projects/Code/Python/Python/Course/Udacity/Intro to Machine Learning/ud120-projects-master/tools/")
from feature_format import featureFormat, targetFeatureSplit
from sklearn.tree import DecisionTreeClassifier
from sklearn import cross_validation
import numpy as np
data_dict = pickle.load(open("C:/Vindico/Projects/Code/Python/Python/Course/Udacity/Intro to Machine Learning/ud120-projects-master/final_project/final_project_dataset.pkl", "r") )
### add more features to features_list!
features_list = ["poi", "salary"]
data = featureFormat(data_dict, features_list)
labels, features = targetFeatureSplit(data)
### your code goes here
features_train,features_test,labels_train,labels_test = cross_validation.train_test_split(features,labels,test_size=0.3,
random_state=42)
clf = DecisionTreeClassifier()
clf.fit(features_train,labels_train)
clf.score(features_test,labels_test)
# How many POIs are in the test set for your POI identifier?
pred = clf.predict(features_test)
sum(pred)
print len([e for e in labels_test if e == 1.0])
# How many people total are in your test set?
len(pred)
# If your identifier predicted 0. (not POI) for everyone in the test set, what would its accuracy be?
1.0 - 5.0/29
# Precision and recall can help illuminate your performance better.
# Use the precision_score and recall_score available in sklearn.metrics to compute those quantities.
# What’s the precision?
from sklearn.metrics import *
precision_score(labels_test, pred)
# What’s the recall?
recall_score(labels_test, pred)
# Here are some made-up predictions and true labels for a hypothetical test set;
# fill in the following boxes to practice identifying true positives, false positives, true negatives, and false negatives.
# Let’s use the convention that “1” signifies a positive result, and “0” a negative.
predictions = [0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1]
true_labels = [0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0]
# What's the precision of this classifier?
precision_score(true_labels, predictions)
# What's the recall of this classifier?
recall_score(true_labels, predictions)
| mit | -3,788,225,604,616,323,000 | 34.260274 | 180 | 0.721445 | false |
mitmedialab/MediaCloud-Web-Tools | server/util/request.py | 1 | 3666 | import logging
import os
from functools import wraps
from flask import jsonify, request
from mediacloud.error import MCException
logger = logging.getLogger(__name__)
def validate_params_exist(form, params):
for param in params:
if param not in form:
raise ValueError('Missing required value for '+param)
def json_error_response(message, status_code=400):
response = jsonify({
'statusCode': status_code,
'message': message,
})
response.status_code = status_code
return response
def filters_from_args(request_args):
"""
Helper to centralize reading filters from url params
"""
timespans_id = safely_read_arg('timespanId')
snapshots_id = safely_read_arg('snapshotId')
foci_id = safely_read_arg('focusId')
q = request_args['q'] if ('q' in request_args) and (request_args['q'] != 'undefined') else None
return snapshots_id, timespans_id, foci_id, q
def arguments_required(*expected_args):
"""
Handy decorator for ensuring that request params exist
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
logger.debug(request.args)
validate_params_exist(request.args, expected_args)
return func(*args, **kwargs)
except ValueError as e:
logger.exception("Missing a required arg")
return json_error_response(e.args[0])
return wrapper
return decorator
def form_fields_required(*expected_form_fields):
"""
Handy decorator for ensuring that the form has the fields you need
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
logger.debug(request.form)
validate_params_exist(request.form, expected_form_fields)
return func(*args, **kwargs)
except ValueError as e:
logger.exception("Missing a required form field")
return json_error_response(e.args[0])
return wrapper
return decorator
def api_error_handler(func):
"""
Handy decorator that catches any exception from the Media Cloud API and
sends it back to the browser as a nicely formatted JSON error. The idea is
that the client code can catch these at a low level and display error messages.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except MCException as e:
logger.exception(e)
return json_error_response(e.message, e.status_code)
return wrapper
def is_csv(filename):
filename, file_extension = os.path.splitext(filename)
return file_extension.lower() in ['.csv']
def csv_required(func):
"""
Validates a file is supplied in the request and that it has a csv extension.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
if 'file' not in request.files:
return json_error_response('No file part')
uploaded_file = request.files['file']
if uploaded_file.filename == '':
return json_error_response('No selected file')
if not (uploaded_file and is_csv(uploaded_file.filename)):
return json_error_response('Invalid file')
return func(*args, **kwargs)
except MCException as e:
logger.exception(e)
return json_error_response(e.message, e.status_code)
return wrapper
def safely_read_arg(arg_name, default=None):
return request.args[arg_name] if arg_name in request.args else default
| apache-2.0 | -5,131,921,134,876,014,000 | 30.603448 | 99 | 0.621386 | false |
iwschris/ezodf2 | tests/test_pages.py | 1 | 3894 | #!/usr/bin/env python
#coding:utf-8
# Purpose: test spreadpage body
# Created: 29.01.2011
# Copyright (C) 2011, Manfred Moitzi
# License: MIT
from __future__ import unicode_literals, print_function, division
__author__ = "mozman <[email protected]>"
# Standard Library
import unittest
# trusted or separately tested modules
from ezodf2.xmlns import CN
from lxml.etree import Element
from ezodf2.drawingpage import DrawingPage as Page
# objects to test
from ezodf2.pages import Pages
class TestPagesManagement(unittest.TestCase):
def setUp(self):
self.pages = Pages(Element(CN('office:drawing')))
def test_empty_body(self):
self.assertEqual(len(self.pages), 0)
def test_has_one_table(self):
self.pages.append(Page(name='Page1'))
self.assertEqual(len(self.pages), 1)
def test_get_page_by_name(self):
self.pages.append(Page(name='Page1'))
page = self.pages['Page1']
self.assertEqual(page.name, 'Page1')
def test_page_not_found_error(self):
with self.assertRaises(KeyError):
self.pages['Morgenstern']
def test_get_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
page = self.pages[2]
self.assertEqual(page.name, 'Page3')
def test_get_last_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
page = self.pages[-1]
self.assertEqual(page.name, 'Page3')
def test_page_index_0_error(self):
with self.assertRaises(IndexError):
self.pages[0]
def test_page_index_1_error(self):
self.pages += Page(name='Page1')
with self.assertRaises(IndexError):
self.pages[1]
def test_set_page_by_index(self):
self.pages += Page(name='Page1')
self.pages[0] = Page(name='Page2')
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_set_page_by_name(self):
self.pages += Page(name='Page1')
self.pages['Page1'] = Page(name='Page2')
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_remove_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
del self.pages[0]
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_remove_page_by_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
del self.pages['Page1']
self.assertEqual(len(self.pages), 1)
self.assertEqual(self.pages[0].name, 'Page2')
def test_is_same_object(self):
self.pages += Page(name='Page1')
object1 = self.pages['Page1']
object2 = self.pages['Page1']
self.assertTrue(object1 is object2)
def test_page_names(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
self.assertEqual(list(self.pages.names()), ['Page1', 'Page2', 'Page3'])
def test_page_index(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages += Page(name='Page3')
self.assertEqual(self.pages.index(self.pages['Page3']), 2)
def test_page_insert(self):
self.pages += Page(name='Page1')
self.pages += Page(name='Page2')
self.pages.insert(1, Page(name='Page3'))
self.assertEqual(self.pages[1].name, 'Page3')
self.assertEqual(len(self.pages), 3)
if __name__=='__main__':
unittest.main()
| mit | -2,706,917,590,960,068,000 | 28.186047 | 79 | 0.592193 | false |
cbrucks/Federated_Python-Swiftclient | swiftclient/contrib/federated/protocols/rax.py | 1 | 2519 | import urllib
import urllib2
import json
import getpass
import BaseHTTPServer
import os
import webbrowser
from swiftclient.contrib.federated import federated_exceptions, federated_utils
import ssl
## Sends the authentication request to the IdP along
# @param idpEndpoint The IdP address
# @param idpRequest The authentication request returned by Keystone
def getIdPResponse(idpEndpoint, idpRequest, realm=None):
print "\nInitiating Authentication against: "+realm["name"]+"\n"
# Get the unscoped token
# 1. Get the user name
chosen = False
user = None
while not chosen:
try:
user = raw_input("Please enter your username: ")
chosen = True
except:
print "Invalid input, please try again"
# 2. Get the password
chosen = False
password = None
while not chosen:
try:
password = getpass.getpass()
chosen = True
except:
print "Invalid input, please try again"
# Insert creds
req = json.loads(idpRequest)
req['auth']['passwordCredentials']['username'] = user
req['auth']['passwordCredentials']['password'] = password
# Contact Keystone V2
unscoped = json.loads(request(idpEndpoint+'/tokens', method='POST', data=req).read())
print "Successfully Logged In\n"
# Get the list of tenants
tenants = json.loads(request(idpEndpoint+'/tenants', method='GET', header={'X-Auth-Token':unscoped['access']['token']['id']}).read())
# Offer the user the choice of tenants
tenant = federated_utils.selectTenantOrDomain(tenants['tenants'],serverName=realm["name"])
# Get the scoped token
newReq = {"auth":{"tenantName": tenant["name"], "token":{"id":unscoped["access"]["token"]["id"]}}}
scoped = json.loads(request(idpEndpoint+'/tokens', method='POST', data=newReq).read())
print "\nSuccessfully Authorised to access: "+tenant["name"]+"\n"
# Return scoped token
return scoped
## Send a request that will be process by the V2 Keystone
def request(keystoneEndpoint, data={}, method="GET", header={}):
headers = header
if method == "GET":
data = urllib.urlencode(data)
req = urllib2.Request(keystoneEndpoint + data, headers = header)
response = urllib2.urlopen(req)
elif method == "POST":
data = json.dumps(data)
headers['Content-Type'] = 'application/json'
req = urllib2.Request(keystoneEndpoint, data, header)
response = urllib2.urlopen(req)
return response
| apache-2.0 | 9,062,573,529,326,963,000 | 37.166667 | 137 | 0.663358 | false |
NaturalHistoryMuseum/inselect | inselect/gui/views/boxes/box_item.py | 1 | 8872 | import sys
from itertools import chain
from PyQt5.QtCore import Qt, QRect, QRectF
from PyQt5.QtGui import QPen
from PyQt5.QtWidgets import QGraphicsItem, QGraphicsRectItem
from inselect.lib.utils import debug_print
from inselect.gui.colours import colour_scheme_choice
from inselect.gui.utils import painter_state
from .resize_handle import ResizeHandle
from .reticle import Reticle
class BoxItem(QGraphicsRectItem):
# Might be some relevant stuff here:
# http://stackoverflow.com/questions/10590881/events-and-signals-in-qts-qgraphicsitem-how-is-this-supposed-to-work
# The width of the line (in pixels) drawn around the box.
# A width of 1 on Mac OS X is too thin. 2 is too thick on Windows.
BOX_WIDTH = 2 if 'darwin' == sys.platform else 1
def __init__(self, x, y, w, h, isvalid, parent=None):
super(BoxItem, self).__init__(x, y, w, h, parent)
self.setFlags(QGraphicsItem.ItemIsFocusable |
QGraphicsItem.ItemIsSelectable |
QGraphicsItem.ItemSendsGeometryChanges |
QGraphicsItem.ItemIsMovable)
self.setCursor(Qt.OpenHandCursor)
self.setAcceptHoverEvents(True)
# True if the box has valid metadata
self._isvalid = isvalid
# Points of interest as represented by instances of Reticle
self._pois = []
# Resize handles
positions = (Qt.TopLeftCorner, Qt.TopRightCorner, Qt.BottomLeftCorner,
Qt.BottomRightCorner)
self._handles = []
self._handles = [self._create_handle(pos) for pos in positions]
self._layout_children()
self._set_z_index()
def paint(self, painter, option, widget=None):
"""QGraphicsRectItem virtual
"""
# TODO LH Is there a way to clip to overlapping
# QAbstractGraphicsItems with a larger zorder
# TODO LH Get pixmap without tight coupling to scene
if not self.has_mouse():
painter.drawPixmap(self.boundingRect(),
self.scene().pixmap,
self.sceneBoundingRect())
with painter_state(painter):
outline_colour, fill_colour = self.colours
# Cosmetic pens "...draw strokes that have a constant width
# regardless of any transformations applied to the QPainter they are
# used with."
pen = QPen(outline_colour, self.BOX_WIDTH, Qt.SolidLine)
pen.setCosmetic(True)
painter.setPen(pen)
r = self.boundingRect()
painter.drawRect(r)
if fill_colour:
painter.fillRect(r, fill_colour)
def has_mouse(self):
"""True if self or self._handles has grabbed the mouse
"""
return self.scene().mouseGrabberItem() in chain([self], self._handles)
@property
def colours(self):
"""Tuple of two QColors to use for the box's border and fill
respectively. Fill might be None.
"""
colours = colour_scheme_choice().current['Colours']
has_mouse = self.has_mouse()
if has_mouse:
outline = colours['Resizing']
elif self.isSelected():
outline = colours['Selected']
elif self._isvalid:
outline = colours['Valid']
else:
outline = colours['Invalid']
if not self._isvalid and not has_mouse:
fill = colours['InvalidFill']
else:
fill = None
return outline, fill
def update(self, rect=QRectF()):
"""QGraphicsRectItem function
"""
# TODO LH QGraphicsRectItem::update is not a virtual function - is it
# OK to implement this function and call the base class's
# implementation?
super(BoxItem, self).update(rect)
for item in self._handles:
item.update()
def hoverEnterEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverEnterEvent')
super(BoxItem, self).hoverEnterEvent(event)
self._set_handles_visible(True)
self._set_z_index()
self.update()
def hoverLeaveEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverLeaveEvent')
super(BoxItem, self).hoverLeaveEvent(event)
self._set_handles_visible(False)
self._set_z_index()
self.update()
def _set_handles_visible(self, visible):
for handle in self._handles:
handle.setVisible(visible)
def _create_handle(self, corner):
# Creates and returns a new ResizeHandle at the given Qt.Corner
handle = ResizeHandle(corner, self)
handle.setVisible(False)
handle.setFlags(QGraphicsItem.ItemStacksBehindParent |
QGraphicsItem.ItemIgnoresTransformations)
return handle
def _layout_children(self):
"""Moves child graphics items to the appropriate positions
"""
bounding = self.boundingRect()
for child in chain(self._handles, self._pois):
child.layout(bounding)
def setRect(self, rect):
"""QGraphicsRectItem function
"""
debug_print('BoxItem.setRect')
super(BoxItem, self).setRect(rect)
self._set_z_index()
self._layout_children()
def mousePressEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mousePressEvent')
super(BoxItem, self).mousePressEvent(event)
self._set_z_index()
if Qt.ShiftModifier == event.modifiers():
# Add a point of interest
self.append_point_of_interest(event.pos())
else:
# Starting a move
self.setCursor(Qt.ClosedHandCursor)
self.update()
def mouseReleaseEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mouseReleaseEvent')
super(BoxItem, self).mouseReleaseEvent(event)
self.setCursor(Qt.OpenHandCursor)
self._set_z_index()
self.update()
def itemChange(self, change, value):
"""QGraphicsItem virtual
"""
if change == self.ItemSelectedHasChanged:
# Clear points of interest
scene = self.scene()
while self._pois:
scene.removeItem(self._pois.pop())
# Item has gained or lost selection
self._set_z_index()
return super(BoxItem, self).itemChange(change, value)
def set_rect(self, new_rect):
"""Sets a new QRect in integer coordinates
"""
# Cumbersome conversion to ints
current = self.sceneBoundingRect()
current = QRect(current.left(), current.top(),
current.width(), current.height())
if current != new_rect:
msg = 'Update rect for [{0}] from [{1}] to [{2}]'
debug_print(msg.format(self, current, new_rect))
self.prepareGeometryChange()
# setrect() expects floating point rect
self.setRect(QRectF(new_rect))
def set_isvalid(self, isvalid):
"""Sets a new 'is valid'
"""
if isvalid != self._isvalid:
self._isvalid = isvalid
self.update()
def _set_z_index(self):
"""Updates the Z-index of the box
This sorts the boxes such that the bigger the area of a box, the lower
it's Z-index is; and boxes that are selected and have mouse or keyboard
focus are always above other boxes.
"""
rect = self.rect()
# Smaller items have a higher z
z = 1.0
if rect.width() and rect.height():
z += + 1.0 / float(rect.width() * rect.height())
if self.isSelected():
z += 1.0
else:
# Newly created items have zero width and height
pass
self.setZValue(z)
def adjust_rect(self, dx1, dy1, dx2, dy2):
"""Adjusts rect
"""
r = self.rect()
r.adjust(dx1, dy1, dx2, dy2)
if r.width() > 1.0 and r.height() > 1.0:
self.prepareGeometryChange()
self.setRect(r)
def append_point_of_interest(self, pos):
"""Appends pos (a QPoint relative to the top-left of this box) to the
list of points of interest
"""
debug_print('New point of interest at [{0}]'.format(pos))
self._pois.append(Reticle(pos - self.boundingRect().topLeft(), self))
self._pois[-1].layout(self.boundingRect())
self._pois[-1].setFlags(QGraphicsItem.ItemIgnoresTransformations)
@property
def points_of_interest(self):
"""An iterable of QPointFs in item coordinates
"""
return [poi.offset for poi in self._pois]
| bsd-3-clause | -3,026,487,980,262,549,000 | 32.73384 | 118 | 0.593102 | false |
ragupta-git/ImcSdk | imcsdk/mometa/huu/HuuFirmwareCatalogComponent.py | 1 | 4134 | """This module contains the general information for HuuFirmwareCatalogComponent ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class HuuFirmwareCatalogComponentConsts:
pass
class HuuFirmwareCatalogComponent(ManagedObject):
"""This is HuuFirmwareCatalogComponent class."""
consts = HuuFirmwareCatalogComponentConsts()
naming_props = set([u'id'])
mo_meta = {
"classic": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version151f, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"]),
"modular": MoMeta("HuuFirmwareCatalogComponent", "huuFirmwareCatalogComponent", "id-[id]", VersionMeta.Version2013e, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'huuFirmwareCatalog'], [], ["Get"])
}
prop_meta = {
"classic": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"component_name": MoPropertyMeta("component_name", "componentName", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"description": MoPropertyMeta("description", "description", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version151f, MoPropertyMeta.NAMING, None, None, None, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
"modular": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"component_name": MoPropertyMeta("component_name", "componentName", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"description": MoPropertyMeta("description", "description", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, None, None, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
}
prop_map = {
"classic": {
"childAction": "child_action",
"componentName": "component_name",
"description": "description",
"dn": "dn",
"id": "id",
"rn": "rn",
"status": "status",
},
"modular": {
"childAction": "child_action",
"componentName": "component_name",
"description": "description",
"dn": "dn",
"id": "id",
"rn": "rn",
"status": "status",
},
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.component_name = None
self.description = None
self.status = None
ManagedObject.__init__(self, "HuuFirmwareCatalogComponent", parent_mo_or_dn, **kwargs)
| apache-2.0 | 3,600,132,035,256,602,000 | 50.037037 | 217 | 0.604983 | false |
rouault/Quantum-GIS | tests/src/python/test_qgsserver_wms_getfeatureinfo.py | 1 | 17012 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer GetFeatureInfo WMS.
From build dir, run: ctest -R PyQgsServerWMSGetFeatureInfo -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '11/03/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver_wms import TestQgsServerWMSTestBase
from qgis.core import QgsProject
class TestQgsServerWMSGetFeatureInfo(TestQgsServerWMSTestBase):
"""QGIS Server WMS Tests for GetFeatureInfo request"""
def testGetFeatureInfo(self):
# Test getfeatureinfo response xml
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
self.wms_request_compare('GetFeatureInfo',
'&layers=&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# Test getfeatureinfo response html
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-html')
# Test getfeatureinfo response html with geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo-text-html-geometry')
# Test getfeatureinfo response html with maptip
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_maptip=true',
'wms_getfeatureinfo-text-html-maptip')
# Test getfeatureinfo response text
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=text/plain',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo default info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo invalid info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=InvalidFormat',
'wms_getfeatureinfo-invalid-format')
# Test feature info request with filter geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A4326&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_geometry_filter')
# Test feature info request with filter geometry in non-layer CRS
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON ((913213.6839952 5606021.5399693, 913215.6988780 5606021.5399693, 913215.6988780 5606015.09643322, 913213.6839952 5606015.0964332, 913213.6839952 5606021.5399693))',
'wms_getfeatureinfo_geometry_filter_3857')
# Test feature info request with invalid query_layer
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=InvalidLayer&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_invalid_query_layers')
# Test feature info request with '+' instead of ' ' in layers and
# query_layers parameters
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer+%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer+%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# layer1 is a clone of layer0 but with a scale visibility. Thus,
# GetFeatureInfo response contains only a feature for layer0 and layer1
# is ignored for the required bbox. Without the scale visibility option,
# the feature for layer1 would have been in the response too.
mypath = self.testdata_path + "test_project_scalevisibility.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0,layer1&styles=&' +
'VERSION=1.1.0&' +
'info_format=text%2Fxml&' +
'width=500&height=500&srs=EPSG%3A4326' +
'&bbox=8.1976,44.8998,8.2100,44.9027&' +
'query_layers=layer0,layer1&X=235&Y=243',
'wms_getfeatureinfo_notvisible',
'test_project_scalevisibility.qgs')
# Test GetFeatureInfo resolves "value map" widget values
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&QUERY_LAYERS=layer0&I=487&J=308',
'wms_getfeatureinfo-values1-text-xml',
'test_project_values.qgs')
# TODO fix regression in QGIS 3 as the widget values don't get solved and enable test
@unittest.expectedFailure
def testGetFeatureInfoValueRelation(self):
"""Test GetFeatureInfo resolves "value relation" widget values"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer1&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer1&I=487&J=308',
'wms_getfeatureinfo-values1-text-xml',
'test_project_values.qgs')
# TODO make GetFeatureInfo show the dictionary values and enable test
@unittest.expectedFailure
def testGetFeatureInfoValueRelationArray(self):
"""Test GetFeatureInfo on "value relation" widget with array field (multiple selections)"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer3&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer3&I=487&J=308',
'wms_getfeatureinfo-values3-text-xml',
'test_project_values.qgs')
# TODO make GetFeatureInfo show what's in the display expression and enable test
@unittest.expectedFailure
def testGetFeatureInfoRelationReference(self):
"""Test GetFeatureInfo solves "relation reference" widget "display expression" values"""
mypath = self.testdata_path + "test_project_values.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer2&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer2&I=487&J=308',
'wms_getfeatureinfo-values2-text-xml',
'test_project_values.qgs')
def testGetFeatureInfoFilter(self):
# Test getfeatureinfo response xml
# Regression for #8656
# Mind the gap! (the space in the FILTER expression)
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter')
# Test a filter with NO condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' AND "utf8nameè" = \'no-results\''),
'wms_getfeatureinfo_filter_no_results')
# Test a filter with OR condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' OR "NAME" = \'three\''),
'wms_getfeatureinfo_filter_or')
# Test a filter with OR condition and UTF results
# Note that the layer name that contains utf-8 chars cannot be
# to upper case.
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\' OR "utf8nameè" = \'three èé↓\''),
'wms_getfeatureinfo_filter_or_utf8')
# Regression #18292 Server GetFeatureInfo FILTER search fails when WIDTH, HEIGHT are not specified
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' + urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter_no_width')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 1,467,075,829,748,212,500 | 57.439863 | 236 | 0.499353 | false |
oppia/oppia-ml | core/domain/remote_access_services.py | 1 | 4226 | # coding: utf-8
#
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides interface to communicate with Oppia remotely."""
import base64
import hashlib
import hmac
import json
import requests
from core.domain.proto import training_job_response_payload_pb2
from core.platform import platform_services
import utils
import vmconf
metadata_services = platform_services.Registry.import_metadata_services()
def _get_url():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_COMMUNICATION_URL
return vmconf.SERVER_COMMUNICATION_URL
def _get_port():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_COMMUNICATION_PORT
return vmconf.SERVER_COMMUNICATION_PORT
def _get_vm_id():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_VM_ID
# Get VMID dynamically from metadata. HMAC module does not
# support unicode string. Hence we need to cast them to str.
return str(metadata_services.get_metadata_param(
vmconf.METADATA_VM_ID_PARAM_NAME))
def _get_shared_secret():
if vmconf.DEV_MODE:
return vmconf.DEFAULT_VM_SHARED_SECRET
# Get shared secret dynamically from metadata. HMAC module does not
# support unicode string. Hence we need to cast them to str.
return str(metadata_services.get_metadata_param(
vmconf.METADATA_SHARED_SECRET_PARAM_NAME))
def generate_signature(message, vm_id):
"""Generates digital signature for given message combined with vm_id.
Args:
message: bytes. Message string.
vm_id: str. ID of the VM that trained the job.
Returns:
str. The digital signature generated from request data.
"""
encoded_vm_id = vm_id.encode(encoding='utf-8')
msg = b'%s|%s' % (base64.b64encode(message), encoded_vm_id)
key = _get_shared_secret().encode(encoding='utf-8')
# Generate signature and return it.
return hmac.new(key, msg, digestmod=hashlib.sha256).hexdigest()
def fetch_next_job_request():
"""Returns the next job request to be processed.
Returns:
dict. A dict retrieved remotely from database containing
job request data.
"""
request_url = "%s:%s/%s" % (
_get_url(), _get_port(), vmconf.FETCH_NEXT_JOB_REQUEST_HANDLER)
payload = {
'vm_id': _get_vm_id().encode(encoding='utf-8'),
'message': _get_vm_id().encode(encoding='utf-8'),
}
signature = generate_signature(payload['message'], payload['vm_id'])
payload['signature'] = signature
data = {
'payload': json.dumps(payload)
}
response = requests.post(request_url, data=data)
return utils.parse_data_received_from_server(response.text)
def store_trained_classifier_model(job_result):
"""Stores the result of processed job request.
Args:
job_result: TrainingJobResult. Domain object containing result of
training of classifier along with job_id and algorithm_id.
Returns:
int. Status code of the response.
"""
job_result.validate()
payload = training_job_response_payload_pb2.TrainingJobResponsePayload()
payload.job_result.CopyFrom(job_result.to_proto())
payload.vm_id = _get_vm_id().encode(encoding='utf-8')
message = payload.job_result.SerializeToString().encode(encoding='utf-8')
signature = generate_signature(message, payload.vm_id)
payload.signature = signature
data = payload.SerializeToString()
request_url = "%s:%s/%s" % (
_get_url(), _get_port(), vmconf.STORE_TRAINED_CLASSIFIER_MODEL_HANDLER)
response = requests.post(
request_url, data=data,
headers={'Content-Type': 'application/octet-stream'})
return response.status_code
| apache-2.0 | 7,458,767,984,366,599,000 | 30.537313 | 79 | 0.6938 | false |
google/clif | examples/wrapfunc/python/default_args_test.py | 1 | 1214 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for clif.examples.wrapfunc.python.default_args."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from wrapfunc.python import default_args
class DefaultArgTest(unittest.TestCase):
def testDefaultArgTest(self):
self.assertTrue(default_args.Inc(5), 6)
self.assertTrue(default_args.Inc(5, 2), 7)
self.assertTrue(default_args.Scale(5), 10)
self.assertTrue(default_args.Scale(5, offset=10), 30)
with self.assertRaises(ValueError):
default_args.ScaleWithRatios(5, offset=10)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -2,786,963,431,532,792,000 | 30.947368 | 74 | 0.73888 | false |
carolFrohlich/nipype | nipype/interfaces/mrtrix3/preprocess.py | 2 | 7501 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# -*- coding: utf-8 -*-
"""
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname(os.path.realpath(__file__ ))
>>> datadir = os.path.realpath(os.path.join(filepath,
... '../../testing/data'))
>>> os.chdir(datadir)
"""
from __future__ import print_function, division, unicode_literals, absolute_import
import os.path as op
from ..traits_extension import isdefined
from ..base import (CommandLineInputSpec, CommandLine, traits, TraitedSpec,
File)
from .base import MRTrix3BaseInputSpec, MRTrix3Base
class ResponseSDInputSpec(MRTrix3BaseInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-2,
desc='input diffusion weighted images')
out_file = File(
'response.txt', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file containing SH coefficients')
# DW Shell selection options
shell = traits.List(traits.Float, sep=',', argstr='-shell %s',
desc='specify one or more dw gradient shells')
in_mask = File(exists=True, argstr='-mask %s',
desc='provide initial mask image')
max_sh = traits.Int(8, argstr='-lmax %d',
desc='maximum harmonic degree of response function')
out_sf = File('sf_mask.nii.gz', argstr='-sf %s',
desc='write a mask containing single-fibre voxels')
test_all = traits.Bool(False, argstr='-test_all',
desc='re-test all voxels at every iteration')
# Optimization
iterations = traits.Int(0, argstr='-max_iters %d',
desc='maximum number of iterations per pass')
max_change = traits.Float(
argstr='-max_change %f',
desc=('maximum percentile change in any response function coefficient;'
' if no individual coefficient changes by more than this '
'fraction, the algorithm is terminated.'))
# Thresholds
vol_ratio = traits.Float(
.15, argstr='-volume_ratio %f',
desc=('maximal volume ratio between the sum of all other positive'
' lobes in the voxel and the largest FOD lobe'))
disp_mult = traits.Float(
1., argstr='-dispersion_multiplier %f',
desc=('dispersion of FOD lobe must not exceed some threshold as '
'determined by this multiplier and the FOD dispersion in other '
'single-fibre voxels. The threshold is: (mean + (multiplier * '
'(mean - min))); default = 1.0. Criterion is only applied in '
'second pass of RF estimation.'))
int_mult = traits.Float(
2., argstr='-integral_multiplier %f',
desc=('integral of FOD lobe must not be outside some range as '
'determined by this multiplier and FOD lobe integral in other'
' single-fibre voxels. The range is: (mean +- (multiplier * '
'stdev)); default = 2.0. Criterion is only applied in second '
'pass of RF estimation.'))
class ResponseSDOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
out_sf = File(desc=('mask containing single-fibre voxels'))
class ResponseSD(MRTrix3Base):
"""
Generate an appropriate response function from the image data for
spherical deconvolution.
.. [1] Tax, C. M.; Jeurissen, B.; Vos, S. B.; Viergever, M. A. and
Leemans, A., Recursive calibration of the fiber response function
for spherical deconvolution of diffusion MRI data. NeuroImage,
2014, 86, 67-80
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> resp = mrt.ResponseSD()
>>> resp.inputs.in_file = 'dwi.mif'
>>> resp.inputs.in_mask = 'mask.nii.gz'
>>> resp.inputs.grad_fsl = ('bvecs', 'bvals')
>>> resp.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'dwi2response -fslgrad bvecs bvals -mask mask.nii.gz dwi.mif response.txt'
>>> resp.run() # doctest: +SKIP
"""
_cmd = 'dwi2response'
input_spec = ResponseSDInputSpec
output_spec = ResponseSDOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
if isdefined(self.inputs.out_sf):
outputs['out_sf'] = op.abspath(self.inputs.out_sf)
return outputs
class ACTPrepareFSLInputSpec(CommandLineInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-2,
desc='input anatomical image')
out_file = File(
'act_5tt.mif', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file after processing')
class ACTPrepareFSLOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
class ACTPrepareFSL(CommandLine):
"""
Generate anatomical information necessary for Anatomically
Constrained Tractography (ACT).
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> prep = mrt.ACTPrepareFSL()
>>> prep.inputs.in_file = 'T1.nii.gz'
>>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'act_anat_prepare_fsl T1.nii.gz act_5tt.mif'
>>> prep.run() # doctest: +SKIP
"""
_cmd = 'act_anat_prepare_fsl'
input_spec = ACTPrepareFSLInputSpec
output_spec = ACTPrepareFSLOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
return outputs
class ReplaceFSwithFIRSTInputSpec(CommandLineInputSpec):
in_file = File(exists=True, argstr='%s', mandatory=True, position=-4,
desc='input anatomical image')
in_t1w = File(exists=True, argstr='%s', mandatory=True, position=-3,
desc='input T1 image')
in_config = File(exists=True, argstr='%s', position=-2,
desc='connectome configuration file')
out_file = File(
'aparc+first.mif', argstr='%s', mandatory=True, position=-1,
usedefault=True, desc='output file after processing')
class ReplaceFSwithFIRSTOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='the output response file')
class ReplaceFSwithFIRST(CommandLine):
"""
Replace deep gray matter structures segmented with FSL FIRST in a
FreeSurfer parcellation.
Example
-------
>>> import nipype.interfaces.mrtrix3 as mrt
>>> prep = mrt.ReplaceFSwithFIRST()
>>> prep.inputs.in_file = 'aparc+aseg.nii'
>>> prep.inputs.in_t1w = 'T1.nii.gz'
>>> prep.inputs.in_config = 'mrtrix3_labelconfig.txt'
>>> prep.cmdline # doctest: +ELLIPSIS +IGNORE_UNICODE
'fs_parc_replace_sgm_first aparc+aseg.nii T1.nii.gz \
mrtrix3_labelconfig.txt aparc+first.mif'
>>> prep.run() # doctest: +SKIP
"""
_cmd = 'fs_parc_replace_sgm_first'
input_spec = ReplaceFSwithFIRSTInputSpec
output_spec = ReplaceFSwithFIRSTOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['out_file'] = op.abspath(self.inputs.out_file)
return outputs
| bsd-3-clause | -1,332,776,987,818,199,800 | 36.318408 | 87 | 0.614985 | false |
sam-m888/gramps | gramps/plugins/drawreport/ancestortree.py | 1 | 42337 | # Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2007-2012 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2014 Paul Franklin
# Copyright (C) 2010-2015 Craig J. Anderson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Reports/Graphical Reports/Ancestor Tree"""
#------------------------------------------------------------------------
#
# Python modules
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.errors import ReportError
from gramps.gen.plug.menu import (TextOption, NumberOption, BooleanOption,
EnumeratedListOption, StringOption,
PersonOption)
from gramps.gen.plug.report import Report, MenuReportOptions, stdoptions
from gramps.gen.plug.docgen import (FontStyle, ParagraphStyle, GraphicsStyle,
FONT_SANS_SERIF, PARA_ALIGN_CENTER)
from gramps.plugins.lib.libtreebase import *
from gramps.plugins.lib.librecurse import AscendPerson
from gramps.gen.proxy import CacheProxyDb
from gramps.gen.display.name import displayer as _nd
PT2CM = utils.pt2cm
#cm2pt = utils.cm2pt
#------------------------------------------------------------------------
#
# Constants
#
#------------------------------------------------------------------------
_BORN = _("birth abbreviation|b."),
_DIED = _("death abbreviation|d."),
_MARR = _("marriage abbreviation|m."),
LVL_GEN, LVL_INDX, LVL_Y = range(3)
#------------------------------------------------------------------------
#
# Box classes
#
#------------------------------------------------------------------------
class PersonBox(BoxBase):
"""
Calculates information about the box that will print on a page
"""
def __init__(self, level):
BoxBase.__init__(self)
self.boxstr = "AC2-box"
#self.level = (level[0]-1, level[1])
self.level = level
def __lt__(self, other):
return self.level[LVL_Y] < other.level[LVL_Y]
class FamilyBox(BoxBase):
"""
Calculates information about the box that will print on a page
"""
def __init__(self, level):
BoxBase.__init__(self)
self.boxstr = "AC2-fam-box"
#self.level = (level[0]-1, level[1])
self.level = level
def __lt__(self, other):
return self.level[LVL_Y] < other.level[LVL_Y]
#------------------------------------------------------------------------
#
# Titles Class(es)
#
#------------------------------------------------------------------------
class TitleN(TitleNoDisplay):
"""No Title class for the report """
def __init__(self, doc, locale):
TitleNoDisplay.__init__(self, doc, "AC2-Title-box")
self._ = locale.translation.sgettext
def calc_title(self, center):
"""Calculate the title of the report"""
#we want no text, but need a text for the TOC in a book!
self.mark_text = self._("Ancestor Graph")
self.text = ''
class TitleA(TitleBox):
"""Title class for the report """
def __init__(self, doc, locale, name_displayer):
self._nd = name_displayer
TitleBox.__init__(self, doc, "AC2-Title-box")
self._ = locale.translation.sgettext
def calc_title(self, center):
"""Calculate the title of the report"""
name = ""
if center is not None:
name = self._nd.display(center)
# feature request 2356: avoid genitive form
self.text = self._("Ancestor Graph for %s") % name
self.set_box_height_width()
#------------------------------------------------------------------------
#
# CalcItems (helper class to calculate text)
# make_ancestor_tree (main recursive functions)
#
#------------------------------------------------------------------------
class CalcItems:
""" A helper class to calculate the default box text
and text for each person / marriage
"""
def __init__(self, dbase):
_gui = GUIConnect()
self._gui = _gui
#calculate the printed lines for each box
#str = ""
#if self.get_val('miss_val'):
# str = "_____"
display_repl = _gui.get_val("replace_list")
self.__calc_l = CalcLines(dbase, display_repl, _gui.locale, _gui.n_d)
self.__blank_father = None
self.__blank_mother = None
self.__blank_father = \
self.__calc_l.calc_lines(None, None, _gui.get_val("father_disp"))
self.__blank_mother = \
self.__calc_l.calc_lines(None, None, _gui.get_val("mother_disp"))
self.center_use = _gui.get_val("center_uses")
self.disp_father = _gui.get_val("father_disp")
self.disp_mother = _gui.get_val("mother_disp")
self.disp_marr = [_gui.get_val("marr_disp")]
self.__blank_marriage = \
self.__calc_l.calc_lines(None, None, self.disp_marr)
def calc_person(self, index, indi_handle, fams_handle):
working_lines = ""
if index[1] % 2 == 0 or (index[1] == 1 and self.center_use == 0):
if indi_handle == fams_handle is None:
working_lines = self.__calc_l.calc_lines(
None, None, self._gui.get_val("father_disp"))
else:
working_lines = self.disp_father
else:
if indi_handle == fams_handle is None:
working_lines = self.__calc_l.calc_lines(
None, None, self._gui.get_val("mother_disp"))
else:
working_lines = self.disp_mother
if indi_handle == fams_handle is None:
return working_lines
else:
return self.__calc_l.calc_lines(indi_handle, fams_handle,
working_lines)
def calc_marriage(self, indi_handle, fams_handle):
if indi_handle == fams_handle is None:
return self.__blank_marriage
else:
return self.__calc_l.calc_lines(indi_handle, fams_handle,
self.disp_marr)
class MakeAncestorTree(AscendPerson):
"""
The main procedure to use recursion to make the tree based off of a person.
order of people inserted into Persons is important.
makes sure that order is done correctly.
"""
def __init__(self, dbase, canvas):
_gui = GUIConnect()
AscendPerson.__init__(self, dbase, _gui.maxgen(), _gui.fill_out())
self.database = dbase
self.canvas = canvas
self.inlc_marr = _gui.inc_marr()
self.inc_sib = _gui.inc_sib()
self.compress_tree = _gui.compress_tree()
self.center_family = None
self.lines = [None] * (_gui.maxgen() + 1)
self.max_generation = 0
self.calc_items = CalcItems(self.database)
def add_person(self, index, indi_handle, fams_handle):
""" Makes a person box and add that person into the Canvas. """
#print str(index) + " add_person " + str(indi_handle)
myself = PersonBox((index[0] - 1,) + index[1:])
if index[LVL_GEN] == 1: # Center Person
self.center_family = fams_handle
if index[LVL_GEN] > self.max_generation:
self.max_generation = index[LVL_GEN]
myself.text = self.calc_items.calc_person(index,
indi_handle, fams_handle)
# myself.text[0] = myself.text[0] + ' ' + repr(index) # for debugging
if indi_handle is not None: # None is legal for an empty box
myself.add_mark(self.database,
self.database.get_person_from_handle(indi_handle))
self.canvas.add_box(myself)
#make the lines
indx = index[LVL_GEN]
self.lines[indx] = myself
if indx > 1:
if self.lines[indx - 1].line_to is None:
line = LineBase(self.lines[indx - 1])
self.lines[indx - 1].line_to = line
self.canvas.add_line(line)
else:
line = self.lines[indx - 1].line_to
line.add_to(myself)
return myself
def add_person_again(self, index, indi_handle, fams_handle):
self.add_person(index, indi_handle, fams_handle)
def add_marriage(self, index, indi_handle, fams_handle):
""" Makes a marriage box and add that person into the Canvas. """
if not self.inlc_marr:
return
myself = FamilyBox((index[0] - 1,) + index[1:])
#calculate the text.
myself.text = self.calc_items.calc_marriage(indi_handle, fams_handle)
self.canvas.add_box(myself)
def y_index(self, x_level, index):
""" Calculate the column or generation that this person is in.
x_level -> 0 to max_gen-1
index -> 1 to (self.max_generation**2)-1
"""
#Calculate which row in the column of people.
tmp_y = index - (2**x_level)
#Calculate which row in the table (yes table) of people.
delta = (2**self.max_generation) // (2**(x_level))
return int((delta / 2) + (tmp_y * delta)) - 1
def do_y_indx(self):
''' Make the y_index for all boxes
first off of a forumula, then remove blank areas around the edges,
then compress the tree if desired
'''
min_y = self.y_index(self.canvas.boxes[0].level[LVL_GEN],
self.canvas.boxes[0].level[LVL_INDX])
for box in self.canvas.boxes:
if "fam" in box.boxstr:
box.level = box.level + \
(self.y_index(box.level[LVL_GEN] - 1,
int(box.level[LVL_INDX] / 2)),)
else:
box.level = box.level + \
(self.y_index(box.level[LVL_GEN], box.level[LVL_INDX]),)
min_y = min(min_y, box.level[LVL_Y])
#print (str(box.level))
#if a last father (of fathers) does not have a father/parents
#Then there could be a gap. Remove this gap
if min_y > 0:
for box in self.canvas.boxes:
box.level = box.level[:LVL_Y] + (box.level[LVL_Y] - min_y,)
#Now that we have y_index, lets see if we need to squish the tree
self.canvas.boxes.sort() # Sort them on the y_index
if not self.compress_tree:
return
#boxes are already in top down [LVL_Y] form so lets
#set the box in the correct y level depending on compress_tree
y_level = 0
current_y = self.canvas.boxes[0].level[LVL_Y]
for box in self.canvas.boxes:
y_index = box.level[LVL_Y]
if y_index > current_y:
current_y = y_index
y_level += 1
box.level = box.level[:LVL_Y] + (y_level,)
def do_sibs(self):
if not self.inc_sib or self.center_family is None:
return
family = self.database.get_family_from_handle(self.center_family)
mykids = [kid.ref for kid in family.get_child_ref_list()]
if len(mykids) == 1: # No other siblings. Don't do anything.
return
# The first person is the center person had he/she has our information
center = self.canvas.boxes.pop(self.canvas.boxes.index(self.lines[1]))
line = center.line_to
level = center.level[LVL_Y]
move = level - (len(mykids) // 2) + ((len(mykids) + 1) % 2)
if move < 0:
# more kids than parents. ran off the page. Move them all down
for box in self.canvas.boxes:
box.level = (box.level[0], box.level[1], box.level[2] - move)
move = 0
line.start = []
rrr = -1 # if len(mykids)%2 == 1 else 0
for kid in mykids:
rrr += 1
mee = self.add_person((1, 1, move + rrr), kid, self.center_family)
line.add_from(mee)
#mee.level = (0, 1, level - (len(mykids)//2)+rrr)
mee.line_to = line
def start(self, person_id):
""" go ahead and make it happen """
center = self.database.get_person_from_gramps_id(person_id)
if center is None:
raise ReportError(
_("Person %s is not in the Database") % person_id)
center_h = center.get_handle()
#Step 1. Get the people
self.recurse(center_h)
#Step 2. Calculate the y_index for everyone
self.do_y_indx()
#Step 3. Siblings of the center person
self.do_sibs()
#------------------------------------------------------------------------
#
# Transform Classes
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# Class lr_Transform
#------------------------------------------------------------------------
class LRTransform:
"""
setup all of the boxes on the canvas in for a left/right report
"""
def __init__(self, canvas, max_generations):
self.canvas = canvas
self.rept_opts = canvas.report_opts
self.y_offset = (self.rept_opts.littleoffset * 2 +
self.canvas.title.height)
def _place(self, box):
""" put the box in it's correct spot """
#1. cm_x
box.x_cm = self.rept_opts.littleoffset
box.x_cm += (box.level[LVL_GEN] *
(self.rept_opts.col_width + self.rept_opts.max_box_width))
#2. cm_y
box.y_cm = self.rept_opts.max_box_height + self.rept_opts.box_pgap
box.y_cm *= box.level[LVL_Y]
box.y_cm += self.y_offset
#if box.height < self.rept_opts.max_box_height:
# box.y_cm += ((self.rept_opts.max_box_height - box.height) /2)
def place(self):
""" Step through boxes so they can be put in the right spot """
#prime the pump
self.__last_y_level = self.canvas.boxes[0].level[LVL_Y]
#go
for box in self.canvas.boxes:
self._place(box)
#------------------------------------------------------------------------
#
# class make_report
#
#------------------------------------------------------------------------
class MakeReport:
def __init__(self, dbase, doc, canvas, font_normal):
self.database = dbase
self.doc = doc
self.canvas = canvas
self.font_normal = font_normal
_gui = GUIConnect()
self.inlc_marr = _gui.inc_marr()
self.compress_tree = _gui.compress_tree()
self.mother_ht = self.father_ht = 0
self.max_generations = 0
def get_height_width(self, box):
"""
obtain width information for each level (x)
obtain height information for each item
"""
self.canvas.set_box_height_width(box)
if box.width > self.canvas.report_opts.max_box_width:
self.canvas.report_opts.max_box_width = box.width # + box.shadow
if box.level[LVL_Y] > 0:
if box.level[LVL_INDX] % 2 == 0 and box.height > self.father_ht:
self.father_ht = box.height
elif box.level[LVL_INDX] % 2 == 1 and box.height > self.mother_ht:
self.mother_ht = box.height
if box.level[LVL_GEN] > self.max_generations:
self.max_generations = box.level[LVL_GEN]
def get_generations(self):
return self.max_generations
def start(self):
# __gui = GUIConnect()
# 1.
#set the sizes for each box and get the max_generations.
self.father_ht = 0.0
self.mother_ht = 0.0
for box in self.canvas.boxes:
self.get_height_width(box)
if self.compress_tree and not self.inlc_marr:
self.canvas.report_opts.max_box_height = \
min(self.father_ht, self.mother_ht)
else:
self.canvas.report_opts.max_box_height = \
max(self.father_ht, self.mother_ht)
#At this point we know everything we need to make the report.
#Size of each column of people - self.rept_opt.box_width
#size of each column (or row) of lines - self.rept_opt.col_width
#size of each row - self.rept_opt.box_height
#go ahead and set it now.
for box in self.canvas.boxes:
box.width = self.canvas.report_opts.max_box_width
# 2.
#setup the transform class to move around the boxes on the canvas
transform = LRTransform(self.canvas, self.max_generations)
transform.place()
class GUIConnect:
""" This is a BORG object. There is ONLY one.
This give some common routines that EVERYONE can use like
get the value from a GUI variable
"""
__shared_state = {}
def __init__(self): # We are BORG!
self.__dict__ = self.__shared_state
def set__opts(self, options, locale, name_displayer):
""" Set only once as we are BORG. """
self.__opts = options
self.locale = locale
self.n_d = name_displayer
def get_val(self, val):
""" Get a GUI value. """
value = self.__opts.get_option_by_name(val)
if value:
return value.get_value()
else:
False
def title_class(self, doc):
""" Return a class that holds the proper title based off of the
GUI options """
title_type = self.get_val('report_title')
if title_type:
return TitleA(doc, self.locale, self.n_d)
else:
return TitleN(doc, self.locale)
def inc_marr(self):
return self.get_val("inc_marr")
def inc_sib(self):
return self.get_val("inc_siblings")
def maxgen(self):
return self.get_val("maxgen")
def fill_out(self):
return self.get_val("fill_out")
def compress_tree(self):
return self.get_val("compress_tree")
#------------------------------------------------------------------------
#
# AncestorTree
#
#------------------------------------------------------------------------
class AncestorTree(Report):
""" AncestorTree Report """
def __init__(self, database, options, user):
"""
Create AncestorTree object that produces the report.
The arguments are:
database - the Gramps database instance
options - instance of the Options class for this report
user - a gen.user.User() instance
"""
Report.__init__(self, database, options, user)
self.options = options
self._user = user
self.set_locale(options.menu.get_option_by_name('trans').get_value())
stdoptions.run_date_format_option(self, options.menu)
stdoptions.run_private_data_option(self, options.menu)
stdoptions.run_living_people_option(self, options.menu, self._locale)
self.database = CacheProxyDb(self.database)
stdoptions.run_name_format_option(self, options.menu)
self._nd = self._name_display
def begin_report(self):
"""
This report needs the following parameters (class variables)
that come in the options class.
max_generations - Maximum number of generations to include.
pagebbg - Whether to include page breaks between generations.
dispf - Display format for the output box.
scale_report - Whether to scale the report to fit the width or all.
indblank - Whether to include blank pages.
compress - Whether to compress chart.
incl_private - Whether to include private data
living_people - How to handle living people
years_past_death - Consider as living this many years after death
We will
1. a canvas in its full one-page size
2. a page that we wish to print on
scale up/down either or both of the above as needed/desired.
almost all of this should be moved into Canvas!
"""
database = self.database
self.connect = GUIConnect()
self.connect.set__opts(self.options.menu, self._locale, self._nd)
#Set up the canvas that we will print on.
style_sheet = self.doc.get_style_sheet()
font_normal = style_sheet.get_paragraph_style("AC2-Normal").get_font()
#The canvas that we will put our report on and print off of
self.canvas = Canvas(self.doc,
ReportOptions(self.doc, font_normal, 'AC2-line'))
self.canvas.report_opts.box_shadow *= \
self.connect.get_val('shadowscale')
self.canvas.report_opts.box_pgap *= self.connect.get_val('box_Yscale')
self.canvas.report_opts.box_mgap *= self.connect.get_val('box_Yscale')
with self._user.progress(_('Ancestor Tree'),
_('Making the Tree...'), 4) as step:
#make the tree onto the canvas
# inlc_marr = self.connect.get_val("inc_marr")
self.max_generations = self.connect.get_val('maxgen')
tree = MakeAncestorTree(database, self.canvas)
tree.start(self.connect.get_val('pid'))
tree = None
step()
#Title
title = self.connect.title_class(self.doc)
center = self.database.get_person_from_gramps_id(
self.connect.get_val('pid'))
title.calc_title(center)
self.canvas.add_title(title)
#make the report as big as it wants to be.
report = MakeReport(database, self.doc, self.canvas, font_normal)
report.start()
self.max_generations = report.get_generations() # already know
report = None
step()
#Note?
if self.connect.get_val("inc_note"):
note_box = NoteBox(self.doc, "AC2-note-box",
self.connect.get_val("note_place"))
subst = SubstKeywords(self.database, self._locale, self._nd,
None, None)
note_box.text = subst.replace_and_clean(
self.connect.get_val('note_disp'))
self.canvas.add_note(note_box)
#Now we have the report in its full size.
#Do we want to scale the report?
one_page = self.connect.get_val("resize_page")
scale_report = self.connect.get_val("scale_tree")
scale = self.canvas.scale_report(one_page,
scale_report != 0,
scale_report == 2)
step()
if scale != 1 or self.connect.get_val('shadowscale') != 1.0:
self.scale_styles(scale)
def write_report(self):
one_page = self.connect.get_val("resize_page")
#scale_report = self.connect.get_val("scale_tree")
#inlc_marr = self.connect.get_val("inc_marr")
inc_border = self.connect.get_val('inc_border')
incblank = self.connect.get_val("inc_blank")
prnnum = self.connect.get_val("inc_pagenum")
#####################
#Setup page information
colsperpage = self.doc.get_usable_width()
colsperpage += self.canvas.report_opts.col_width
colsperpage = int(
colsperpage / (self.canvas.report_opts.max_box_width +
self.canvas.report_opts.col_width))
colsperpage = colsperpage or 1
#####################
#Vars
if prnnum:
page_num_box = PageNumberBox(self.doc, 'AC2-box', self._locale)
#TODO - Here
#####################
#ok, everyone is now ready to print on the canvas. Paginate?
self.canvas.paginate(colsperpage, one_page)
#####################
#Yeah!!!
#lets finally make some pages!!!
#####################
pages = self.canvas.page_count(incblank)
with self._user.progress(_('Ancestor Tree'),
_('Printing the Tree...'), pages) as step:
for page in self.canvas.page_iter_gen(incblank):
self.doc.start_page()
#do we need to print a border?
if inc_border:
page.draw_border('AC2-line')
#Do we need to print the page number?
if prnnum:
page_num_box.display(page)
#Print the individual people and lines
page.display()
step()
self.doc.end_page()
def scale_styles(self, scale):
"""
Scale the styles for this report.
"""
style_sheet = self.doc.get_style_sheet()
graph_style = style_sheet.get_draw_style("AC2-box")
graph_style.set_shadow(graph_style.get_shadow(),
self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-box", graph_style)
graph_style = style_sheet.get_draw_style("AC2-fam-box")
graph_style.set_shadow(graph_style.get_shadow(),
self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-fam-box", graph_style)
graph_style = style_sheet.get_draw_style("AC2-note-box")
#graph_style.set_shadow(graph_style.get_shadow(),
# self.canvas.report_opts.box_shadow * scale)
graph_style.set_line_width(graph_style.get_line_width() * scale)
style_sheet.add_draw_style("AC2-note-box", graph_style)
para_style = style_sheet.get_paragraph_style("AC2-Normal")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Normal", para_style)
para_style = style_sheet.get_paragraph_style("AC2-Note")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Note", para_style)
para_style = style_sheet.get_paragraph_style("AC2-Title")
font = para_style.get_font()
font.set_size(font.get_size() * scale)
para_style.set_font(font)
style_sheet.add_paragraph_style("AC2-Title", para_style)
graph_style = GraphicsStyle()
width = graph_style.get_line_width()
width = width * scale
graph_style.set_line_width(width)
style_sheet.add_draw_style("AC2-line", graph_style)
self.doc.set_style_sheet(style_sheet)
#------------------------------------------------------------------------
#
# AncestorTreeOptions
#
#------------------------------------------------------------------------
class AncestorTreeOptions(MenuReportOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, dbase):
self.__db = dbase
self.__pid = None
self.box_Y_sf = None
self.box_shadow_sf = None
MenuReportOptions.__init__(self, name, dbase)
def get_subject(self):
""" Return a string that describes the subject of the report. """
gid = self.__pid.get_value()
person = self.__db.get_person_from_gramps_id(gid)
return _nd.display(person)
def add_menu_options(self, menu):
##################
category_name = _("Tree Options")
self.__pid = PersonOption(_("Center Person"))
self.__pid.set_help(_("The center person for the tree"))
menu.add_option(category_name, "pid", self.__pid)
siblings = BooleanOption(
_('Include siblings of the center person'), False)
siblings.set_help(
_("Whether to only display the center person or all "
"of his/her siblings too"))
menu.add_option(category_name, "inc_siblings", siblings)
self.max_gen = NumberOption(_("Generations"), 10, 1, 50)
self.max_gen.set_help(_("The number of generations to include "
"in the tree"))
menu.add_option(category_name, "maxgen", self.max_gen)
self.fillout = EnumeratedListOption(_("Display unknown\ngenerations"),
0)
self.fillout.set_help(_("The number of generations of empty "
"boxes that will be displayed"))
menu.add_option(category_name, "fill_out", self.fillout)
self.max_gen.connect('value-changed', self.__fillout_vals)
self.__fillout_vals()
compress = BooleanOption(_('Compress tree'), True)
compress.set_help(
_("Whether to remove any extra blank spaces set "
"aside for people that are unknown"))
menu.add_option(category_name, "compress_tree", compress)
#better to 'Show siblings of\nthe center person
#Spouse_disp = EnumeratedListOption(_("Show spouses of\nthe center "
# "person"), 0)
#Spouse_disp.add_item(0, _("No. Do not show Spouses"))
#Spouse_disp.add_item(1, _("Yes, and use the Main Display Format"))
#Spouse_disp.add_item(2, _("Yes, and use the Secondary "
# "Display Format"))
#Spouse_disp.set_help(_("Show spouses of the center person?"))
#menu.add_option(category_name, "Spouse_disp", Spouse_disp)
##################
category_name = _("Report Options")
self.title = EnumeratedListOption(_("Report Title"), 0)
self.title.add_item(0, _("Do not include a title"))
self.title.add_item(1, _("Include Report Title"))
self.title.set_help(_("Choose a title for the report"))
menu.add_option(category_name, "report_title", self.title)
border = BooleanOption(_('Include a border'), False)
border.set_help(_("Whether to make a border around the report."))
menu.add_option(category_name, "inc_border", border)
prnnum = BooleanOption(_('Include Page Numbers'), False)
prnnum.set_help(_("Whether to print page numbers on each page."))
menu.add_option(category_name, "inc_pagenum", prnnum)
self.scale = EnumeratedListOption(_("Scale tree to fit"), 0)
self.scale.add_item(0, _("Do not scale tree"))
self.scale.add_item(1, _("Scale tree to fit page width only"))
self.scale.add_item(2, _("Scale tree to fit the size of the page"))
self.scale.set_help(
_("Whether to scale the tree to fit a specific paper size"))
menu.add_option(category_name, "scale_tree", self.scale)
self.scale.connect('value-changed', self.__check_blank)
if "BKI" not in self.name.split(","):
self.__onepage = BooleanOption(
_("Resize Page to Fit Tree size\n"
"\n"
"Note: Overrides options in the 'Paper Option' tab"
),
False)
self.__onepage.set_help(
_("Whether to resize the page to fit the size \n"
"of the tree. Note: the page will have a \n"
"non standard size.\n"
"\n"
"With this option selected, the following will happen:\n"
"\n"
"With the 'Do not scale tree' option the page\n"
" is resized to the height/width of the tree\n"
"\n"
"With 'Scale tree to fit page width only' the height of\n"
" the page is resized to the height of the tree\n"
"\n"
"With 'Scale tree to fit the size of the page' the page\n"
" is resized to remove any gap in either height or width"
))
menu.add_option(category_name, "resize_page", self.__onepage)
self.__onepage.connect('value-changed', self.__check_blank)
else:
self.__onepage = None
self.__blank = BooleanOption(_('Include Blank Pages'), True)
self.__blank.set_help(_("Whether to include pages that are blank."))
menu.add_option(category_name, "inc_blank", self.__blank)
self.__check_blank()
##################
category_name = _("Report Options (2)")
stdoptions.add_name_format_option(menu, category_name)
stdoptions.add_living_people_option(menu, category_name)
stdoptions.add_private_data_option(menu, category_name)
locale_opt = stdoptions.add_localization_option(menu, category_name)
stdoptions.add_date_format_option(menu, category_name, locale_opt)
##################
category_name = _("Display")
disp = TextOption(_("Father\nDisplay Format"),
["$n",
"%s $b" % _BORN,
"-{%s $d}" % _DIED])
disp.set_help(_("Display format for the fathers box."))
menu.add_option(category_name, "father_disp", disp)
#Will add when libsubstkeyword supports it.
#missing = EnumeratedListOption(_("Replace missing\nplaces\\dates \
# with"), 0)
#missing.add_item(0, _("Does not display anything"))
#missing.add_item(1, _("Displays '_____'"))
#missing.set_help(_("What will print when information is not known"))
#menu.add_option(category_name, "miss_val", missing)
disp_mom = TextOption(_("Mother\nDisplay Format"),
["$n",
"%s $b" % _BORN,
"%s $m" % _MARR,
"-{%s $d}" % _DIED])
disp_mom.set_help(_("Display format for the mothers box."))
menu.add_option(category_name, "mother_disp", disp_mom)
center_disp = EnumeratedListOption(_("Center person uses\n"
"which format"), 0)
center_disp.add_item(0, _("Use Fathers Display format"))
center_disp.add_item(1, _("Use Mothers display format"))
center_disp.set_help(_("The display format for the center person"))
menu.add_option(category_name, "center_uses", center_disp)
self.incmarr = BooleanOption(_('Include Marriage box'), False)
self.incmarr.set_help(
_("Whether to include a separate marital box in the report"))
menu.add_option(category_name, "inc_marr", self.incmarr)
self.incmarr.connect('value-changed', self._incmarr_changed)
self.marrdisp = StringOption(_("Marriage\nDisplay Format"),
"%s $m" % _MARR)
self.marrdisp.set_help(_("Display format for the marital box."))
menu.add_option(category_name, "marr_disp", self.marrdisp)
self._incmarr_changed()
##################
category_name = _("Advanced")
repldisp = TextOption(
_("Replace Display Format:\n'Replace this'/' with this'"),
[])
repldisp.set_help(_("i.e.\nUnited States of America/U.S.A"))
menu.add_option(category_name, "replace_list", repldisp)
# TODO this code is never used and so I conclude it is for future use
# self.__include_images = BooleanOption(
# _('Include thumbnail images of people'), False)
# self.__include_images.set_help(
# _("Whether to include thumbnails of people."))
# menu.add_option(category_name, "includeImages",
# self.__include_images)
self.usenote = BooleanOption(_('Include a note'), False)
self.usenote.set_help(_("Whether to include a note on the report."))
menu.add_option(category_name, "inc_note", self.usenote)
self.usenote.connect('value-changed', self._usenote_changed)
self.notedisp = TextOption(_("Note"), [])
self.notedisp.set_help(_("Add a note\n\n"
"$T inserts today's date"))
menu.add_option(category_name, "note_disp", self.notedisp)
locales = NoteType(0, 1)
self.notelocal = EnumeratedListOption(_("Note Location"), 0)
for num, text in locales.note_locals():
self.notelocal.add_item(num, text)
self.notelocal.set_help(_("Where to place the note."))
menu.add_option(category_name, "note_place", self.notelocal)
self._usenote_changed()
self.box_Y_sf = NumberOption(_("inter-box scale factor"),
1.00, 0.10, 2.00, 0.01)
self.box_Y_sf.set_help(
_("Make the inter-box spacing bigger or smaller"))
menu.add_option(category_name, "box_Yscale", self.box_Y_sf)
self.box_shadow_sf = NumberOption(_("box shadow scale factor"),
1.00, 0.00, 2.00, 0.01) # down to 0
self.box_shadow_sf.set_help(_("Make the box shadow bigger or smaller"))
menu.add_option(category_name, "shadowscale", self.box_shadow_sf)
def _incmarr_changed(self):
"""
If Marriage box is not enabled, disable Marriage Display Format box
"""
value = self.incmarr.get_value()
self.marrdisp.set_available(value)
def _usenote_changed(self):
"""
If Note box is not enabled, disable Note Location box
"""
value = self.usenote.get_value()
self.notelocal.set_available(value)
def __check_blank(self):
if self.__onepage:
value = not self.__onepage.get_value()
else:
value = True
off = value and (self.scale.get_value() != 2)
self.__blank.set_available(off)
def __fillout_vals(self):
max_gen = self.max_gen.get_value()
old_val = self.fillout.get_value()
item_list = []
item_list.append([0, _("No generations of empty boxes "
"for unknown ancestors")])
if max_gen > 1:
item_list.append([1, _("One Generation of empty boxes "
"for unknown ancestors")])
item_list.extend(
[itr, str(itr) +
_(" Generations of empty boxes for unknown ancestors")]
for itr in range(2, max_gen))
self.fillout.set_items(item_list)
if old_val + 2 > len(item_list):
self.fillout.set_value(len(item_list) - 2)
def make_default_style(self, default_style):
"""Make the default output style for the Ancestor Tree."""
# Paragraph Styles:
font = FontStyle()
font.set_size(9)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_description(
_('The basic style used for the text display.'))
default_style.add_paragraph_style("AC2-Normal", para_style)
box_shadow = PT2CM(font.get_size()) * .6
font = FontStyle()
font.set_size(9)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_description(
_('The basic style used for the note display.'))
default_style.add_paragraph_style("AC2-Note", para_style)
font = FontStyle()
font.set_size(16)
font.set_type_face(FONT_SANS_SERIF)
para_style = ParagraphStyle()
para_style.set_font(font)
para_style.set_alignment(PARA_ALIGN_CENTER)
para_style.set_description(_('The style used for the title.'))
default_style.add_paragraph_style("AC2-Title", para_style)
# Draw styles
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Normal")
graph_style.set_shadow(1, box_shadow) # shadow set by text size
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-box", graph_style)
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Normal")
#graph_style.set_shadow(0, PT2CM(9)) # shadow set by text size
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-fam-box", graph_style)
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Note")
graph_style.set_fill_color((255, 255, 255))
default_style.add_draw_style("AC2-note-box", graph_style)
# TODO this seems meaningless, as only the text is displayed
graph_style = GraphicsStyle()
graph_style.set_paragraph_style("AC2-Title")
graph_style.set_color((0, 0, 0))
graph_style.set_fill_color((255, 255, 255))
graph_style.set_line_width(0)
graph_style.set_description(_("Cannot edit this reference"))
default_style.add_draw_style("AC2-Title-box", graph_style)
graph_style = GraphicsStyle()
default_style.add_draw_style("AC2-line", graph_style)
#=====================================
#But even if you should suffer for what is right, you are blessed.
#"Do not fear what they fear ; do not be frightened."
#Take Courage
#1 Peter 3:14
| gpl-2.0 | 6,894,781,434,802,397,000 | 37.038634 | 79 | 0.546709 | false |
lgfausak/sqlbridge | sqlbridge/twisted/dbengine.py | 1 | 6399 | #!/usr/bin/env python
###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
import sys,os,argparse,six
from twisted.python import log
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.endpoints import clientFromString
#from myapprunner import MyApplicationRunner
from autobahn.twisted.wamp import ApplicationSession,ApplicationRunner
from autobahn import util
from autobahn.wamp import auth
from autobahn.wamp import types
from autobahn.wamp.exception import ApplicationError
from autobahn.twisted import wamp, websocket
from autobahn.twisted.wamp import ApplicationSession
class DB(ApplicationSession):
"""
An application component providing db access
"""
def __init__(self, *args, **kwargs):
log.msg("__init__")
self.db = {}
self.svar = {}
log.msg("got args {}, kwargs {}".format(args,kwargs))
# reap init variables meant only for us
for i in ( 'engine', 'topic_base', 'dsn', 'authinfo', 'debug', ):
if i in kwargs:
if kwargs[i] is not None:
self.svar[i] = kwargs[i]
del kwargs[i]
log.msg("sending to super.init args {}, kwargs {}".format(args,kwargs))
ApplicationSession.__init__(self, *args, **kwargs)
def onConnect(self):
log.msg("onConnect")
auth_type = 'none'
auth_user = 'anon'
if 'authinfo' in self.svar:
auth_type = self.svar['authinfo']['auth_type']
auth_user = self.svar['authinfo']['auth_user']
log.msg("onConnect with {} {}".format(auth_type, auth_user))
self.join(self.config.realm, [six.u(auth_type)], six.u(auth_user))
def onChallenge(self, challenge):
log.msg("onChallenge - maynard")
password = 'unknown'
if 'authinfo' in self.svar:
password = self.svar['authinfo']['auth_password']
log.msg("onChallenge with password {}".format(password))
if challenge.method == u'wampcra':
if u'salt' in challenge.extra:
key = auth.derive_key(password.encode('utf8'),
challenge.extra['salt'].encode('utf8'),
challenge.extra.get('iterations', None),
challenge.extra.get('keylen', None))
else:
key = password.encode('utf8')
signature = auth.compute_wcs(key, challenge.extra['challenge'].encode('utf8'))
return signature.decode('ascii')
else:
raise Exception("don't know how to compute challenge for authmethod {}".format(challenge.method))
@inlineCallbacks
def onJoin(self, details):
log.msg("db:onJoin session attached {}".format(details))
if 'engine' in self.svar and 'topic_base' in self.svar:
if self.svar['engine'] == 'PG9_4' or self.svar['engine'] == 'PG':
from .db import postgres
dbo = postgres.PG9_4(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
elif self.svar['engine'] == 'MYSQL14_14' or self.svar['engine'] == 'MYSQL':
from .db import mysql
dbo = mysql.MYSQL14_14(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
elif self.svar['engine'] == 'SQLITE3_3_8_2' or self.svar['engine'] == 'SQLITE3' or self.svar['engine'] == 'SQLITE':
from .db import ausqlite3
dbo = ausqlite3.SQLITE3_3_8_2(topic_base = self.svar['topic_base'], app_session = self, debug = self.svar['debug'])
else:
raise Exception("Unsupported dbtype {} ".format(self.svar['engine']))
else:
raise Exception("when instantiating this class DB you must provide engine=X and topic_base=Y")
self.db = { 'instance': dbo }
self.db['registration'] = {}
r = types.RegisterOptions(details_arg = 'details')
self.db['registration']['connect'] = yield self.register(dbo.connect, self.svar['topic_base']+'.connect', options = r)
self.db['registration']['disconnect'] = yield self.register(dbo.disconnect, self.svar['topic_base']+'.disconnect', options = r)
self.db['registration']['query'] = yield self.register(dbo.query, self.svar['topic_base']+'.query', options = r)
self.db['registration']['operation'] = yield self.register(dbo.operation, self.svar['topic_base']+'.operation', options = r)
self.db['registration']['watch'] = yield self.register(dbo.watch, self.svar['topic_base']+'.watch', options = r)
self.db['registration']['info'] = yield self.register(dbo.info, self.svar['topic_base']+'.info', options = r)
if 'dsn' in self.svar:
log.msg("db:onJoin connecting... {}".format(self.svar['dsn']))
yield self.call(self.svar['topic_base'] + '.connect', self.svar['dsn'])
log.msg("db:onJoin connecting established")
log.msg("db bootstrap procedures registered")
def onLeave(self, details):
print("onLeave: {}").format(details)
yield self.db['registration']['connect'].unregister()
yield self.db['registration']['disconnect'].unregister()
yield self.db['registration']['query'].unregister()
yield self.db['registration']['operation'].unregister()
yield self.db['registration']['watch'].unregister()
yield self.db['registration']['info'].unregister()
del self.db
self.disconnect()
return
def onDisconnect(self):
print("onDisconnect:")
reactor.stop()
| apache-2.0 | 2,118,608,279,539,695,600 | 42.828767 | 135 | 0.606188 | false |
cjaymes/pyscap | src/scap/model/ocil_2_0/ItemBaseType.py | 1 | 1052 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.Model import Model
import logging
logger = logging.getLogger(__name__)
class ItemBaseType(Model):
MODEL_MAP = {
'elements': [
{'tag_name': 'notes', 'list': 'notes', 'type': 'StringType', 'min': 0, 'max': None},
],
'attributes': {
'revision': {'type': 'NonNegativeIntegerType', 'default': 0},
}
}
| gpl-3.0 | 7,049,207,792,901,553,000 | 34.066667 | 96 | 0.680608 | false |
mancoast/CPythonPyc_test | fail/311_test_funcattrs.py | 1 | 9974 | from test import support
import types
import unittest
class FuncAttrsTest(unittest.TestCase):
def setUp(self):
class F:
def a(self):
pass
def b():
return 3
self.fi = F()
self.F = F
self.b = b
def cannot_set_attr(self, obj, name, value, exceptions):
try:
setattr(obj, name, value)
except exceptions:
pass
else:
self.fail("shouldn't be able to set %s to %r" % (name, value))
try:
delattr(obj, name)
except exceptions:
pass
else:
self.fail("shouldn't be able to del %s" % name)
class FunctionPropertiesTest(FuncAttrsTest):
# Include the external setUp method that is common to all tests
def test_module(self):
self.assertEqual(self.b.__module__, __name__)
def test_dir_includes_correct_attrs(self):
self.b.known_attr = 7
self.assertTrue('known_attr' in dir(self.b),
"set attributes not in dir listing of method")
# Test on underlying function object of method
self.F.a.known_attr = 7
self.assertTrue('known_attr' in dir(self.fi.a), "set attribute on function "
"implementations, should show up in next dir")
def test_duplicate_function_equality(self):
# Body of `duplicate' is the exact same as self.b
def duplicate():
'my docstring'
return 3
self.assertNotEqual(self.b, duplicate)
def test_copying___code__(self):
def test(): pass
self.assertEqual(test(), None)
test.__code__ = self.b.__code__
self.assertEqual(test(), 3) # self.b always returns 3, arbitrarily
def test___globals__(self):
self.assertEqual(self.b.__globals__, globals())
self.cannot_set_attr(self.b, '__globals__', 2, (AttributeError, TypeError))
def test___name__(self):
self.assertEqual(self.b.__name__, 'b')
self.b.__name__ = 'c'
self.assertEqual(self.b.__name__, 'c')
self.b.__name__ = 'd'
self.assertEqual(self.b.__name__, 'd')
# __name__ and __name__ must be a string
self.cannot_set_attr(self.b, '__name__', 7, TypeError)
# __name__ must be available when in restricted mode. Exec will raise
# AttributeError if __name__ is not available on f.
s = """def f(): pass\nf.__name__"""
exec(s, {'__builtins__': {}})
# Test on methods, too
self.assertEqual(self.fi.a.__name__, 'a')
self.cannot_set_attr(self.fi.a, "__name__", 'a', AttributeError)
def test___code__(self):
num_one, num_two = 7, 8
def a(): pass
def b(): return 12
def c(): return num_one
def d(): return num_two
def e(): return num_one, num_two
for func in [a, b, c, d, e]:
self.assertEqual(type(func.__code__), types.CodeType)
self.assertEqual(c(), 7)
self.assertEqual(d(), 8)
d.__code__ = c.__code__
self.assertEqual(c.__code__, d.__code__)
self.assertEqual(c(), 7)
# self.assertEqual(d(), 7)
try: b.__code__ = c.__code__
except ValueError: pass
else: self.fail(
"__code__ with different numbers of free vars should not be "
"possible")
try: e.__code__ = d.__code__
except ValueError: pass
else: self.fail(
"__code__ with different numbers of free vars should not be "
"possible")
def test_blank_func_defaults(self):
self.assertEqual(self.b.__defaults__, None)
del self.b.__defaults__
self.assertEqual(self.b.__defaults__, None)
def test_func_default_args(self):
def first_func(a, b):
return a+b
def second_func(a=1, b=2):
return a+b
self.assertEqual(first_func.__defaults__, None)
self.assertEqual(second_func.__defaults__, (1, 2))
first_func.__defaults__ = (1, 2)
self.assertEqual(first_func.__defaults__, (1, 2))
self.assertEqual(first_func(), 3)
self.assertEqual(first_func(3), 5)
self.assertEqual(first_func(3, 5), 8)
del second_func.__defaults__
self.assertEqual(second_func.__defaults__, None)
try: second_func()
except TypeError: pass
else: self.fail(
"func_defaults does not update; deleting it does not remove "
"requirement")
class ImplicitReferencesTest(FuncAttrsTest):
def test___class__(self):
self.assertEqual(self.fi.a.__self__.__class__, self.F)
self.cannot_set_attr(self.fi.a, "__class__", self.F, TypeError)
def test___func__(self):
self.assertEqual(self.fi.a.__func__, self.F.a)
self.cannot_set_attr(self.fi.a, "__func__", self.F.a, AttributeError)
def test___self__(self):
self.assertEqual(self.fi.a.__self__, self.fi)
self.cannot_set_attr(self.fi.a, "__self__", self.fi, AttributeError)
def test___func___non_method(self):
# Behavior should be the same when a method is added via an attr
# assignment
self.fi.id = types.MethodType(id, self.fi)
self.assertEqual(self.fi.id(), id(self.fi))
# Test usage
try: self.fi.id.unknown_attr
except AttributeError: pass
else: self.fail("using unknown attributes should raise AttributeError")
# Test assignment and deletion
self.cannot_set_attr(self.fi.id, 'unknown_attr', 2, AttributeError)
class ArbitraryFunctionAttrTest(FuncAttrsTest):
def test_set_attr(self):
self.b.known_attr = 7
self.assertEqual(self.b.known_attr, 7)
try: self.fi.a.known_attr = 7
except AttributeError: pass
else: self.fail("setting attributes on methods should raise error")
def test_delete_unknown_attr(self):
try: del self.b.unknown_attr
except AttributeError: pass
else: self.fail("deleting unknown attribute should raise TypeError")
def test_unset_attr(self):
for func in [self.b, self.fi.a]:
try: func.non_existent_attr
except AttributeError: pass
else: self.fail("using unknown attributes should raise "
"AttributeError")
class FunctionDictsTest(FuncAttrsTest):
def test_setting_dict_to_invalid(self):
self.cannot_set_attr(self.b, '__dict__', None, TypeError)
from collections import UserDict
d = UserDict({'known_attr': 7})
self.cannot_set_attr(self.fi.a.__func__, '__dict__', d, TypeError)
def test_setting_dict_to_valid(self):
d = {'known_attr': 7}
self.b.__dict__ = d
# Test assignment
self.assertEqual(d, self.b.__dict__)
# ... and on all the different ways of referencing the method's func
self.F.a.__dict__ = d
self.assertEqual(d, self.fi.a.__func__.__dict__)
self.assertEqual(d, self.fi.a.__dict__)
# Test value
self.assertEqual(self.b.known_attr, 7)
self.assertEqual(self.b.__dict__['known_attr'], 7)
# ... and again, on all the different method's names
self.assertEqual(self.fi.a.__func__.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
def test_delete___dict__(self):
try: del self.b.__dict__
except TypeError: pass
else: self.fail("deleting function dictionary should raise TypeError")
def test_unassigned_dict(self):
self.assertEqual(self.b.__dict__, {})
def test_func_as_dict_key(self):
value = "Some string"
d = {}
d[self.b] = value
self.assertEqual(d[self.b], value)
class FunctionDocstringTest(FuncAttrsTest):
def test_set_docstring_attr(self):
self.assertEqual(self.b.__doc__, None)
docstr = "A test method that does nothing"
self.b.__doc__ = docstr
self.F.a.__doc__ = docstr
self.assertEqual(self.b.__doc__, docstr)
self.assertEqual(self.fi.a.__doc__, docstr)
self.cannot_set_attr(self.fi.a, "__doc__", docstr, AttributeError)
def test_delete_docstring(self):
self.b.__doc__ = "The docstring"
del self.b.__doc__
self.assertEqual(self.b.__doc__, None)
def cell(value):
"""Create a cell containing the given value."""
def f():
print(a)
a = value
return f.__closure__[0]
def empty_cell(empty=True):
"""Create an empty cell."""
def f():
print(a)
# the intent of the following line is simply "if False:"; it's
# spelt this way to avoid the danger that a future optimization
# might simply remove an "if False:" code block.
if not empty:
a = 1729
return f.__closure__[0]
class CellTest(unittest.TestCase):
def test_comparison(self):
# These tests are here simply to exercise the comparison code;
# their presence should not be interpreted as providing any
# guarantees about the semantics (or even existence) of cell
# comparisons in future versions of CPython.
self.assertTrue(cell(2) < cell(3))
self.assertTrue(empty_cell() < cell('saturday'))
self.assertTrue(empty_cell() == empty_cell())
self.assertTrue(cell(-36) == cell(-36.0))
self.assertTrue(cell(True) > empty_cell())
class StaticMethodAttrsTest(unittest.TestCase):
def test_func_attribute(self):
def f():
pass
c = classmethod(f)
self.assertTrue(c.__func__ is f)
s = staticmethod(f)
self.assertTrue(s.__func__ is f)
def test_main():
support.run_unittest(FunctionPropertiesTest, ImplicitReferencesTest,
ArbitraryFunctionAttrTest, FunctionDictsTest,
FunctionDocstringTest, CellTest,
StaticMethodAttrsTest)
if __name__ == "__main__":
test_main()
| gpl-3.0 | 3,026,376,331,994,264,000 | 35.137681 | 84 | 0.579707 | false |
daniel1yuan/Persist | Persist/webapp/views.py | 1 | 6162 | from django.shortcuts import redirect,render
from django.http import Http404, JsonResponse, HttpResponseForbidden, HttpResponse
from django.contrib.auth import authenticate, login, logout
from webapp.models import User, Customer, Habit
from django.core import serializers
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from datetime import datetime
from webapp.helper import habits_arr, arr_str
import json
import os
# Create your views here.
def index(request):
context = {
'title': 'Persist'
}
if request.user.is_authenticated():
return redirect("home")
else:
return render(request, 'webapp/landing.html', context)
def home(request):
if request.user.is_authenticated():
return render(request, 'webapp/home.html')
else:
return redirect("login_page")
def login_page(request):
if request.user.is_authenticated():
return redirect("home")
context = {
'title': 'Persist'
}
return render(request, 'webapp/login.html', context)
#Authentication Views
@csrf_exempt
def login_user(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return HttpResponse(json.dumps({"success": True}))
else:
return HttpResponse(json.dumps({"success": False}))
def logout_user(request):
logout(request)
return HttpResponse(json.dumps({"success": True}))
@csrf_exempt
def add_user(request):
username = request.POST['username']
password = request.POST['password']
user = User.objects.create_user(username=username, password=password)
customer = Customer(user=user, habits="")
customer.save()
user.save()
return HttpResponse(json.dumps({"success": True}))
def del_cur_user(request):
if request.user.is_authenticated():
user = request.user
user.delete()
return HttpResponse(json.dumps({"success": True}))
else:
return HttpResponse(json.dumps({"success": False}))
def del_user(request):
user = request.user
#Check if the admin is logged on
if user.is_authenticated() and user.has_perm('webapp'):
username = request.POST['username']
user = User.objects.get(username=username)
user.delete()
return HttpResponse(json.dumps({"success": True}))
return HttpResponse(json.dumps({"success": False}))
def is_logged_in(request):
if (request.user.is_authenticated()):
return HttpResponse(json.dumps({"success": True, "logged_in": True}))
else:
return HttpResponse(json.dumps({"success": True, "logged_in": False}))
return HttpResponse(json.dumps({"success": False}))
def get_habit(request):
habit_id = int(request.POST['habit_id'])
try:
habit_obj = Habit.objects.get(pk=habit_id)
print habit_obj.monetary_amount
habit_serial = serializers.serialize('json', [habit_obj])
#[1:-1] to remove brackets?
return HttpResponse(json.dumps(habit_serial[1:-1]), content_type='application/json')
except Habit.DoesNotExist:
return HttpResponse(json.dumps({"pk": -1}))
def create_habit(request):
name = request.POST['name']
description = request.POST['description']
monetary_amount = int(request.POST['monetary_amount'])
end_date = int(int((request.POST['end_date']))/(1000.0))
start_date = int((datetime.utcnow()-datetime(1970,1,1)).total_seconds())
last_clicked = int((datetime.utcnow()-datetime(1970,1,1)).total_seconds())
status = int(request.POST['success_status'])
charity = int(request.POST['charity'])
user = request.user
if (not user.is_authenticated()):
return HttpResponse(json.dumps({"success": False}))
habit = Habit(name=name,description=description,monetary_amount=monetary_amount,end_date=end_date,status=status,charity=charity,user=user,start_date=start_date,last_clicked=last_clicked)
print habit.start_date
habit.save()
user.customer.habits += "," + str(habit.pk)
user.customer.save()
return HttpResponse(json.dumps({"success": True,"pk":habit.pk}))
def delete_habit(request):
try:
user = request.user
customer = user.customer
pk = request.POST['id']
habit = Habit.objects.get(pk=pk)
habits = habits_arr(customer.habits)
index = habits.index(int(pk))
del(habits[index])
customer.habits = arr_str(habits)
customer.save()
habit.delete()
return HttpResponse(json.dumps({"success": True}))
except:
return HttpResponse(json.dumps({"success": False}))
def change_habit(request):
pk = request.POST['id']
habit = Habit.objects.get(pk=pk)
if habit is None:
return HttpResponse(json.dumps({"success": False}))
else:
try:
habit.name = request.POST['name']
except:
habit.name = habit.name
try:
habit.description = request.POST['description']
except:
habit.description = habit.description
try:
habit.monetary_amount = request.POST['monetary_amount']
except:
habit.monetary_amount = habit.monetary_amount
try:
habit.end_date = int((request.POST['end_date']))/(1000.0)
except:
habit.end_date = habit.end_date
try:
habit.last_clicked = int((request.POST['last_clicked']))/(1000.0)
except:
habit.last_clicked = habit.last_clicked
try:
habit.status = request.POST['success_status']
except:
habit.status = habit.status
try:
habit.charity = request.POST['charity']
except:
habit.charity = habit.charity
habit.save()
return HttpResponse(json.dumps({"success": True}))
def get_all_habits(request):
if request.user.is_authenticated():
habits = habits_arr(request.user.customer.habits)
json_dict = {}
for idx in habits:
cur_habit = Habit.objects.get(pk=idx)
cur_serial = serializers.serialize('json',[cur_habit])[1:-1]
json_dict[idx] = cur_serial
return HttpResponse(json.dumps(json_dict))
else:
return HttpResponse(json.dumps({"success": False}))
def get_username(request):
if request.user.is_authenticated():
return HttpResponse(json.dumps({"success": True, "username": request.user.username}))
else:
return HttpResponse(json.dumps({"success": False}))
| mit | 303,794,903,503,346,800 | 31.431579 | 188 | 0.693281 | false |
alertedsnake/pycrust | pycrust/__init__.py | 1 | 5061 | """
Pycrust
A collection of CherryPy extensions
See also the following submodules:
pycrust.auth
pycrust.saplugin
pycrust.satool
pycrust.tools
"""
__author__ = 'Michael Stella <[email protected]>'
__version__ = '1.0.0'
import inspect, logging, os, sys
import cherrypy
import codecs
class BaseHandler(object):
"""A Base class for web handler objects."""
_cp_config = {}
def log(self, msg, severity=logging.INFO, context=None):
"""Logs to the Cherrypy error log but in a much more pretty way,
with the handler name and line number
"""
if not context:
context = inspect.getouterframes(inspect.currentframe())[1]
cherrypy.log.error(msg=msg.strip().replace('\n', '; '), severity=severity,
context='HANDLER ({}:{}:{})'.format(
self.__class__.__name__, context[3], context[2]))
def log_debug(self, msg):
return self.log(msg, severity=logging.DEBUG,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_info(self, msg):
return self.log(msg, severity=logging.INFO,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_warn(self, msg):
return self.log(msg, severity=logging.WARN,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_error(self, msg):
return self.log(msg, severity=logging.ERROR,
context=inspect.getouterframes(inspect.currentframe())[1])
def log_fatal(self, msg):
return self.log(msg, severity=logging.FATAL,
context=inspect.getouterframes(inspect.currentframe())[1])
def url(*args, **kwargs):
"""Find the given URL using routes. Throws an exception
if you're not using routes.
"""
import routes
if 'absolute' in kwargs and kwargs['absolute']:
del(kwargs['absolute'])
return cherrypy.url(routes.url_for(*args, **kwargs))
return routes.url_for(*args, **kwargs)
def dump_request(*args, **kwargs):
"""Dumps the request out to a file in /tmp, for debugging
Enable by setting, in your config file:
tools.debug_request.on = True
"""
with codecs.open('/tmp/request.%s.txt' % cherrypy.request.method, 'w', encoding='utf-8') as f:
f.write(cherrypy.request.request_line)
f.write("\n")
# write headers
for (k,v) in cherrypy.request.headers.items():
f.write('%s: %s\n' % (k,v))
f.write("\n")
# dump out the POST data when submitted
if ('Content-Type' in cherrypy.request.headers and
'application/x-www-form-urlencoded' in cherrypy.request.headers['Content-Type']):
for (k,v) in cherrypy.request.params.items():
f.write('%s: %s\n' % (k,v))
# otherwise, dump the body
elif cherrypy.request.body:
with cherrypy.request.body.make_file() as fin:
f.write(str(fin.read()))
def dump_response(*args, **kwargs):
"""Dumps the response out to a file in /tmp, for debugging.
Enable by setting, in your config file:
tools.debug_response.on = True
"""
# when a 500 error is displayed, cherrypy handles this
# differently, and we don't really need to dump it out
if not cherrypy.response.status:
return
status = 200
if isinstance(cherrypy.response.status, int):
status = cherrypy.response.status
elif isinstance(cherrypy.response.status, str):
status = int(cherrypy.response.status.split(' ', 1)[0])
with codecs.open('/tmp/response.%d.txt' % status, 'w', encoding='utf-8') as f:
f.write("HTTP/1.1 %s\n" % cherrypy.response.status)
for (k,v) in cherrypy.response.headers.items():
f.write('%s: %s\n' % (k,v))
f.write("Status: %d\n\n" % status)
if cherrypy.response.body:
if sys.version < '3':
f.write(str(cherrypy.response.collapse_body().decode()))
else:
f.write(str(cherrypy.response.collapse_body()))
cherrypy.tools.debug_request = cherrypy.Tool('before_handler', dump_request, priority=1)
cherrypy.tools.debug_response = cherrypy.Tool('on_end_resource', dump_response)
def load_class(fullname):
"""Loads a class given the full dotted class name"""
assert fullname is not None, "fullname must not be None"
modulename, classname = fullname.rsplit('.', 1)
try:
module = __import__(modulename, globals(), locals(), [classname])
except ImportError as e:
cherrypy.log("Error loading module {}".format(modulename), context='ENGINE', severity=loging.ERROR)
raise
try:
cls = getattr(module, classname)
except AttributeError as e:
cherrypy.log("Error loading class {} from module {}".format(classname, modulename),
context='ENGINE', severity=logging.ERROR)
return None
return cls
| mit | 8,430,111,610,056,274,000 | 31.031646 | 107 | 0.610354 | false |
ayseyo/oclapi | django-nonrel/ocl/oclapi/settings/common.py | 1 | 10523 | import os
from configurations import Configuration
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class Common(Configuration):
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Jon Payne', '[email protected]'),
('PK Shiu', '[email protected]'),
)
MANAGERS = ADMINS
DEFAULT_FROM_EMAIL = '[email protected]'
EMAIL_HOST = 'openconceptlab.org'
EMAIL_SUBJECT_PREFIX = '[openconceptlab.org] '
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.3/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
DEFAULT_LOCALE = 'en'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# In the deployment environment, comment out the above line, and uncomment the one below
#STATIC_ROOT = '/usr/local/wsgi/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '+p+lx2*o3ywq+z)%f7929b6)93)^mcc9-0eu9ynq77gc+pe=ck'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'oclapi.middlewares.RequestLogMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'corsheaders',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
# Core OCL app
'oclapi',
# Third-party apps:
'djangotoolbox',
'django_mongodb_engine',
'rest_framework',
'rest_framework.authtoken',
'haystack',
# Project-specific apps:
'users',
'orgs',
'sources',
'concepts',
'collection',
'mappings',
'integration_tests',
)
# Django Rest Framework configuration
REST_FRAMEWORK = {
# Default to token-based authentication; fall back on session-based
# A user gets a unique token upon account creation (residing in the authtoken_token data store).
# To pass an authentication token along with your request, include the following header:
# Authorization: Token [TOKEN_VALUE]
# e.g.
# Authorization: Token ad73f481096c3b6202bce395820199
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
'oclapi.renderers.ZippedJSONRenderer',
),
'DEFAULT_CONTENT_NEGOTIATION_CLASS': 'oclapi.negotiation.OptionallyCompressContentNegotiation',
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.HyperlinkedModelSerializer',
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'oclapi.serializers.HeaderPaginationSerializer',
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
#'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
'rest_framework.permissions.IsAuthenticated',
],
'PAGINATE_BY': 10, # Default to 10
'PAGINATE_BY_PARAM': 'limit', # Allow client to override, using `?limit=xxx`.
'MAX_PAGINATE_BY': 100 # Maximum limit allowed when using `?limit=xxx`.
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'oclapi.search_backends.OCLSolrEngine',
'URL': 'http://solr.openconceptlab.org:8983/solr/collection1'
# ...or for multicore...
# 'URL': 'http://127.0.0.1:8983/solr/mysite',
},
}
DATABASES = {
'default': {
'ENGINE': 'django_mongodb_engine',
'HOST': 'mongo.openconceptlab.org',
'NAME': 'ocl',
}
}
BROKER_URL = 'redis://redis.openconceptlab.org:6379/0'
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_METHODS = (
'GET',
)
# CORS_ORIGIN_WHITELIST = (
# 'google.com',
# 'hostname.example.com',
# )
# Haystack processor determines when/how updates to mongo are indexed by Solr
# RealtimeSignalProcessor will update the index for every mongo update, sometimes at
# the cost of performance. BaseSignalProcessor does not update the index at all, which
# means the index must be updated manually (e.g. using the haystack update_index command).
HAYSTACK_SIGNAL_PROCESSOR = 'haystack.signals.RealtimeSignalProcessor'
HAYSTACK_ITERATOR_LOAD_PER_QUERY = 25
HAYSTACK_SEARCH_RESULTS_PER_PAGE = 25
# Celery settings
CELERY_RESULT_BACKEND = 'redis://redis.openconceptlab.org:6379/0'
# Set these in your postactivate hook if you use virtualenvwrapper
AWS_ACCESS_KEY_ID=os.environ.get('AWS_ACCESS_KEY_ID', '')
AWS_SECRET_ACCESS_KEY=os.environ.get('AWS_SECRET_ACCESS_KEY', '')
AWS_STORAGE_BUCKET_NAME=os.environ.get('AWS_STORAGE_BUCKET_NAME', '')
# Model that stores auxiliary user profile attributes.
# A user must have a profile in order to access the system.
# (A profile is created automatically for any user created using the 'POST /users' endpoint.)
AUTH_PROFILE_MODULE = 'users.UserProfile'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'normal': {
'format': "[%(asctime)s] %(levelname)-8s: %(message)s",
'datefmt': "%Y/%m/%d %H:%M:%S"
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'null': {
'class': 'django.utils.log.NullHandler',
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'normal',
},
'logfile': {
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler',
'when': 'midnight',
'filename': os.path.join(BASE_DIR, 'ocl_api.log'),
'formatter': 'normal',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'oclapi': {
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
'request_logger': {
'handlers': ['console', 'logfile'],
'level': 'INFO',
},
}
}
| mpl-2.0 | -5,339,695,786,148,565,000 | 35.922807 | 104 | 0.615699 | false |
stuart-knock/tvb-framework | tvb_test/adapters/visualizers/eegmonitor_test.py | 1 | 4412 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <[email protected]>
"""
import os
import unittest
import demo_data.sensors as sensors_dataset
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.adapters.visualizers.eeg_monitor import EegMonitor
from tvb.datatypes.surfaces import CorticalSurface
from tvb.datatypes.connectivity import Connectivity
from tvb.datatypes.sensors import SensorsEEG
from tvb_test.core.test_factory import TestFactory
from tvb_test.datatypes.datatypes_factory import DatatypesFactory
from tvb_test.core.base_testcase import TransactionalTestCase
class EEGMonitorTest(TransactionalTestCase):
"""
Unit-tests for EEG Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
self.surface = TestFactory.get_entity(self.test_project, CorticalSurface())
self.assertTrue(self.surface is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),
'EEG_unit_vectors_BrainProducts_62.txt.bz2')
TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
viewer = EegMonitor()
result = viewer.launch(time_series)
expected_keys = ['tsStateVars', 'tsModes', 'translationStep', 'total_length', 'title',
'timeSetPaths', 'number_of_visible_points', 'normalizedSteps', 'noOfChannels',
'labelsForCheckBoxes', 'label_x', 'graphLabels', 'entities', 'channelsPage']
for key in expected_keys:
self.assertTrue(key in result)
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(EEGMonitorTest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE) | gpl-2.0 | -6,400,699,188,668,995,000 | 41.028571 | 103 | 0.694923 | false |
lilydjwg/you-get | src/you_get/extractors/netease.py | 1 | 9738 | #!/usr/bin/env python
from json import loads
import hashlib
import base64
import os
import binascii
try:
from Crypto.Cipher import AES
import xml.etree.ElementTree as ET
has_crypto = True
except ImportError:
has_crypto = False
from ..common import *
from ..extractor import VideoExtractor
from ..util import log
def netease_hymn():
return """
player's Game Over,
u can abandon.
u get pissed,
get pissed,
Hallelujah my King!
errr oh! fuck ohhh!!!!
"""
def encrypted_id(dfsId):
x = [ord(i[0]) for i in netease_hymn().split()]
y = ''.join([chr(i - 61) if i > 96 else chr(i + 32) for i in x])
byte1 = bytearray(y, encoding='ascii')
byte2 = bytearray(str(dfsId), encoding='ascii')
for i in range(len(byte2)):
byte2[i] ^= byte1[i % len(byte1)]
m = hashlib.md5()
m.update(byte2)
result = base64.b64encode(m.digest()).decode('ascii')
result = result.replace('/', '_')
result = result.replace('+', '-')
return result
def make_url(songNet, dfsId):
encId = encrypted_id(dfsId)
mp3_url = "http://%s/%s/%s.mp3" % (songNet, encId, dfsId)
return mp3_url
# for http://open.163.com/movie/2014/12/I/9/MAD7EMDVE_MAD7K95I9.html
keys = ["4fxGZqoGmesXqg2o", "3fxVNqoPmesAqg2o"]
def decrypto_video_url(data, whichkey):
key = keys[whichkey - 1]
cipher = AES.new(key, mode=AES.MODE_ECB)
ciphertext = binascii.a2b_hex(data)
cleartext = cipher.decrypt(ciphertext)
padding = cleartext[-1]
cleartext = cleartext[:-padding]
return cleartext.decode('ascii')
class NetEase(VideoExtractor):
# test URLs:
# http://live.ws.126.net/movie/I/9/2_MAD7EMDVE_MAD7K95I9.xml
# http://live.ws.126.net/movie/V/H/2_MB3M6LDG1_MB3OBKTVH.xml
name = '网易'
if has_crypto:
stream_types = [
{'id': 'SHD', 'video_profile': '超清'},
{'id': 'HD', 'video_profile': '高清'},
{'id': 'SD', 'video_profile': '标清'},
]
else:
stream_types = [
{'id': 'default'},
]
def prepare(self, **kwargs):
# compatibility for _cloud_music_prepare
self.output_dir = kwargs.get('output_dir')
self.info_only = kwargs.get('info_only')
self.subs = []
self.lyrics = None
url = self.url
if "163.fm" in url:
url = get_location(url)
if "music.163.com" in url:
self._cloud_music_prepare(url)
elif has_crypto:
self._crypto_prepare(url)
else:
log.w('PyCrypto not found, '
'high resolution videos may be unavailable.')
self._legacy_prepare(url)
def _crypto_prepare(self, url):
if url.startswith('http://swf.ws.126.net/openplayer/'):
video_id = url.split('-')[2]
assert video_id.startswith('2_')
video_id = video_id[2:]
else:
# http://open.163.com/movie/2015/10/V/H/MB3M6LDG1_MB3OBKTVH.html
video_id = url.split('/')[-1].split('.')[0]
xml = self._get_xml_for_video_id(video_id)
encrypt_key = int(xml.find('encrypt').text)
playurl = xml.find('playurl_origin')
if len(playurl) == 0:
playurl = xml.find('playurl')
streams = {}
for stream in self.stream_types:
e = playurl.find('./%s/mp4' % stream['id'])
if e is not None:
url = decrypto_video_url(e.text, encrypt_key)
streams[stream['id']] = {
'url': url,
'video_profile': stream['video_profile'],
'size': url_size(url),
}
self.streams = streams
for sub in xml.findall('subs/*'):
name = sub.find('name').text
url = sub.find('url').text
self.subs.append((name, url))
def _legacy_prepare(self, url):
if url.startswith('http://swf.ws.126.net/openplayer/'):
video_id = url.split('-')[2]
assert video_id.startswith('2_')
video_id = video_id[2:]
xml = self._get_xml_for_video_id(video_id)
url = xml.find('pageUrl').text
html = get_decoded_html(url)
title = match1(html, "movieDescription='([^']+)'") or \
match1(html, '<title>(.+)</title>')
self.title = title.strip()
src = match1(html, r'<source src="([^"]+)"') or \
match1(html, r'<source type="[^"]+" src="([^"]+)"')
if src:
url = src
else:
url = (match1(html, r'["\'](.+)-list.m3u8["\']') or
match1(html, r'["\'](.+).m3u8["\']')) + ".mp4"
self.streams['default'] = {
'url': url,
}
def _cloud_music_prepare(self, url):
rid = match1(url, r'id=(.*)')
output_dir = self.output_dir
info_only = self.info_only
if rid is None:
rid = match1(url, r'/(\d+)/?$')
if "album" in url:
# FIXME: only last is processed
j = loads(get_content("http://music.163.com/api/album/%s?id=%s&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
artist_name = j['album']['artists'][0]['name']
album_name = j['album']['name']
new_dir = output_dir + '/' + "%s - %s" % (artist_name, album_name)
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if not info_only:
cover_url = j['album']['picUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
for i in j['album']['songs']:
self._song_prepare(i)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(i, l["lrc"]["lyric"])
except: pass
elif "playlist" in url:
# FIXME: only last is processed
j = loads(get_content("http://music.163.com/api/playlist/detail?id=%s&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
new_dir = output_dir + '/' + j['result']['name']
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if not info_only:
cover_url = j['result']['coverImgUrl']
download_urls([cover_url], "cover", "jpg", 0, new_dir)
for i in j['result']['tracks']:
self._song_prepare(i)
try: # download lyrics
assert kwargs['caption']
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % i['id'], headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(i, l["lrc"]["lyric"])
except: pass
elif "song" in url:
j = loads(get_content("http://music.163.com/api/song/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
self._song_prepare(j["songs"][0])
try: # download lyrics
l = loads(get_content("http://music.163.com/api/song/lyric/?id=%s&lv=-1&csrf_token=" % rid, headers={"Referer": "http://music.163.com/"}))
self._lyrics_prepare(j["songs"][0], l["lrc"]["lyric"])
except:
pass
elif "mv" in url:
j = loads(get_content("http://music.163.com/api/mv/detail/?id=%s&ids=[%s]&csrf_token=" % (rid, rid), headers={"Referer": "http://music.163.com/"}))
self._video_prepare(j['data'])
def _song_prepare(self, song):
# test URL: http://music.163.com/#/song?id=29043459
self.title = "%s. %s" % (song['position'], song['name'])
songNet = 'p' + song['mp3Url'].split('/')[2][1:]
s = self.streams
if 'hMusic' in song and song['hMusic'] is not None:
s['hMusic'] = {'url': make_url(songNet, song['hMusic']['dfsId'])}
if 'mp3Url' in song:
s['mp3Url'] = {'url': song['mp3Url']}
if 'bMusic' in song:
s['bMusic'] = {'url': make_url(songNet, song['bMusic']['dfsId'])}
self.stream_types = [
{'id': x} for x in ['hMusic', 'mp3Url', 'bMusic']
]
def _video_prepare(self, vinfo):
# test URL: http://music.163.com/#/mv/343100/
self.title = "%s - %s" % (vinfo['name'], vinfo['artistName'])
s = self.streams
for bitrate, url in vinfo['brs'].items():
s[bitrate] = {'url': url}
self.stream_types = [
{'id': x} for x in sorted(s, key=int, reverse=True)
]
def _lyrics_prepare(self, song, lyrics):
# test URL: http://music.163.com/#/song?id=29043459
title = "%s. %s" % (song['position'], song['name'])
filename = '%s.lrc' % get_filename(title)
self.plain_files.append({
'filename': filename,
'content': lyrics,
})
def _get_xml_for_video_id(self, vid):
xml_url = 'http://live.ws.126.net/movie/%s/%s/2_%s.xml' % (
vid[-2], vid[-1], vid)
xml = get_content(xml_url)
e = ET.fromstring(xml)
self.title = e.find('title').text
return e
def extract(self, **kwargs):
for i in self.streams:
s = self.streams[i]
_, s['container'], s['size'] = url_info(s['url'])
s['src'] = [s['url']]
for name, url in self.subs:
self.caption_tracks[name] = get_content(url)
site = NetEase()
download = site.download_by_url
| mit | -3,439,591,755,401,592,300 | 35.548872 | 162 | 0.518103 | false |
jlaunonen/kirppu | kirppu/views/csv_utils.py | 1 | 1469 | # -*- coding: utf-8 -*-
import functools
import html
import io
from urllib.parse import quote
from django.conf import settings
from django.http import HttpResponse, StreamingHttpResponse
def strip_generator(fn):
@functools.wraps(fn)
def inner(output, event, generator=False):
if generator:
# Return the generator object only when using StringIO.
return fn(output, event)
for _ in fn(output, event):
pass
return inner
def csv_streamer_view(request, generator, filename_base):
debug = settings.DEBUG and request.GET.get("debug") is not None
def streamer():
if debug:
yield "<!DOCTYPE html>\n<html>\n<body>\n<pre>"
output = io.StringIO()
for a_string in generator(output):
val = output.getvalue()
if debug:
yield html.escape(val, quote=False)
else:
yield val
output.truncate(0)
output.seek(0)
if debug:
yield "</pre>\n</body>\n</html>"
if debug:
response = HttpResponse("".join(streamer()))
else:
response = StreamingHttpResponse(streamer(), content_type="text/plain; charset=utf-8")
if request.GET.get("download") is not None:
response["Content-Disposition"] = 'attachment; filename="%s.csv"' % quote(filename_base, safe="")
response["Content-Type"] = "text/csv; charset=utf-8"
return response
| mit | 4,648,484,677,998,597,000 | 28.979592 | 105 | 0.605174 | false |
bossjones/scarlett | scarlett/listener/gstlistenerfsm.py | 1 | 17131 | # -*- coding: UTF-8 -*-
import scarlett
import pygst
pygst.require('0.10')
import gobject
gobject.threads_init()
import dbus
import dbus.service
# TODO: Figure out if we need this or not, re dbus threading
# from dbus.mainloop.glib import DBusGMainLoop
# dbus_loop = DBusGMainLoop(set_as_default=True)
# dsession = SessionBus(mainloop=dbus_loop)
dbus.mainloop.glib.threads_init()
import gst
import os
import threading
import time
import logging
from transitions import Machine
from scarlett.events import scarlett_event
from colorama import init, Fore, Back, Style
from scarlett.constants import (
EVENT_SCARLETT_START,
EVENT_SCARLETT_STOP,
EVENT_STATE_CHANGED,
EVENT_TIME_CHANGED,
EVENT_CALL_SERVICE,
EVENT_SERVICE_EXECUTED,
EVENT_SERVICE_REGISTER,
EVENT_PLATFORM_DISCOVERED,
EVENT_SCARLETT_SAY,
EVENT_BRAIN_UPDATE,
EVENT_BRAIN_CHECK
)
SCARLETT_ROLE = 'listener'
CORE_OBJECT = 'GstlistenerFSM'
_INSTANCE = None
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-9s) %(message)s',)
def setup_core(ss):
# logging.info("attempting to setup GstlistenerFSM")
global _INSTANCE
if _INSTANCE is None:
_INSTANCE = GstlistenerFSM()
return _INSTANCE
class GstlistenerFSM(gobject.GObject):
"""GstListener with FSM. Preforms pocketsphinx speech recognition.
"""
__gproperties__ = {
'override_parse': (
gobject.TYPE_STRING, # type
'Override parse', # nick name
# description
'Allows you to override what the gst parse line looks like',
'', # default value
gobject.PARAM_READWRITE
),
'failed': (
gobject.TYPE_INT, # type
'Failed', # nick name
'Number of times recognition failed', # description
0, # min value
5, # max value
0, # default value
gobject.PARAM_READWRITE
),
'kw_found': (
gobject.TYPE_INT, # type
'Keyword Match', # nick name
'int value for keyword', # description
0, # min value
5, # max value
0, # default value
gobject.PARAM_READWRITE
)
}
__gsignals__ = {
'gst-started': (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,)
),
'kw-found-ps': (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,)
),
'failed-ps': (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,)
)
}
capability = []
states = ['initalize', 'ready', 'running',
'is_checking_states', 'time_change', 'done_checking_states']
DBUS_NAME = 'org.scarlettapp.scarlettdaemon'
DBUS_PATH = '/org/scarlettapp/scarlettdaemon'
def __init__(self, *args, **kwargs):
gobject.GObject.__init__(self)
# managed by gproperties
self.override_parse = ''
self.failed = 0
self.kw_found = 0
self.wit_thread = None
self.loop = None
self.config = scarlett.config
self.name = 'GstlistenerFSM'
# Initalize the state machine
self.machine = Machine(
model=self,
# TODO: Fix this, its def not working
states=self.states,
initial='initalize')
# startup transition
self.machine.add_transition(
trigger='startup',
source='initalize',
dest='ready')
# checking_states transition
self.machine.add_transition(
trigger='checking_states',
source='ready',
dest='is_checking_states',
conditions=['is_ready'])
# array / dict of state machines connected to scarlett
self._machines = {}
# Check interval, in seconds
self.interval = 1
# bus = dbus.SessionBus()
# self.remote = bus.get_object(GstlistenerFSM.DBUS_NAME,
# GstlistenerFSM.DBUS_PATH)
# "/usr/local/share/pocketsphinx/model/hmm/en_US/hub4wsj_sc_8k"
self.ps_hmm = self.get_hmm_full_path()
self.ps_dict = self.get_dict_full_path()
self.ps_lm = self.get_lm_full_path()
self.ps_device = self.config.get('audio', 'usb_input_device')
self.speech_system = self.config.get('speech', 'system')
# default, use what we have set
self.parse_launch_array = self._get_pocketsphinx_definition(
self.override_parse)
scarlett.log.debug(
Fore.YELLOW +
'Initializing gst-parse-launch -------->')
self.pipeline = gst.parse_launch(
' ! '.join(self.parse_launch_array))
listener = self.pipeline.get_by_name('listener')
listener.connect('result', self.__result__)
listener.set_property('configured', True)
scarlett.log.debug(
Fore.YELLOW +
"Initializing connection to vader element -------->")
# TODO: Play with vader object some more
# vader = self.pipeline.get_by_name("vader")
# vader.connect("vader-start", self._on_vader_start)
# vader.connect("vader-stop", self._on_vader_stop)
scarlett.log.debug(Fore.YELLOW + "Initializing Bus -------->")
bus = self.pipeline.get_bus()
bus.add_signal_watch()
scarlett.log.debug(Fore.YELLOW + "Sending Message to Bus ---------->")
bus.connect('message::application', self.__application_message__)
# logging.debug('running with %s and %s', args, kwargs)
# TODO: Uncomment this when we're ready to try this
ss_listener = threading.Thread(name='Scarlett Listener',target=self.start_listener)
ss_listener.daemon = True
ss_listener.start()
# GObject translates all the underscore characters to hyphen
# characters so if you have a property called background_color,
# its internal and valid name will be background-color.
def do_get_property(self, property):
if property.name == 'kw-found':
return self.kw_found
elif property.name == 'failed':
return self.failed
elif property.name == 'override-parse':
return self.override_parse
else:
raise AttributeError('unknown property %s' % property.name)
def do_set_property(self, property, value):
if property == 'kw-found':
self.kw_found = value
elif property == 'failed':
self.failed = value
elif property == 'override-parse':
self.override_parse = value
else:
raise AttributeError('unknown property %s' % property)
def start_listener(self):
global CORE_OBJECT
# register service start
listener_connect = scarlett_event(
'service_state',
data=CORE_OBJECT
)
# idle_emit since this is something with low priority
gobject.idle_add(
self.emit,
'gst-started', listener_connect
)
self.pipeline.set_state(gst.STATE_PLAYING)
scarlett.log.debug(
Fore.YELLOW +
'KEYWORD: ' +
self.config.get('scarlett', 'owner')
)
self.loop = gobject.MainLoop()
self.loop.run()
def stop(self):
self.pipeline.set_state(gst.STATE_NULL)
if self.loop is not None:
self.loop.quit()
def scarlett_start_listen(self):
self.pipeline.set_state(gst.STATE_PLAYING)
def scarlett_stop_listen(self):
self.pipeline.set_state(gst.STATE_READY)
def scarlett_pause_listen(self):
self.pipeline.set_state(gst.STATE_PAUSED)
def scarlett_reset_listen(self):
self.do_set_property('failed', 0)
self.do_set_property('kw-found', 0)
def partial_result(self, asr, text, uttid):
"""Forward partial result signals on the bus to the main thread."""
pass
def result(self, hyp, uttid):
"""Forward result signals on the bus to the main thread."""
scarlett.log.debug(Fore.YELLOW + "Inside result function")
if hyp in self.config.get('scarlett', 'keywords'):
scarlett.log.debug(
Fore.YELLOW +
"HYP-IS-SOMETHING: " +
hyp +
"\n\n\n")
scarlett.log.debug(
Fore.YELLOW +
"UTTID-IS-SOMETHING:" +
uttid +
"\n")
self.do_set_property('failed', 0)
self.do_set_property('kw-found', 1)
# TODO: Change this to emit to main thread
# scarlett.basics.voice.play_block('pi-listening')
else:
failed_temp = self.do_get_property('failed') + 1
self.do_set_property('failed', failed_temp)
scarlett.log.debug(
Fore.YELLOW +
"self.failed = %i" %
(self.do_get_property('failed')))
if self.do_get_property('failed') > 4:
# reset pipline
self.scarlett_reset_listen()
# TODO: Change this to emit text data to main thread
# ScarlettTalk.speak(
# " %s , if you need me, just say my name." %
# (self.config.get('scarlett', 'owner')))
def run_cmd(self, hyp, uttid):
scarlett.log.debug(Fore.YELLOW + "Inside run_cmd function")
scarlett.log.debug(Fore.YELLOW + "KEYWORD IDENTIFIED BABY")
scarlett.log.debug(
Fore.RED +
"self.kw_found = %i" %
(self.do_get_property('kw-found')))
if hyp == 'CANCEL':
self.cancel_listening()
else:
# TODO: Change this into emit hypothesis instead
hyp_event = scarlett_event(
"listener_hyp",
data=hyp
)
self.emit('kw-found-ps', hyp_event)
current_kw_identified = self.do_get_property('kw-found')
self.do_set_property('kw-found', current_kw_identified)
scarlett.log.debug(
Fore.RED +
"AFTER run_cmd, self.kw_found = %i" %
(self.do_get_property('kw-found')))
def hello(self):
print 'hello hello hello!'
def listen(self, valve, vader):
scarlett.log.debug(Fore.YELLOW + "Inside listen function")
# TODO: have this emit pi-listening to mainthread
# scarlett.basics.voice.play_block('pi-listening')
valve.set_property('drop', False)
valve.set_property('drop', True)
def cancel_listening(self):
scarlett.log.debug(Fore.YELLOW + "Inside cancel_listening function")
self.scarlett_reset_listen()
scarlett.log.debug(Fore.YELLOW + "self.failed = %i" % (self.failed))
scarlett.log.debug(
Fore.RED +
"self.keyword_identified = %i" %
(self.do_get_property('kw-found')))
def get_hmm_full_path(self):
if os.environ.get('SCARLETT_HMM'):
_hmm_full_path = os.environ.get('SCARLETT_HMM')
else:
_hmm_full_path = self.config.get('pocketsphinx', 'hmm')
return _hmm_full_path
def get_lm_full_path(self):
if os.environ.get('SCARLETT_LM'):
_lm_full_path = os.environ.get('SCARLETT_LM')
else:
_lm_full_path = self.config.get('pocketsphinx', 'lm')
return _lm_full_path
def get_dict_full_path(self):
if os.environ.get('SCARLETT_DICT'):
_dict_full_path = os.environ.get('SCARLETT_DICT')
else:
_dict_full_path = self.config.get('pocketsphinx', 'dict')
return _dict_full_path
def get_pipeline(self):
scarlett.log.debug(Fore.YELLOW + "Inside get_pipeline")
return self.pipeline
def get_voice(self):
scarlett.log.debug(Fore.YELLOW + "Inside get_voice")
return self.voice
def get_pipeline_state(self):
return self.pipeline.get_state()
def _get_pocketsphinx_definition(self, override_parse):
scarlett.log.debug(Fore.YELLOW + "Inside _get_pocketsphinx_definition")
"""Return ``pocketsphinx`` definition for :func:`gst.parse_launch`."""
# default, use what we have set
if override_parse == '':
return [
'alsasrc device=' +
self.ps_device,
'queue silent=false leaky=2 max-size-buffers=0 max-size-time=0 max-size-bytes=0', # noqa
'audioconvert',
'audioresample',
'audio/x-raw-int, rate=16000, width=16, depth=16, channels=1',
'audioresample',
'audio/x-raw-int, rate=8000',
'vader name=vader auto-threshold=true',
'pocketsphinx lm=' +
self.ps_lm +
' dict=' +
self.ps_dict +
' hmm=' +
self.ps_hmm +
' name=listener',
'fakesink dump=1']
# NOTE, I commented out the refrence to the tee
# 'fakesink dump=1 t.'
else:
return override_parse
def _get_vader_definition(self):
scarlett.log.debug(Fore.YELLOW + "Inside _get_vader_definition")
"""Return ``vader`` definition for :func:`gst.parse_launch`."""
# source: https://github.com/bossjones/eshayari/blob/master/eshayari/application.py # noqa
# Convert noise level from spin button range [0,32768] to gstreamer
# element's range [0,1]. Likewise, convert silence from spin button's
# milliseconds to gstreamer element's nanoseconds.
# MY DEFAULT VADER DEFINITON WAS: vader name=vader auto-threshold=true
# vader name=vader auto-threshold=true
noise = 256 / 32768
silence = 300 * 1000000
return ("vader "
+ "name=vader "
+ "auto-threshold=false "
+ "threshold=%.9f " % noise
+ "run-length=%d " % silence
)
def _on_vader_start(self, vader, pos):
scarlett.log.debug(Fore.YELLOW + "Inside _on_vader_start")
"""Send start position as a message on the bus."""
import gst
struct = gst.Structure("start")
pos = pos / 1000000000 # ns to s
struct.set_value("start", pos)
vader.post_message(gst.message_new_application(vader, struct))
def _on_vader_stop(self, vader, pos):
scarlett.log.debug(Fore.YELLOW + "Inside _on_vader_stop")
"""Send stop position as a message on the bus."""
import gst
struct = gst.Structure("stop")
pos = pos / 1000000000 # ns to s
struct.set_value("stop", pos)
def __result__(self, listener, text, uttid):
"""We're inside __result__"""
scarlett.log.debug(Fore.YELLOW + "Inside __result__")
import gst
struct = gst.Structure('result')
struct.set_value('hyp', text)
struct.set_value('uttid', uttid)
listener.post_message(gst.message_new_application(listener, struct))
def __partial_result__(self, listener, text, uttid):
"""We're inside __partial_result__"""
scarlett.log.debug(Fore.YELLOW + "Inside __partial_result__")
struct = gst.Structure('partial_result')
struct.set_value('hyp', text)
struct.set_value('uttid', uttid)
listener.post_message(gst.message_new_application(listener, struct))
def __run_cmd__(self, listener, text, uttid):
"""We're inside __run_cmd__"""
import gst
scarlett.log.debug(Fore.YELLOW + "Inside __run_cmd__")
struct = gst.Structure('result')
struct.set_value('hyp', text)
struct.set_value('uttid', uttid)
listener.post_message(gst.message_new_application(listener, struct))
def __application_message__(self, bus, msg):
msgtype = msg.structure.get_name()
scarlett.log.debug(Fore.YELLOW + "msgtype: " + msgtype)
if msgtype == 'partial_result':
self.partial_result(msg.structure['hyp'], msg.structure['uttid'])
elif msgtype == 'result':
if self.do_get_property('kw-found') == 1:
self.run_cmd(msg.structure['hyp'], msg.structure['uttid'])
else:
self.result(msg.structure['hyp'], msg.structure['uttid'])
elif msgtype == 'run_cmd':
self.run_cmd(msg.structure['hyp'], msg.structure['uttid'])
elif msgtype == gst.MESSAGE_EOS:
pass
# TODO: SEE IF WE NEED THIS
# self.pipeline.set_state(gst.STATE_NULL)
elif msgtype == gst.MESSAGE_ERROR:
(err, debug) = msgtype.parse_error()
scarlett.log.debug(Fore.RED + "Error: %s" % err, debug)
pass
# Register to be able to emit signals
gobject.type_register(GstlistenerFSM)
| mit | -6,935,489,890,912,596,000 | 32.855731 | 104 | 0.566692 | false |
wbrp/dnsimple-zoneimport | setup.py | 1 | 1349 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from dnsimple_zoneimport import meta
f = open('requirements.txt', 'r')
lines = f.readlines()
requirements = [l.strip().strip('\n') for l in lines if l.strip() and not l.strip().startswith('#')]
readme = open('README.rst').read()
setup(name='dnsimple-zoneimport',
version=meta.version,
description=meta.description,
author=meta.author,
author_email=meta.author_email,
url='https://github.com/wbrp/dnsimple-zoneimport',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
license=meta.license,
keywords='dnsimple dns "zone files" bind import api',
long_description=readme,
install_requires=requirements,
entry_points={
'console_scripts': [
'%s = dnsimple_zoneimport.importer:main' % meta.title.replace('-', '_'),
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: Name Service (DNS)',
'Topic :: Terminals',
],
)
| mit | -6,276,857,376,273,034,000 | 32.725 | 100 | 0.594514 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.